summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-01-21 14:21:10 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-01-21 14:21:10 +0000
commitcb0d23c455b73486fd1015f8ca9479b5b7e3585d (patch)
treed7dc129a407fd74266d2dc561bebf24665197c2f /spec
parentc3e911be175c0aabfea1eb030f9e0ef23f5f3887 (diff)
downloadgitlab-ce-cb0d23c455b73486fd1015f8ca9479b5b7e3585d.tar.gz
Add latest changes from gitlab-org/gitlab@12-7-stable-ee
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb4
-rw-r--r--spec/controllers/admin/users_controller_spec.rb1
-rw-r--r--spec/controllers/application_controller_spec.rb46
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb4
-rw-r--r--spec/controllers/concerns/confirm_email_warning_spec.rb2
-rw-r--r--spec/controllers/concerns/continue_params_spec.rb1
-rw-r--r--spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb4
-rw-r--r--spec/controllers/concerns/group_tree_spec.rb2
-rw-r--r--spec/controllers/concerns/internal_redirect_spec.rb1
-rw-r--r--spec/controllers/concerns/lfs_request_spec.rb2
-rw-r--r--spec/controllers/concerns/metrics_dashboard_spec.rb2
-rw-r--r--spec/controllers/concerns/renders_commits_spec.rb2
-rw-r--r--spec/controllers/concerns/routable_actions_spec.rb2
-rw-r--r--spec/controllers/concerns/sourcegraph_decorator_spec.rb (renamed from spec/controllers/concerns/sourcegraph_gon_spec.rb)4
-rw-r--r--spec/controllers/concerns/static_object_external_storage_spec.rb2
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb96
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb6
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb13
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb16
-rw-r--r--spec/controllers/health_check_controller_spec.rb8
-rw-r--r--spec/controllers/metrics_controller_spec.rb8
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb28
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb3
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb9
-rw-r--r--spec/controllers/projects/ci/lints_controller_spec.rb2
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb1
-rw-r--r--spec/controllers/projects/deployments_controller_spec.rb2
-rw-r--r--spec/controllers/projects/environments/prometheus_api_controller_spec.rb34
-rw-r--r--spec/controllers/projects/environments/sample_metrics_controller_spec.rb15
-rw-r--r--spec/controllers/projects/error_tracking/projects_controller_spec.rb122
-rw-r--r--spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb91
-rw-r--r--spec/controllers/projects/error_tracking_controller_spec.rb219
-rw-r--r--spec/controllers/projects/find_file_controller_spec.rb3
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb80
-rw-r--r--spec/controllers/projects/git_http_controller_spec.rb72
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb1
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb6
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb17
-rw-r--r--spec/controllers/projects/pages_controller_spec.rb11
-rw-r--r--spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb132
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb47
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb2
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb13
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb4
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb60
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb2
-rw-r--r--spec/controllers/projects/tree_controller_spec.rb28
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb15
-rw-r--r--spec/controllers/projects/wikis_controller_spec.rb1
-rw-r--r--spec/controllers/projects_controller_spec.rb30
-rw-r--r--spec/controllers/registrations_controller_spec.rb19
-rw-r--r--spec/controllers/search_controller_spec.rb36
-rw-r--r--spec/controllers/snippets_controller_spec.rb52
-rw-r--r--spec/controllers/users_controller_spec.rb1
-rw-r--r--spec/db/schema_spec.rb3
-rw-r--r--spec/factories/analytics/cycle_analytics/project_stages.rb2
-rw-r--r--spec/factories/aws/roles.rb2
-rw-r--r--spec/factories/badge.rb4
-rw-r--r--spec/factories/chat_names.rb2
-rw-r--r--spec/factories/chat_teams.rb2
-rw-r--r--spec/factories/ci/bridge.rb2
-rw-r--r--spec/factories/ci/build_need.rb2
-rw-r--r--spec/factories/ci/build_trace_chunks.rb2
-rw-r--r--spec/factories/ci/build_trace_section_names.rb2
-rw-r--r--spec/factories/ci/builds.rb14
-rw-r--r--spec/factories/ci/group_variables.rb2
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/factories/ci/job_variables.rb2
-rw-r--r--spec/factories/ci/pipeline_schedule.rb2
-rw-r--r--spec/factories/ci/pipeline_schedule_variables.rb2
-rw-r--r--spec/factories/ci/pipeline_variables.rb2
-rw-r--r--spec/factories/ci/pipelines.rb2
-rw-r--r--spec/factories/ci/resource.rb11
-rw-r--r--spec/factories/ci/resource_group.rb8
-rw-r--r--spec/factories/ci/runner_projects.rb2
-rw-r--r--spec/factories/ci/runners.rb2
-rw-r--r--spec/factories/ci/sources/pipelines.rb2
-rw-r--r--spec/factories/ci/stages.rb4
-rw-r--r--spec/factories/ci/trigger_requests.rb2
-rw-r--r--spec/factories/ci/triggers.rb2
-rw-r--r--spec/factories/ci/variables.rb2
-rw-r--r--spec/factories/clusters/applications/helm.rb19
-rw-r--r--spec/factories/clusters/clusters.rb2
-rw-r--r--spec/factories/clusters/kubernetes_namespaces.rb2
-rw-r--r--spec/factories/clusters/platforms/kubernetes.rb2
-rw-r--r--spec/factories/clusters/projects.rb2
-rw-r--r--spec/factories/clusters/providers/aws.rb2
-rw-r--r--spec/factories/clusters/providers/gcp.rb2
-rw-r--r--spec/factories/commit_statuses.rb8
-rw-r--r--spec/factories/container_expiration_policies.rb20
-rw-r--r--spec/factories/deployments.rb2
-rw-r--r--spec/factories/dev_ops_score_metrics.rb2
-rw-r--r--spec/factories/environments.rb2
-rw-r--r--spec/factories/error_tracking/detailed_error.rb9
-rw-r--r--spec/factories/error_tracking/error.rb2
-rw-r--r--spec/factories/error_tracking/error_event.rb2
-rw-r--r--spec/factories/error_tracking/project.rb2
-rw-r--r--spec/factories/events.rb2
-rw-r--r--spec/factories/gitaly/commit.rb2
-rw-r--r--spec/factories/gitaly/commit_author.rb2
-rw-r--r--spec/factories/gitaly/tag.rb2
-rw-r--r--spec/factories/grafana_integrations.rb2
-rw-r--r--spec/factories/groups.rb2
-rw-r--r--spec/factories/import_states.rb2
-rw-r--r--spec/factories/labels.rb2
-rw-r--r--spec/factories/namespace/aggregation_schedules.rb2
-rw-r--r--spec/factories/namespace/root_storage_statistics.rb2
-rw-r--r--spec/factories/notes.rb20
-rw-r--r--spec/factories/project_error_tracking_settings.rb2
-rw-r--r--spec/factories/project_metrics_settings.rb2
-rw-r--r--spec/factories/projects.rb11
-rw-r--r--spec/factories/prometheus_metrics.rb2
-rw-r--r--spec/factories/releases.rb9
-rw-r--r--spec/factories/releases/link.rb2
-rw-r--r--spec/factories/resource_weight_events.rb8
-rw-r--r--spec/factories/sentry_issue.rb4
-rw-r--r--spec/factories/serverless/domain_cluster.rb2
-rw-r--r--spec/factories/services.rb2
-rw-r--r--spec/factories/terms.rb2
-rw-r--r--spec/factories/todos.rb2
-rw-r--r--spec/features/admin/admin_broadcast_messages_spec.rb30
-rw-r--r--spec/features/admin/admin_groups_spec.rb10
-rw-r--r--spec/features/admin/admin_hooks_spec.rb4
-rw-r--r--spec/features/admin/admin_projects_spec.rb4
-rw-r--r--spec/features/admin/admin_runners_spec.rb24
-rw-r--r--spec/features/boards/boards_spec.rb10
-rw-r--r--spec/features/boards/modal_filter_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb4
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb3
-rw-r--r--spec/features/cycle_analytics_spec.rb2
-rw-r--r--spec/features/dashboard/instance_statistics_spec.rb22
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb6
-rw-r--r--spec/features/dashboard/issues_spec.rb2
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb4
-rw-r--r--spec/features/dashboard/projects_spec.rb28
-rw-r--r--spec/features/dashboard/snippets_spec.rb1
-rw-r--r--spec/features/groups/issues_spec.rb2
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb89
-rw-r--r--spec/features/groups/members/manage_members_spec.rb8
-rw-r--r--spec/features/groups/members/search_members_spec.rb2
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/import/manifest_import_spec.rb10
-rw-r--r--spec/features/instance_statistics/cohorts_spec.rb2
-rw-r--r--spec/features/issuables/issuable_list_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb201
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb169
-rw-r--r--spec/features/issues/filtered_search/dropdown_base_spec.rb58
-rw-r--r--spec/features/issues/filtered_search/dropdown_emoji_spec.rb156
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb193
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb285
-rw-r--r--spec/features/issues/filtered_search/dropdown_milestone_spec.rb253
-rw-r--r--spec/features/issues/filtered_search/dropdown_release_spec.rb30
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb216
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb219
-rw-r--r--spec/features/issues/rss_spec.rb37
-rw-r--r--spec/features/issues/user_comments_on_issue_spec.rb6
-rw-r--r--spec/features/issues/user_creates_issue_by_email_spec.rb46
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb126
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb279
-rw-r--r--spec/features/issues/user_filters_issues_spec.rb39
-rw-r--r--spec/features/issues/user_resets_their_incoming_email_token_spec.rb32
-rw-r--r--spec/features/issues/user_sees_breadcrumb_links_spec.rb20
-rw-r--r--spec/features/issues/user_sees_empty_state_spec.rb51
-rw-r--r--spec/features/issues/user_sees_live_update_spec.rb52
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb187
-rw-r--r--spec/features/issues_spec.rb828
-rw-r--r--spec/features/labels_hierarchy_spec.rb6
-rw-r--r--spec/features/markdown/markdown_spec.rb18
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb4
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb29
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb6
-rw-r--r--spec/features/merge_request/user_toggles_whitespace_changes_spec.rb3
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb3
-rw-r--r--spec/features/merge_requests/filters_generic_behavior_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_assignees_spec.rb6
-rw-r--r--spec/features/merge_requests/user_filters_by_labels_spec.rb6
-rw-r--r--spec/features/merge_requests/user_filters_by_milestones_spec.rb10
-rw-r--r--spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb4
-rw-r--r--spec/features/merge_requests/user_filters_by_target_branch_spec.rb6
-rw-r--r--spec/features/profiles/active_sessions_spec.rb27
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb17
-rw-r--r--spec/features/projects/badges/coverage_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb4
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb69
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb15
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb15
-rw-r--r--spec/features/projects/fork_spec.rb83
-rw-r--r--spec/features/projects/jobs_spec.rb15
-rw-r--r--spec/features/projects/members/list_spec.rb4
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb4
-rw-r--r--spec/features/projects/pages_spec.rb6
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb140
-rw-r--r--spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb2
-rw-r--r--spec/features/projects/serverless/functions_spec.rb5
-rw-r--r--spec/features/projects/settings/project_settings_spec.rb20
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb37
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb2
-rw-r--r--spec/features/projects/settings/user_renames_a_project_spec.rb4
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb4
-rw-r--r--spec/features/projects/sourcegraph_csp_spec.rb98
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb2
-rw-r--r--spec/features/projects/tree/create_file_spec.rb2
-rw-r--r--spec/features/projects/view_on_env_spec.rb3
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb18
-rw-r--r--spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb79
-rw-r--r--spec/features/task_lists_spec.rb69
-rw-r--r--spec/features/triggers_spec.rb92
-rw-r--r--spec/features/users/signup_spec.rb134
-rw-r--r--spec/finders/branches_finder_spec.rb2
-rw-r--r--spec/finders/clusters/knative_services_finder_spec.rb4
-rw-r--r--spec/finders/deployments_finder_spec.rb40
-rw-r--r--spec/finders/environments_finder_spec.rb28
-rw-r--r--spec/finders/events_finder_spec.rb26
-rw-r--r--spec/finders/group_members_finder_spec.rb61
-rw-r--r--spec/finders/issues_finder_spec.rb4
-rw-r--r--spec/finders/keys_finder_spec.rb34
-rw-r--r--spec/finders/merge_requests_finder_spec.rb32
-rw-r--r--spec/finders/pipelines_finder_spec.rb13
-rw-r--r--spec/finders/projects/serverless/functions_finder_spec.rb6
-rw-r--r--spec/finders/sentry_issue_finder_spec.rb46
-rw-r--r--spec/finders/todos_finder_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json2
-rw-r--r--spec/fixtures/api/schemas/entities/issue_board.json3
-rw-r--r--spec/fixtures/api/schemas/error_tracking/error_detailed.json44
-rw-r--r--spec/fixtures/api/schemas/error_tracking/update_issue.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issue.json5
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/label_basic.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_request.json7
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/service.json24
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/services.json4
-rw-r--r--spec/fixtures/emails/envelope_to_header.eml32
-rw-r--r--spec/fixtures/emails/forwarded_new_issue.eml16
-rw-r--r--spec/fixtures/gitlab/sample_metrics/sample_metric_query_result.yml477
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json199
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group/project.json61
-rw-r--r--spec/fixtures/markdown.md.erb8
-rw-r--r--spec/fixtures/not_a_png.pngbin0 -> 72642 bytes
-rw-r--r--spec/fixtures/referees/metrics_referee.json.gzbin0 -> 568 bytes
-rw-r--r--spec/fixtures/referees/network_referee.json.gzbin0 -> 568 bytes
-rw-r--r--spec/fixtures/sentry/issue_latest_event_no_stack_sample_response.json300
-rw-r--r--spec/fixtures/sentry/issue_latest_event_sample_response.json5299
-rw-r--r--spec/fixtures/sentry/issue_link_sample_response.json7
-rw-r--r--spec/fixtures/sentry/issue_sample_response.json311
-rw-r--r--spec/fixtures/sentry/repos_sample_response.json15
-rw-r--r--spec/frontend/__mocks__/@gitlab/ui.js19
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js1
-rw-r--r--spec/frontend/api_spec.js15
-rw-r--r--spec/frontend/behaviors/bind_in_out_spec.js204
-rw-r--r--spec/frontend/behaviors/markdown/paste_markdown_table_spec.js113
-rw-r--r--spec/frontend/boards/components/issue_time_estimate_spec.js2
-rw-r--r--spec/frontend/boards/issue_card_spec.js10
-rw-r--r--spec/frontend/bootstrap_jquery_spec.js (renamed from spec/javascripts/bootstrap_jquery_spec.js)25
-rw-r--r--spec/frontend/branches/branches_delete_modal_spec.js (renamed from spec/javascripts/branches/branches_delete_modal_spec.js)2
-rw-r--r--spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap6
-rw-r--r--spec/frontend/clusters/components/applications_spec.js63
-rw-r--r--spec/frontend/clusters/components/knative_domain_editor_spec.js15
-rw-r--r--spec/frontend/clusters/components/remove_cluster_confirmation_spec.js1
-rw-r--r--spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js7
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js4
-rw-r--r--spec/frontend/clusters/services/mock_data.js4
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js13
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js1
-rw-r--r--spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap12
-rw-r--r--spec/frontend/confidential_merge_request/components/project_form_group_spec.js10
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap8
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js7
-rw-r--r--spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js (renamed from spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js)88
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js26
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js13
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js135
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_network_dropdown_spec.js143
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js138
-rw-r--r--spec/frontend/create_cluster/gke_cluster/components/gke_subnetwork_dropdown_spec.js113
-rw-r--r--spec/frontend/create_cluster/gke_cluster/mock_data.js75
-rw-r--r--spec/frontend/create_cluster/store/cluster_dropdown/actions_spec.js (renamed from spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/actions_spec.js)6
-rw-r--r--spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js (renamed from spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/mutations_spec.js)6
-rw-r--r--spec/frontend/cycle_analytics/limit_warning_component_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/stage_nav_item_spec.js4
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js8
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_discussion_reply_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js22
-rw-r--r--spec/frontend/diffs/components/diff_gutter_avatars_spec.js18
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js12
-rw-r--r--spec/frontend/diffs/components/edit_button_spec.js18
-rw-r--r--spec/frontend/diffs/components/hidden_files_warning_spec.js5
-rw-r--r--spec/frontend/diffs/components/no_changes_spec.js2
-rw-r--r--spec/frontend/diffs/components/settings_dropdown_spec.js (renamed from spec/javascripts/diffs/components/settings_dropdown_spec.js)17
-rw-r--r--spec/frontend/droplab/constants_spec.js39
-rw-r--r--spec/frontend/droplab/plugins/ajax_filter_spec.js (renamed from spec/javascripts/droplab/plugins/ajax_filter_spec.js)10
-rw-r--r--spec/frontend/droplab/plugins/ajax_spec.js (renamed from spec/javascripts/droplab/plugins/ajax_spec.js)8
-rw-r--r--spec/frontend/environments/environment_item_spec.js77
-rw-r--r--spec/frontend/environments/environment_monitoring_spec.js4
-rw-r--r--spec/frontend/environments/environment_pin_spec.js46
-rw-r--r--spec/frontend/environments/environment_rollback_spec.js4
-rw-r--r--spec/frontend/environments/environment_stop_spec.js4
-rw-r--r--spec/frontend/environments/environment_terminal_button_spec.js4
-rw-r--r--spec/frontend/environments/mock_data.js5
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js135
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js188
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_entry_spec.js4
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js78
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js2
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js20
-rw-r--r--spec/frontend/error_tracking_settings/components/app_spec.js4
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js4
-rw-r--r--spec/frontend/error_tracking_settings/store/actions_spec.js8
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_options_spec.js (renamed from spec/javascripts/feature_highlight/feature_highlight_options_spec.js)18
-rw-r--r--spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js (renamed from spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js)4
-rw-r--r--spec/frontend/filtered_search/dropdown_user_spec.js (renamed from spec/javascripts/filtered_search/dropdown_user_spec.js)18
-rw-r--r--spec/frontend/filtered_search/filtered_search_token_keys_spec.js2
-rw-r--r--spec/frontend/fixtures/issues.rb9
-rw-r--r--spec/frontend/fixtures/static/mock-video.mp4bin0 -> 383631 bytes
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js (renamed from spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js)22
-rw-r--r--spec/frontend/gl_field_errors_spec.js (renamed from spec/javascripts/gl_field_errors_spec.js)62
-rw-r--r--spec/frontend/gpg_badges_spec.js (renamed from spec/javascripts/gpg_badges_spec.js)4
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap24
-rw-r--r--spec/frontend/header_spec.js (renamed from spec/javascripts/header_spec.js)2
-rw-r--r--spec/frontend/helpers/class_spec_helper_spec.js (renamed from spec/javascripts/helpers/class_spec_helper_spec.js)10
-rw-r--r--spec/frontend/helpers/diffs_helper_spec.js113
-rw-r--r--spec/frontend/helpers/stub_children.js3
-rw-r--r--spec/frontend/ide/components/branches/search_list_spec.js1
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js82
-rw-r--r--spec/frontend/ide/components/error_message_spec.js10
-rw-r--r--spec/frontend/ide/components/file_templates/dropdown_spec.js5
-rw-r--r--spec/frontend/ide/components/ide_status_list_spec.js3
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap2
-rw-r--r--spec/frontend/ide/components/jobs/detail/scroll_button_spec.js (renamed from spec/javascripts/ide/components/jobs/detail/scroll_button_spec.js)2
-rw-r--r--spec/frontend/ide/components/jobs/list_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/stage_spec.js10
-rw-r--r--spec/frontend/ide/components/merge_requests/list_spec.js1
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js167
-rw-r--r--spec/frontend/ide/components/panes/right_spec.js151
-rw-r--r--spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap2
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js1
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js1
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js (renamed from spec/javascripts/ide/stores/actions/file_spec.js)364
-rw-r--r--spec/frontend/ide/stores/modules/pane/actions_spec.js76
-rw-r--r--spec/frontend/ide/stores/mutations/file_spec.js233
-rw-r--r--spec/frontend/image_diff/helpers/init_image_diff_spec.js (renamed from spec/javascripts/image_diff/helpers/init_image_diff_spec.js)4
-rw-r--r--spec/frontend/image_diff/init_discussion_tab_spec.js (renamed from spec/javascripts/image_diff/init_discussion_tab_spec.js)22
-rw-r--r--spec/frontend/import_projects/components/import_projects_table_spec.js1
-rw-r--r--spec/frontend/import_projects/components/imported_project_table_row_spec.js1
-rw-r--r--spec/frontend/import_projects/components/provider_repo_table_row_spec.js1
-rw-r--r--spec/frontend/issuable_suggestions/components/app_spec.js22
-rw-r--r--spec/frontend/issuable_suggestions/components/item_spec.js4
-rw-r--r--spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap2
-rw-r--r--spec/frontend/issuables_list/components/issuable_spec.js10
-rw-r--r--spec/frontend/issuables_list/components/issuables_list_app_spec.js9
-rw-r--r--spec/frontend/issue_show/components/edit_actions_spec.js (renamed from spec/javascripts/issue_show/components/edit_actions_spec.js)8
-rw-r--r--spec/frontend/issue_show/components/fields/description_spec.js (renamed from spec/javascripts/issue_show/components/fields/description_spec.js)2
-rw-r--r--spec/frontend/issue_show/components/fields/title_spec.js (renamed from spec/javascripts/issue_show/components/fields/title_spec.js)2
-rw-r--r--spec/frontend/issue_show/components/pinned_links_spec.js8
-rw-r--r--spec/frontend/issue_show/index_spec.js (renamed from spec/javascripts/issue_show/index_spec.js)2
-rw-r--r--spec/frontend/issue_spec.js (renamed from spec/javascripts/issue_spec.js)94
-rw-r--r--spec/frontend/jobs/components/erased_block_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js (renamed from spec/javascripts/jobs/components/job_log_controllers_spec.js)8
-rw-r--r--spec/frontend/jobs/components/log/collapsible_section_spec.js6
-rw-r--r--spec/frontend/jobs/components/log/duration_badge_spec.js1
-rw-r--r--spec/frontend/jobs/components/log/line_header_spec.js5
-rw-r--r--spec/frontend/jobs/components/log/line_number_spec.js1
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js1
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js1
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js2
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js56
-rw-r--r--spec/frontend/lib/utils/poll_until_complete_spec.js89
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js19
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/empty_state_spec.js.snap6
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap14
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js1
-rw-r--r--spec/frontend/monitoring/components/charts/column_spec.js8
-rw-r--r--spec/frontend/monitoring/components/charts/empty_chart_spec.js8
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js8
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js39
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js553
-rw-r--r--spec/frontend/monitoring/components/dashboard_time_url_spec.js51
-rw-r--r--spec/frontend/monitoring/components/dashboard_time_window_spec.js68
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js249
-rw-r--r--spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js6
-rw-r--r--spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js99
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js153
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js (renamed from spec/javascripts/monitoring/components/graph_group_spec.js)8
-rw-r--r--spec/frontend/monitoring/init_utils.js57
-rw-r--r--spec/frontend/monitoring/mock_data.js144
-rw-r--r--spec/frontend/monitoring/panel_type_spec.js4
-rw-r--r--spec/frontend/monitoring/shared/prometheus_header_spec.js (renamed from spec/javascripts/monitoring/shared/prometheus_header_spec.js)2
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js86
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js12
-rw-r--r--spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap12
-rw-r--r--spec/frontend/mr_popover/mr_popover_spec.js13
-rw-r--r--spec/frontend/namespace_select_spec.js (renamed from spec/javascripts/namespace_select_spec.js)10
-rw-r--r--spec/frontend/new_branch_spec.js (renamed from spec/javascripts/new_branch_spec.js)68
-rw-r--r--spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap3
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js2
-rw-r--r--spec/frontend/notes/components/diff_discussion_header_spec.js9
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js6
-rw-r--r--spec/frontend/notes/components/discussion_filter_note_spec.js (renamed from spec/javascripts/notes/components/discussion_filter_note_spec.js)6
-rw-r--r--spec/frontend/notes/components/discussion_jump_to_next_button_spec.js11
-rw-r--r--spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js5
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js45
-rw-r--r--spec/frontend/notes/components/discussion_reply_placeholder_spec.js10
-rw-r--r--spec/frontend/notes/components/discussion_resolve_button_spec.js13
-rw-r--r--spec/frontend/notes/components/note_app_spec.js11
-rw-r--r--spec/frontend/notes/components/note_edited_text_spec.js6
-rw-r--r--spec/frontend/notes/components/note_header_spec.js (renamed from spec/javascripts/notes/components/note_header_spec.js)2
-rw-r--r--spec/frontend/notes/stores/getters_spec.js (renamed from spec/javascripts/notes/stores/getters_spec.js)2
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js (renamed from spec/javascripts/notes/stores/mutation_spec.js)4
-rw-r--r--spec/frontend/operation_settings/components/external_dashboard_spec.js4
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap14
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap4
-rw-r--r--spec/frontend/pages/admin/users/components/delete_user_modal_spec.js1
-rw-r--r--spec/frontend/pages/admin/users/components/user_modal_manager_spec.js9
-rw-r--r--spec/frontend/pages/admin/users/components/user_operation_confirmation_modal_spec.js1
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js (renamed from spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js)6
-rw-r--r--spec/frontend/performance_bar/components/add_request_spec.js12
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js6
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js16
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js67
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_mock_data.js4
-rw-r--r--spec/frontend/pipelines/nav_controls_spec.js (renamed from spec/javascripts/pipelines/nav_controls_spec.js)2
-rw-r--r--spec/frontend/pipelines/pipeline_triggerer_spec.js2
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js6
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js1
-rw-r--r--spec/frontend/polyfills/element_spec.js46
-rw-r--r--spec/frontend/profile/add_ssh_key_validation_spec.js (renamed from spec/javascripts/profile/add_ssh_key_validation_spec.js)10
-rw-r--r--spec/frontend/project_select_combo_button_spec.js140
-rw-r--r--spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap9
-rw-r--r--spec/frontend/registry/list/components/app_spec.js13
-rw-r--r--spec/frontend/registry/list/components/collapsible_container_spec.js38
-rw-r--r--spec/frontend/registry/list/components/project_empty_state_spec.js2
-rw-r--r--spec/frontend/registry/list/components/table_registry_spec.js69
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap14
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap181
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js37
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js169
-rw-r--r--spec/frontend/registry/settings/mock_data.js12
-rw-r--r--spec/frontend/registry/settings/store/actions_spec.js124
-rw-r--r--spec/frontend/registry/settings/store/mutations_spec.js58
-rw-r--r--spec/frontend/registry/settings/stores/actions_spec.js20
-rw-r--r--spec/frontend/registry/settings/stores/mutations_spec.js21
-rw-r--r--spec/frontend/releases/detail/components/app_spec.js4
-rw-r--r--spec/frontend/releases/list/components/evidence_block_spec.js18
-rw-r--r--spec/frontend/releases/list/components/release_block_footer_spec.js1
-rw-r--r--spec/frontend/releases/list/components/release_block_milestone_info_spec.js3
-rw-r--r--spec/frontend/releases/list/components/release_block_spec.js5
-rw-r--r--spec/frontend/reports/components/report_item_spec.js5
-rw-r--r--spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap16
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap46
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js8
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js64
-rw-r--r--spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap4
-rw-r--r--spec/frontend/repository/components/preview/index_spec.js8
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap4
-rw-r--r--spec/frontend/repository/components/table/index_spec.js8
-rw-r--r--spec/frontend/repository/components/table/parent_row_spec.js10
-rw-r--r--spec/frontend/repository/components/table/row_spec.js75
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js4
-rw-r--r--spec/frontend/repository/utils/readme_spec.js49
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap72
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_spec.js83
-rw-r--r--spec/frontend/self_monitor/store/actions_spec.js255
-rw-r--r--spec/frontend/self_monitor/store/mutations_spec.js64
-rw-r--r--spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js87
-rw-r--r--spec/frontend/serverless/components/area_spec.js1
-rw-r--r--spec/frontend/serverless/components/environment_row_spec.js20
-rw-r--r--spec/frontend/serverless/components/function_details_spec.js4
-rw-r--r--spec/frontend/serverless/components/function_row_spec.js4
-rw-r--r--spec/frontend/serverless/components/functions_spec.js5
-rw-r--r--spec/frontend/serverless/components/missing_prometheus_spec.js1
-rw-r--r--spec/frontend/serverless/components/pod_box_spec.js1
-rw-r--r--spec/frontend/serverless/components/url_spec.js1
-rw-r--r--spec/frontend/shared/popover_spec.js (renamed from spec/javascripts/shared/popover_spec.js)38
-rw-r--r--spec/frontend/sidebar/__snapshots__/todo_spec.js.snap2
-rw-r--r--spec/frontend/sidebar/assignees_spec.js8
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js4
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_spec.js1
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js4
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js1
-rw-r--r--spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/confidential_issue_sidebar_spec.js29
-rw-r--r--spec/frontend/sidebar/sidebar_store_spec.js168
-rw-r--r--spec/frontend/sidebar/todo_spec.js5
-rw-r--r--spec/frontend/snippets/components/app_spec.js5
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js5
-rw-r--r--spec/frontend/snippets/components/snippet_title_spec.js71
-rw-r--r--spec/frontend/syntax_highlight_spec.js (renamed from spec/javascripts/syntax_highlight_spec.js)26
-rw-r--r--spec/frontend/task_list_spec.js (renamed from spec/javascripts/task_list_spec.js)30
-rw-r--r--spec/frontend/test_setup.js5
-rw-r--r--spec/frontend/version_check_image_spec.js42
-rw-r--r--spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js3
-rw-r--r--spec/frontend/vue_mr_widget/components/artifacts_list_spec.js7
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js1
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js7
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js13
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js1
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js11
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js12
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap13
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/memory_graph_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap14
-rw-r--r--spec/frontend/vue_shared/components/callout_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/changed_file_icon_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/commit_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/expand_button_spec.js42
-rw-r--r--spec/frontend/vue_shared/components/file_icon_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js (renamed from spec/javascripts/vue_shared/components/gl_modal_vuex_spec.js)20
-rw-r--r--spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_milestone_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/loading_button_spec.js100
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js68
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js57
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js (renamed from spec/javascripts/vue_shared/components/markdown/suggestion_diff_spec.js)2
-rw-r--r--spec/frontend/vue_shared/components/modal_copy_button_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/notes/system_note_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/notes/timeline_entry_item_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/paginated_list_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/pagination_links_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/recaptcha_modal_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/resizable_chart_container_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/slot_switch_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/table_pagination_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js (renamed from spec/javascripts/vue_shared/components/user_avatar/user_avatar_list_spec.js)16
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js149
-rw-r--r--spec/frontend/vue_shared/directives/track_event_spec.js14
-rw-r--r--spec/frontend/vue_shared/droplab_dropdown_button_spec.js10
-rw-r--r--spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js5
-rw-r--r--spec/frontend/vuex_shared/bindings_spec.js79
-rw-r--r--spec/graphql/gitlab_schema_spec.rb16
-rw-r--r--spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb29
-rw-r--r--spec/graphql/types/environment_type_spec.rb17
-rw-r--r--spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb2
-rw-r--r--spec/graphql/types/grafana_integration_type_spec.rb22
-rw-r--r--spec/graphql/types/group_type_spec.rb6
-rw-r--r--spec/graphql/types/project_type_spec.rb45
-rw-r--r--spec/graphql/types/query_type_spec.rb11
-rw-r--r--spec/helpers/application_helper_spec.rb9
-rw-r--r--spec/helpers/application_settings_helper_spec.rb64
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb20
-rw-r--r--spec/helpers/container_expiration_policies_helper_spec.rb6
-rw-r--r--spec/helpers/environments_helper_spec.rb5
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb23
-rw-r--r--spec/helpers/markup_helper_spec.rb22
-rw-r--r--spec/helpers/projects/error_tracking_helper_spec.rb18
-rw-r--r--spec/helpers/projects_helper_spec.rb4
-rw-r--r--spec/helpers/users_helper_spec.rb14
-rw-r--r--spec/initializers/database_config_spec.rb23
-rw-r--r--spec/initializers/lograge_spec.rb2
-rw-r--r--spec/javascripts/behaviors/bind_in_out_spec.js192
-rw-r--r--spec/javascripts/breakpoints_spec.js27
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js109
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js115
-rw-r--r--spec/javascripts/diffs/components/app_spec.js184
-rw-r--r--spec/javascripts/diffs/components/compare_versions_dropdown_spec.js1
-rw-r--r--spec/javascripts/diffs/components/diff_discussions_spec.js1
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js15
-rw-r--r--spec/javascripts/diffs/store/getters_spec.js8
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js68
-rw-r--r--spec/javascripts/diffs/store/utils_spec.js33
-rw-r--r--spec/javascripts/droplab/constants_spec.js39
-rw-r--r--spec/javascripts/droplab/drop_down_spec.js13
-rw-r--r--spec/javascripts/dropzone_input_spec.js29
-rw-r--r--spec/javascripts/filtered_search/dropdown_utils_spec.js14
-rw-r--r--spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js43
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js16
-rw-r--r--spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js88
-rw-r--r--spec/javascripts/filtered_search/issues_filtered_search_token_keys_spec.js148
-rw-r--r--spec/javascripts/filtered_search/visual_token_value_spec.js10
-rw-r--r--spec/javascripts/fly_out_nav_spec.js4
-rw-r--r--spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js1
-rw-r--r--spec/javascripts/frequent_items/utils_spec.js18
-rw-r--r--spec/javascripts/helpers/filtered_search_spec_helper.js17
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/form_spec.js38
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/list_item_spec.js1
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/list_spec.js1
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js46
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js39
-rw-r--r--spec/javascripts/ide/components/new_dropdown/modal_spec.js43
-rw-r--r--spec/javascripts/ide/components/repo_editor_spec.js12
-rw-r--r--spec/javascripts/ide/components/repo_tab_spec.js4
-rw-r--r--spec/javascripts/ide/lib/editor_spec.js1
-rw-r--r--spec/javascripts/ide/stores/actions/merge_request_spec.js9
-rw-r--r--spec/javascripts/ide/stores/actions/project_spec.js46
-rw-r--r--spec/javascripts/ide/stores/actions/tree_spec.js9
-rw-r--r--spec/javascripts/ide/stores/actions_spec.js618
-rw-r--r--spec/javascripts/jobs/components/manual_variables_form_spec.js1
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js5
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js1
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js141
-rw-r--r--spec/javascripts/monitoring/components/dashboard_spec.js729
-rw-r--r--spec/javascripts/monitoring/helpers.js8
-rw-r--r--spec/javascripts/notes/components/discussion_resolve_with_issue_button_spec.js1
-rw-r--r--spec/javascripts/notes/components/note_actions/reply_button_spec.js1
-rw-r--r--spec/javascripts/notes/components/note_actions_spec.js1
-rw-r--r--spec/javascripts/notes/components/note_form_spec.js1
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js60
-rw-r--r--spec/javascripts/notes/components/noteable_note_spec.js1
-rw-r--r--spec/javascripts/pipelines/graph/graph_component_spec.js1
-rw-r--r--spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js1
-rw-r--r--spec/javascripts/pipelines/header_component_spec.js15
-rw-r--r--spec/javascripts/pipelines/linked_pipelines_mock.json3
-rw-r--r--spec/javascripts/polyfills/element_spec.js36
-rw-r--r--spec/javascripts/project_select_combo_button_spec.js124
-rw-r--r--spec/javascripts/projects/project_import_gitlab_project_spec.js46
-rw-r--r--spec/javascripts/projects/project_new_spec.js30
-rw-r--r--spec/javascripts/related_merge_requests/components/related_merge_requests_spec.js1
-rw-r--r--spec/javascripts/sidebar/participants_spec.js17
-rw-r--r--spec/javascripts/sidebar/sidebar_store_spec.js162
-rw-r--r--spec/javascripts/version_check_image_spec.js35
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_alert_message_spec.js1
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_container_spec.js1
-rw-r--r--spec/javascripts/vue_mr_widget/components/review_app_link_spec.js5
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js44
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js1
-rw-r--r--spec/javascripts/vue_shared/components/header_ci_component_spec.js17
-rw-r--r--spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js13
-rw-r--r--spec/javascripts/vue_shared/components/loading_button_spec.js111
-rw-r--r--spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js1
-rw-r--r--spec/javascripts/vue_shared/components/project_selector/project_list_item_spec.js1
-rw-r--r--spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js1
-rw-r--r--spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js1
-rw-r--r--spec/lib/api/helpers/pagination_spec.rb70
-rw-r--r--spec/lib/api/helpers/pagination_strategies_spec.rb97
-rw-r--r--spec/lib/banzai/filter/abstract_reference_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/plantuml_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/repository_link_filter_spec.rb (renamed from spec/lib/banzai/filter/relative_link_filter_spec.rb)174
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb221
-rw-r--r--spec/lib/banzai/pipeline/post_process_pipeline_spec.rb26
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb6
-rw-r--r--spec/lib/expand_variables_spec.rb2
-rw-r--r--spec/lib/feature_spec.rb7
-rw-r--r--spec/lib/gitlab/app_json_logger_spec.rb18
-rw-r--r--spec/lib/gitlab/app_logger_spec.rb18
-rw-r--r--spec/lib/gitlab/app_text_logger_spec.rb25
-rw-r--r--spec/lib/gitlab/application_context_spec.rb104
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb27
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb87
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb51
-rw-r--r--spec/lib/gitlab/auth_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb93
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb320
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb11
-rw-r--r--spec/lib/gitlab/backtrace_cleaner_spec.rb50
-rw-r--r--spec/lib/gitlab/badge/coverage/report_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/release/assets/link_spec.rb79
-rw-r--r--spec/lib/gitlab/ci/config/entry/release/assets/links_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/entry/release/assets_spec.rb69
-rw-r--r--spec/lib/gitlab/ci/config/entry/release_spec.rb114
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb201
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/resource_group_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/status/external/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/pipeline/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/stage/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb101
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb11
-rw-r--r--spec/lib/gitlab/config/entry/attributable_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/production_stage_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/changelog_spec.rb12
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb315
-rw-r--r--spec/lib/gitlab/danger/emoji_checker_spec.rb38
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb13
-rw-r--r--spec/lib/gitlab/data_builder/note_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb67
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb164
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb92
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb51
-rw-r--r--spec/lib/gitlab/database_spec.rb14
-rw-r--r--spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb69
-rw-r--r--spec/lib/gitlab/dependency_linker_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb10
-rw-r--r--spec/lib/gitlab/email/attachment_uploader_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb13
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb19
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb4
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb23
-rw-r--r--spec/lib/gitlab/file_finder_spec.rb6
-rw-r--r--spec/lib/gitlab/file_hook_spec.rb (renamed from spec/lib/gitlab/plugin_spec.rb)34
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb4
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb77
-rw-r--r--spec/lib/gitlab/git_spec.rb29
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb57
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb174
-rw-r--r--spec/lib/gitlab/gpg_spec.rb2
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb87
-rw-r--r--spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/health_checks/puma_check_spec.rb2
-rw-r--r--spec/lib/gitlab/health_checks/unicorn_check_spec.rb2
-rw-r--r--spec/lib/gitlab/highlight_spec.rb2
-rw-r--r--spec/lib/gitlab/import/merge_request_helpers_spec.rb3
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml48
-rw-r--r--spec/lib/gitlab/import_export/base_object_builder_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/base_relation_factory_spec.rb145
-rw-r--r--spec/lib/gitlab/import_export/group_project_object_builder_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/import_failure_service_spec.rb107
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb132
-rw-r--r--spec/lib/gitlab/import_export/project_relation_factory_spec.rb (renamed from spec/lib/gitlab/import_export/relation_factory_spec.rb)116
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb124
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml34
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb11
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb37
-rw-r--r--spec/lib/gitlab/kubernetes/namespace_spec.rb14
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb18
-rw-r--r--spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb4
-rw-r--r--spec/lib/gitlab/middleware/request_context_spec.rb78
-rw-r--r--spec/lib/gitlab/multi_destination_logger_spec.rb59
-rw-r--r--spec/lib/gitlab/pages_spec.rb22
-rw-r--r--spec/lib/gitlab/pagination/keyset/page_spec.rb11
-rw-r--r--spec/lib/gitlab/pagination/keyset/pager_spec.rb54
-rw-r--r--spec/lib/gitlab/pagination/keyset/request_context_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/keyset_spec.rb16
-rw-r--r--spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb33
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/user_spec.rb6
-rw-r--r--spec/lib/gitlab/phabricator_import/user_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/profiler_spec.rb47
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb3
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb (renamed from spec/services/prometheus/adapter_service_spec.rb)9
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb3
-rw-r--r--spec/lib/gitlab/quick_actions/dsl_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/request_context_spec.rb79
-rw-r--r--spec/lib/gitlab/runtime_spec.rb163
-rw-r--r--spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb53
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb65
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb113
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/correlation_injector_spec.rb49
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb35
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb (renamed from spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb)4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/slash_commands/command_spec.rb2
-rw-r--r--spec/lib/gitlab/slash_commands/deploy_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb20
-rw-r--r--spec/lib/gitlab/utils/lazy_attributes_spec.rb70
-rw-r--r--spec/lib/gitlab/utils_spec.rb35
-rw-r--r--spec/lib/prometheus/pid_provider_spec.rb18
-rw-r--r--spec/lib/quality/helm_client_spec.rb2
-rw-r--r--spec/lib/quality/kubernetes_client_spec.rb2
-rw-r--r--spec/lib/sentry/api_urls_spec.rb85
-rw-r--r--spec/lib/sentry/client/event_spec.rb73
-rw-r--r--spec/lib/sentry/client/issue_link_spec.rb41
-rw-r--r--spec/lib/sentry/client/issue_spec.rb299
-rw-r--r--spec/lib/sentry/client/projects_spec.rb19
-rw-r--r--spec/lib/sentry/client/repo_spec.rb39
-rw-r--r--spec/lib/sentry/client_spec.rb218
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb56
-rw-r--r--spec/migrations/20191204114127_delete_legacy_triggers_spec.rb23
-rw-r--r--spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb23
-rw-r--r--spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb22
-rw-r--r--spec/migrations/backfill_operations_feature_flags_active_spec.rb54
-rw-r--r--spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb21
-rw-r--r--spec/migrations/fix_max_pages_size_spec.rb19
-rw-r--r--spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb134
-rw-r--r--spec/migrations/update_fingerprint_sha256_within_keys_spec.rb30
-rw-r--r--spec/models/active_session_spec.rb52
-rw-r--r--spec/models/application_setting_spec.rb12
-rw-r--r--spec/models/blob_spec.rb1
-rw-r--r--spec/models/blob_viewer/changelog_spec.rb1
-rw-r--r--spec/models/blob_viewer/composer_json_spec.rb1
-rw-r--r--spec/models/blob_viewer/gemspec_spec.rb1
-rw-r--r--spec/models/blob_viewer/gitlab_ci_yml_spec.rb1
-rw-r--r--spec/models/blob_viewer/license_spec.rb1
-rw-r--r--spec/models/blob_viewer/package_json_spec.rb2
-rw-r--r--spec/models/blob_viewer/podspec_json_spec.rb1
-rw-r--r--spec/models/blob_viewer/podspec_spec.rb1
-rw-r--r--spec/models/blob_viewer/readme_spec.rb1
-rw-r--r--spec/models/blob_viewer/route_map_spec.rb1
-rw-r--r--spec/models/board_spec.rb27
-rw-r--r--spec/models/ci/build_spec.rb104
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb2
-rw-r--r--spec/models/ci/pipeline_config_spec.rb10
-rw-r--r--spec/models/ci/pipeline_spec.rb217
-rw-r--r--spec/models/ci/processable_spec.rb55
-rw-r--r--spec/models/ci/resource_group_spec.rb88
-rw-r--r--spec/models/ci/resource_spec.rb28
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/ci/stage_spec.rb33
-rw-r--r--spec/models/ci/trigger_spec.rb59
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb56
-rw-r--r--spec/models/clusters/applications/helm_spec.rb1
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb10
-rw-r--r--spec/models/clusters/applications/jupyter_spec.rb5
-rw-r--r--spec/models/clusters/applications/knative_spec.rb1
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb13
-rw-r--r--spec/models/commit_spec.rb31
-rw-r--r--spec/models/commit_status_spec.rb66
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb26
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb2
-rw-r--r--spec/models/concerns/each_batch_spec.rb2
-rw-r--r--spec/models/concerns/has_status_spec.rb24
-rw-r--r--spec/models/concerns/ignorable_columns_spec.rb2
-rw-r--r--spec/models/concerns/issuable_spec.rb189
-rw-r--r--spec/models/concerns/loaded_in_group_list_spec.rb1
-rw-r--r--spec/models/concerns/milestoneable_spec.rb243
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb1
-rw-r--r--spec/models/concerns/resolvable_note_spec.rb1
-rw-r--r--spec/models/concerns/safe_url_spec.rb14
-rw-r--r--spec/models/concerns/schedulable_spec.rb74
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb1
-rw-r--r--spec/models/container_expiration_policy_spec.rb34
-rw-r--r--spec/models/cycle_analytics/code_spec.rb12
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb10
-rw-r--r--spec/models/cycle_analytics/issue_spec.rb13
-rw-r--r--spec/models/cycle_analytics/plan_spec.rb13
-rw-r--r--spec/models/cycle_analytics/production_spec.rb19
-rw-r--r--spec/models/cycle_analytics/project_level_spec.rb10
-rw-r--r--spec/models/cycle_analytics/review_spec.rb6
-rw-r--r--spec/models/cycle_analytics/staging_spec.rb18
-rw-r--r--spec/models/cycle_analytics/test_spec.rb26
-rw-r--r--spec/models/deployment_metrics_spec.rb20
-rw-r--r--spec/models/deployment_spec.rb70
-rw-r--r--spec/models/diff_note_spec.rb128
-rw-r--r--spec/models/diff_viewer/base_spec.rb28
-rw-r--r--spec/models/environment_spec.rb83
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb122
-rw-r--r--spec/models/external_issue_spec.rb1
-rw-r--r--spec/models/global_milestone_spec.rb1
-rw-r--r--spec/models/group_group_link_spec.rb8
-rw-r--r--spec/models/group_spec.rb52
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb3
-rw-r--r--spec/models/import_failure_spec.rb28
-rw-r--r--spec/models/instance_configuration_spec.rb2
-rw-r--r--spec/models/internal_id_spec.rb3
-rw-r--r--spec/models/issue_spec.rb6
-rw-r--r--spec/models/key_spec.rb1
-rw-r--r--spec/models/merge_request_spec.rb46
-rw-r--r--spec/models/namespace_spec.rb7
-rw-r--r--spec/models/project_deploy_token_spec.rb1
-rw-r--r--spec/models/project_feature_spec.rb84
-rw-r--r--spec/models/project_services/chat_message/base_message_spec.rb34
-rw-r--r--spec/models/project_services/chat_message/wiki_page_message_spec.rb19
-rw-r--r--spec/models/project_services/emails_on_push_service_spec.rb72
-rw-r--r--spec/models/project_services/external_wiki_service_spec.rb30
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb1
-rw-r--r--spec/models/project_spec.rb47
-rw-r--r--spec/models/readme_blob_spec.rb1
-rw-r--r--spec/models/release_spec.rb6
-rw-r--r--spec/models/repository_spec.rb8
-rw-r--r--spec/models/resource_weight_event_spec.rb75
-rw-r--r--spec/models/sent_notification_spec.rb13
-rw-r--r--spec/models/sentry_issue_spec.rb12
-rw-r--r--spec/models/snippet_spec.rb1
-rw-r--r--spec/models/trending_project_spec.rb8
-rw-r--r--spec/models/uploads/fog_spec.rb2
-rw-r--r--spec/models/uploads/local_spec.rb2
-rw-r--r--spec/models/user_interacted_project_spec.rb1
-rw-r--r--spec/models/user_preference_spec.rb6
-rw-r--r--spec/models/user_spec.rb44
-rw-r--r--spec/policies/ci/trigger_policy_spec.rb54
-rw-r--r--spec/policies/project_policy_spec.rb28
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb78
-rw-r--r--spec/presenters/project_presenter_spec.rb330
-rw-r--r--spec/requests/api/appearance_spec.rb142
-rw-r--r--spec/requests/api/deployments_spec.rb86
-rw-r--r--spec/requests/api/discussions_spec.rb12
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/error_tracking_spec.rb79
-rw-r--r--spec/requests/api/events_spec.rb15
-rw-r--r--spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/grafana_integration_spec.rb64
-rw-r--r--spec/requests/api/groups_spec.rb2
-rw-r--r--spec/requests/api/internal/base_spec.rb34
-rw-r--r--spec/requests/api/issues/get_group_issues_spec.rb27
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb20
-rw-r--r--spec/requests/api/issues/issues_spec.rb11
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb10
-rw-r--r--spec/requests/api/jobs_spec.rb2
-rw-r--r--spec/requests/api/keys_spec.rb30
-rw-r--r--spec/requests/api/merge_requests_spec.rb103
-rw-r--r--spec/requests/api/notes_spec.rb69
-rw-r--r--spec/requests/api/pipelines_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb151
-rw-r--r--spec/requests/api/remote_mirrors_spec.rb43
-rw-r--r--spec/requests/api/runner_spec.rb88
-rw-r--r--spec/requests/api/services_spec.rb33
-rw-r--r--spec/requests/api/triggers_spec.rb16
-rw-r--r--spec/requests/api/wikis_spec.rb2
-rw-r--r--spec/requests/self_monitoring_project_spec.rb224
-rw-r--r--spec/routing/admin_routing_spec.rb14
-rw-r--r--spec/routing/project_routing_spec.rb14
-rw-r--r--spec/routing/routing_spec.rb6
-rw-r--r--spec/routing/uploads_routing_spec.rb8
-rw-r--r--spec/rubocop/cop/migration/add_column_with_default_spec.rb66
-rw-r--r--spec/rubocop/cop/rspec/have_gitlab_http_status_spec.rb102
-rw-r--r--spec/serializers/deploy_key_entity_spec.rb1
-rw-r--r--spec/serializers/deployment_entity_spec.rb2
-rw-r--r--spec/serializers/environment_status_entity_spec.rb2
-rw-r--r--spec/serializers/issue_board_entity_spec.rb12
-rw-r--r--spec/serializers/pipeline_entity_spec.rb20
-rw-r--r--spec/serializers/review_app_setup_entity_spec.rb54
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb2
-rw-r--r--spec/services/auto_merge_service_spec.rb4
-rw-r--r--spec/services/award_emojis/add_service_spec.rb6
-rw-r--r--spec/services/award_emojis/destroy_service_spec.rb6
-rw-r--r--spec/services/award_emojis/toggle_service_spec.rb6
-rw-r--r--spec/services/boards/issues/move_service_spec.rb16
-rw-r--r--spec/services/boards/list_service_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb29
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb144
-rw-r--r--spec/services/ci/ensure_stage_service_spec.rb4
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb91
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb12
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb12
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb940
-rw-r--r--spec/services/ci/prepare_build_service_spec.rb4
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb894
-rw-r--r--spec/services/ci/register_job_service_spec.rb6
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb64
-rw-r--r--spec/services/ci/retry_build_service_spec.rb28
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/run_scheduled_build_service_spec.rb12
-rw-r--r--spec/services/clusters/applications/check_installation_progress_service_spec.rb6
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb34
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb43
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb12
-rw-r--r--spec/services/container_expiration_policy_service_spec.rb31
-rw-r--r--spec/services/create_snippet_service_spec.rb117
-rw-r--r--spec/services/deployments/after_create_service_spec.rb48
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb30
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb4
-rw-r--r--spec/services/error_tracking/list_projects_service_spec.rb4
-rw-r--r--spec/services/external_pull_requests/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/git/branch_push_service_spec.rb6
-rw-r--r--spec/services/groups/auto_devops_service_spec.rb4
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb16
-rw-r--r--spec/services/issues/reorder_service_spec.rb6
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb4
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb7
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb4
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb12
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb9
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb197
-rw-r--r--spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/default_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/dynamic_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/project_dashboard_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/system_dashboard_service_spec.rb6
-rw-r--r--spec/services/metrics/sample_metrics_service_spec.rb9
-rw-r--r--spec/services/milestones/promote_service_spec.rb4
-rw-r--r--spec/services/milestones/transfer_service_spec.rb4
-rw-r--r--spec/services/namespaces/statistics_refresher_service_spec.rb9
-rw-r--r--spec/services/notes/create_service_spec.rb20
-rw-r--r--spec/services/notes/destroy_service_spec.rb4
-rw-r--r--spec/services/notes/resolve_service_spec.rb4
-rw-r--r--spec/services/notification_service_spec.rb20
-rw-r--r--spec/services/pages_domains/create_acme_order_service_spec.rb26
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/destroy_service_spec.rb4
-rw-r--r--spec/services/projects/create_service_spec.rb4
-rw-r--r--spec/services/projects/detect_repository_languages_service_spec.rb4
-rw-r--r--spec/services/projects/fork_service_spec.rb13
-rw-r--r--spec/services/projects/gitlab_projects_import_service_spec.rb2
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb2
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb8
-rw-r--r--spec/services/projects/lfs_pointers/lfs_import_service_spec.rb12
-rw-r--r--spec/services/projects/open_merge_requests_count_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb4
-rw-r--r--spec/services/projects/participants_service_spec.rb28
-rw-r--r--spec/services/projects/update_pages_service_spec.rb66
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb10
-rw-r--r--spec/services/prometheus/proxy_variable_substitution_service_spec.rb146
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb5
-rw-r--r--spec/services/releases/update_service_spec.rb38
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb4
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb10
-rw-r--r--spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb21
-rw-r--r--spec/services/snippets/create_service_spec.rb170
-rw-r--r--spec/services/snippets/destroy_service_spec.rb77
-rw-r--r--spec/services/snippets/update_service_spec.rb123
-rw-r--r--spec/services/spam/mark_as_spam_service_spec.rb52
-rw-r--r--spec/services/spam_service_spec.rb4
-rw-r--r--spec/services/suggestions/apply_service_spec.rb31
-rw-r--r--spec/services/system_note_service_spec.rb119
-rw-r--r--spec/services/system_notes/commit_service_spec.rb6
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb8
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb129
-rw-r--r--spec/services/task_list_toggle_service_spec.rb44
-rw-r--r--spec/services/template_engines/liquid_service_spec.rb126
-rw-r--r--spec/services/update_snippet_service_spec.rb80
-rw-r--r--spec/services/users/activity_service_spec.rb4
-rw-r--r--spec/services/users/destroy_service_spec.rb16
-rw-r--r--spec/services/users/update_service_spec.rb7
-rw-r--r--spec/spec_helper.rb6
-rw-r--r--spec/support/cycle_analytics_helpers/test_generation.rb6
-rw-r--r--spec/support/features/discussion_comments_shared_example.rb4
-rw-r--r--spec/support/helpers/filter_spec_helper.rb11
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb17
-rw-r--r--spec/support/helpers/graphql_helpers.rb24
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb127
-rw-r--r--spec/support/helpers/metrics_dashboard_helpers.rb50
-rw-r--r--spec/support/helpers/query_recorder.rb2
-rw-r--r--spec/support/helpers/sentry_client_helpers.rb4
-rw-r--r--spec/support/helpers/test_env.rb5
-rw-r--r--spec/support/import_export/common_util.rb4
-rw-r--r--spec/support/import_export/configuration_helper.rb4
-rw-r--r--spec/support/matchers/eq_uri.rb19
-rw-r--r--spec/support/matchers/graphql_matchers.rb20
-rw-r--r--spec/support/matchers/markdown_matchers.rb17
-rw-r--r--spec/support/migrations_helpers/prometheus_service_helpers.rb35
-rw-r--r--spec/support/prometheus/additional_metrics_shared_examples.rb5
-rw-r--r--spec/support/redis/redis_shared_examples.rb10
-rw-r--r--spec/support/shared_contexts/upload_type_check_shared_context.rb33
-rw-r--r--spec/support/shared_examples/controllers/error_tracking_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/email_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb2
-rw-r--r--spec/support/shared_examples/graphql/connection_paged_nodes.rb4
-rw-r--r--spec/support/shared_examples/graphql/failure_to_find_anything.rb17
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/lib/sentry/client_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/logging_application_context_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb21
-rw-r--r--spec/support/shared_examples/migration_helpers_examples.rb23
-rw-r--r--spec/support/shared_examples/models/cluster_application_initial_status.rb24
-rw-r--r--spec/support/shared_examples/models/cluster_application_status_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb46
-rw-r--r--spec/support/shared_examples/pages_size_limit_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/requests/api/diff_discussions.rb21
-rw-r--r--spec/support/shared_examples/requests/api/status_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/self_monitoring_shared_examples.rb132
-rw-r--r--spec/support/shared_examples/services/boards/boards_list_service.rb17
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/unique_ip_check_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/uploaders/upload_type_shared_examples.rb65
-rw-r--r--spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb116
-rw-r--r--spec/support/shared_examples/workers/self_monitoring_shared_examples.rb28
-rw-r--r--spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb2
-rw-r--r--spec/tasks/gitlab/import_export/import_rake_spec.rb36
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb12
-rw-r--r--spec/uploaders/favicon_uploader_spec.rb24
-rw-r--r--spec/uploaders/upload_type_check_spec.rb124
-rw-r--r--spec/validators/qualified_domain_array_validator_spec.rb15
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb10
-rw-r--r--spec/views/projects/ci/lints/show.html.haml_spec.rb5
-rw-r--r--spec/views/projects/commit/branches.html.haml_spec.rb4
-rw-r--r--spec/views/projects/diffs/_viewer.html.haml_spec.rb12
-rw-r--r--spec/views/projects/edit.html.haml_spec.rb27
-rw-r--r--spec/views/projects/issues/show.html.haml_spec.rb22
-rw-r--r--spec/views/search/_results.html.haml_spec.rb2
-rw-r--r--spec/workers/chat_notification_worker_spec.rb35
-rw-r--r--spec/workers/ci/archive_traces_cron_worker_spec.rb9
-rw-r--r--spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb34
-rw-r--r--spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb6
-rw-r--r--spec/workers/concerns/reenqueuer_spec.rb179
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb57
-rw-r--r--spec/workers/delete_merged_branches_worker_spec.rb4
-rw-r--r--spec/workers/deployments/finished_worker_spec.rb14
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb4
-rw-r--r--spec/workers/file_hook_worker_spec.rb27
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb12
-rw-r--r--spec/workers/gitlab_shell_worker_spec.rb4
-rw-r--r--spec/workers/gitlab_usage_ping_worker_spec.rb4
-rw-r--r--spec/workers/hashed_storage/migrator_worker_spec.rb4
-rw-r--r--spec/workers/hashed_storage/rollbacker_worker_spec.rb4
-rw-r--r--spec/workers/import_issues_csv_worker_spec.rb4
-rw-r--r--spec/workers/new_release_worker_spec.rb4
-rw-r--r--spec/workers/pipeline_update_worker_spec.rb2
-rw-r--r--spec/workers/plugin_worker_spec.rb27
-rw-r--r--spec/workers/repository_import_worker_spec.rb23
-rw-r--r--spec/workers/self_monitoring_project_create_worker_spec.rb16
-rw-r--r--spec/workers/self_monitoring_project_delete_worker_spec.rb19
-rw-r--r--spec/workers/stage_update_worker_spec.rb2
1122 files changed, 33716 insertions, 11175 deletions
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index bd0bb0bd81f..be996aee1d2 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -122,7 +122,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
describe '#destroy' do
context 'for regular users' do
it 'shows error page' do
- get :destroy
+ post :destroy
expect(response).to have_gitlab_http_status(404)
expect(controller.current_user_mode.admin_mode?).to be(false)
@@ -139,7 +139,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
post :create, params: { password: user.password }
expect(controller.current_user_mode.admin_mode?).to be(true)
- get :destroy
+ post :destroy
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(root_path)
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index f11b5e798c9..ebdfbe14dec 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Admin::UsersController do
let(:user) { create(:user) }
+
let_it_be(:admin) { create(:admin) }
before do
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index e72ab16f62a..0c299dcda34 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -895,4 +895,50 @@ describe ApplicationController do
end
end
end
+
+ context '#set_current_context' do
+ controller(described_class) do
+ def index
+ Labkit::Context.with_context do |context|
+ render json: context.to_h
+ end
+ end
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'does not break anything when no group or project method is defined' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'sets the username in the context when signed in' do
+ get :index
+
+ expect(json_response['meta.user']).to eq(user.username)
+ end
+
+ it 'sets the group if it was available' do
+ group = build_stubbed(:group)
+ controller.instance_variable_set(:@group, group)
+
+ get :index, format: :json
+
+ expect(json_response['meta.root_namespace']).to eq(group.path)
+ end
+
+ it 'sets the project if one was available' do
+ project = build_stubbed(:project)
+ controller.instance_variable_set(:@project, project)
+
+ get :index, format: :json
+
+ expect(json_response['meta.project']).to eq(project.full_path)
+ end
+ end
end
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 4227a4453a3..51f20bae880 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -112,9 +112,7 @@ describe AutocompleteController do
context 'limited users per page' do
before do
- 25.times do
- create(:user)
- end
+ create_list(:user, 25)
sign_in(user)
get(:users)
diff --git a/spec/controllers/concerns/confirm_email_warning_spec.rb b/spec/controllers/concerns/confirm_email_warning_spec.rb
index 25429cdd149..56a6efab8ed 100644
--- a/spec/controllers/concerns/confirm_email_warning_spec.rb
+++ b/spec/controllers/concerns/confirm_email_warning_spec.rb
@@ -10,7 +10,7 @@ describe ConfirmEmailWarning do
controller(ApplicationController) do
# `described_class` is not available in this context
- include ConfirmEmailWarning # rubocop:disable RSpec/DescribedClass
+ include ConfirmEmailWarning
def index
head :ok
diff --git a/spec/controllers/concerns/continue_params_spec.rb b/spec/controllers/concerns/continue_params_spec.rb
index b4b62cbe1e3..6af01aa837c 100644
--- a/spec/controllers/concerns/continue_params_spec.rb
+++ b/spec/controllers/concerns/continue_params_spec.rb
@@ -12,6 +12,7 @@ describe ContinueParams do
end
end
end
+
subject(:controller) { controller_class.new }
def strong_continue_params(params)
diff --git a/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
index 7a56f7203b0..e47f1650b1f 100644
--- a/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
+++ b/spec/controllers/concerns/controller_with_cross_project_access_check_spec.rb
@@ -22,7 +22,7 @@ describe ControllerWithCrossProjectAccessCheck do
describe '#requires_cross_project_access' do
controller(ApplicationController) do
# `described_class` is not available in this context
- include ControllerWithCrossProjectAccessCheck # rubocop:disable RSpec/DescribedClass
+ include ControllerWithCrossProjectAccessCheck
requires_cross_project_access :index, show: false,
unless: -> { unless_condition },
@@ -81,7 +81,7 @@ describe ControllerWithCrossProjectAccessCheck do
describe '#skip_cross_project_access_check' do
controller(ApplicationController) do
# `described_class` is not available in this context
- include ControllerWithCrossProjectAccessCheck # rubocop:disable RSpec/DescribedClass
+ include ControllerWithCrossProjectAccessCheck
requires_cross_project_access
diff --git a/spec/controllers/concerns/group_tree_spec.rb b/spec/controllers/concerns/group_tree_spec.rb
index 835c3d9b3af..543f0170be0 100644
--- a/spec/controllers/concerns/group_tree_spec.rb
+++ b/spec/controllers/concerns/group_tree_spec.rb
@@ -8,7 +8,7 @@ describe GroupTree do
controller(ApplicationController) do
# `described_class` is not available in this context
- include GroupTree # rubocop:disable RSpec/DescribedClass
+ include GroupTree
def index
render_group_tree GroupsFinder.new(current_user).execute
diff --git a/spec/controllers/concerns/internal_redirect_spec.rb b/spec/controllers/concerns/internal_redirect_spec.rb
index e5e50cfd55e..cc6422f2817 100644
--- a/spec/controllers/concerns/internal_redirect_spec.rb
+++ b/spec/controllers/concerns/internal_redirect_spec.rb
@@ -12,6 +12,7 @@ describe InternalRedirect do
end
end
end
+
subject(:controller) { controller_class.new }
describe '#safe_redirect_path' do
diff --git a/spec/controllers/concerns/lfs_request_spec.rb b/spec/controllers/concerns/lfs_request_spec.rb
index 823b9a50434..584448e68f9 100644
--- a/spec/controllers/concerns/lfs_request_spec.rb
+++ b/spec/controllers/concerns/lfs_request_spec.rb
@@ -7,7 +7,7 @@ describe LfsRequest do
controller(Projects::GitHttpClientController) do
# `described_class` is not available in this context
- include LfsRequest # rubocop:disable RSpec/DescribedClass
+ include LfsRequest
def show
storage_project
diff --git a/spec/controllers/concerns/metrics_dashboard_spec.rb b/spec/controllers/concerns/metrics_dashboard_spec.rb
index ff2b6fbb8ec..389d264bed3 100644
--- a/spec/controllers/concerns/metrics_dashboard_spec.rb
+++ b/spec/controllers/concerns/metrics_dashboard_spec.rb
@@ -16,7 +16,7 @@ describe MetricsDashboard do
end
controller(::ApplicationController) do
- include MetricsDashboard # rubocop:disable RSpec/DescribedClass
+ include MetricsDashboard
end
let(:json_response) do
diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb
index 79350847383..c43ceb6b795 100644
--- a/spec/controllers/concerns/renders_commits_spec.rb
+++ b/spec/controllers/concerns/renders_commits_spec.rb
@@ -9,7 +9,7 @@ describe RendersCommits do
controller(ApplicationController) do
# `described_class` is not available in this context
- include RendersCommits # rubocop:disable RSpec/DescribedClass
+ include RendersCommits
def index
@merge_request = MergeRequest.find(params[:id])
diff --git a/spec/controllers/concerns/routable_actions_spec.rb b/spec/controllers/concerns/routable_actions_spec.rb
index 59d48c68b9c..a11f4d2a154 100644
--- a/spec/controllers/concerns/routable_actions_spec.rb
+++ b/spec/controllers/concerns/routable_actions_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe RoutableActions do
controller(::ApplicationController) do
- include RoutableActions # rubocop:disable RSpec/DescribedClass
+ include RoutableActions
before_action :routable
diff --git a/spec/controllers/concerns/sourcegraph_gon_spec.rb b/spec/controllers/concerns/sourcegraph_decorator_spec.rb
index 4fb7e37d148..f1f3f0489c6 100644
--- a/spec/controllers/concerns/sourcegraph_gon_spec.rb
+++ b/spec/controllers/concerns/sourcegraph_decorator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe SourcegraphGon do
+describe SourcegraphDecorator do
let_it_be(:enabled_user) { create(:user, sourcegraph_enabled: true) }
let_it_be(:disabled_user) { create(:user, sourcegraph_enabled: false) }
let_it_be(:public_project) { create(:project, :public) }
@@ -17,7 +17,7 @@ describe SourcegraphGon do
let(:project) { internal_project }
controller(ApplicationController) do
- include SourcegraphGon # rubocop:disable RSpec/DescribedClass
+ include SourcegraphDecorator
def index
head :ok
diff --git a/spec/controllers/concerns/static_object_external_storage_spec.rb b/spec/controllers/concerns/static_object_external_storage_spec.rb
index 3a0219ddaa1..ddd1a95427e 100644
--- a/spec/controllers/concerns/static_object_external_storage_spec.rb
+++ b/spec/controllers/concerns/static_object_external_storage_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe StaticObjectExternalStorage do
controller(Projects::ApplicationController) do
- include StaticObjectExternalStorage # rubocop:disable RSpec/DescribedClass
+ include StaticObjectExternalStorage
before_action :redirect_to_external_storage, if: :static_objects_external_storage_enabled?
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 8f04822fee6..04f2e33b26a 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -111,4 +111,100 @@ describe Groups::GroupLinksController do
end
end
end
+
+ describe '#update' do
+ let!(:link) do
+ create(:group_group_link, { shared_group: shared_group,
+ shared_with_group: shared_with_group })
+ end
+
+ let(:expiry_date) { 1.month.from_now.to_date }
+
+ subject do
+ post(:update, params: { group_id: shared_group,
+ id: link.id,
+ group_link: { group_access: Gitlab::Access::GUEST,
+ expires_at: expiry_date } })
+ end
+
+ context 'when user has admin access to the shared group' do
+ before do
+ shared_group.add_owner(user)
+ end
+
+ it 'updates existing link' do
+ expect(link.group_access).to eq(Gitlab::Access::DEVELOPER)
+ expect(link.expires_at).to be_nil
+
+ subject
+
+ link.reload
+
+ expect(link.group_access).to eq(Gitlab::Access::GUEST)
+ expect(link.expires_at).to eq(expiry_date)
+ end
+ end
+
+ context 'when user does not have admin access to the shared group' do
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ describe '#destroy' do
+ let!(:link) do
+ create(:group_group_link, { shared_group: shared_group,
+ shared_with_group: shared_with_group })
+ end
+
+ subject do
+ post(:destroy, params: { group_id: shared_group,
+ id: link.id })
+ end
+
+ context 'when user has admin access to the shared group' do
+ before do
+ shared_group.add_owner(user)
+ end
+
+ it 'deletes existing link' do
+ expect { subject }.to change(GroupGroupLink, :count).by(-1)
+ end
+ end
+
+ context 'when user does not have admin access to the shared group' do
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ it 'renders 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
end
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index a144d9e0786..1c8a2bd160d 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -31,6 +31,12 @@ describe Groups::GroupMembersController do
expect(assigns(:invited_members).map(&:invite_email)).to match_array(invited.map(&:invite_email))
end
+ it 'assigns skip groups' do
+ get :index, params: { group_id: group }
+
+ expect(assigns(:skip_groups)).to match_array(group.related_group_ids)
+ end
+
it 'restricts search to one email' do
get :index, params: { group_id: group, search_invited: invited.first.invite_email }
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 4f4f9e5143b..8fb9f0c516c 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -148,6 +148,19 @@ describe Groups::MilestonesController do
expect(response).to have_gitlab_http_status(200)
expect(response.content_type).to eq 'application/json'
end
+
+ context 'for a subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ it 'includes ancestor group milestones' do
+ get :index, params: { group_id: subgroup.to_param }, format: :json
+
+ milestones = json_response
+
+ expect(milestones.count).to eq(1)
+ expect(milestones.first['title']).to eq('group milestone')
+ end
+ end
end
context 'external authorization' do
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 60342bf8e3d..8abebd04e8b 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -19,6 +19,22 @@ describe Groups::UploadsController do
let(:uploader_class) { NamespaceFileUploader }
end
+ context 'with a moved group' do
+ let!(:upload) { create(:upload, :issuable_upload, :with_file, model: model) }
+ let(:group) { model }
+ let(:old_path) { group.to_param + 'old' }
+ let!(:redirect_route) { model.redirect_routes.create(path: old_path) }
+ let(:upload_path) { File.basename(upload.path) }
+
+ it 'redirects to a file with the proper extension' do
+ get :show, params: { group_id: old_path, filename: upload_path, secret: upload.secret }
+
+ expect(response.location).to eq(show_group_uploads_url(group, upload.secret, upload_path))
+ expect(response.location).to end_with(upload.path)
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
def post_authorize(verified: true)
request.headers.merge!(workhorse_internal_api_request_header) if verified
diff --git a/spec/controllers/health_check_controller_spec.rb b/spec/controllers/health_check_controller_spec.rb
index b48b7dc86e0..cbcda5d0dc7 100644
--- a/spec/controllers/health_check_controller_spec.rb
+++ b/spec/controllers/health_check_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe HealthCheckController do
+describe HealthCheckController, :request_store do
include StubENV
let(:xml_response) { Hash.from_xml(response.body)['hash'] }
@@ -18,7 +18,7 @@ describe HealthCheckController do
describe 'GET #index' do
context 'when services are up but accessed from outside whitelisted ips' do
before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns a not found page' do
@@ -48,7 +48,7 @@ describe HealthCheckController do
context 'when services are up and accessed from whitelisted ips' do
before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it 'supports successful plaintext response' do
@@ -95,7 +95,7 @@ describe HealthCheckController do
before do
allow(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
allow(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire')
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it 'supports failure plaintext response' do
diff --git a/spec/controllers/metrics_controller_spec.rb b/spec/controllers/metrics_controller_spec.rb
index 1d378b9b9dc..331eafba0d3 100644
--- a/spec/controllers/metrics_controller_spec.rb
+++ b/spec/controllers/metrics_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe MetricsController do
+describe MetricsController, :request_store do
include StubENV
let(:metrics_multiproc_dir) { @metrics_multiproc_dir }
@@ -53,7 +53,7 @@ describe MetricsController do
context 'accessed from whitelisted ip' do
before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it_behaves_like 'endpoint providing metrics'
@@ -61,7 +61,7 @@ describe MetricsController do
context 'accessed from ip in whitelisted range' do
before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(ip_in_whitelisted_range)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_in_whitelisted_range)
end
it_behaves_like 'endpoint providing metrics'
@@ -69,7 +69,7 @@ describe MetricsController do
context 'accessed from not whitelisted ip' do
before do
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns the expected error response' do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 6c5f36804e8..8b92976252c 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -287,6 +287,34 @@ describe OmniauthCallbacksController, type: :controller, do_not_mock_admin_mode:
request.env['omniauth.auth'] = Rails.application.env_config['omniauth.auth']
end
+ context 'sign up' do
+ before do
+ user.destroy
+ end
+
+ it 'denies login if sign up is enabled, but block_auto_created_users is set' do
+ post :saml, params: { SAMLResponse: mock_saml_response }
+
+ expect(flash[:alert]).to start_with 'Your account has been blocked.'
+ end
+
+ it 'accepts login if sign up is enabled' do
+ stub_omniauth_setting(block_auto_created_users: false)
+
+ post :saml, params: { SAMLResponse: mock_saml_response }
+
+ expect(request.env['warden']).to be_authenticated
+ end
+
+ it 'denies login if sign up is not enabled' do
+ stub_omniauth_setting(allow_single_sign_on: false, block_auto_created_users: false)
+
+ post :saml, params: { SAMLResponse: mock_saml_response }
+
+ expect(flash[:alert]).to start_with 'Signing in using your saml account without a pre-existing GitLab account is not allowed.'
+ end
+ end
+
context 'with GitLab initiated request' do
before do
post :saml, params: { SAMLResponse: mock_saml_response }
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index e0e6d78bdcd..77e7b32af25 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -46,7 +46,8 @@ describe Profiles::PreferencesController do
dashboard: 'stars',
theme_id: '2',
first_day_of_week: '1',
- preferred_language: 'jp'
+ preferred_language: 'jp',
+ render_whitespace_in_code: 'true'
}.with_indifferent_access
expect(user).to receive(:assign_attributes).with(ActionController::Parameters.new(prefs).permit!)
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index 4f8ab6a5def..ac39ac626c7 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -35,6 +35,7 @@ describe Projects::BranchesController do
context "valid branch name, valid source" do
let(:branch) { "merge_branch" }
let(:ref) { "master" }
+
it 'redirects' do
expect(subject)
.to redirect_to("/#{project.full_path}/tree/merge_branch")
@@ -44,6 +45,7 @@ describe Projects::BranchesController do
context "invalid branch name, valid ref" do
let(:branch) { "<script>alert('merge');</script>" }
let(:ref) { "master" }
+
it 'redirects' do
expect(subject)
.to redirect_to("/#{project.full_path}/tree/alert('merge');")
@@ -53,18 +55,21 @@ describe Projects::BranchesController do
context "valid branch name, invalid ref" do
let(:branch) { "merge_branch" }
let(:ref) { "<script>alert('ref');</script>" }
+
it { is_expected.to render_template('new') }
end
context "invalid branch name, invalid ref" do
let(:branch) { "<script>alert('merge');</script>" }
let(:ref) { "<script>alert('ref');</script>" }
+
it { is_expected.to render_template('new') }
end
context "valid branch name with encoded slashes" do
let(:branch) { "feature%2Ftest" }
let(:ref) { "<script>alert('ref');</script>" }
+
it { is_expected.to render_template('new') }
it { project.repository.branch_exists?('feature/test') }
end
@@ -586,7 +591,7 @@ describe Projects::BranchesController do
params: {
namespace_id: project.namespace,
project_id: project,
- names: ['fix', 'add-pdf-file', 'branch-merged']
+ names: %w[fix add-pdf-file branch-merged]
}
expect(response).to have_gitlab_http_status(200)
@@ -634,7 +639,7 @@ describe Projects::BranchesController do
params: {
namespace_id: project.namespace,
project_id: project,
- names: ['fix', 'add-pdf-file', 'branch-merged']
+ names: %w[fix add-pdf-file branch-merged]
}
expect(response).to have_gitlab_http_status(200)
diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb
index 3d8f287f999..8fb39f734b6 100644
--- a/spec/controllers/projects/ci/lints_controller_spec.rb
+++ b/spec/controllers/projects/ci/lints_controller_spec.rb
@@ -103,7 +103,7 @@ describe Projects::Ci::LintsController do
end
it 'assigns errors' do
- expect(assigns[:error]).to eq('root config contains unknown keys: rubocop')
+ expect(assigns[:errors]).to eq(['root config contains unknown keys: rubocop'])
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index ab8bfc0cabe..642932e2935 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -26,6 +26,7 @@ describe Projects::ClustersController do
let(:project) { create(:project) }
let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
+
it 'lists available clusters' do
go
diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb
index 66112c95742..b360319c6b1 100644
--- a/spec/controllers/projects/deployments_controller_spec.rb
+++ b/spec/controllers/projects/deployments_controller_spec.rb
@@ -6,7 +6,7 @@ describe Projects::DeploymentsController do
include ApiHelpers
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:environment) { create(:environment, name: 'production', project: project) }
before do
diff --git a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb b/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
index 0940fccb431..793c10f0b21 100644
--- a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
+++ b/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
@@ -78,6 +78,40 @@ describe Projects::Environments::PrometheusApiController do
end
end
end
+
+ context 'with variables' do
+ let(:pod_name) { "pod1" }
+
+ before do
+ expected_params[:query] = %{up{pod_name="#{pod_name}"}}
+ expected_params[:variables] = ['pod_name', pod_name]
+ end
+
+ it 'replaces variables with values' do
+ get :proxy, params: environment_params.merge(
+ query: 'up{pod_name="{{pod_name}}"}', variables: ['pod_name', pod_name]
+ )
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(Prometheus::ProxyService).to have_received(:new)
+ .with(environment, 'GET', 'query', expected_params)
+ end
+
+ context 'with invalid variables' do
+ let(:params_with_invalid_variables) do
+ environment_params.merge(
+ query: 'up{pod_name="{{pod_name}}"}', variables: ['a']
+ )
+ end
+
+ it 'returns 400' do
+ get :proxy, params: params_with_invalid_variables
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(Prometheus::ProxyService).not_to receive(:new)
+ end
+ end
+ end
end
context 'with nil result' do
diff --git a/spec/controllers/projects/environments/sample_metrics_controller_spec.rb b/spec/controllers/projects/environments/sample_metrics_controller_spec.rb
index 4faa3ecb567..19b07a2ccc4 100644
--- a/spec/controllers/projects/environments/sample_metrics_controller_spec.rb
+++ b/spec/controllers/projects/environments/sample_metrics_controller_spec.rb
@@ -9,17 +9,6 @@ describe Projects::Environments::SampleMetricsController do
let_it_be(:environment) { create(:environment, project: project) }
let_it_be(:user) { create(:user) }
- before(:context) do
- RSpec::Mocks.with_temporary_scope do
- stub_env('USE_SAMPLE_METRICS', 'true')
- Rails.application.reload_routes!
- end
- end
-
- after(:context) do
- Rails.application.reload_routes!
- end
-
before do
project.add_reporter(user)
sign_in(user)
@@ -58,7 +47,9 @@ describe Projects::Environments::SampleMetricsController do
id: environment.id.to_s,
namespace_id: project.namespace.full_path,
project_id: project.name,
- identifier: 'sample_metric_query_result'
+ identifier: 'sample_metric_query_result',
+ start: '2019-12-02T23:31:45.000Z',
+ end: '2019-12-03T00:01:45.000Z'
}.merge(params)
end
end
diff --git a/spec/controllers/projects/error_tracking/projects_controller_spec.rb b/spec/controllers/projects/error_tracking/projects_controller_spec.rb
new file mode 100644
index 00000000000..1737528b597
--- /dev/null
+++ b/spec/controllers/projects/error_tracking/projects_controller_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::ErrorTracking::ProjectsController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ describe 'GET #index' do
+ context 'with insufficient permissions' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns 404' do
+ get :index, params: list_projects_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with an anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it 'redirects to sign-in page' do
+ get :index, params: list_projects_params
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
+ context 'with authorized user' do
+ let(:list_projects_service) { spy(:list_projects_service) }
+ let(:sentry_project) { build(:error_tracking_project) }
+
+ let(:query_params) do
+ list_projects_params.slice(:api_host, :token)
+ end
+
+ before do
+ allow(ErrorTracking::ListProjectsService)
+ .to receive(:new).with(project, user, query_params)
+ .and_return(list_projects_service)
+ end
+
+ context 'service result is successful' do
+ before do
+ expect(list_projects_service).to receive(:execute)
+ .and_return(status: :success, projects: [sentry_project])
+ end
+
+ it 'returns a list of projects' do
+ get :index, params: list_projects_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('error_tracking/list_projects')
+ expect(json_response['projects']).to eq([sentry_project].as_json)
+ end
+ end
+
+ context 'service result is erroneous' do
+ let(:error_message) { 'error message' }
+
+ context 'without http_status' do
+ before do
+ expect(list_projects_service).to receive(:execute)
+ .and_return(status: :error, message: error_message)
+ end
+
+ it 'returns 400 with message' do
+ get :index, params: list_projects_params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+
+ context 'with explicit http_status' do
+ let(:http_status) { :no_content }
+
+ before do
+ expect(list_projects_service).to receive(:execute).and_return(
+ status: :error,
+ message: error_message,
+ http_status: http_status
+ )
+ end
+
+ it 'returns http_status with message' do
+ get :index, params: list_projects_params
+
+ expect(response).to have_gitlab_http_status(http_status)
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+ end
+
+ private
+
+ def list_projects_params(opts = {})
+ project_params(
+ format: :json,
+ api_host: 'gitlab.com',
+ token: 'token'
+ )
+ end
+ end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace, project_id: project)
+ end
+end
diff --git a/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb b/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb
new file mode 100644
index 00000000000..75e1c817baa
--- /dev/null
+++ b/spec/controllers/projects/error_tracking/stack_traces_controller_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::ErrorTracking::StackTracesController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ describe 'GET #index' do
+ let(:issue_id) { 1234 }
+ let(:issue_stack_trace_service) { spy(:issue_stack_trace_service) }
+
+ subject(:get_stack_trace) do
+ get :index, params: { namespace_id: project.namespace, project_id: project, issue_id: issue_id, format: :json }
+ end
+
+ before do
+ expect(ErrorTracking::IssueLatestEventService)
+ .to receive(:new).with(project, user, issue_id: issue_id.to_s)
+ .and_return(issue_stack_trace_service)
+ expect(issue_stack_trace_service).to receive(:execute).and_return(service_response)
+
+ get_stack_trace
+ end
+
+ context 'awaiting data' do
+ let(:service_response) { { status: :error, http_status: :no_content }}
+
+ it 'responds with no data' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it_behaves_like 'sets the polling header'
+ end
+
+ context 'service result is successful' do
+ let(:service_response) { { status: :success, latest_event: error_event } }
+ let(:error_event) { build(:error_tracking_error_event) }
+
+ it 'responds with success' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'responds with error' do
+ expect(response).to match_response_schema('error_tracking/issue_stack_trace')
+ end
+
+ it 'highlights stack trace source code' do
+ expect(json_response['error']).to eq(
+ Gitlab::ErrorTracking::StackTraceHighlightDecorator.decorate(error_event).as_json
+ )
+ end
+
+ it_behaves_like 'sets the polling header'
+ end
+
+ context 'service result is erroneous' do
+ let(:error_message) { 'error message' }
+
+ context 'without http_status' do
+ let(:service_response) { { status: :error, message: error_message } }
+
+ it 'responds with bad request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'responds with error message' do
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+
+ context 'with explicit http_status' do
+ let(:http_status) { :no_content }
+ let(:service_response) { { status: :error, message: error_message, http_status: http_status } }
+
+ it 'responds with custom http status' do
+ expect(response).to have_gitlab_http_status(http_status)
+ end
+
+ it 'responds with error message' do
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/error_tracking_controller_spec.rb b/spec/controllers/projects/error_tracking_controller_spec.rb
index e5585d7b52d..588c4b05528 100644
--- a/spec/controllers/projects/error_tracking_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking_controller_spec.rb
@@ -91,13 +91,13 @@ describe Projects::ErrorTrackingController do
.and_return(status: :success, issues: [error], pagination: {})
expect(list_issues_service).to receive(:external_url)
.and_return(external_url)
+
+ get :index, params: params
end
let(:error) { build(:error_tracking_error) }
it 'returns a list of errors' do
- get :index, params: params
-
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('error_tracking/index')
expect(json_response).to eq(
@@ -106,6 +106,8 @@ describe Projects::ErrorTrackingController do
'external_url' => external_url
)
end
+
+ it_behaves_like 'sets the polling header'
end
end
@@ -179,113 +181,6 @@ describe Projects::ErrorTrackingController do
end
end
- describe 'POST #list_projects' do
- context 'with insufficient permissions' do
- before do
- project.add_guest(user)
- end
-
- it 'returns 404' do
- post :list_projects, params: list_projects_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with an anonymous user' do
- before do
- sign_out(user)
- end
-
- it 'redirects to sign-in page' do
- post :list_projects, params: list_projects_params
-
- expect(response).to have_gitlab_http_status(:redirect)
- end
- end
-
- context 'with authorized user' do
- let(:list_projects_service) { spy(:list_projects_service) }
- let(:sentry_project) { build(:error_tracking_project) }
-
- let(:permitted_params) do
- ActionController::Parameters.new(
- list_projects_params[:error_tracking_setting]
- ).permit!
- end
-
- before do
- allow(ErrorTracking::ListProjectsService)
- .to receive(:new).with(project, user, permitted_params)
- .and_return(list_projects_service)
- end
-
- context 'service result is successful' do
- before do
- expect(list_projects_service).to receive(:execute)
- .and_return(status: :success, projects: [sentry_project])
- end
-
- it 'returns a list of projects' do
- post :list_projects, params: list_projects_params
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('error_tracking/list_projects')
- expect(json_response['projects']).to eq([sentry_project].as_json)
- end
- end
-
- context 'service result is erroneous' do
- let(:error_message) { 'error message' }
-
- context 'without http_status' do
- before do
- expect(list_projects_service).to receive(:execute)
- .and_return(status: :error, message: error_message)
- end
-
- it 'returns 400 with message' do
- get :list_projects, params: list_projects_params
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq(error_message)
- end
- end
-
- context 'with explicit http_status' do
- let(:http_status) { :no_content }
-
- before do
- expect(list_projects_service).to receive(:execute).and_return(
- status: :error,
- message: error_message,
- http_status: http_status
- )
- end
-
- it 'returns http_status with message' do
- get :list_projects, params: list_projects_params
-
- expect(response).to have_gitlab_http_status(http_status)
- expect(json_response['message']).to eq(error_message)
- end
- end
- end
- end
-
- private
-
- def list_projects_params(opts = {})
- project_params(
- format: :json,
- error_tracking_setting: {
- api_host: 'gitlab.com',
- token: 'token'
- }
- )
- end
- end
-
describe 'GET #issue_details' do
let_it_be(:issue_id) { 1234 }
@@ -308,30 +203,40 @@ describe Projects::ErrorTrackingController do
before do
expect(issue_details_service).to receive(:execute)
.and_return(status: :error, http_status: :no_content)
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
end
it 'returns no data' do
- get :details, params: issue_params(issue_id: issue_id, format: :json)
-
expect(response).to have_gitlab_http_status(:no_content)
end
+
+ it_behaves_like 'sets the polling header'
end
context 'service result is successful' do
before do
expect(issue_details_service).to receive(:execute)
.and_return(status: :success, issue: error)
+
+ get :details, params: issue_params(issue_id: issue_id, format: :json)
end
let(:error) { build(:detailed_error_tracking_error) }
it 'returns an error' do
- get :details, params: issue_params(issue_id: issue_id, format: :json)
+ expected_error = error.as_json.except('first_release_version').merge(
+ {
+ 'gitlab_commit' => nil,
+ 'gitlab_commit_path' => nil
+ }
+ )
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('error_tracking/issue_detailed')
- expect(json_response['error']).to eq(error.as_json)
+ expect(json_response['error']).to eq(expected_error)
end
+
+ it_behaves_like 'sets the polling header'
end
context 'service result is erroneous' do
@@ -373,97 +278,53 @@ describe Projects::ErrorTrackingController do
end
end
- describe 'GET #stack_trace' do
- let_it_be(:issue_id) { 1234 }
-
- let(:issue_stack_trace_service) { spy(:issue_stack_trace_service) }
-
+ describe 'PUT #update' do
+ let(:issue_id) { 1234 }
+ let(:issue_update_service) { spy(:issue_update_service) }
let(:permitted_params) do
ActionController::Parameters.new(
- { issue_id: issue_id.to_s }
+ { issue_id: issue_id.to_s, status: 'resolved' }
).permit!
end
- subject(:get_stack_trace) do
- get :stack_trace, params: issue_params(issue_id: issue_id, format: :json)
+ subject(:update_issue) do
+ put :update, params: issue_params(issue_id: issue_id, status: 'resolved', format: :json)
end
before do
- expect(ErrorTracking::IssueLatestEventService)
+ expect(ErrorTracking::IssueUpdateService)
.to receive(:new).with(project, user, permitted_params)
- .and_return(issue_stack_trace_service)
+ .and_return(issue_update_service)
end
describe 'format json' do
- context 'awaiting data' do
- before do
- expect(issue_stack_trace_service).to receive(:execute)
- .and_return(status: :error, http_status: :no_content)
- end
-
- it 'returns no data' do
- get_stack_trace
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- context 'service result is successful' do
+ context 'update result is successful' do
before do
- expect(issue_stack_trace_service).to receive(:execute)
- .and_return(status: :success, latest_event: error_event)
+ expect(issue_update_service).to receive(:execute)
+ .and_return(status: :success, updated: true)
- get_stack_trace
+ update_issue
end
- let(:error_event) { build(:error_tracking_error_event) }
-
- it 'returns an error' do
+ it 'returns a success' do
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('error_tracking/issue_stack_trace')
- end
-
- it 'highlights stack trace source code' do
- expect(json_response['error']).to eq(
- Gitlab::ErrorTracking::StackTraceHighlightDecorator.decorate(error_event).as_json
- )
+ expect(response).to match_response_schema('error_tracking/update_issue')
end
end
- context 'service result is erroneous' do
+ context 'update result is erroneous' do
let(:error_message) { 'error message' }
- context 'without http_status' do
- before do
- expect(issue_stack_trace_service).to receive(:execute)
- .and_return(status: :error, message: error_message)
- end
-
- it 'returns 400 with message' do
- get_stack_trace
+ before do
+ expect(issue_update_service).to receive(:execute)
+ .and_return(status: :error, message: error_message)
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq(error_message)
- end
+ update_issue
end
- context 'with explicit http_status' do
- let(:http_status) { :no_content }
-
- before do
- expect(issue_stack_trace_service).to receive(:execute).and_return(
- status: :error,
- message: error_message,
- http_status: http_status
- )
- end
-
- it 'returns http_status with message' do
- get_stack_trace
-
- expect(response).to have_gitlab_http_status(http_status)
- expect(json_response['message']).to eq(error_message)
- end
+ it 'returns 400 with message' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(error_message)
end
end
end
diff --git a/spec/controllers/projects/find_file_controller_spec.rb b/spec/controllers/projects/find_file_controller_spec.rb
index a493985f8a0..4d8933f3aaf 100644
--- a/spec/controllers/projects/find_file_controller_spec.rb
+++ b/spec/controllers/projects/find_file_controller_spec.rb
@@ -28,11 +28,13 @@ describe Projects::FindFileController do
context "valid branch" do
let(:id) { 'master' }
+
it { is_expected.to respond_with(:success) }
end
context "invalid branch" do
let(:id) { 'invalid-branch' }
+
it { is_expected.to respond_with(:not_found) }
end
end
@@ -50,6 +52,7 @@ describe Projects::FindFileController do
context "valid branch" do
let(:id) { 'master' }
+
it 'returns an array of file path list' do
go
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index 80b5eb9a7ee..e351fb2b1f6 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -12,6 +12,21 @@ describe Projects::ForksController do
group.add_owner(user)
end
+ shared_examples 'forking disabled' do
+ let(:project) { create(:project, :private, :repository, :forking_disabled) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'returns with 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
describe 'GET index' do
def get_forks(search: nil)
get :index,
@@ -138,19 +153,19 @@ describe Projects::ForksController do
end
describe 'GET new' do
- def get_new
+ subject do
get :new,
- params: {
- namespace_id: project.namespace,
- project_id: project
- }
+ params: {
+ namespace_id: project.namespace,
+ project_id: project
+ }
end
context 'when user is signed in' do
it 'responds with status 200' do
sign_in(user)
- get_new
+ subject
expect(response).to have_gitlab_http_status(200)
end
@@ -160,21 +175,26 @@ describe Projects::ForksController do
it 'redirects to the sign-in page' do
sign_out(user)
- get_new
+ subject
expect(response).to redirect_to(new_user_session_path)
end
end
+
+ it_behaves_like 'forking disabled'
end
describe 'POST create' do
- def post_create(params = {})
- post :create,
- params: {
- namespace_id: project.namespace,
- project_id: project,
- namespace_key: user.namespace.id
- }.merge(params)
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ namespace_key: user.namespace.id
+ }
+ end
+
+ subject do
+ post :create, params: params
end
context 'when user is signed in' do
@@ -183,18 +203,34 @@ describe Projects::ForksController do
end
it 'responds with status 302' do
- post_create
+ subject
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(namespace_project_import_path(user.namespace, project))
end
- it 'passes continue params to the redirect' do
- continue_params = { to: '/-/ide/project/path', notice: 'message' }
- post_create continue: continue_params
+ context 'continue params' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ namespace_key: user.namespace.id,
+ continue: continue_params
+ }
+ end
+ let(:continue_params) do
+ {
+ to: '/-/ide/project/path',
+ notice: 'message'
+ }
+ end
- expect(response).to have_gitlab_http_status(302)
- expect(response).to redirect_to(namespace_project_import_path(user.namespace, project, continue: continue_params))
+ it 'passes continue params to the redirect' do
+ subject
+
+ expect(response).to have_gitlab_http_status(302)
+ expect(response).to redirect_to(namespace_project_import_path(user.namespace, project, continue: continue_params))
+ end
end
end
@@ -202,10 +238,12 @@ describe Projects::ForksController do
it 'redirects to the sign-in page' do
sign_out(user)
- post_create
+ subject
expect(response).to redirect_to(new_user_session_path)
end
end
+
+ it_behaves_like 'forking disabled'
end
end
diff --git a/spec/controllers/projects/git_http_controller_spec.rb b/spec/controllers/projects/git_http_controller_spec.rb
index b756dd5662d..4df53121aaa 100644
--- a/spec/controllers/projects/git_http_controller_spec.rb
+++ b/spec/controllers/projects/git_http_controller_spec.rb
@@ -3,10 +3,19 @@
require 'spec_helper'
describe Projects::GitHttpController do
+ include GitHttpHelpers
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let(:project_params) do
+ {
+ namespace_id: project.namespace.to_param,
+ project_id: project.path + '.git'
+ }
+ end
+ let(:params) { project_params }
+
describe 'HEAD #info_refs' do
it 'returns 403' do
- project = create(:project, :public, :repository)
-
head :info_refs, params: { namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
expect(response.status).to eq(403)
@@ -14,18 +23,39 @@ describe Projects::GitHttpController do
end
describe 'GET #info_refs' do
+ let(:params) { project_params.merge(service: 'git-upload-pack') }
+
it 'returns 401 for unauthenticated requests to public repositories when http protocol is disabled' do
stub_application_setting(enabled_git_access_protocol: 'ssh')
- project = create(:project, :public, :repository)
- get :info_refs, params: { service: 'git-upload-pack', namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
+ get :info_refs, params: params
expect(response.status).to eq(401)
end
- context 'with exceptions' do
- let(:project) { create(:project, :public, :repository) }
+ context 'with authorized user' do
+ let(:user) { project.owner }
+
+ before do
+ request.headers.merge! auth_env(user.username, user.password, nil)
+ end
+
+ it 'returns 200' do
+ get :info_refs, params: params
+
+ expect(response.status).to eq(200)
+ end
+
+ it 'updates the user activity' do
+ expect_next_instance_of(Users::ActivityService) do |activity_service|
+ expect(activity_service).to receive(:execute)
+ end
+
+ get :info_refs, params: params
+ end
+ end
+ context 'with exceptions' do
before do
allow(controller).to receive(:verify_workhorse_api!).and_return(true)
end
@@ -33,7 +63,7 @@ describe Projects::GitHttpController do
it 'returns 503 with GRPC Unavailable' do
allow(controller).to receive(:access_check).and_raise(GRPC::Unavailable)
- get :info_refs, params: { service: 'git-upload-pack', namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
+ get :info_refs, params: params
expect(response.status).to eq(503)
end
@@ -41,11 +71,37 @@ describe Projects::GitHttpController do
it 'returns 503 with timeout error' do
allow(controller).to receive(:access_check).and_raise(Gitlab::GitAccess::TimeoutError)
- get :info_refs, params: { service: 'git-upload-pack', namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
+ get :info_refs, params: params
expect(response.status).to eq(503)
expect(response.body).to eq 'Gitlab::GitAccess::TimeoutError'
end
end
end
+
+ describe 'POST #git_upload_pack' do
+ before do
+ allow(controller).to receive(:authenticate_user).and_return(true)
+ allow(controller).to receive(:verify_workhorse_api!).and_return(true)
+ allow(controller).to receive(:access_check).and_return(nil)
+ end
+
+ after do
+ post :git_upload_pack, params: params
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'does not update project statistics' do
+ expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+ end
+ end
+
+ it 'updates project statistics' do
+ expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+ end
+ end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index f64e928098d..945a56365c8 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1357,6 +1357,7 @@ describe Projects::IssuesController do
describe 'GET #discussions' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
+
context 'when authenticated' do
before do
project.add_developer(user)
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index edef24f6595..53c40683a5b 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -556,6 +556,12 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(json_response['status']).to eq job.status
expect(json_response['lines']).to eq [{ 'content' => [{ 'text' => 'BUILD TRACE' }], 'offset' => 0 }]
end
+
+ it 'sets being-watched flag for the job' do
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(job.trace.being_watched?).to be(true)
+ end
end
context 'when job has no traces' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 3b7d8adb8e5..d5b1bfe0ac4 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1289,19 +1289,6 @@ describe Projects::MergeRequestsController do
get_ci_environments_status(environment_target: 'merge_commit')
end
- context 'when the deployment_merge_requests_widget feature flag is disabled' do
- it 'uses the deployments retrieved using CI builds' do
- stub_feature_flags(deployment_merge_requests_widget: false)
-
- expect(EnvironmentStatus)
- .to receive(:after_merge_request)
- .with(merge_request, user)
- .and_call_original
-
- get_ci_environments_status(environment_target: 'merge_commit')
- end
- end
-
def get_ci_environments_status(extra_params = {})
params = {
namespace_id: merge_request.project.namespace.to_param,
@@ -1389,7 +1376,7 @@ describe Projects::MergeRequestsController do
end
def expect_rebase_worker_for(user)
- expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id)
+ expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id, false)
end
context 'successfully' do
@@ -1425,7 +1412,7 @@ describe Projects::MergeRequestsController do
post_rebase
expect(response.status).to eq(409)
- expect(json_response['merge_error']).to eq(MergeRequest::REBASE_LOCK_MESSAGE)
+ expect(json_response['merge_error']).to eq('Failed to enqueue the rebase operation, possibly due to a long-lived transaction. Try again later.')
end
end
diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb
index f80bbf0d78f..c07619465bf 100644
--- a/spec/controllers/projects/pages_controller_spec.rb
+++ b/spec/controllers/projects/pages_controller_spec.rb
@@ -115,5 +115,16 @@ describe Projects::PagesController do
patch :update, params: request_params
end
+
+ context 'when update_service returns an error message' do
+ let(:update_service) { double(execute: { status: :error, message: 'some error happened' }) }
+
+ it 'adds an error message' do
+ patch :update, params: request_params
+
+ expect(response).to redirect_to(project_pages_path(project))
+ expect(flash[:alert]).to eq('some error happened')
+ end
+ end
end
end
diff --git a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
new file mode 100644
index 00000000000..1c29b68dc24
--- /dev/null
+++ b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::PerformanceMonitoring::DashboardsController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:namespace) { create(:namespace) }
+ let!(:project) { create(:project, :repository, name: 'dashboard-project', namespace: namespace) }
+ let(:repository) { project.repository }
+ let(:branch) { double(name: branch_name) }
+ let(:commit_message) { 'test' }
+ let(:branch_name) { "#{Time.current.to_i}_dashboard_new_branch" }
+ let(:dashboard) { 'config/prometheus/common_metrics.yml' }
+ let(:file_name) { 'custom_dashboard.yml' }
+ let(:params) do
+ {
+ namespace_id: namespace,
+ project_id: project,
+ dashboard: dashboard,
+ file_name: file_name,
+ commit_message: commit_message,
+ branch: branch_name,
+ format: :json
+ }
+ end
+
+ describe 'POST #create' do
+ context 'authenticated user' do
+ before do
+ sign_in(user)
+ end
+
+ context 'project with repository feature' do
+ context 'with rights to push to the repository' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'valid parameters' do
+ it 'delegates cloning to ::Metrics::Dashboard::CloneDashboardService' do
+ allow(controller).to receive(:repository).and_return(repository)
+ allow(repository).to receive(:find_branch).and_return(branch)
+ dashboard_attrs = {
+ dashboard: dashboard,
+ file_name: file_name,
+ commit_message: commit_message,
+ branch: branch_name
+ }
+
+ service_instance = instance_double(::Metrics::Dashboard::CloneDashboardService)
+ expect(::Metrics::Dashboard::CloneDashboardService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success, http_status: :created, dashboard: { path: 'dashboard/path' })
+
+ post :create, params: params
+ end
+
+ context 'request format json' do
+ it 'returns services response' do
+ allow(::Metrics::Dashboard::CloneDashboardService).to receive(:new).and_return(double(execute: { status: :success, dashboard: { path: ".gitlab/dashboards/#{file_name}" }, http_status: :created }))
+ allow(controller).to receive(:repository).and_return(repository)
+ allow(repository).to receive(:find_branch).and_return(branch)
+
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :created
+ expect(response).to set_flash[:notice].to eq("Your dashboard has been copied. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
+ expect(json_response).to eq('status' => 'success', 'dashboard' => { 'path' => ".gitlab/dashboards/#{file_name}" })
+ end
+
+ context 'Metrics::Dashboard::CloneDashboardService failure' do
+ it 'returns json with failure message', :aggregate_failures do
+ allow(::Metrics::Dashboard::CloneDashboardService).to receive(:new).and_return(double(execute: { status: :error, message: 'something went wrong', http_status: :bad_request }))
+
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :bad_request
+ expect(json_response).to eq('error' => 'something went wrong')
+ end
+ end
+
+ %w(commit_message file_name dashboard).each do |param|
+ context "param #{param} is missing" do
+ let(param.to_s) { nil }
+
+ it 'responds with bad request status and error message', :aggregate_failures do
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :bad_request
+ expect(json_response).to eq('error' => "Request parameter #{param} is missing.")
+ end
+ end
+ end
+
+ context "param branch_name is missing" do
+ let(:branch_name) { nil }
+
+ it 'responds with bad request status and error message', :aggregate_failures do
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :bad_request
+ expect(json_response).to eq('error' => "Request parameter branch is missing.")
+ end
+ end
+ end
+ end
+ end
+
+ context 'without rights to push to repository' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'responds with :forbidden status code' do
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :forbidden
+ end
+ end
+ end
+
+ context 'project without repository feature' do
+ let!(:project) { create(:project, name: 'dashboard-project', namespace: namespace) }
+
+ it 'responds with :not_found status code' do
+ post :create, params: params
+
+ expect(response).to have_gitlab_http_status :not_found
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 902a84a843b..4cc5b3cba7c 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -740,4 +740,51 @@ describe Projects::PipelinesController do
expect(response).to have_gitlab_http_status(404)
end
end
+
+ describe 'DELETE #destroy' do
+ let!(:project) { create(:project, :private, :repository) }
+ let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
+ let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
+
+ context 'when user has ability to delete pipeline' do
+ before do
+ sign_in(project.owner)
+ end
+
+ it 'deletes pipeline and redirects' do
+ delete_pipeline
+
+ expect(response).to have_gitlab_http_status(303)
+
+ expect(Ci::Build.exists?(build.id)).to be_falsy
+ expect(Ci::Pipeline.exists?(pipeline.id)).to be_falsy
+ end
+
+ context 'and builds are disabled' do
+ let(:feature) { ProjectFeature::DISABLED }
+
+ it 'fails to delete pipeline' do
+ delete_pipeline
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when user has no privileges' do
+ it 'fails to delete pipeline' do
+ delete_pipeline
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ def delete_pipeline
+ delete :destroy, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: pipeline.id
+ }
+ end
+ end
end
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index afdb8bbc983..157948de29d 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -85,7 +85,7 @@ describe Projects::Prometheus::MetricsController do
end
it 'calls prometheus adapter service' do
- expect_next_instance_of(::Prometheus::AdapterService) do |instance|
+ expect_next_instance_of(::Gitlab::Prometheus::Adapter) do |instance|
expect(instance).to receive(:prometheus_adapter)
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index bdf1c1a84d3..a570db12d94 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -56,10 +56,13 @@ describe Projects::RawController do
stub_application_setting(raw_blob_request_limit: 5)
end
- it 'prevents from accessing the raw file' do
- execute_raw_requests(requests: 6, project: project, file_path: file_path)
+ it 'prevents from accessing the raw file', :request_store do
+ execute_raw_requests(requests: 5, project: project, file_path: file_path)
+
+ expect { execute_raw_requests(requests: 1, project: project, file_path: file_path) }
+ .to change { Gitlab::GitalyClient.get_request_count }.by(0)
- expect(flash[:alert]).to eq(_('You cannot access the raw file. Please wait a minute.'))
+ expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
expect(response).to have_gitlab_http_status(429)
end
@@ -109,7 +112,7 @@ describe Projects::RawController do
execute_raw_requests(requests: 3, project: project, file_path: modified_path)
- expect(flash[:alert]).to eq(_('You cannot access the raw file. Please wait a minute.'))
+ expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
expect(response).to have_gitlab_http_status(429)
end
end
@@ -137,7 +140,7 @@ describe Projects::RawController do
# Accessing downcase version of readme
execute_raw_requests(requests: 6, project: project, file_path: file_path)
- expect(flash[:alert]).to eq(_('You cannot access the raw file. Please wait a minute.'))
+ expect(response.body).to eq(_('You cannot access the raw file. Please wait a minute.'))
expect(response).to have_gitlab_http_status(429)
# Accessing upcase version of readme
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 33d66f4ac5a..7e98ded88a9 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -7,9 +7,9 @@ describe Projects::Serverless::FunctionsController do
include ReactiveCachingHelpers
let(:user) { create(:user) }
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { create(:project, :repository) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
let(:service) { cluster.platform_kubernetes }
- let(:project) { cluster.project }
let(:environment) { create(:environment, project: project) }
let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
let(:knative_services_finder) { environment.knative_services_finder }
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index d372a94db56..ee145a62b57 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -445,4 +445,64 @@ describe Projects::SnippetsController do
end
end
end
+
+ describe 'DELETE #destroy' do
+ let!(:snippet) { create(:project_snippet, :private, project: project, author: user) }
+
+ let(:params) do
+ {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: snippet.to_param
+ }
+ end
+
+ context 'when current user has ability to destroy the snippet' do
+ before do
+ sign_in(user)
+ end
+
+ it 'removes the snippet' do
+ delete :destroy, params: params
+
+ expect { snippet.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when snippet is succesfuly destroyed' do
+ it 'redirects to the project snippets page' do
+ delete :destroy, params: params
+
+ expect(response).to redirect_to(project_snippets_path(project))
+ end
+ end
+
+ context 'when snippet is not destroyed' do
+ before do
+ allow(snippet).to receive(:destroy).and_return(false)
+ controller.instance_variable_set(:@snippet, snippet)
+ end
+
+ it 'renders the snippet page with errors' do
+ delete :destroy, params: params
+
+ expect(flash[:alert]).to eq('Failed to remove snippet.')
+ expect(response).to redirect_to(project_snippet_path(project, snippet))
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the snippet' do
+ let(:another_user) { create(:user) }
+
+ before do
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ delete :destroy, params: params
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index 7e5237facf6..15ef1c65c53 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -29,11 +29,13 @@ describe Projects::TagsController do
context "valid tag" do
let(:id) { 'v1.0.0' }
+
it { is_expected.to respond_with(:success) }
end
context "invalid tag" do
let(:id) { 'latest' }
+
it { is_expected.to respond_with(:not_found) }
end
end
diff --git a/spec/controllers/projects/tree_controller_spec.rb b/spec/controllers/projects/tree_controller_spec.rb
index c0c11db5dd6..7c9abdf700b 100644
--- a/spec/controllers/projects/tree_controller_spec.rb
+++ b/spec/controllers/projects/tree_controller_spec.rb
@@ -89,6 +89,34 @@ describe Projects::TreeController do
end
end
+ describe "GET show" do
+ context 'lfs_blob_ids instance variable' do
+ let(:id) { 'master' }
+
+ context 'with vue tree view enabled' do
+ before do
+ get(:show, params: { namespace_id: project.namespace.to_param, project_id: project, id: id })
+ end
+
+ it 'is not set' do
+ expect(assigns[:lfs_blob_ids]).to be_nil
+ end
+ end
+
+ context 'with vue tree view disabled' do
+ before do
+ stub_feature_flags(vue_file_list: false)
+
+ get(:show, params: { namespace_id: project.namespace.to_param, project_id: project, id: id })
+ end
+
+ it 'is set' do
+ expect(assigns[:lfs_blob_ids]).not_to be_nil
+ end
+ end
+ end
+ end
+
describe 'GET show with whitespace in ref' do
render_views
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index cd6a9886f72..a70669e86a6 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -25,6 +25,21 @@ describe Projects::UploadsController do
end
end
+ context 'with a moved project' do
+ let!(:upload) { create(:upload, :issuable_upload, :with_file, model: model) }
+ let(:project) { model }
+ let(:upload_path) { File.basename(upload.path) }
+ let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+
+ it 'redirects to a file with the proper extension' do
+ get :show, params: { namespace_id: project.namespace, project_id: project.to_param + 'old', filename: File.basename(upload.path), secret: upload.secret }
+
+ expect(response.location).to eq(show_project_uploads_url(project, upload.secret, upload_path))
+ expect(response.location).to end_with(upload.path)
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
context "when exception occurs" do
before do
allow(FileUploader).to receive(:workhorse_authorize).and_raise(SocketError.new)
diff --git a/spec/controllers/projects/wikis_controller_spec.rb b/spec/controllers/projects/wikis_controller_spec.rb
index 3100aa2cb96..bfa555aab4c 100644
--- a/spec/controllers/projects/wikis_controller_spec.rb
+++ b/spec/controllers/projects/wikis_controller_spec.rb
@@ -213,6 +213,7 @@ describe Projects::WikisController do
describe 'PATCH #update' do
let(:new_title) { 'New title' }
let(:new_content) { 'New content' }
+
subject do
patch(:update,
params: {
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 5259c612bbd..9ae1277de26 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -289,6 +289,36 @@ describe ProjectsController do
.not_to exceed_query_limit(2).for_query(expected_query)
end
end
+
+ context 'lfs_blob_ids instance variable' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'with vue tree view enabled' do
+ before do
+ get :show, params: { namespace_id: project.namespace, id: project }
+ end
+
+ it 'is not set' do
+ expect(assigns[:lfs_blob_ids]).to be_nil
+ end
+ end
+
+ context 'with vue tree view disabled' do
+ before do
+ stub_feature_flags(vue_file_list: false)
+
+ get :show, params: { namespace_id: project.namespace, id: project }
+ end
+
+ it 'is set' do
+ expect(assigns[:lfs_blob_ids]).not_to be_nil
+ end
+ end
+ end
end
describe 'GET edit' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index da36148ba85..214eb35ec9d 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -306,6 +306,23 @@ describe RegistrationsController do
expect(subject.current_user).not_to be_nil
end
+
+ context 'with the experimental signup flow enabled and the user is part of the experimental group' do
+ before do
+ stub_experiment(signup_flow: true)
+ stub_experiment_for_user(signup_flow: true)
+ end
+
+ let(:base_user_params) { { first_name: 'First', last_name: 'Last', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
+
+ it 'sets name from first and last name' do
+ post :create, params: { new_user: base_user_params }
+
+ expect(User.last.first_name).to eq(base_user_params[:first_name])
+ expect(User.last.last_name).to eq(base_user_params[:last_name])
+ expect(User.last.name).to eq("#{base_user_params[:first_name]} #{base_user_params[:last_name]}")
+ end
+ end
end
describe '#destroy' do
@@ -395,7 +412,7 @@ describe RegistrationsController do
label: anything,
property: 'experimental_group'
)
- patch :update_registration, params: { user: { name: 'New name', role: 'software_developer', setup_for_company: 'false' } }
+ patch :update_registration, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 3dcafae295a..ca7b8a4036a 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -92,6 +92,7 @@ describe SearchController do
end
context 'global search' do
+ using RSpec::Parameterized::TableSyntax
render_views
it 'omits pipeline status from load' do
@@ -102,6 +103,41 @@ describe SearchController do
expect(assigns[:search_objects].first).to eq project
end
+
+ context 'check search term length' do
+ let(:search_queries) do
+ char_limit = SearchService::SEARCH_CHAR_LIMIT
+ term_limit = SearchService::SEARCH_TERM_LIMIT
+ {
+ chars_under_limit: ('a' * (char_limit - 1)),
+ chars_over_limit: ('a' * (char_limit + 1)),
+ terms_under_limit: ('abc ' * (term_limit - 1)),
+ terms_over_limit: ('abc ' * (term_limit + 1))
+ }
+ end
+
+ where(:string_name, :expectation) do
+ :chars_under_limit | :not_to_set_flash
+ :chars_over_limit | :set_chars_flash
+ :terms_under_limit | :not_to_set_flash
+ :terms_over_limit | :set_terms_flash
+ end
+
+ with_them do
+ it do
+ get :show, params: { scope: 'projects', search: search_queries[string_name] }
+
+ case expectation
+ when :not_to_set_flash
+ expect(controller).not_to set_flash[:alert]
+ when :set_chars_flash
+ expect(controller).to set_flash[:alert].to(/characters/)
+ when :set_terms_flash
+ expect(controller).to set_flash[:alert].to(/terms/)
+ end
+ end
+ end
+ end
end
it 'finds issue comments' do
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 510db4374c0..c8f9e4256c9 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -664,4 +664,56 @@ describe SnippetsController do
expect(json_response.keys).to match_array(%w(body references))
end
end
+
+ describe 'DELETE #destroy' do
+ let!(:snippet) { create :personal_snippet, author: user }
+
+ context 'when current user has ability to destroy the snippet' do
+ before do
+ sign_in(user)
+ end
+
+ it 'removes the snippet' do
+ delete :destroy, params: { id: snippet.to_param }
+
+ expect { snippet.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when snippet is succesfuly destroyed' do
+ it 'redirects to the project snippets page' do
+ delete :destroy, params: { id: snippet.to_param }
+
+ expect(response).to redirect_to(dashboard_snippets_path)
+ end
+ end
+
+ context 'when snippet is not destroyed' do
+ before do
+ allow(snippet).to receive(:destroy).and_return(false)
+ controller.instance_variable_set(:@snippet, snippet)
+ end
+
+ it 'renders the snippet page with errors' do
+ delete :destroy, params: { id: snippet.to_param }
+
+ expect(flash[:alert]).to eq('Failed to remove snippet.')
+ expect(response).to redirect_to(snippet_path(snippet))
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the snippet' do
+ let(:another_user) { create(:user) }
+
+ before do
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ delete :destroy, params: { id: snippet.to_param }
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
end
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index bbbb9691f53..597d2a185b5 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -81,6 +81,7 @@ describe UsersController do
context 'json with events' do
let(:project) { create(:project) }
+
before do
project.add_developer(user)
Gitlab::DataBuilder::Push.build_sample(project, user)
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index a42916a83a6..482e0fbe7ce 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -34,7 +34,7 @@ describe 'Database schema' do
draft_notes: %w[discussion_id commit_id],
emails: %w[user_id],
events: %w[target_id],
- epics: %w[updated_by_id last_edited_by_id start_date_sourcing_milestone_id due_date_sourcing_milestone_id state_id],
+ epics: %w[updated_by_id last_edited_by_id state_id],
forked_project_links: %w[forked_from_project_id],
geo_event_log: %w[hashed_storage_attachments_event_id],
geo_job_artifact_deleted_events: %w[job_artifact_id],
@@ -133,6 +133,7 @@ describe 'Database schema' do
'Ci::BuildTraceChunk' => %w[data_store],
'Ci::JobArtifact' => %w[file_type],
'Ci::Pipeline' => %w[source config_source failure_reason],
+ 'Ci::Processable' => %w[failure_reason],
'Ci::Runner' => %w[access_level],
'Ci::Stage' => %w[status],
'Clusters::Applications::Ingress' => %w[ingress_type],
diff --git a/spec/factories/analytics/cycle_analytics/project_stages.rb b/spec/factories/analytics/cycle_analytics/project_stages.rb
index 6f8c140ed8a..3a481bd20fd 100644
--- a/spec/factories/analytics/cycle_analytics/project_stages.rb
+++ b/spec/factories/analytics/cycle_analytics/project_stages.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cycle_analytics_project_stage, class: Analytics::CycleAnalytics::ProjectStage do
+ factory :cycle_analytics_project_stage, class: 'Analytics::CycleAnalytics::ProjectStage' do
project
sequence(:name) { |n| "Stage ##{n}" }
hidden { false }
diff --git a/spec/factories/aws/roles.rb b/spec/factories/aws/roles.rb
index c078033dfad..7195b571366 100644
--- a/spec/factories/aws/roles.rb
+++ b/spec/factories/aws/roles.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :aws_role, class: Aws::Role do
+ factory :aws_role, class: 'Aws::Role' do
user
role_arn { 'arn:aws:iam::123456789012:role/role-name' }
diff --git a/spec/factories/badge.rb b/spec/factories/badge.rb
index 1d4e29014cc..7623797a7fa 100644
--- a/spec/factories/badge.rb
+++ b/spec/factories/badge.rb
@@ -6,11 +6,11 @@ FactoryBot.define do
image_url { generate(:url) }
end
- factory :project_badge, traits: [:base_badge], class: ProjectBadge do
+ factory :project_badge, traits: [:base_badge], class: 'ProjectBadge' do
project
end
- factory :group_badge, aliases: [:badge], traits: [:base_badge], class: GroupBadge do
+ factory :group_badge, aliases: [:badge], traits: [:base_badge], class: 'GroupBadge' do
group
end
end
diff --git a/spec/factories/chat_names.rb b/spec/factories/chat_names.rb
index ace5d5e83c9..73c885806f2 100644
--- a/spec/factories/chat_names.rb
+++ b/spec/factories/chat_names.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :chat_name, class: ChatName do
+ factory :chat_name, class: 'ChatName' do
user factory: :user
service factory: :service
diff --git a/spec/factories/chat_teams.rb b/spec/factories/chat_teams.rb
index 52628e6d53d..f413555d980 100644
--- a/spec/factories/chat_teams.rb
+++ b/spec/factories/chat_teams.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :chat_team, class: ChatTeam do
+ factory :chat_team, class: 'ChatTeam' do
sequence(:team_id) { |n| "abcdefghijklm#{n}" }
namespace factory: :group
end
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index 60219b07cf0..b2e8051eb5e 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_bridge, class: Ci::Bridge do
+ factory :ci_bridge, class: 'Ci::Bridge' do
name { 'bridge' }
stage { 'test' }
stage_idx { 0 }
diff --git a/spec/factories/ci/build_need.rb b/spec/factories/ci/build_need.rb
index 568aff45a91..fa72e696343 100644
--- a/spec/factories/ci/build_need.rb
+++ b/spec/factories/ci/build_need.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_build_need, class: Ci::BuildNeed do
+ factory :ci_build_need, class: 'Ci::BuildNeed' do
build factory: :ci_build
sequence(:name) { |n| "build_#{n}" }
end
diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb
index 22f091f8e76..7c348f4b7e4 100644
--- a/spec/factories/ci/build_trace_chunks.rb
+++ b/spec/factories/ci/build_trace_chunks.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_build_trace_chunk, class: Ci::BuildTraceChunk do
+ factory :ci_build_trace_chunk, class: 'Ci::BuildTraceChunk' do
build factory: :ci_build
chunk_index { 0 }
data_store { :redis }
diff --git a/spec/factories/ci/build_trace_section_names.rb b/spec/factories/ci/build_trace_section_names.rb
index e52694ef3dc..b9b66b49317 100644
--- a/spec/factories/ci/build_trace_section_names.rb
+++ b/spec/factories/ci/build_trace_section_names.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_build_trace_section_name, class: Ci::BuildTraceSectionName do
+ factory :ci_build_trace_section_name, class: 'Ci::BuildTraceSectionName' do
sequence(:name) { |n| "section_#{n}" }
project factory: :project
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index ecb1f1996d9..3d65f9065bf 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -3,7 +3,7 @@
include ActionDispatch::TestProcess
FactoryBot.define do
- factory :ci_build, class: Ci::Build do
+ factory :ci_build, class: 'Ci::Build' do
name { 'test' }
stage { 'test' }
stage_idx { 0 }
@@ -77,6 +77,10 @@ FactoryBot.define do
status { 'created' }
end
+ trait :waiting_for_resource do
+ status { 'waiting_for_resource' }
+ end
+
trait :preparing do
status { 'preparing' }
end
@@ -207,6 +211,14 @@ FactoryBot.define do
trigger_request factory: :ci_trigger_request
end
+ trait :resource_group do
+ waiting_for_resource_at { 5.minutes.ago }
+
+ after(:build) do |build, evaluator|
+ build.resource_group = create(:ci_resource_group, project: build.project)
+ end
+ end
+
after(:build) do |build, evaluator|
build.project ||= build.pipeline.project
end
diff --git a/spec/factories/ci/group_variables.rb b/spec/factories/ci/group_variables.rb
index 217f05a088e..d3b891eb1e3 100644
--- a/spec/factories/ci/group_variables.rb
+++ b/spec/factories/ci/group_variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_group_variable, class: Ci::GroupVariable do
+ factory :ci_group_variable, class: 'Ci::GroupVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
masked { false }
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index bdc6cc2f169..7347c2b87ca 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -3,7 +3,7 @@
include ActionDispatch::TestProcess
FactoryBot.define do
- factory :ci_job_artifact, class: Ci::JobArtifact do
+ factory :ci_job_artifact, class: 'Ci::JobArtifact' do
job factory: :ci_build
file_type { :archive }
file_format { :zip }
diff --git a/spec/factories/ci/job_variables.rb b/spec/factories/ci/job_variables.rb
index bfc631b8126..472a89d3bef 100644
--- a/spec/factories/ci/job_variables.rb
+++ b/spec/factories/ci/job_variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_job_variable, class: Ci::JobVariable do
+ factory :ci_job_variable, class: 'Ci::JobVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
diff --git a/spec/factories/ci/pipeline_schedule.rb b/spec/factories/ci/pipeline_schedule.rb
index c752dc1c9dd..fc9044fb8e3 100644
--- a/spec/factories/ci/pipeline_schedule.rb
+++ b/spec/factories/ci/pipeline_schedule.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_pipeline_schedule, class: Ci::PipelineSchedule do
+ factory :ci_pipeline_schedule, class: 'Ci::PipelineSchedule' do
cron { '0 1 * * *' }
cron_timezone { Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE }
ref { 'master' }
diff --git a/spec/factories/ci/pipeline_schedule_variables.rb b/spec/factories/ci/pipeline_schedule_variables.rb
index 24913c614f4..d598ba1b1b9 100644
--- a/spec/factories/ci/pipeline_schedule_variables.rb
+++ b/spec/factories/ci/pipeline_schedule_variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_pipeline_schedule_variable, class: Ci::PipelineScheduleVariable do
+ factory :ci_pipeline_schedule_variable, class: 'Ci::PipelineScheduleVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
variable_type { 'env_var' }
diff --git a/spec/factories/ci/pipeline_variables.rb b/spec/factories/ci/pipeline_variables.rb
index 48f6e35fe70..17aa9962e0b 100644
--- a/spec/factories/ci/pipeline_variables.rb
+++ b/spec/factories/ci/pipeline_variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_pipeline_variable, class: Ci::PipelineVariable do
+ factory :ci_pipeline_variable, class: 'Ci::PipelineVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index e2ec9d496bc..afc203562ba 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
# TODO: we can remove this factory in favour of :ci_pipeline
- factory :ci_empty_pipeline, class: Ci::Pipeline do
+ factory :ci_empty_pipeline, class: 'Ci::Pipeline' do
source { :push }
ref { 'master' }
sha { '97de212e80737a608d939f648d959671fb0a0142' }
diff --git a/spec/factories/ci/resource.rb b/spec/factories/ci/resource.rb
new file mode 100644
index 00000000000..515329506e5
--- /dev/null
+++ b/spec/factories/ci/resource.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_resource, class: 'Ci::Resource' do
+ resource_group factory: :ci_resource_group
+
+ trait(:retained) do
+ build factory: :ci_build
+ end
+ end
+end
diff --git a/spec/factories/ci/resource_group.rb b/spec/factories/ci/resource_group.rb
new file mode 100644
index 00000000000..7ca89037160
--- /dev/null
+++ b/spec/factories/ci/resource_group.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_resource_group, class: 'Ci::ResourceGroup' do
+ project
+ sequence(:key) { |n| "IOS_#{n}" }
+ end
+end
diff --git a/spec/factories/ci/runner_projects.rb b/spec/factories/ci/runner_projects.rb
index bc28544a839..ead9fe10f6e 100644
--- a/spec/factories/ci/runner_projects.rb
+++ b/spec/factories/ci/runner_projects.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_runner_project, class: Ci::RunnerProject do
+ factory :ci_runner_project, class: 'Ci::RunnerProject' do
runner factory: [:ci_runner, :project]
project
end
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 3697970721b..30f78531324 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_runner, class: Ci::Runner do
+ factory :ci_runner, class: 'Ci::Runner' do
sequence(:description) { |n| "My runner#{n}" }
platform { "darwin" }
diff --git a/spec/factories/ci/sources/pipelines.rb b/spec/factories/ci/sources/pipelines.rb
index 57495502944..93d35097eac 100644
--- a/spec/factories/ci/sources/pipelines.rb
+++ b/spec/factories/ci/sources/pipelines.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_sources_pipeline, class: Ci::Sources::Pipeline do
+ factory :ci_sources_pipeline, class: 'Ci::Sources::Pipeline' do
after(:build) do |source|
source.project ||= source.pipeline.project
source.source_pipeline ||= source.source_job.pipeline
diff --git a/spec/factories/ci/stages.rb b/spec/factories/ci/stages.rb
index 67f4db41d96..4751c04584e 100644
--- a/spec/factories/ci/stages.rb
+++ b/spec/factories/ci/stages.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_stage, class: Ci::LegacyStage do
+ factory :ci_stage, class: 'Ci::LegacyStage' do
skip_create
transient do
@@ -18,7 +18,7 @@ FactoryBot.define do
end
end
- factory :ci_stage_entity, class: Ci::Stage do
+ factory :ci_stage_entity, class: 'Ci::Stage' do
project factory: :project
pipeline factory: :ci_empty_pipeline
diff --git a/spec/factories/ci/trigger_requests.rb b/spec/factories/ci/trigger_requests.rb
index d63bf9868c9..cfffcf222f3 100644
--- a/spec/factories/ci/trigger_requests.rb
+++ b/spec/factories/ci/trigger_requests.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_trigger_request, class: Ci::TriggerRequest do
+ factory :ci_trigger_request, class: 'Ci::TriggerRequest' do
trigger factory: :ci_trigger
end
end
diff --git a/spec/factories/ci/triggers.rb b/spec/factories/ci/triggers.rb
index 6f628ed5435..5089d43f6ff 100644
--- a/spec/factories/ci/triggers.rb
+++ b/spec/factories/ci/triggers.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_trigger_without_token, class: Ci::Trigger do
+ factory :ci_trigger_without_token, class: 'Ci::Trigger' do
owner
factory :ci_trigger do
diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb
index 9d2501c4e18..a4cbf873b0b 100644
--- a/spec/factories/ci/variables.rb
+++ b/spec/factories/ci/variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_variable, class: Ci::Variable do
+ factory :ci_variable, class: 'Ci::Variable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
masked { false }
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 0e59f8cb9ec..ff9fc882dcc 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :clusters_applications_helm, class: Clusters::Applications::Helm do
+ factory :clusters_applications_helm, class: 'Clusters::Applications::Helm' do
cluster factory: %i(cluster provided_by_gcp)
before(:create) do
@@ -70,39 +70,40 @@ FactoryBot.define do
updated_at { ClusterWaitForAppInstallationWorker::TIMEOUT.ago }
end
- factory :clusters_applications_ingress, class: Clusters::Applications::Ingress do
+ factory :clusters_applications_ingress, class: 'Clusters::Applications::Ingress' do
+ modsecurity_enabled { false }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_cert_manager, class: Clusters::Applications::CertManager do
+ factory :clusters_applications_cert_manager, class: 'Clusters::Applications::CertManager' do
email { 'admin@example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_elastic_stack, class: Clusters::Applications::ElasticStack do
+ factory :clusters_applications_elastic_stack, class: 'Clusters::Applications::ElasticStack' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_crossplane, class: Clusters::Applications::Crossplane do
+ factory :clusters_applications_crossplane, class: 'Clusters::Applications::Crossplane' do
stack { 'gcp' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus do
+ factory :clusters_applications_prometheus, class: 'Clusters::Applications::Prometheus' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_runner, class: Clusters::Applications::Runner do
+ factory :clusters_applications_runner, class: 'Clusters::Applications::Runner' do
runner factory: %i(ci_runner)
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_knative, class: Clusters::Applications::Knative do
+ factory :clusters_applications_knative, class: 'Clusters::Applications::Knative' do
hostname { 'example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_jupyter, class: Clusters::Applications::Jupyter do
+ factory :clusters_applications_jupyter, class: 'Clusters::Applications::Jupyter' do
oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp project)
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 7121850e5ff..843f87ef7d6 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster, class: Clusters::Cluster do
+ factory :cluster, class: 'Clusters::Cluster' do
user
name { 'test-cluster' }
cluster_type { :project_type }
diff --git a/spec/factories/clusters/kubernetes_namespaces.rb b/spec/factories/clusters/kubernetes_namespaces.rb
index 75895e1c020..c820bf4da60 100644
--- a/spec/factories/clusters/kubernetes_namespaces.rb
+++ b/spec/factories/clusters/kubernetes_namespaces.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster_kubernetes_namespace, class: Clusters::KubernetesNamespace do
+ factory :cluster_kubernetes_namespace, class: 'Clusters::KubernetesNamespace' do
association :cluster, :project, :provided_by_gcp
after(:build) do |kubernetes_namespace|
diff --git a/spec/factories/clusters/platforms/kubernetes.rb b/spec/factories/clusters/platforms/kubernetes.rb
index dbcb838e9da..822457adaef 100644
--- a/spec/factories/clusters/platforms/kubernetes.rb
+++ b/spec/factories/clusters/platforms/kubernetes.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster_platform_kubernetes, class: Clusters::Platforms::Kubernetes do
+ factory :cluster_platform_kubernetes, class: 'Clusters::Platforms::Kubernetes' do
association :cluster, platform_type: :kubernetes, provider_type: :user
namespace { nil }
api_url { 'https://kubernetes.example.com' }
diff --git a/spec/factories/clusters/projects.rb b/spec/factories/clusters/projects.rb
index 6cda77c6f85..e980279cad9 100644
--- a/spec/factories/clusters/projects.rb
+++ b/spec/factories/clusters/projects.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster_project, class: Clusters::Project do
+ factory :cluster_project, class: 'Clusters::Project' do
cluster
project
end
diff --git a/spec/factories/clusters/providers/aws.rb b/spec/factories/clusters/providers/aws.rb
index e4b10aa5f33..2c54300e606 100644
--- a/spec/factories/clusters/providers/aws.rb
+++ b/spec/factories/clusters/providers/aws.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster_provider_aws, class: Clusters::Providers::Aws do
+ factory :cluster_provider_aws, class: 'Clusters::Providers::Aws' do
association :cluster, platform_type: :kubernetes, provider_type: :aws
role_arn { 'arn:aws:iam::123456789012:role/role-name' }
diff --git a/spec/factories/clusters/providers/gcp.rb b/spec/factories/clusters/providers/gcp.rb
index 216c4d4fa31..c99f4407b42 100644
--- a/spec/factories/clusters/providers/gcp.rb
+++ b/spec/factories/clusters/providers/gcp.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :cluster_provider_gcp, class: Clusters::Providers::Gcp do
+ factory :cluster_provider_gcp, class: 'Clusters::Providers::Gcp' do
association :cluster, platform_type: :kubernetes, provider_type: :gcp
gcp_project_id { 'test-gcp-project' }
diff --git a/spec/factories/commit_statuses.rb b/spec/factories/commit_statuses.rb
index 5d635d93ff2..fa10b37cdbf 100644
--- a/spec/factories/commit_statuses.rb
+++ b/spec/factories/commit_statuses.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :commit_status, class: CommitStatus do
+ factory :commit_status, class: 'CommitStatus' do
name { 'default' }
stage { 'test' }
stage_idx { 0 }
@@ -35,6 +35,10 @@ FactoryBot.define do
status { 'pending' }
end
+ trait :waiting_for_resource do
+ status { 'waiting_for_resource' }
+ end
+
trait :preparing do
status { 'preparing' }
end
@@ -55,7 +59,7 @@ FactoryBot.define do
build.project = build.pipeline.project
end
- factory :generic_commit_status, class: GenericCommitStatus do
+ factory :generic_commit_status, class: 'GenericCommitStatus' do
name { 'generic' }
description { 'external commit status' }
end
diff --git a/spec/factories/container_expiration_policies.rb b/spec/factories/container_expiration_policies.rb
new file mode 100644
index 00000000000..951127a4aa7
--- /dev/null
+++ b/spec/factories/container_expiration_policies.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :container_expiration_policy, class: 'ContainerExpirationPolicy' do
+ association :project, factory: [:project, :without_container_expiration_policy]
+ cadence { '1d' }
+ enabled { true }
+
+ trait :runnable do
+ after(:create) do |policy|
+ # next_run_at will be set before_save to Time.now + cadence, so this ensures the policy is active
+ policy.update_column(:next_run_at, Time.zone.now - 1.day)
+ end
+ end
+
+ trait :disabled do
+ enabled { false }
+ end
+ end
+end
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index f8738d28d83..f92e213a385 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :deployment, class: Deployment do
+ factory :deployment, class: 'Deployment' do
sha { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
ref { 'master' }
tag { false }
diff --git a/spec/factories/dev_ops_score_metrics.rb b/spec/factories/dev_ops_score_metrics.rb
index 0d9d7059e7f..1d1f1a2c39e 100644
--- a/spec/factories/dev_ops_score_metrics.rb
+++ b/spec/factories/dev_ops_score_metrics.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :dev_ops_score_metric, class: DevOpsScore::Metric do
+ factory :dev_ops_score_metric, class: 'DevOpsScore::Metric' do
leader_issues { 9.256 }
instance_issues { 1.234 }
percentage_issues { 13.331 }
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 44aa4527e12..323ea2d478b 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :environment, class: Environment do
+ factory :environment, class: 'Environment' do
sequence(:name) { |n| "environment#{n}" }
association :project, :repository
diff --git a/spec/factories/error_tracking/detailed_error.rb b/spec/factories/error_tracking/detailed_error.rb
index f12c327d403..07b6c53e3cd 100644
--- a/spec/factories/error_tracking/detailed_error.rb
+++ b/spec/factories/error_tracking/detailed_error.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :detailed_error_tracking_error, class: Gitlab::ErrorTracking::DetailedError do
+ factory :detailed_error_tracking_error, class: 'Gitlab::ErrorTracking::DetailedError' do
id { '1' }
title { 'title' }
type { 'error' }
@@ -18,6 +18,12 @@ FactoryBot.define do
project_slug { 'project_name' }
short_id { 'ID' }
status { 'unresolved' }
+ tags do
+ {
+ level: 'error',
+ logger: 'rails'
+ }
+ end
frequency do
[
[Time.now.to_i, 10]
@@ -28,6 +34,7 @@ FactoryBot.define do
last_release_last_commit { '9ad419c86' }
first_release_short_version { 'abc123' }
last_release_short_version { 'abc123' }
+ first_release_version { '12345678' }
skip_create
end
diff --git a/spec/factories/error_tracking/error.rb b/spec/factories/error_tracking/error.rb
index 541bc410462..5be1f074555 100644
--- a/spec/factories/error_tracking/error.rb
+++ b/spec/factories/error_tracking/error.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :error_tracking_error, class: Gitlab::ErrorTracking::Error do
+ factory :error_tracking_error, class: 'Gitlab::ErrorTracking::Error' do
id { 'id' }
title { 'title' }
type { 'error' }
diff --git a/spec/factories/error_tracking/error_event.rb b/spec/factories/error_tracking/error_event.rb
index 1590095f1bd..880fdf17fae 100644
--- a/spec/factories/error_tracking/error_event.rb
+++ b/spec/factories/error_tracking/error_event.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :error_tracking_error_event, class: Gitlab::ErrorTracking::ErrorEvent do
+ factory :error_tracking_error_event, class: 'Gitlab::ErrorTracking::ErrorEvent' do
issue_id { 'id' }
date_received { Time.now.iso8601 }
stack_trace_entries do
diff --git a/spec/factories/error_tracking/project.rb b/spec/factories/error_tracking/project.rb
index 885d398d433..4cbec312622 100644
--- a/spec/factories/error_tracking/project.rb
+++ b/spec/factories/error_tracking/project.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :error_tracking_project, class: Gitlab::ErrorTracking::Project do
+ factory :error_tracking_project, class: 'Gitlab::ErrorTracking::Project' do
id { '1' }
name { 'Sentry Example' }
slug { 'sentry-example' }
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index 4eedcd02c9a..81d57a25058 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -24,7 +24,7 @@ FactoryBot.define do
end
end
- factory :push_event, class: PushEvent do
+ factory :push_event, class: 'PushEvent' do
project factory: :project_empty_repo
author(factory: :user) { project.creator }
action { Event::PUSHED }
diff --git a/spec/factories/gitaly/commit.rb b/spec/factories/gitaly/commit.rb
index ef5301db770..2ed201e9aac 100644
--- a/spec/factories/gitaly/commit.rb
+++ b/spec/factories/gitaly/commit.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
sequence(:gitaly_commit_id) { Digest::SHA1.hexdigest(Time.now.to_f.to_s) }
- factory :gitaly_commit, class: Gitaly::GitCommit do
+ factory :gitaly_commit, class: 'Gitaly::GitCommit' do
skip_create
id { generate(:gitaly_commit_id) }
diff --git a/spec/factories/gitaly/commit_author.rb b/spec/factories/gitaly/commit_author.rb
index 51dcd8a623b..31097118d1f 100644
--- a/spec/factories/gitaly/commit_author.rb
+++ b/spec/factories/gitaly/commit_author.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :gitaly_commit_author, class: Gitaly::CommitAuthor do
+ factory :gitaly_commit_author, class: 'Gitaly::CommitAuthor' do
skip_create
name { generate(:name) }
diff --git a/spec/factories/gitaly/tag.rb b/spec/factories/gitaly/tag.rb
index a7a84753090..9dd1b8301c1 100644
--- a/spec/factories/gitaly/tag.rb
+++ b/spec/factories/gitaly/tag.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :gitaly_tag, class: Gitaly::Tag do
+ factory :gitaly_tag, class: 'Gitaly::Tag' do
skip_create
name { 'v3.1.4' }
diff --git a/spec/factories/grafana_integrations.rb b/spec/factories/grafana_integrations.rb
index ae819ca828c..a647ef8d2ec 100644
--- a/spec/factories/grafana_integrations.rb
+++ b/spec/factories/grafana_integrations.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :grafana_integration, class: GrafanaIntegration do
+ factory :grafana_integration, class: 'GrafanaIntegration' do
project
grafana_url { 'https://grafana.example.com' }
token { SecureRandom.hex(10) }
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 93c01f8034d..4b6c1756d1e 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :group, class: Group, parent: :namespace do
+ factory :group, class: 'Group', parent: :namespace do
sequence(:name) { |n| "group#{n}" }
path { name.downcase.gsub(/\s/, '_') }
type { 'Group' }
diff --git a/spec/factories/import_states.rb b/spec/factories/import_states.rb
index 576f68ab57f..4dca78b1059 100644
--- a/spec/factories/import_states.rb
+++ b/spec/factories/import_states.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :import_state, class: ProjectImportState do
+ factory :import_state, class: 'ProjectImportState' do
status { :none }
association :project, factory: :project
diff --git a/spec/factories/labels.rb b/spec/factories/labels.rb
index 89fcd8b1a9d..81d3e4be6fd 100644
--- a/spec/factories/labels.rb
+++ b/spec/factories/labels.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
color { "#990000" }
end
- factory :label, traits: [:base_label], class: ProjectLabel do
+ factory :label, traits: [:base_label], class: 'ProjectLabel' do
project
transient do
diff --git a/spec/factories/namespace/aggregation_schedules.rb b/spec/factories/namespace/aggregation_schedules.rb
index c172c3360e2..5962c46dee6 100644
--- a/spec/factories/namespace/aggregation_schedules.rb
+++ b/spec/factories/namespace/aggregation_schedules.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :namespace_aggregation_schedules, class: Namespace::AggregationSchedule do
+ factory :namespace_aggregation_schedules, class: 'Namespace::AggregationSchedule' do
namespace
end
end
diff --git a/spec/factories/namespace/root_storage_statistics.rb b/spec/factories/namespace/root_storage_statistics.rb
index 54c5921eb44..3b11d7a6ec7 100644
--- a/spec/factories/namespace/root_storage_statistics.rb
+++ b/spec/factories/namespace/root_storage_statistics.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :namespace_root_storage_statistics, class: Namespace::RootStorageStatistics do
+ factory :namespace_root_storage_statistics, class: 'Namespace::RootStorageStatistics' do
namespace
end
end
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 330f5276422..11fc5060cf0 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -18,9 +18,9 @@ FactoryBot.define do
factory :note_on_personal_snippet, traits: [:on_personal_snippet]
factory :system_note, traits: [:system]
- factory :discussion_note, class: DiscussionNote
+ factory :discussion_note, class: 'DiscussionNote'
- factory :discussion_note_on_merge_request, traits: [:on_merge_request], class: DiscussionNote do
+ factory :discussion_note_on_merge_request, traits: [:on_merge_request], class: 'DiscussionNote' do
association :project, :repository
trait :resolved do
@@ -29,22 +29,22 @@ FactoryBot.define do
end
end
- factory :discussion_note_on_issue, traits: [:on_issue], class: DiscussionNote
+ factory :discussion_note_on_issue, traits: [:on_issue], class: 'DiscussionNote'
- factory :discussion_note_on_commit, traits: [:on_commit], class: DiscussionNote
+ factory :discussion_note_on_commit, traits: [:on_commit], class: 'DiscussionNote'
- factory :discussion_note_on_personal_snippet, traits: [:on_personal_snippet], class: DiscussionNote
+ factory :discussion_note_on_personal_snippet, traits: [:on_personal_snippet], class: 'DiscussionNote'
- factory :discussion_note_on_snippet, traits: [:on_snippet], class: DiscussionNote
+ factory :discussion_note_on_snippet, traits: [:on_snippet], class: 'DiscussionNote'
- factory :legacy_diff_note_on_commit, traits: [:on_commit, :legacy_diff_note], class: LegacyDiffNote
+ factory :legacy_diff_note_on_commit, traits: [:on_commit, :legacy_diff_note], class: 'LegacyDiffNote'
- factory :legacy_diff_note_on_merge_request, traits: [:on_merge_request, :legacy_diff_note], class: LegacyDiffNote do
+ factory :legacy_diff_note_on_merge_request, traits: [:on_merge_request, :legacy_diff_note], class: 'LegacyDiffNote' do
association :project, :repository
position { '' }
end
- factory :diff_note_on_merge_request, traits: [:on_merge_request], class: DiffNote do
+ factory :diff_note_on_merge_request, traits: [:on_merge_request], class: 'DiffNote' do
association :project, :repository
transient do
@@ -95,7 +95,7 @@ FactoryBot.define do
end
end
- factory :diff_note_on_commit, traits: [:on_commit], class: DiffNote do
+ factory :diff_note_on_commit, traits: [:on_commit], class: 'DiffNote' do
association :project, :repository
transient do
diff --git a/spec/factories/project_error_tracking_settings.rb b/spec/factories/project_error_tracking_settings.rb
index f90a2d17846..7af881f4214 100644
--- a/spec/factories/project_error_tracking_settings.rb
+++ b/spec/factories/project_error_tracking_settings.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :project_error_tracking_setting, class: ErrorTracking::ProjectErrorTrackingSetting do
+ factory :project_error_tracking_setting, class: 'ErrorTracking::ProjectErrorTrackingSetting' do
project
api_url { 'https://gitlab.com/api/0/projects/sentry-org/sentry-project' }
enabled { true }
diff --git a/spec/factories/project_metrics_settings.rb b/spec/factories/project_metrics_settings.rb
index 51b2ce0e0e9..b5c0fd88a6c 100644
--- a/spec/factories/project_metrics_settings.rb
+++ b/spec/factories/project_metrics_settings.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :project_metrics_setting, class: ProjectMetricsSetting do
+ factory :project_metrics_setting, class: 'ProjectMetricsSetting' do
project
external_dashboard_url { 'https://grafana.com' }
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 2608f717f1c..490ae9e84e7 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -25,6 +25,7 @@ FactoryBot.define do
builds_access_level { ProjectFeature::ENABLED }
snippets_access_level { ProjectFeature::ENABLED }
issues_access_level { ProjectFeature::ENABLED }
+ forking_access_level { ProjectFeature::ENABLED }
merge_requests_access_level { ProjectFeature::ENABLED }
repository_access_level { ProjectFeature::ENABLED }
pages_access_level do
@@ -48,6 +49,7 @@ FactoryBot.define do
builds_access_level: builds_access_level,
snippets_access_level: evaluator.snippets_access_level,
issues_access_level: evaluator.issues_access_level,
+ forking_access_level: evaluator.forking_access_level,
merge_requests_access_level: merge_requests_access_level,
repository_access_level: evaluator.repository_access_level
}
@@ -137,6 +139,12 @@ FactoryBot.define do
end
end
+ trait :without_container_expiration_policy do
+ after(:build) do |project|
+ project.class.skip_callback(:create, :after, :create_container_expiration_policy, raise: false)
+ end
+ end
+
# Build a custom repository by specifying a hash of `filename => content` in
# the transient `files` attribute. Each file will be created in its own
# commit, operating against the master branch. So, the following call:
@@ -258,6 +266,9 @@ FactoryBot.define do
trait(:issues_disabled) { issues_access_level { ProjectFeature::DISABLED } }
trait(:issues_enabled) { issues_access_level { ProjectFeature::ENABLED } }
trait(:issues_private) { issues_access_level { ProjectFeature::PRIVATE } }
+ trait(:forking_disabled) { forking_access_level { ProjectFeature::DISABLED } }
+ trait(:forking_enabled) { forking_access_level { ProjectFeature::ENABLED } }
+ trait(:forking_private) { forking_access_level { ProjectFeature::PRIVATE } }
trait(:merge_requests_enabled) { merge_requests_access_level { ProjectFeature::ENABLED } }
trait(:merge_requests_disabled) { merge_requests_access_level { ProjectFeature::DISABLED } }
trait(:merge_requests_private) { merge_requests_access_level { ProjectFeature::PRIVATE } }
diff --git a/spec/factories/prometheus_metrics.rb b/spec/factories/prometheus_metrics.rb
index f6b58cf84c3..83e3845f1c3 100644
--- a/spec/factories/prometheus_metrics.rb
+++ b/spec/factories/prometheus_metrics.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :prometheus_metric, class: PrometheusMetric do
+ factory :prometheus_metric, class: 'PrometheusMetric' do
title { 'title' }
query { 'avg(metric)' }
y_label { 'y_label' }
diff --git a/spec/factories/releases.rb b/spec/factories/releases.rb
index 182ee2378d4..0e79f2e6d3a 100644
--- a/spec/factories/releases.rb
+++ b/spec/factories/releases.rb
@@ -20,5 +20,14 @@ FactoryBot.define do
create(:evidence, release: release)
end
end
+
+ trait :with_milestones do
+ transient do
+ milestones_count { 2 }
+ end
+ after(:create) do |release, evaluator|
+ create_list(:milestone, evaluator.milestones_count, project: evaluator.project, releases: [release])
+ end
+ end
end
end
diff --git a/spec/factories/releases/link.rb b/spec/factories/releases/link.rb
index d23db6d4bad..82446dbdb69 100644
--- a/spec/factories/releases/link.rb
+++ b/spec/factories/releases/link.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :release_link, class: ::Releases::Link do
+ factory :release_link, class: '::Releases::Link' do
release
sequence(:name) { |n| "release-18.#{n}.dmg" }
sequence(:url) { |n| "https://example.com/scrambled-url/app-#{n}.zip" }
diff --git a/spec/factories/resource_weight_events.rb b/spec/factories/resource_weight_events.rb
new file mode 100644
index 00000000000..cb9a34df332
--- /dev/null
+++ b/spec/factories/resource_weight_events.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :resource_weight_event do
+ issue { create(:issue) }
+ user { issue&.author || create(:user) }
+ end
+end
diff --git a/spec/factories/sentry_issue.rb b/spec/factories/sentry_issue.rb
index c9886f1673a..e729095432c 100644
--- a/spec/factories/sentry_issue.rb
+++ b/spec/factories/sentry_issue.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :sentry_issue, class: SentryIssue do
+ factory :sentry_issue, class: 'SentryIssue' do
issue
- sentry_issue_identifier { 1234567891 }
+ sequence(:sentry_issue_identifier) { |n| 10000000 + n }
end
end
diff --git a/spec/factories/serverless/domain_cluster.rb b/spec/factories/serverless/domain_cluster.rb
index 290d3fc152e..5adfcacbd7f 100644
--- a/spec/factories/serverless/domain_cluster.rb
+++ b/spec/factories/serverless/domain_cluster.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :serverless_domain_cluster, class: Serverless::DomainCluster do
+ factory :serverless_domain_cluster, class: 'Serverless::DomainCluster' do
pages_domain { create(:pages_domain) }
knative { create(:clusters_applications_knative) }
creator { create(:user) }
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index b6bb30d1f93..5d62b3cb9c9 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
type { 'Service' }
end
- factory :custom_issue_tracker_service, class: CustomIssueTrackerService do
+ factory :custom_issue_tracker_service, class: 'CustomIssueTrackerService' do
project
active { true }
issue_tracker
diff --git a/spec/factories/terms.rb b/spec/factories/terms.rb
index b98a2453f7e..915a6099c2a 100644
--- a/spec/factories/terms.rb
+++ b/spec/factories/terms.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :term, class: ApplicationSetting::Term do
+ factory :term, class: 'ApplicationSetting::Term' do
terms { "Lorem ipsum dolor sit amet, consectetur adipiscing elit." }
end
end
diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb
index bb91fc9ac8e..0b5d00cff67 100644
--- a/spec/factories/todos.rb
+++ b/spec/factories/todos.rb
@@ -46,7 +46,7 @@ FactoryBot.define do
end
end
- factory :on_commit_todo, class: Todo do
+ factory :on_commit_todo, class: 'Todo' do
project
author
user
diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb
index c5a302ce78b..bf7f8563e68 100644
--- a/spec/features/admin/admin_broadcast_messages_spec.rb
+++ b/spec/features/admin/admin_broadcast_messages_spec.rb
@@ -13,7 +13,7 @@ describe 'Admin Broadcast Messages' do
expect(page).to have_content 'Migration to new server'
end
- it 'Create a customized broadcast message' do
+ it 'creates a customized broadcast banner message' do
fill_in 'broadcast_message_message', with: 'Application update from **4:00 CST to 5:00 CST**'
fill_in 'broadcast_message_color', with: '#f2dede'
fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
@@ -28,6 +28,20 @@ describe 'Admin Broadcast Messages' do
expect(page).to have_selector %(div[style="background-color: #f2dede; color: #b94a48"])
end
+ it 'creates a customized broadcast notification message' do
+ fill_in 'broadcast_message_message', with: 'Application update from **4:00 CST to 5:00 CST**'
+ fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
+ select 'Notification', from: 'broadcast_message_broadcast_type'
+ select Date.today.next_year.year, from: 'broadcast_message_ends_at_1i'
+ click_button 'Add broadcast message'
+
+ expect(current_path).to eq admin_broadcast_messages_path
+ expect(page).to have_content 'Application update from 4:00 CST to 5:00 CST'
+ expect(page).to have_content '*/user_onboarded'
+ expect(page).to have_content 'Notification'
+ expect(page).to have_selector 'strong', text: '4:00 CST to 5:00 CST'
+ end
+
it 'Edit an existing broadcast message' do
click_link 'Edit'
fill_in 'broadcast_message_message', with: 'Application update RIGHT NOW'
@@ -44,10 +58,20 @@ describe 'Admin Broadcast Messages' do
expect(page).not_to have_content 'Migration to new server'
end
- it 'Live preview a customized broadcast message', :js do
+ it 'updates a preview of a customized broadcast banner message', :js do
+ fill_in 'broadcast_message_message', with: "Live **Markdown** previews. :tada:"
+
+ page.within('.js-broadcast-banner-message-preview') do
+ expect(page).to have_selector('strong', text: 'Markdown')
+ expect(page).to have_emoji('tada')
+ end
+ end
+
+ it 'updates a preview of a customized broadcast notification message', :js do
fill_in 'broadcast_message_message', with: "Live **Markdown** previews. :tada:"
+ select 'Notification', from: 'broadcast_message_broadcast_type'
- page.within('.broadcast-message-preview') do
+ page.within('.js-broadcast-notification-message-preview') do
expect(page).to have_selector('strong', text: 'Markdown')
expect(page).to have_emoji('tada')
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 257e5cb8bf0..9a4889a0335 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -167,14 +167,14 @@ describe 'Admin Groups' do
it 'adds admin a to a group as developer', :js do
visit group_group_members_path(group)
- page.within '.users-group-form' do
+ page.within '.invite-users-form' do
select2(current_user.id, from: '#user_ids', multiple: true)
select 'Developer', from: 'access_level'
end
- click_button 'Add to group'
+ click_button 'Invite'
- page.within '.content-list' do
+ page.within '[data-qa-selector="members_list"]' do
expect(page).to have_content(current_user.name)
expect(page).to have_content('Developer')
end
@@ -187,7 +187,7 @@ describe 'Admin Groups' do
visit group_group_members_path(group)
- page.within '.content-list' do
+ page.within '[data-qa-selector="members_list"]' do
expect(page).to have_content(current_user.name)
expect(page).to have_content('Developer')
end
@@ -196,7 +196,7 @@ describe 'Admin Groups' do
visit group_group_members_path(group)
- page.within '.content-list' do
+ page.within '[data-qa-selector="members_list"]' do
expect(page).not_to have_content(current_user.name)
expect(page).not_to have_content('Developer')
end
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index b4bcbe9d812..64326f3be32 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -28,11 +28,11 @@ describe 'Admin::Hooks' do
end
it 'renders plugins list as well' do
- allow(Gitlab::Plugin).to receive(:files).and_return(['foo.rb', 'bar.clj'])
+ allow(Gitlab::FileHook).to receive(:files).and_return(['foo.rb', 'bar.clj'])
visit admin_hooks_path
- expect(page).to have_content('Plugins')
+ expect(page).to have_content('File Hooks')
expect(page).to have_content('foo.rb')
expect(page).to have_content('bar.clj')
end
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 7c40ac5bde3..d1889d3a89a 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -98,12 +98,12 @@ describe "Admin::Projects" do
it 'adds admin a to a project as developer', :js do
visit project_project_members_path(project)
- page.within '.users-project-form' do
+ page.within '.invite-users-form' do
select2(current_user.id, from: '#user_ids', multiple: true)
select 'Developer', from: 'access_level'
end
- click_button 'Add to project'
+ click_button 'Invite'
page.within '.content-list' do
expect(page).to have_content(current_user.name)
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 0d5f5df71b6..6bcadda6523 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -57,7 +57,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-active'
expect(page).to have_content 'runner-paused'
- input_filtered_search_keys('status:active')
+ input_filtered_search_keys('status=active')
expect(page).to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
end
@@ -68,7 +68,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('status:offline')
+ input_filtered_search_keys('status=offline')
expect(page).not_to have_content 'runner-active'
expect(page).not_to have_content 'runner-paused'
@@ -83,12 +83,12 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('status:active')
+ input_filtered_search_keys('status=active')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('status:active runner-a')
+ input_filtered_search_keys('status=active runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
@@ -105,7 +105,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-project'
expect(page).to have_content 'runner-group'
- input_filtered_search_keys('type:project_type')
+ input_filtered_search_keys('type=project_type')
expect(page).to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
end
@@ -116,7 +116,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('type:instance_type')
+ input_filtered_search_keys('type=instance_type')
expect(page).not_to have_content 'runner-project'
expect(page).not_to have_content 'runner-group'
@@ -131,12 +131,12 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('type:project_type')
+ input_filtered_search_keys('type=project_type')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('type:project_type runner-a')
+ input_filtered_search_keys('type=project_type runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
@@ -153,7 +153,7 @@ describe "Admin Runners" do
expect(page).to have_content 'runner-blue'
expect(page).to have_content 'runner-red'
- input_filtered_search_keys('tag:blue')
+ input_filtered_search_keys('tag=blue')
expect(page).to have_content 'runner-blue'
expect(page).not_to have_content 'runner-red'
@@ -165,7 +165,7 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('tag:red')
+ input_filtered_search_keys('tag=red')
expect(page).not_to have_content 'runner-blue'
expect(page).not_to have_content 'runner-blue'
@@ -179,13 +179,13 @@ describe "Admin Runners" do
visit admin_runners_path
- input_filtered_search_keys('tag:blue')
+ input_filtered_search_keys('tag=blue')
expect(page).to have_content 'runner-a-1'
expect(page).to have_content 'runner-b-1'
expect(page).not_to have_content 'runner-a-2'
- input_filtered_search_keys('tag:blue runner-a')
+ input_filtered_search_keys('tag=blue runner-a')
expect(page).to have_content 'runner-a-1'
expect(page).not_to have_content 'runner-b-1'
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index c740e4e26d9..8aad598b843 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -163,9 +163,7 @@ describe 'Issue Boards', :js do
end
it 'infinite scrolls list' do
- 50.times do
- create(:labeled_issue, project: project, labels: [planning])
- end
+ create_list(:labeled_issue, 50, project: project, labels: [planning])
visit project_board_path(project, board)
wait_for_requests
@@ -475,9 +473,7 @@ describe 'Issue Boards', :js do
end
it 'infinite scrolls list with label filter' do
- 50.times do
- create(:labeled_issue, project: project, labels: [planning, testing])
- end
+ create_list(:labeled_issue, 50, project: project, labels: [planning, testing])
set_filter("label", testing.title)
click_filter_link(testing.title)
@@ -628,7 +624,7 @@ describe 'Issue Boards', :js do
end
def set_filter(type, text)
- find('.filtered-search').native.send_keys("#{type}:#{text}")
+ find('.filtered-search').native.send_keys("#{type}=#{text}")
end
def submit_filter
diff --git a/spec/features/boards/modal_filter_spec.rb b/spec/features/boards/modal_filter_spec.rb
index 70bc067f79d..d14041ecf3f 100644
--- a/spec/features/boards/modal_filter_spec.rb
+++ b/spec/features/boards/modal_filter_spec.rb
@@ -211,7 +211,7 @@ describe 'Issue Boards add issue modal filtering', :js do
end
def set_filter(type, text = '')
- find('.add-issues-modal .filtered-search').native.send_keys("#{type}:#{text}")
+ find('.add-issues-modal .filtered-search').native.send_keys("#{type}=#{text}")
end
def submit_filter
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 9143db16b87..c7edb574f19 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -318,7 +318,9 @@ describe 'Issue Boards', :js do
wait_for_requests
click_link bug.title
- within('.dropdown-menu-labels') { expect(page).to have_selector('.is-active', count: 3) }
+
+ wait_for_requests
+
click_link regression.title
wait_for_requests
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
index 988cd228c1c..20648ed3d46 100644
--- a/spec/features/clusters/installing_applications_shared_examples.rb
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -181,11 +181,8 @@ shared_examples "installing applications on a cluster" do
context 'when user installs Elastic Stack' do
before do
allow(ClusterInstallAppWorker).to receive(:perform_async)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
create(:clusters_applications_helm, :installed, cluster: cluster)
- create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1', cluster: cluster)
page.within('.js-cluster-application-row-elastic_stack') do
click_button 'Install'
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index e9751aa2e72..0cafdb4e982 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -76,7 +76,7 @@ describe 'Cycle Analytics', :js do
click_stage('Staging')
expect_build_to_be_present
- click_stage('Production')
+ click_stage('Total')
expect_issue_to_be_present
end
diff --git a/spec/features/dashboard/instance_statistics_spec.rb b/spec/features/dashboard/instance_statistics_spec.rb
index 21ee2796bd8..feb568d8ef4 100644
--- a/spec/features/dashboard/instance_statistics_spec.rb
+++ b/spec/features/dashboard/instance_statistics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Showing instance statistics' do
+describe 'Showing analytics' do
before do
sign_in user if user
end
@@ -13,10 +13,10 @@ describe 'Showing instance statistics' do
context 'for unauthenticated users' do
let(:user) { nil }
- it 'does not show the instance statistics link' do
+ it 'does not show the Analytics link' do
subject
- expect(page).not_to have_link('Instance Statistics')
+ expect(page).not_to have_link('Analytics')
end
end
@@ -28,10 +28,10 @@ describe 'Showing instance statistics' do
stub_application_setting(instance_statistics_visibility_private: false)
end
- it 'shows the instance statistics link' do
+ it 'shows the analytics link' do
subject
- expect(page).to have_link('Instance Statistics')
+ expect(page).to have_link('Analytics')
end
end
@@ -40,10 +40,14 @@ describe 'Showing instance statistics' do
stub_application_setting(instance_statistics_visibility_private: true)
end
- it 'shows the instance statistics link' do
+ it 'does not show the analytics link' do
subject
- expect(page).not_to have_link('Instance Statistics')
+ # Skipping this test on EE as there is an EE specifc spec for this functionality
+ # ee/spec/features/dashboards/analytics_spec.rb
+ skip if Gitlab.ee?
+
+ expect(page).not_to have_link('Analytics')
end
end
end
@@ -51,10 +55,10 @@ describe 'Showing instance statistics' do
context 'for admins' do
let(:user) { create(:admin) }
- it 'shows the instance statistics link' do
+ it 'shows the analytics link' do
subject
- expect(page).to have_link('Instance Statistics')
+ expect(page).to have_link('Analytics')
end
end
end
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index 1352e1bd8fc..8e7fd1f500f 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -28,14 +28,14 @@ describe 'Dashboard Issues filtering', :js do
context 'filtering by milestone' do
it 'shows all issues with no milestone' do
- input_filtered_search("milestone:none")
+ input_filtered_search("milestone=none")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_selector('.issue', count: 1)
end
it 'shows all issues with the selected milestone' do
- input_filtered_search("milestone:%\"#{milestone.title}\"")
+ input_filtered_search("milestone=%\"#{milestone.title}\"")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_selector('.issue', count: 1)
@@ -63,7 +63,7 @@ describe 'Dashboard Issues filtering', :js do
let!(:label_link) { create(:label_link, label: label, target: issue) }
it 'shows all issues with the selected label' do
- input_filtered_search("label:~#{label.title}")
+ input_filtered_search("label=~#{label.title}")
page.within 'ul.content-list' do
expect(page).to have_content issue.title
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index cb055ff8416..a2ead1b5d33 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Dashboard Issues' do
it 'shows issues when current user is author', :js do
reset_filters
- input_filtered_search("author:#{current_user.to_reference}")
+ input_filtered_search("author=#{current_user.to_reference}")
expect(page).to have_content(authored_issue.title)
expect(page).to have_content(authored_issue_on_public_project.title)
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 0c1e1d5910b..bb515cfae82 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -107,7 +107,7 @@ describe 'Dashboard Merge Requests' do
it 'shows authored merge requests', :js do
reset_filters
- input_filtered_search("author:#{current_user.to_reference}")
+ input_filtered_search("author=#{current_user.to_reference}")
expect(page).to have_content(authored_merge_request.title)
expect(page).to have_content(authored_merge_request_from_fork.title)
@@ -120,7 +120,7 @@ describe 'Dashboard Merge Requests' do
it 'shows labeled merge requests', :js do
reset_filters
- input_filtered_search("label:#{label.name}")
+ input_filtered_search("label=#{label.name}")
expect(page).to have_content(labeled_merge_request.title)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index f10cdf6da1e..73f759f8a54 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -173,6 +173,19 @@ describe 'Dashboard Projects' do
end
end
+ shared_examples 'hidden pipeline status' do
+ it 'does not show the pipeline status' do
+ visit dashboard_projects_path
+
+ page.within('.controls') do
+ expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
+ expect(page).not_to have_css('.ci-status-link')
+ expect(page).not_to have_css('.ci-status-icon-success')
+ expect(page).not_to have_link('Pipeline: passed')
+ end
+ end
+ end
+
context 'guest user of project and project has private pipelines' do
let(:guest_user) { create(:user) }
@@ -182,16 +195,15 @@ describe 'Dashboard Projects' do
sign_in(guest_user)
end
- it 'shows that the last pipeline passed' do
- visit dashboard_projects_path
+ it_behaves_like 'hidden pipeline status'
+ end
- page.within('.controls') do
- expect(page).not_to have_xpath("//a[@href='#{pipelines_project_commit_path(project, project.commit, ref: pipeline.ref)}']")
- expect(page).not_to have_css('.ci-status-link')
- expect(page).not_to have_css('.ci-status-icon-success')
- expect(page).not_to have_link('Pipeline: passed')
- end
+ context 'when dashboard_pipeline_status is disabled' do
+ before do
+ stub_feature_flags(dashboard_pipeline_status: false)
end
+
+ it_behaves_like 'hidden pipeline status'
end
end
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index ff3eb58931d..94dc8601abb 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -91,6 +91,7 @@ describe 'Dashboard snippets' do
context 'as an external user' do
let(:user) { create(:user, :external) }
+
before do
sign_in(user)
visit dashboard_snippets_path
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index b9b233026fd..a3fa87e3242 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -48,7 +48,7 @@ describe 'Group issues page' do
let(:user2) { user_outside_group }
it 'filters by only group users' do
- filtered_search.set('assignee:')
+ filtered_search.set('assignee=')
expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name)
expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name)
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
new file mode 100644
index 00000000000..55f9418521f
--- /dev/null
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Groups > Members > Manage groups', :js do
+ include Select2Helper
+ include Spec::Support::Helpers::Features::ListRowsHelpers
+
+ let(:user) { create(:user) }
+ let(:shared_with_group) { create(:group) }
+ let(:shared_group) { create(:group) }
+
+ before do
+ shared_group.add_owner(user)
+ sign_in(user)
+ end
+
+ context 'with share groups with groups feature flag' do
+ before do
+ stub_feature_flags(shared_with_group: true)
+ end
+
+ it 'add group to group' do
+ visit group_group_members_path(shared_group)
+
+ add_group(shared_with_group.id, 'Reporter')
+
+ page.within(first_row) do
+ expect(page).to have_content(shared_with_group.name)
+ expect(page).to have_content('Reporter')
+ end
+ end
+
+ it 'remove user from group' do
+ create(:group_group_link, shared_group: shared_group,
+ shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+
+ visit group_group_members_path(shared_group)
+
+ expect(page).to have_content(shared_with_group.name)
+
+ accept_confirm do
+ find(:css, '#existing_shares li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content(shared_with_group.name)
+ end
+
+ it 'update group to owner level' do
+ create(:group_group_link, shared_group: shared_group,
+ shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+
+ visit group_group_members_path(shared_group)
+
+ page.within(first_row) do
+ click_button('Developer')
+ click_link('Maintainer')
+
+ wait_for_requests
+
+ expect(page).to have_button('Maintainer')
+ end
+ end
+
+ def add_group(id, role)
+ page.click_link 'Invite group'
+ page.within ".invite-group-form" do
+ select2(id, from: "#shared_with_group_id")
+ select(role, from: "shared_group_access")
+ click_button "Invite"
+ end
+ end
+ end
+
+ context 'without share groups with groups feature flag' do
+ before do
+ stub_feature_flags(share_group_with_group: false)
+ end
+
+ it 'does not render invitation form and tabs' do
+ visit group_group_members_path(shared_group)
+
+ expect(page).not_to have_link('Invite member')
+ expect(page).not_to have_link('Invite group')
+ end
+ end
+end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index cdd16ae9441..e4ba3022d8b 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -113,7 +113,8 @@ describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- expect(page).not_to have_button 'Add to group'
+ expect(page).not_to have_selector '.invite-users-form'
+ expect(page).not_to have_selector '.invite-group-form'
page.within(second_row) do
# Can not modify user2 role
@@ -125,11 +126,10 @@ describe 'Groups > Members > Manage members' do
end
def add_user(id, role)
- page.within ".users-group-form" do
+ page.within ".invite-users-form" do
select2(id, from: "#user_ids", multiple: true)
select(role, from: "access_level")
+ click_button "Invite"
end
-
- click_button "Add to group"
end
end
diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb
index 9c17aac09e8..fda129ce422 100644
--- a/spec/features/groups/members/search_members_spec.rb
+++ b/spec/features/groups/members/search_members_spec.rb
@@ -24,7 +24,7 @@ describe 'Search group member' do
find('.user-search-btn').click
end
- group_members_list = find(".card .content-list")
+ group_members_list = find('[data-qa-selector="members_list"]')
expect(group_members_list).to have_content(member.name)
expect(group_members_list).not_to have_content(user.name)
end
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 59230d6891a..0038a8e4892 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -52,7 +52,7 @@ describe 'Group merge requests page' do
let(:user2) { user_outside_group }
it 'filters by assignee only group users' do
- filtered_search.set('assignee:')
+ filtered_search.set('assignee=')
expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name)
expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name)
diff --git a/spec/features/import/manifest_import_spec.rb b/spec/features/import/manifest_import_spec.rb
index 89bf69dea7d..36478128dd1 100644
--- a/spec/features/import/manifest_import_spec.rb
+++ b/spec/features/import/manifest_import_spec.rb
@@ -24,17 +24,17 @@ describe 'Import multiple repositories by uploading a manifest file', :js do
expect(page).to have_content('https://android-review.googlesource.com/platform/build/blueprint')
end
- it 'imports successfully imports a project', :sidekiq_might_not_need_inline do
+ it 'imports successfully imports a project', :sidekiq_inline do
visit new_import_manifest_path
attach_file('manifest', Rails.root.join('spec/fixtures/aosp_manifest.xml'))
click_on 'List available repositories'
- page.within(first_row) do
+ page.within(second_row) do
click_on 'Import'
expect(page).to have_content 'Done'
- expect(page).to have_content("#{group.full_path}/build/make")
+ expect(page).to have_content("#{group.full_path}/build/blueprint")
end
end
@@ -47,7 +47,7 @@ describe 'Import multiple repositories by uploading a manifest file', :js do
expect(page).to have_content 'The uploaded file is not a valid XML file.'
end
- def first_row
- page.all('table.import-jobs tbody tr')[0]
+ def second_row
+ page.all('table.import-jobs tbody tr')[1]
end
end
diff --git a/spec/features/instance_statistics/cohorts_spec.rb b/spec/features/instance_statistics/cohorts_spec.rb
index 3940e8fa389..0bb2e4b997d 100644
--- a/spec/features/instance_statistics/cohorts_spec.rb
+++ b/spec/features/instance_statistics/cohorts_spec.rb
@@ -10,7 +10,7 @@ describe 'Cohorts page' do
end
it 'See users count per month' do
- 2.times { create(:user) }
+ create_list(:user, 2)
visit instance_statistics_cohorts_path
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 30c516459c5..bcc05d313ad 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -83,9 +83,7 @@ describe 'issuable list' do
create(:merge_request, title: FFaker::Lorem.sentence, source_project: project, source_branch: source_branch, head_pipeline: pipeline)
end
- 2.times do
- create(:note_on_issue, noteable: issuable, project: project)
- end
+ create_list(:note_on_issue, 2, noteable: issuable, project: project)
create(:award_emoji, :downvote, awardable: issuable)
create(:award_emoji, :upvote, awardable: issuable)
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index e1177bedd2d..8aa29cddd5f 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -7,24 +7,11 @@ describe 'Dropdown assignee', :js do
let!(:project) { create(:project) }
let!(:user) { create(:user, name: 'administrator', username: 'root') }
- let!(:user_john) { create(:user, name: 'John', username: 'th0mas') }
- let!(:user_jacob) { create(:user, name: 'Jacob', username: 'otter32') }
- let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
- def dropdown_assignee_size
- filter_dropdown.all('.filter-dropdown-item').size
- end
-
- def click_assignee(text)
- find('#js-dropdown-assignee .filter-dropdown .filter-dropdown-item', text: text).click
- end
-
before do
project.add_maintainer(user)
- project.add_maintainer(user_john)
- project.add_maintainer(user_jacob)
sign_in(user)
create(:issue, project: project)
@@ -32,153 +19,23 @@ describe 'Dropdown assignee', :js do
end
describe 'behavior' do
- it 'opens when the search bar has assignee:' do
- input_filtered_search('assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
-
- it 'closes when the search bar is unfocused' do
- find('body').click
-
- expect(page).to have_css(js_dropdown_assignee, visible: false)
- end
-
- it 'shows loading indicator when opened' do
- slow_requests do
- # We aren't using `input_filtered_search` because we want to see the loading indicator
- filtered_search.set('assignee:')
-
- expect(page).to have_css('#js-dropdown-assignee .filter-dropdown-loading', visible: true)
- end
- end
-
- it 'hides loading indicator when loaded' do
- input_filtered_search('assignee:', submit: false, extra_space: false)
-
- expect(find(js_dropdown_assignee)).not_to have_css('.filter-dropdown-loading')
- end
-
it 'loads all the assignees when opened' do
- input_filtered_search('assignee:', submit: false, extra_space: false)
+ input_filtered_search('assignee=', submit: false, extra_space: false)
- expect(dropdown_assignee_size).to eq(4)
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
it 'shows current user at top of dropdown' do
- input_filtered_search('assignee:', submit: false, extra_space: false)
+ input_filtered_search('assignee=', submit: false, extra_space: false)
expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
end
end
- describe 'filtering' do
- before do
- input_filtered_search('assignee:', submit: false, extra_space: false)
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_john.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user.name)
- end
-
- it 'filters by name' do
- input_filtered_search('jac', submit: false, extra_space: false)
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user.name)
- end
-
- it 'filters by case insensitive name' do
- input_filtered_search('JAC', submit: false, extra_space: false)
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user.name)
- end
-
- it 'filters by username with symbol' do
- input_filtered_search('@ott', submit: false, extra_space: false)
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user_john.name)
- end
-
- it 'filters by case insensitive username with symbol' do
- input_filtered_search('@OTT', submit: false, extra_space: false)
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user_john.name)
- end
-
- it 'filters by username without symbol' do
- input_filtered_search('ott', submit: false, extra_space: false)
-
- wait_for_requests
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user_john.name)
- end
-
- it 'filters by case insensitive username without symbol' do
- input_filtered_search('OTT', submit: false, extra_space: false)
-
- wait_for_requests
-
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user_jacob.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_content(user.name)
- expect(find("#{js_dropdown_assignee} .filter-dropdown")).to have_no_content(user_john.name)
- end
- end
-
- describe 'selecting from dropdown' do
- before do
- input_filtered_search('assignee:', submit: false, extra_space: false)
- end
-
- it 'fills in the assignee username when the assignee has not been filtered' do
- click_assignee(user_jacob.name)
-
- wait_for_requests
-
- expect(page).to have_css(js_dropdown_assignee, visible: false)
- expect_tokens([assignee_token(user_jacob.name)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the assignee username when the assignee has been filtered' do
- input_filtered_search('roo', submit: false, extra_space: false)
- click_assignee(user.name)
-
- wait_for_requests
-
- expect(page).to have_css(js_dropdown_assignee, visible: false)
- expect_tokens([assignee_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `None`' do
- find('#js-dropdown-assignee .filter-dropdown-item', text: 'None').click
-
- expect(page).to have_css(js_dropdown_assignee, visible: false)
- expect_tokens([assignee_token('None')])
- expect_filtered_search_input_empty
- end
-
- it 'selects `Any`' do
- find('#js-dropdown-assignee .filter-dropdown-item', text: 'Any').click
-
- expect(page).to have_css(js_dropdown_assignee, visible: false)
- expect_tokens([assignee_token('Any')])
- expect_filtered_search_input_empty
- end
- end
-
describe 'selecting from dropdown without Ajax call' do
before do
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
- input_filtered_search('assignee:', submit: false, extra_space: false)
+ input_filtered_search('assignee=', submit: false, extra_space: false)
end
after do
@@ -186,59 +43,11 @@ describe 'Dropdown assignee', :js do
end
it 'selects current user' do
- find('#js-dropdown-assignee .filter-dropdown-item', text: user.username).click
+ find("#{js_dropdown_assignee} .filter-dropdown-item", text: user.username).click
expect(page).to have_css(js_dropdown_assignee, visible: false)
expect_tokens([assignee_token(user.username)])
expect_filtered_search_input_empty
end
end
-
- describe 'input has existing content' do
- it 'opens assignee dropdown with existing search term' do
- input_filtered_search('searchTerm assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
-
- it 'opens assignee dropdown with existing author' do
- input_filtered_search('author:@user assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
-
- it 'opens assignee dropdown with existing label' do
- input_filtered_search('label:~bug assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
-
- it 'opens assignee dropdown with existing milestone' do
- input_filtered_search('milestone:%v1.0 assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
-
- it 'opens assignee dropdown with existing my-reaction' do
- input_filtered_search('my-reaction:star assignee:', submit: false, extra_space: false)
-
- expect(page).to have_css(js_dropdown_assignee, visible: true)
- end
- end
-
- describe 'caching requests' do
- it 'caches requests after the first load' do
- input_filtered_search('assignee:', submit: false, extra_space: false)
- initial_size = dropdown_assignee_size
-
- expect(initial_size).to be > 0
-
- new_user = create(:user)
- project.add_maintainer(new_user)
- find('.filtered-search-box .clear-search').click
- input_filtered_search('assignee:', submit: false, extra_space: false)
-
- expect(dropdown_assignee_size).to eq(initial_size)
- end
- end
end
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index bd22eb1056b..c95bd7071b3 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -7,32 +7,11 @@ describe 'Dropdown author', :js do
let!(:project) { create(:project) }
let!(:user) { create(:user, name: 'administrator', username: 'root') }
- let!(:user_john) { create(:user, name: 'John', username: 'th0mas') }
- let!(:user_jacob) { create(:user, name: 'Jacob', username: 'ooter32') }
- let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_author) { '#js-dropdown-author' }
-
- def send_keys_to_filtered_search(input)
- input.split("").each do |i|
- filtered_search.send_keys(i)
- end
-
- sleep 0.5
- wait_for_requests
- end
-
- def dropdown_author_size
- page.all('#js-dropdown-author .filter-dropdown .filter-dropdown-item').size
- end
-
- def click_author(text)
- find('#js-dropdown-author .filter-dropdown .filter-dropdown-item', text: text).click
- end
+ let(:filter_dropdown) { find("#{js_dropdown_author} .filter-dropdown") }
before do
project.add_maintainer(user)
- project.add_maintainer(user_john)
- project.add_maintainer(user_jacob)
sign_in(user)
create(:issue, project: project)
@@ -40,113 +19,23 @@ describe 'Dropdown author', :js do
end
describe 'behavior' do
- it 'opens when the search bar has author:' do
- filtered_search.set('author:')
-
- expect(page).to have_css(js_dropdown_author, visible: true)
- end
-
- it 'closes when the search bar is unfocused' do
- find('body').click
-
- expect(page).to have_css(js_dropdown_author, visible: false)
- end
-
- it 'shows loading indicator when opened' do
- slow_requests do
- filtered_search.set('author:')
-
- expect(page).to have_css('#js-dropdown-author .filter-dropdown-loading', visible: true)
- end
- end
-
- it 'hides loading indicator when loaded' do
- send_keys_to_filtered_search('author:')
-
- expect(page).not_to have_css('#js-dropdown-author .filter-dropdown-loading')
- end
-
it 'loads all the authors when opened' do
- send_keys_to_filtered_search('author:')
+ input_filtered_search('author=', submit: false, extra_space: false)
- expect(dropdown_author_size).to eq(4)
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
it 'shows current user at top of dropdown' do
- send_keys_to_filtered_search('author:')
+ input_filtered_search('author=', submit: false, extra_space: false)
- expect(first('#js-dropdown-author li')).to have_content(user.name)
- end
- end
-
- describe 'filtering' do
- before do
- filtered_search.set('author')
- send_keys_to_filtered_search(':')
- end
-
- it 'filters by name' do
- send_keys_to_filtered_search('jac')
-
- expect(dropdown_author_size).to eq(1)
- end
-
- it 'filters by case insensitive name' do
- send_keys_to_filtered_search('Jac')
-
- expect(dropdown_author_size).to eq(1)
- end
-
- it 'filters by username with symbol' do
- send_keys_to_filtered_search('@oot')
-
- expect(dropdown_author_size).to eq(2)
- end
-
- it 'filters by username without symbol' do
- send_keys_to_filtered_search('oot')
-
- expect(dropdown_author_size).to eq(2)
- end
-
- it 'filters by case insensitive username without symbol' do
- send_keys_to_filtered_search('OOT')
-
- expect(dropdown_author_size).to eq(2)
- end
- end
-
- describe 'selecting from dropdown' do
- before do
- filtered_search.set('author')
- send_keys_to_filtered_search(':')
- end
-
- it 'fills in the author username when the author has not been filtered' do
- click_author(user_jacob.name)
-
- wait_for_requests
-
- expect(page).to have_css(js_dropdown_author, visible: false)
- expect_tokens([author_token(user_jacob.name)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the author username when the author has been filtered' do
- click_author(user.name)
-
- wait_for_requests
-
- expect(page).to have_css(js_dropdown_author, visible: false)
- expect_tokens([author_token(user.name)])
- expect_filtered_search_input_empty
+ expect(filter_dropdown.first('.filter-dropdown-item')).to have_content(user.name)
end
end
describe 'selecting from dropdown without Ajax call' do
before do
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
- filtered_search.set('author:')
+ input_filtered_search('author=', submit: false, extra_space: false)
end
after do
@@ -154,55 +43,11 @@ describe 'Dropdown author', :js do
end
it 'selects current user' do
- find('#js-dropdown-author .filter-dropdown-item', text: user.username).click
+ find("#{js_dropdown_author} .filter-dropdown-item", text: user.username).click
expect(page).to have_css(js_dropdown_author, visible: false)
expect_tokens([author_token(user.username)])
expect_filtered_search_input_empty
end
end
-
- describe 'input has existing content' do
- it 'opens author dropdown with existing search term' do
- filtered_search.set('searchTerm author:')
-
- expect(page).to have_css(js_dropdown_author, visible: true)
- end
-
- it 'opens author dropdown with existing assignee' do
- filtered_search.set('assignee:@user author:')
-
- expect(page).to have_css(js_dropdown_author, visible: true)
- end
-
- it 'opens author dropdown with existing label' do
- filtered_search.set('label:~bug author:')
-
- expect(page).to have_css(js_dropdown_author, visible: true)
- end
-
- it 'opens author dropdown with existing milestone' do
- filtered_search.set('milestone:%v1.0 author:')
-
- expect(page).to have_css(js_dropdown_author, visible: true)
- end
- end
-
- describe 'caching requests' do
- it 'caches requests after the first load' do
- filtered_search.set('author')
- send_keys_to_filtered_search(':')
- initial_size = dropdown_author_size
-
- expect(initial_size).to be > 0
-
- new_user = create(:user)
- project.add_maintainer(new_user)
- find('.filtered-search-box .clear-search').click
- filtered_search.set('author')
- send_keys_to_filtered_search(':')
-
- expect(dropdown_author_size).to eq(initial_size)
- end
- end
end
diff --git a/spec/features/issues/filtered_search/dropdown_base_spec.rb b/spec/features/issues/filtered_search/dropdown_base_spec.rb
new file mode 100644
index 00000000000..2a800f054a0
--- /dev/null
+++ b/spec/features/issues/filtered_search/dropdown_base_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Dropdown base', :js do
+ include FilteredSearchHelpers
+
+ let!(:project) { create(:project) }
+ let!(:user) { create(:user, name: 'administrator', username: 'root') }
+ let(:filtered_search) { find('.filtered-search') }
+ let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
+ let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
+
+ def dropdown_assignee_size
+ filter_dropdown.all('.filter-dropdown-item').size
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ create(:issue, project: project)
+
+ visit project_issues_path(project)
+ end
+
+ describe 'behavior' do
+ it 'shows loading indicator when opened' do
+ slow_requests do
+ # We aren't using `input_filtered_search` because we want to see the loading indicator
+ filtered_search.set('assignee=')
+
+ expect(page).to have_css("#{js_dropdown_assignee} .filter-dropdown-loading", visible: true)
+ end
+ end
+
+ it 'hides loading indicator when loaded' do
+ input_filtered_search('assignee=', submit: false, extra_space: false)
+
+ expect(find(js_dropdown_assignee)).not_to have_css('.filter-dropdown-loading')
+ end
+ end
+
+ describe 'caching requests' do
+ it 'caches requests after the first load' do
+ input_filtered_search('assignee=', submit: false, extra_space: false)
+ initial_size = dropdown_assignee_size
+
+ expect(initial_size).to be > 0
+
+ new_user = create(:user)
+ project.add_maintainer(new_user)
+ find('.filtered-search-box .clear-search').click
+ input_filtered_search('assignee=', submit: false, extra_space: false)
+
+ expect(dropdown_assignee_size).to eq(initial_size)
+ end
+ end
+end
diff --git a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
index 7ec3d215fb1..4c11f83318b 100644
--- a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
@@ -11,30 +11,13 @@ describe 'Dropdown emoji', :js do
let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: issue) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_emoji) { '#js-dropdown-my-reaction' }
-
- def send_keys_to_filtered_search(input)
- input.split("").each do |i|
- filtered_search.send_keys(i)
- end
-
- sleep 0.5
- wait_for_requests
- end
-
- def dropdown_emoji_size
- all('gl-emoji[data-name]').size
- end
-
- def click_emoji(text)
- find('#js-dropdown-my-reaction .filter-dropdown .filter-dropdown-item', text: text).click
- end
+ let(:filter_dropdown) { find("#{js_dropdown_emoji} .filter-dropdown") }
before do
project.add_maintainer(user)
create_list(:award_emoji, 2, user: user, name: 'thumbsup')
create_list(:award_emoji, 1, user: user, name: 'thumbsdown')
create_list(:award_emoji, 3, user: user, name: 'star')
- create_list(:award_emoji, 1, user: user, name: 'tea')
end
context 'when user not logged in' do
@@ -43,8 +26,8 @@ describe 'Dropdown emoji', :js do
end
describe 'behavior' do
- it 'does not open when the search bar has my-reaction:' do
- filtered_search.set('my-reaction:')
+ it 'does not open when the search bar has my-reaction=' do
+ filtered_search.set('my-reaction=')
expect(page).not_to have_css(js_dropdown_emoji)
end
@@ -59,143 +42,22 @@ describe 'Dropdown emoji', :js do
end
describe 'behavior' do
- it 'opens when the search bar has my-reaction:' do
- filtered_search.set('my-reaction:')
+ it 'opens when the search bar has my-reaction=' do
+ filtered_search.set('my-reaction=')
expect(page).to have_css(js_dropdown_emoji, visible: true)
end
- it 'closes when the search bar is unfocused' do
- find('body').click
-
- expect(page).to have_css(js_dropdown_emoji, visible: false)
- end
-
- it 'shows loading indicator when opened' do
- slow_requests do
- filtered_search.set('my-reaction:')
-
- expect(page).to have_css('#js-dropdown-my-reaction .filter-dropdown-loading', visible: true)
- end
- end
-
- it 'hides loading indicator when loaded' do
- send_keys_to_filtered_search('my-reaction:')
-
- expect(page).not_to have_css('#js-dropdown-my-reaction .filter-dropdown-loading')
- end
-
it 'loads all the emojis when opened' do
- send_keys_to_filtered_search('my-reaction:')
+ input_filtered_search('my-reaction=', submit: false, extra_space: false)
- expect(dropdown_emoji_size).to eq(4)
+ expect_filtered_search_dropdown_results(filter_dropdown, 3)
end
it 'shows the most populated emoji at top of dropdown' do
- send_keys_to_filtered_search('my-reaction:')
-
- expect(first('#js-dropdown-my-reaction .filter-dropdown li')).to have_content(award_emoji_star.name)
- end
- end
-
- describe 'filtering' do
- before do
- filtered_search.set('my-reaction')
- send_keys_to_filtered_search(':')
- end
-
- it 'filters by name' do
- send_keys_to_filtered_search('up')
-
- expect(dropdown_emoji_size).to eq(1)
- end
-
- it 'filters by case insensitive name' do
- send_keys_to_filtered_search('Up')
-
- expect(dropdown_emoji_size).to eq(1)
- end
- end
-
- describe 'selecting from dropdown' do
- before do
- filtered_search.set('my-reaction')
- send_keys_to_filtered_search(':')
- end
-
- it 'selects `None`' do
- find('#js-dropdown-my-reaction .filter-dropdown-item', text: 'None').click
-
- expect(page).to have_css(js_dropdown_emoji, visible: false)
- expect_tokens([reaction_token('None', false)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `Any`' do
- find('#js-dropdown-my-reaction .filter-dropdown-item', text: 'Any').click
-
- expect(page).to have_css(js_dropdown_emoji, visible: false)
- expect_tokens([reaction_token('Any', false)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the my-reaction name' do
- click_emoji('thumbsup')
-
- wait_for_requests
-
- expect(page).to have_css(js_dropdown_emoji, visible: false)
- expect_tokens([reaction_token('thumbsup')])
- expect_filtered_search_input_empty
- end
- end
-
- describe 'input has existing content' do
- it 'opens my-reaction dropdown with existing search term' do
- filtered_search.set('searchTerm my-reaction:')
-
- expect(page).to have_css(js_dropdown_emoji, visible: true)
- end
-
- it 'opens my-reaction dropdown with existing assignee' do
- filtered_search.set('assignee:@user my-reaction:')
-
- expect(page).to have_css(js_dropdown_emoji, visible: true)
- end
-
- it 'opens my-reaction dropdown with existing label' do
- filtered_search.set('label:~bug my-reaction:')
-
- expect(page).to have_css(js_dropdown_emoji, visible: true)
- end
-
- it 'opens my-reaction dropdown with existing milestone' do
- filtered_search.set('milestone:%v1.0 my-reaction:')
-
- expect(page).to have_css(js_dropdown_emoji, visible: true)
- end
-
- it 'opens my-reaction dropdown with existing my-reaction' do
- filtered_search.set('my-reaction:star my-reaction:')
-
- expect(page).to have_css(js_dropdown_emoji, visible: true)
- end
- end
-
- describe 'caching requests' do
- it 'caches requests after the first load' do
- filtered_search.set('my-reaction')
- send_keys_to_filtered_search(':')
- initial_size = dropdown_emoji_size
-
- expect(initial_size).to be > 0
-
- create_list(:award_emoji, 1, user: user, name: 'smile')
- find('.filtered-search-box .clear-search').click
- filtered_search.set('my-reaction')
- send_keys_to_filtered_search(':')
+ input_filtered_search('my-reaction=', submit: false, extra_space: false)
- expect(dropdown_emoji_size).to eq(initial_size)
+ expect(first("#{js_dropdown_emoji} .filter-dropdown li")).to have_content(award_emoji_star.name)
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index bb57d69148b..10b092c6957 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -9,11 +9,16 @@ describe 'Dropdown hint', :js do
let!(:user) { create(:user) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_hint) { '#js-dropdown-hint' }
+ let(:js_dropdown_operator) { '#js-dropdown-operator' }
def click_hint(text)
find('#js-dropdown-hint .filter-dropdown .filter-dropdown-item', text: text).click
end
+ def click_operator(op)
+ find("#js-dropdown-operator .filter-dropdown .filter-dropdown-item[data-value='#{op}']").click
+ end
+
before do
project.add_maintainer(user)
create(:issue, project: project)
@@ -27,7 +32,7 @@ describe 'Dropdown hint', :js do
it 'does not exist my-reaction dropdown item' do
expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).not_to have_content('my-reaction')
+ expect(page).not_to have_content('My-reaction')
end
end
@@ -46,9 +51,7 @@ describe 'Dropdown hint', :js do
it 'opens when the search bar is first focused' do
expect(page).to have_css(js_dropdown_hint, visible: true)
- end
- it 'closes when the search bar is unfocused' do
find('body').click
expect(page).to have_css(js_dropdown_hint, visible: false)
@@ -56,15 +59,6 @@ describe 'Dropdown hint', :js do
end
describe 'filtering' do
- it 'does not filter `Press Enter or click to search`' do
- filtered_search.set('randomtext')
-
- hint_dropdown = find(js_dropdown_hint)
-
- expect(hint_dropdown).to have_content('Press Enter or click to search')
- expect(hint_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 0)
- end
-
it 'filters with text' do
filtered_search.set('a')
@@ -77,189 +71,32 @@ describe 'Dropdown hint', :js do
filtered_search.click
end
- it 'opens the author dropdown when you click on author' do
- click_hint('author')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-author', visible: true)
- expect_tokens([{ name: 'Author' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the assignee dropdown when you click on assignee' do
- click_hint('assignee')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-assignee', visible: true)
- expect_tokens([{ name: 'Assignee' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the milestone dropdown when you click on milestone' do
- click_hint('milestone')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-milestone', visible: true)
- expect_tokens([{ name: 'Milestone' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the release dropdown when you click on release' do
- click_hint('release')
+ it 'opens the token dropdown when you click on it' do
+ click_hint('Author')
expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-release', visible: true)
- expect_tokens([{ name: 'Release' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the label dropdown when you click on label' do
- click_hint('label')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-label', visible: true)
- expect_tokens([{ name: 'Label' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the emoji dropdown when you click on my-reaction' do
- click_hint('my-reaction')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-my-reaction', visible: true)
- expect_tokens([{ name: 'My-reaction' }])
- expect_filtered_search_input_empty
- end
+ expect(page).to have_css(js_dropdown_operator, visible: true)
- it 'opens the yes-no dropdown when you click on confidential' do
- click_hint('confidential')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-confidential', visible: true)
- expect_tokens([{ name: 'Confidential' }])
- expect_filtered_search_input_empty
- end
- end
-
- describe 'selecting from dropdown with some input' do
- it 'opens the author dropdown when you click on author' do
- filtered_search.set('auth')
- click_hint('author')
+ click_operator('=')
expect(page).to have_css(js_dropdown_hint, visible: false)
+ expect(page).to have_css(js_dropdown_operator, visible: false)
expect(page).to have_css('#js-dropdown-author', visible: true)
- expect_tokens([{ name: 'Author' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the assignee dropdown when you click on assignee' do
- filtered_search.set('assign')
- click_hint('assignee')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-assignee', visible: true)
- expect_tokens([{ name: 'Assignee' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the milestone dropdown when you click on milestone' do
- filtered_search.set('mile')
- click_hint('milestone')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-milestone', visible: true)
- expect_tokens([{ name: 'Milestone' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the label dropdown when you click on label' do
- filtered_search.set('lab')
- click_hint('label')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-label', visible: true)
- expect_tokens([{ name: 'Label' }])
- expect_filtered_search_input_empty
- end
-
- it 'opens the emoji dropdown when you click on my-reaction' do
- filtered_search.set('my')
- click_hint('my-reaction')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-my-reaction', visible: true)
- expect_tokens([{ name: 'My-reaction' }])
+ expect_tokens([{ name: 'Author', operator: '=' }])
expect_filtered_search_input_empty
end
end
describe 'reselecting from dropdown' do
- it 'reuses existing author text' do
- filtered_search.send_keys('author:')
+ it 'reuses existing token text' do
+ filtered_search.send_keys('author')
filtered_search.send_keys(:backspace)
filtered_search.send_keys(:backspace)
- click_hint('author')
+ click_hint('Author')
expect_tokens([{ name: 'Author' }])
expect_filtered_search_input_empty
end
-
- it 'reuses existing assignee text' do
- filtered_search.send_keys('assignee:')
- filtered_search.send_keys(:backspace)
- filtered_search.send_keys(:backspace)
- click_hint('assignee')
-
- expect_tokens([{ name: 'Assignee' }])
- expect_filtered_search_input_empty
- end
-
- it 'reuses existing milestone text' do
- filtered_search.send_keys('milestone:')
- filtered_search.send_keys(:backspace)
- filtered_search.send_keys(:backspace)
- click_hint('milestone')
-
- expect_tokens([{ name: 'Milestone' }])
- expect_filtered_search_input_empty
- end
-
- it 'reuses existing label text' do
- filtered_search.send_keys('label:')
- filtered_search.send_keys(:backspace)
- filtered_search.send_keys(:backspace)
- click_hint('label')
-
- expect_tokens([{ name: 'Label' }])
- expect_filtered_search_input_empty
- end
-
- it 'reuses existing emoji text' do
- filtered_search.send_keys('my-reaction:')
- filtered_search.send_keys(:backspace)
- filtered_search.send_keys(:backspace)
- click_hint('my-reaction')
-
- expect_tokens([{ name: 'My-reaction' }])
- expect_filtered_search_input_empty
- end
- end
- end
-
- context 'merge request page' do
- before do
- sign_in(user)
- visit project_merge_requests_path(project)
- filtered_search.click
- end
-
- it 'shows the WIP menu item and opens the WIP options dropdown' do
- click_hint('wip')
-
- expect(page).to have_css(js_dropdown_hint, visible: false)
- expect(page).to have_css('#js-dropdown-wip', visible: true)
- expect_tokens([{ name: 'WIP' }])
- expect_filtered_search_input_empty
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index f7f9f0de4db..1e90efc8d56 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -8,31 +8,7 @@ describe 'Dropdown label', :js do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:filtered_search) { find('.filtered-search') }
- let(:js_dropdown_label) { '#js-dropdown-label' }
- let(:filter_dropdown) { find("#{js_dropdown_label} .filter-dropdown") }
-
- shared_context 'with labels' do
- let!(:bug_label) { create(:label, project: project, title: 'bug-label') }
- let!(:uppercase_label) { create(:label, project: project, title: 'BUG-LABEL') }
- let!(:two_words_label) { create(:label, project: project, title: 'High Priority') }
- let!(:wont_fix_label) { create(:label, project: project, title: 'Won"t Fix') }
- let!(:wont_fix_single_label) { create(:label, project: project, title: 'Won\'t Fix') }
- let!(:special_label) { create(:label, project: project, title: '!@#$%^+&*()') }
- let!(:long_label) { create(:label, project: project, title: 'this is a very long title this is a very long title this is a very long title this is a very long title this is a very long title') }
- end
-
- def search_for_label(label)
- init_label_search
- filtered_search.send_keys(label)
- end
-
- def click_label(text)
- filter_dropdown.find('.filter-dropdown-item', text: text).click
- end
-
- def clear_search_field
- find('.filtered-search-box .clear-search').click
- end
+ let(:filter_dropdown) { find('#js-dropdown-label .filter-dropdown') }
before do
project.add_maintainer(user)
@@ -42,267 +18,12 @@ describe 'Dropdown label', :js do
visit project_issues_path(project)
end
- describe 'keyboard navigation' do
- it 'selects label' do
- bug_label = create(:label, project: project, title: 'bug-label')
- init_label_search
-
- # navigate to the bug_label option and selects it
- filtered_search.native.send_keys(:down, :down, :down, :enter)
-
- expect_tokens([label_token(bug_label.title)])
- expect_filtered_search_input_empty
- end
- end
-
describe 'behavior' do
- it 'opens when the search bar has label:' do
- filtered_search.set('label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'closes when the search bar is unfocused' do
- find('body').click
-
- expect(page).not_to have_css(js_dropdown_label)
- end
-
- it 'shows loading indicator when opened and hides it when loaded' do
- slow_requests do
- filtered_search.set('label:')
-
- expect(page).to have_css("#{js_dropdown_label} .filter-dropdown-loading", visible: true)
- end
- expect(find(js_dropdown_label)).not_to have_css('.filter-dropdown-loading')
- end
-
it 'loads all the labels when opened' do
- bug_label = create(:label, project: project, title: 'bug-label')
- filtered_search.set('label:')
-
- expect(filter_dropdown).to have_content(bug_label.title)
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
- end
- end
-
- describe 'filtering' do
- include_context 'with labels'
-
- before do
- init_label_search
- end
-
- it 'filters by case-insensitive name with or without symbol' do
- filtered_search.send_keys('b')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: bug_label.title)).to be_visible
- expect(filter_dropdown.find('.filter-dropdown-item', text: uppercase_label.title)).to be_visible
-
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 2)
-
- clear_search_field
- init_label_search
-
- filtered_search.send_keys('~bu')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: bug_label.title)).to be_visible
- expect(filter_dropdown.find('.filter-dropdown-item', text: uppercase_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 2)
- end
-
- it 'filters by multiple words with or without symbol' do
- filtered_search.send_keys('Hig')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: two_words_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
-
- clear_search_field
- init_label_search
-
- filtered_search.send_keys('~Hig')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: two_words_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
- end
-
- it 'filters by multiple words containing single quotes with or without symbol' do
- filtered_search.send_keys('won\'t')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_single_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
-
- clear_search_field
- init_label_search
-
- filtered_search.send_keys('~won\'t')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_single_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
- end
-
- it 'filters by multiple words containing double quotes with or without symbol' do
- filtered_search.send_keys('won"t')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
-
- clear_search_field
- init_label_search
-
- filtered_search.send_keys('~won"t')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: wont_fix_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
- end
-
- it 'filters by special characters with or without symbol' do
- filtered_search.send_keys('^+')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: special_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
-
- clear_search_field
- init_label_search
-
- filtered_search.send_keys('~^+')
-
- expect(filter_dropdown.find('.filter-dropdown-item', text: special_label.title)).to be_visible
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
- end
- end
-
- describe 'selecting from dropdown' do
- include_context 'with labels'
-
- before do
- init_label_search
- end
-
- it 'fills in the label name when the label has not been filled' do
- click_label(bug_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token(bug_label.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name when the label is partially filled' do
- filtered_search.send_keys('bu')
- click_label(bug_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token(bug_label.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name that contains multiple words' do
- click_label(two_words_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token("\"#{two_words_label.title}\"")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name that contains multiple words and is very long' do
- click_label(long_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token("\"#{long_label.title}\"")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name that contains double quotes' do
- click_label(wont_fix_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token("'#{wont_fix_label.title}'")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name with the correct capitalization' do
- click_label(uppercase_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token(uppercase_label.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the label name with special characters' do
- click_label(special_label.title)
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token(special_label.title)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `no label`' do
- find("#{js_dropdown_label} .filter-dropdown-item", text: 'None').click
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token('None', false)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `any label`' do
- find("#{js_dropdown_label} .filter-dropdown-item", text: 'Any').click
-
- expect(page).not_to have_css(js_dropdown_label)
- expect_tokens([label_token('Any', false)])
- expect_filtered_search_input_empty
- end
- end
-
- describe 'input has existing content' do
- it 'opens label dropdown with existing search term' do
- filtered_search.set('searchTerm label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'opens label dropdown with existing author' do
- filtered_search.set('author:@person label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'opens label dropdown with existing assignee' do
- filtered_search.set('assignee:@person label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'opens label dropdown with existing label' do
- filtered_search.set('label:~urgent label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'opens label dropdown with existing milestone' do
- filtered_search.set('milestone:%v2.0 label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
-
- it 'opens label dropdown with existing my-reaction' do
- filtered_search.set('my-reaction:star label:')
-
- expect(page).to have_css(js_dropdown_label)
- end
- end
-
- describe 'caching requests' do
- it 'caches requests after the first load' do
create(:label, project: project, title: 'bug-label')
- init_label_search
-
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
-
- create(:label, project: project)
- clear_search_field
- init_label_search
+ filtered_search.set('label=')
- expect(filter_dropdown).to have_selector('.filter-dropdown-item', count: 1)
+ expect_filtered_search_dropdown_results(filter_dropdown, 1)
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
index 5272a970a60..1f62a8e0c8d 100644
--- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
@@ -9,26 +9,9 @@ describe 'Dropdown milestone', :js do
let!(:user) { create(:user) }
let!(:milestone) { create(:milestone, title: 'v1.0', project: project) }
let!(:uppercase_milestone) { create(:milestone, title: 'CAP_MILESTONE', project: project) }
- let!(:two_words_milestone) { create(:milestone, title: 'Future Plan', project: project) }
- let!(:wont_fix_milestone) { create(:milestone, title: 'Won"t Fix', project: project) }
- let!(:special_milestone) { create(:milestone, title: '!@#$%^&*(+)', project: project) }
- let!(:long_milestone) { create(:milestone, title: 'this is a very long title this is a very long title this is a very long title this is a very long title this is a very long title', project: project) }
let(:filtered_search) { find('.filtered-search') }
- let(:js_dropdown_milestone) { '#js-dropdown-milestone' }
- let(:filter_dropdown) { find("#{js_dropdown_milestone} .filter-dropdown") }
-
- def dropdown_milestone_size
- filter_dropdown.all('.filter-dropdown-item').size
- end
-
- def click_milestone(text)
- find('#js-dropdown-milestone .filter-dropdown .filter-dropdown-item', text: text).click
- end
-
- def click_static_milestone(text)
- find('#js-dropdown-milestone .filter-dropdown-item', text: text).click
- end
+ let(:filter_dropdown) { find('#js-dropdown-milestone .filter-dropdown') }
before do
project.add_maintainer(user)
@@ -39,240 +22,12 @@ describe 'Dropdown milestone', :js do
end
describe 'behavior' do
- context 'filters by "milestone:"' do
- before do
- filtered_search.set('milestone:')
- end
-
- it 'opens when the search bar has milestone:' do
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'closes when the search bar is unfocused' do
- find('body').click
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- end
-
- it 'hides loading indicator when loaded' do
- expect(find(js_dropdown_milestone)).not_to have_css('.filter-dropdown-loading')
- end
-
- it 'loads all the milestones when opened' do
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 6)
- end
- end
-
- it 'shows loading indicator when opened' do
- slow_requests do
- filtered_search.set('milestone:')
-
- expect(page).to have_css('#js-dropdown-milestone .filter-dropdown-loading', visible: true)
- end
- end
- end
-
- describe 'filtering' do
before do
- filtered_search.set('milestone:')
-
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(uppercase_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(two_words_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(wont_fix_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(special_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(long_milestone.title)
- end
-
- it 'filters by name' do
- filtered_search.send_keys('v1')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'filters by case insensitive name' do
- filtered_search.send_keys('V1')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
+ filtered_search.set('milestone=')
end
- it 'filters by name with symbol' do
- filtered_search.send_keys('%v1')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'filters by case insensitive name with symbol' do
- filtered_search.send_keys('%V1')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'filters by special characters' do
- filtered_search.send_keys('(+')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'filters by special characters with symbol' do
- filtered_search.send_keys('%(+')
-
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
- end
- end
-
- describe 'selecting from dropdown' do
- before do
- filtered_search.set('milestone:')
-
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(uppercase_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(two_words_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(wont_fix_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(special_milestone.title)
- expect(find("#{js_dropdown_milestone} .filter-dropdown")).to have_content(long_milestone.title)
- end
-
- it 'fills in the milestone name when the milestone has not been filled' do
- click_milestone(milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token(milestone.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name when the milestone is partially filled', :quarantine do
- filtered_search.send_keys('v')
- click_milestone(milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token(milestone.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name that contains multiple words' do
- click_milestone(two_words_milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token("\"#{two_words_milestone.title}\"")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name that contains multiple words and is very long' do
- click_milestone(long_milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token("\"#{long_milestone.title}\"")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name that contains double quotes' do
- click_milestone(wont_fix_milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token("'#{wont_fix_milestone.title}'")])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name with the correct capitalization' do
- click_milestone(uppercase_milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token(uppercase_milestone.title)])
- expect_filtered_search_input_empty
- end
-
- it 'fills in the milestone name with special characters' do
- click_milestone(special_milestone.title)
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token(special_milestone.title)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `no milestone`' do
- click_static_milestone('None')
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token('None', false)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `any milestone`' do
- click_static_milestone('Any')
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token('Any', false)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `upcoming milestone`' do
- click_static_milestone('Upcoming')
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token('Upcoming', false)])
- expect_filtered_search_input_empty
- end
-
- it 'selects `started milestones`' do
- click_static_milestone('Started')
-
- expect(page).to have_css(js_dropdown_milestone, visible: false)
- expect_tokens([milestone_token('Started', false)])
- expect_filtered_search_input_empty
- end
- end
-
- describe 'input has existing content' do
- it 'opens milestone dropdown with existing search term' do
- filtered_search.set('searchTerm milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'opens milestone dropdown with existing author' do
- filtered_search.set('author:@john milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'opens milestone dropdown with existing assignee' do
- filtered_search.set('assignee:@john milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'opens milestone dropdown with existing label' do
- filtered_search.set('label:~important milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'opens milestone dropdown with existing milestone' do
- filtered_search.set('milestone:%100 milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
-
- it 'opens milestone dropdown with existing my-reaction' do
- filtered_search.set('my-reaction:star milestone:')
-
- expect(page).to have_css(js_dropdown_milestone, visible: true)
- end
- end
-
- describe 'caching requests' do
- it 'caches requests after the first load' do
- filtered_search.set('milestone:')
- initial_size = dropdown_milestone_size
-
- expect(initial_size).to be > 0
-
- create(:milestone, project: project)
- find('.filtered-search-box .clear-search').click
- filtered_search.set('milestone:')
-
- expect(dropdown_milestone_size).to eq(initial_size)
+ it 'loads all the milestones when opened' do
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_release_spec.rb b/spec/features/issues/filtered_search/dropdown_release_spec.rb
index eea7f2d7848..fd0a98f9ddc 100644
--- a/spec/features/issues/filtered_search/dropdown_release_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_release_spec.rb
@@ -10,13 +10,8 @@ describe 'Dropdown release', :js do
let!(:release) { create(:release, tag: 'v1.0', project: project) }
let!(:crazy_release) { create(:release, tag: '☺!/"#%&\'{}+,-.<>;=@]_`{|}🚀', project: project) }
- def filtered_search
- find('.filtered-search')
- end
-
- def filter_dropdown
- find('#js-dropdown-release .filter-dropdown')
- end
+ let(:filtered_search) { find('.filtered-search') }
+ let(:filter_dropdown) { find('#js-dropdown-release .filter-dropdown') }
before do
project.add_maintainer(user)
@@ -28,28 +23,11 @@ describe 'Dropdown release', :js do
describe 'behavior' do
before do
- filtered_search.set('release:')
- end
-
- def expect_results(count)
- expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: count)
+ filtered_search.set('release=')
end
it 'loads all the releases when opened' do
- expect_results(2)
- end
-
- it 'filters by tag name' do
- filtered_search.send_keys("☺")
- expect_results(1)
- end
-
- it 'fills in the release name when the autocomplete hint is clicked' do
- find('#js-dropdown-release .filter-dropdown-item', text: crazy_release.tag).click
-
- expect(page).to have_css('#js-dropdown-release', visible: false)
- expect_tokens([release_token(crazy_release.tag)])
- expect_filtered_search_input_empty
+ expect_filtered_search_dropdown_results(filter_dropdown, 2)
end
end
end
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 8b5e7934ec1..c99c205d5da 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -67,7 +67,7 @@ describe 'Filter issues', :js do
it 'filters by all available tokens' do
search_term = 'issue'
- input_filtered_search("assignee:@#{user.username} author:@#{user.username} label:~#{caps_sensitive_label.title} milestone:%#{milestone.title} #{search_term}")
+ input_filtered_search("assignee=@#{user.username} author=@#{user.username} label=~#{caps_sensitive_label.title} milestone=%#{milestone.title} #{search_term}")
wait_for_requests
@@ -84,7 +84,7 @@ describe 'Filter issues', :js do
describe 'filter issues by author' do
context 'only author' do
it 'filters issues by searched author' do
- input_filtered_search("author:@#{user.username}")
+ input_filtered_search("author=@#{user.username}")
wait_for_requests
@@ -98,7 +98,7 @@ describe 'Filter issues', :js do
describe 'filter issues by assignee' do
context 'only assignee' do
it 'filters issues by searched assignee' do
- input_filtered_search("assignee:@#{user.username}")
+ input_filtered_search("assignee=@#{user.username}")
wait_for_requests
@@ -108,7 +108,7 @@ describe 'Filter issues', :js do
end
it 'filters issues by no assignee' do
- input_filtered_search('assignee:none')
+ input_filtered_search('assignee=none')
expect_tokens([assignee_token('None')])
expect_issues_list_count(3)
@@ -122,7 +122,7 @@ describe 'Filter issues', :js do
it 'filters issues by multiple assignees' do
create(:issue, project: project, author: user, assignees: [user2, user])
- input_filtered_search("assignee:@#{user.username} assignee:@#{user2.username}")
+ input_filtered_search("assignee=@#{user.username} assignee=@#{user2.username}")
expect_tokens([
assignee_token(user.name),
@@ -138,15 +138,31 @@ describe 'Filter issues', :js do
describe 'filter issues by label' do
context 'only label' do
it 'filters issues by searched label' do
- input_filtered_search("label:~#{bug_label.title}")
+ input_filtered_search("label=~#{bug_label.title}")
expect_tokens([label_token(bug_label.title)])
expect_issues_list_count(2)
expect_filtered_search_input_empty
end
+ it 'filters issues not containing searched label' do
+ input_filtered_search("label!=~#{bug_label.title}")
+
+ expect_tokens([label_token(bug_label.title)])
+ expect_issues_list_count(6)
+ expect_filtered_search_input_empty
+ end
+
it 'filters issues by no label' do
- input_filtered_search('label:none')
+ input_filtered_search('label=none')
+
+ expect_tokens([label_token('None', false)])
+ expect_issues_list_count(4)
+ expect_filtered_search_input_empty
+ end
+
+ it 'filters issues by no label' do
+ input_filtered_search('label!=none')
expect_tokens([label_token('None', false)])
expect_issues_list_count(4)
@@ -154,7 +170,18 @@ describe 'Filter issues', :js do
end
it 'filters issues by multiple labels' do
- input_filtered_search("label:~#{bug_label.title} label:~#{caps_sensitive_label.title}")
+ input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title}")
+
+ expect_tokens([
+ label_token(bug_label.title),
+ label_token(caps_sensitive_label.title)
+ ])
+ expect_issues_list_count(1)
+ expect_filtered_search_input_empty
+ end
+
+ it 'filters issues by multiple labels with not operator' do
+ input_filtered_search("label!=~#{bug_label.title} label=~#{caps_sensitive_label.title}")
expect_tokens([
label_token(bug_label.title),
@@ -169,22 +196,42 @@ describe 'Filter issues', :js do
special_issue = create(:issue, title: "Issue with special character label", project: project)
special_issue.labels << special_label
- input_filtered_search("label:~#{special_label.title}")
+ input_filtered_search("label=~#{special_label.title}")
expect_tokens([label_token(special_label.title)])
expect_issues_list_count(1)
expect_filtered_search_input_empty
end
+ it 'filters issues by label not containing special characters' do
+ special_label = create(:label, project: project, title: '!@#{$%^&*()-+[]<>?/:{}|\}')
+ special_issue = create(:issue, title: "Issue with special character label", project: project)
+ special_issue.labels << special_label
+
+ input_filtered_search("label!=~#{special_label.title}")
+
+ expect_tokens([label_token(special_label.title)])
+ expect_issues_list_count(8)
+ expect_filtered_search_input_empty
+ end
+
it 'does not show issues for unused labels' do
new_label = create(:label, project: project, title: 'new_label')
- input_filtered_search("label:~#{new_label.title}")
+ input_filtered_search("label=~#{new_label.title}")
expect_tokens([label_token(new_label.title)])
expect_no_issues_list
expect_filtered_search_input_empty
end
+
+ it 'does show issues for bug label' do
+ input_filtered_search("label!=~#{bug_label.title}")
+
+ expect_tokens([label_token(bug_label.title)])
+ expect_issues_list_count(6)
+ expect_filtered_search_input_empty
+ end
end
context 'label with multiple words' do
@@ -193,7 +240,7 @@ describe 'Filter issues', :js do
special_multiple_issue = create(:issue, title: "Issue with special character multiple words label", project: project)
special_multiple_issue.labels << special_multiple_label
- input_filtered_search("label:~'#{special_multiple_label.title}'")
+ input_filtered_search("label=~'#{special_multiple_label.title}'")
# Check for search results (which makes sure that the page has changed)
expect_issues_list_count(1)
@@ -205,7 +252,7 @@ describe 'Filter issues', :js do
end
it 'single quotes' do
- input_filtered_search("label:~'#{multiple_words_label.title}'")
+ input_filtered_search("label=~'#{multiple_words_label.title}'")
expect_issues_list_count(1)
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
@@ -213,7 +260,7 @@ describe 'Filter issues', :js do
end
it 'double quotes' do
- input_filtered_search("label:~\"#{multiple_words_label.title}\"")
+ input_filtered_search("label=~\"#{multiple_words_label.title}\"")
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
expect_issues_list_count(1)
@@ -225,7 +272,7 @@ describe 'Filter issues', :js do
double_quotes_label_issue = create(:issue, title: "Issue with double quotes label", project: project)
double_quotes_label_issue.labels << double_quotes_label
- input_filtered_search("label:~'#{double_quotes_label.title}'")
+ input_filtered_search("label=~'#{double_quotes_label.title}'")
expect_tokens([label_token("'#{double_quotes_label.title}'")])
expect_issues_list_count(1)
@@ -237,7 +284,7 @@ describe 'Filter issues', :js do
single_quotes_label_issue = create(:issue, title: "Issue with single quotes label", project: project)
single_quotes_label_issue.labels << single_quotes_label
- input_filtered_search("label:~\"#{single_quotes_label.title}\"")
+ input_filtered_search("label=~\"#{single_quotes_label.title}\"")
expect_tokens([label_token("\"#{single_quotes_label.title}\"")])
expect_issues_list_count(1)
@@ -249,7 +296,7 @@ describe 'Filter issues', :js do
it 'filters issues by searched label, label2, author, assignee, milestone and text' do
search_term = 'bug'
- input_filtered_search("label:~#{bug_label.title} label:~#{caps_sensitive_label.title} author:@#{user.username} assignee:@#{user.username} milestone:%#{milestone.title} #{search_term}")
+ input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title} author=@#{user.username} assignee=@#{user.username} milestone=%#{milestone.title} #{search_term}")
wait_for_requests
@@ -263,6 +310,24 @@ describe 'Filter issues', :js do
expect_issues_list_count(1)
expect_filtered_search_input(search_term)
end
+
+ it 'filters issues by searched label, label2, author, assignee, not included in a milestone' do
+ search_term = 'bug'
+
+ input_filtered_search("label=~#{bug_label.title} label=~#{caps_sensitive_label.title} author=@#{user.username} assignee=@#{user.username} milestone!=%#{milestone.title} #{search_term}")
+
+ wait_for_requests
+
+ expect_tokens([
+ label_token(bug_label.title),
+ label_token(caps_sensitive_label.title),
+ author_token(user.name),
+ assignee_token(user.name),
+ milestone_token(milestone.title, false, '!=')
+ ])
+ expect_issues_list_count(0)
+ expect_filtered_search_input(search_term)
+ end
end
context 'issue label clicked' do
@@ -279,7 +344,7 @@ describe 'Filter issues', :js do
describe 'filter issues by milestone' do
context 'only milestone' do
it 'filters issues by searched milestone' do
- input_filtered_search("milestone:%#{milestone.title}")
+ input_filtered_search("milestone=%#{milestone.title}")
expect_tokens([milestone_token(milestone.title)])
expect_issues_list_count(5)
@@ -287,53 +352,102 @@ describe 'Filter issues', :js do
end
it 'filters issues by no milestone' do
- input_filtered_search("milestone:none")
+ input_filtered_search("milestone=none")
expect_tokens([milestone_token('None', false)])
expect_issues_list_count(3)
expect_filtered_search_input_empty
end
+ it 'filters issues by negation of no milestone' do
+ input_filtered_search("milestone!=none ")
+
+ expect_tokens([milestone_token('None', false, '!=')])
+ expect_issues_list_count(5)
+ expect_filtered_search_input_empty
+ end
+
it 'filters issues by upcoming milestones' do
create(:milestone, project: project, due_date: 1.month.from_now) do |future_milestone|
create(:issue, project: project, milestone: future_milestone, author: user)
end
- input_filtered_search("milestone:upcoming")
+ input_filtered_search("milestone=upcoming")
expect_tokens([milestone_token('Upcoming', false)])
expect_issues_list_count(1)
expect_filtered_search_input_empty
end
+ it 'filters issues by negation of upcoming milestones' do
+ create(:milestone, project: project, due_date: 1.month.from_now) do |future_milestone|
+ create(:issue, project: project, milestone: future_milestone, author: user)
+ end
+
+ input_filtered_search("milestone!=upcoming")
+
+ expect_tokens([milestone_token('Upcoming', false, '!=')])
+ expect_issues_list_count(8)
+ expect_filtered_search_input_empty
+ end
+
it 'filters issues by started milestones' do
- input_filtered_search("milestone:started")
+ input_filtered_search("milestone=started")
expect_tokens([milestone_token('Started', false)])
expect_issues_list_count(5)
expect_filtered_search_input_empty
end
+ it 'filters issues by negation of started milestones' do
+ input_filtered_search("milestone!=started")
+
+ expect_tokens([milestone_token('Started', false, '!=')])
+ expect_issues_list_count(3)
+ expect_filtered_search_input_empty
+ end
+
it 'filters issues by milestone containing special characters' do
special_milestone = create(:milestone, title: '!@\#{$%^&*()}', project: project)
create(:issue, project: project, milestone: special_milestone)
- input_filtered_search("milestone:%#{special_milestone.title}")
+ input_filtered_search("milestone=%#{special_milestone.title}")
expect_tokens([milestone_token(special_milestone.title)])
expect_issues_list_count(1)
expect_filtered_search_input_empty
end
+ it 'filters issues by milestone not containing special characters' do
+ special_milestone = create(:milestone, title: '!@\#{$%^&*()}', project: project)
+ create(:issue, project: project, milestone: special_milestone)
+
+ input_filtered_search("milestone!=%#{special_milestone.title}")
+
+ expect_tokens([milestone_token(special_milestone.title, false, '!=')])
+ expect_issues_list_count(8)
+ expect_filtered_search_input_empty
+ end
+
it 'does not show issues for unused milestones' do
new_milestone = create(:milestone, title: 'new', project: project)
- input_filtered_search("milestone:%#{new_milestone.title}")
+ input_filtered_search("milestone=%#{new_milestone.title}")
expect_tokens([milestone_token(new_milestone.title)])
expect_no_issues_list
expect_filtered_search_input_empty
end
+
+ it 'show issues for unused milestones' do
+ new_milestone = create(:milestone, title: 'new', project: project)
+
+ input_filtered_search("milestone!=%#{new_milestone.title}")
+
+ expect_tokens([milestone_token(new_milestone.title, false, '!=')])
+ expect_issues_list_count(8)
+ expect_filtered_search_input_empty
+ end
end
end
@@ -407,7 +521,7 @@ describe 'Filter issues', :js do
context 'searched text with other filters' do
it 'filters issues by searched text, author, text, assignee, text, label1, text, label2, text, milestone and text' do
- input_filtered_search("bug author:@#{user.username} report label:~#{bug_label.title} label:~#{caps_sensitive_label.title} milestone:%#{milestone.title} foo")
+ input_filtered_search("bug author=@#{user.username} report label=~#{bug_label.title} label=~#{caps_sensitive_label.title} milestone=%#{milestone.title} foo")
expect_issues_list_count(1)
expect_filtered_search_input('bug report foo')
@@ -475,65 +589,13 @@ describe 'Filter issues', :js do
end
end
- describe 'RSS feeds' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
-
- before do
- group.add_developer(user)
- end
-
- shared_examples 'updates atom feed link' do |type|
- it "for #{type}" do
- visit path
-
- link = find_link('Subscribe to RSS feed')
- params = CGI.parse(URI.parse(link[:href]).query)
- auto_discovery_link = find('link[type="application/atom+xml"]', visible: false)
- auto_discovery_params = CGI.parse(URI.parse(auto_discovery_link[:href]).query)
-
- expected = {
- 'feed_token' => [user.feed_token],
- 'milestone_title' => [milestone.title],
- 'assignee_id' => [user.id.to_s]
- }
-
- expect(params).to include(expected)
- expect(auto_discovery_params).to include(expected)
- end
- end
-
- it_behaves_like 'updates atom feed link', :project do
- let(:path) { project_issues_path(project, milestone_title: milestone.title, assignee_id: user.id) }
- end
-
- it_behaves_like 'updates atom feed link', :group do
- let(:path) { issues_group_path(group, milestone_title: milestone.title, assignee_id: user.id) }
- end
-
- it 'updates atom feed link for group issues' do
- visit issues_group_path(group, milestone_title: milestone.title, assignee_id: user.id)
- link = find('.nav-controls a[title="Subscribe to RSS feed"]', visible: false)
- params = CGI.parse(URI.parse(link[:href]).query)
- auto_discovery_link = find('link[type="application/atom+xml"]', visible: false)
- auto_discovery_params = CGI.parse(URI.parse(auto_discovery_link[:href]).query)
-
- expect(params).to include('feed_token' => [user.feed_token])
- expect(params).to include('milestone_title' => [milestone.title])
- expect(params).to include('assignee_id' => [user.id.to_s])
- expect(auto_discovery_params).to include('feed_token' => [user.feed_token])
- expect(auto_discovery_params).to include('milestone_title' => [milestone.title])
- expect(auto_discovery_params).to include('assignee_id' => [user.id.to_s])
- end
- end
-
context 'URL has a trailing slash' do
before do
visit "#{project_issues_path(project)}/"
end
it 'milestone dropdown loads milestones' do
- input_filtered_search("milestone:", submit: false)
+ input_filtered_search("milestone=", submit: false)
within('#js-dropdown-milestone') do
expect(page).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
@@ -541,7 +603,7 @@ describe 'Filter issues', :js do
end
it 'label dropdown load labels' do
- input_filtered_search("label:", submit: false)
+ input_filtered_search("label=", submit: false)
within('#js-dropdown-label') do
expect(page).to have_selector('.filter-dropdown .filter-dropdown-item', count: 3)
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index c038281d825..e05c7aa3af5 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -41,8 +41,8 @@ describe 'Recent searches', :js do
items = all('.filtered-search-history-dropdown-item', visible: false, count: 2)
- expect(items[0].text).to eq('label: ~qux garply')
- expect(items[1].text).to eq('label: ~foo bar')
+ expect(items[0].text).to eq('label: = ~qux garply')
+ expect(items[1].text).to eq('label: = ~foo bar')
end
it 'saved recent searches are restored last on the list' do
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index e97314e02e6..ad994270218 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -34,7 +34,7 @@ describe 'Search bar', :js do
it 'selects item' do
filtered_search.native.send_keys(:down, :down, :enter)
- expect_tokens([author_token])
+ expect_tokens([{ name: 'Assignee' }])
expect_filtered_search_input_empty
end
end
@@ -78,7 +78,7 @@ describe 'Search bar', :js do
filtered_search.click
original_size = page.all('#js-dropdown-hint .filter-dropdown .filter-dropdown-item').size
- filtered_search.set('author')
+ filtered_search.set('autho')
expect(find('#js-dropdown-hint')).to have_selector('.filter-dropdown .filter-dropdown-item', count: 1)
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index f8035ef4b85..2af2e096bcc 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -34,17 +34,15 @@ describe 'Visual tokens', :js do
visit project_issues_path(project)
end
- describe 'editing author token' do
+ describe 'editing a single token' do
before do
- input_filtered_search('author:@root assignee:none', submit: false)
+ input_filtered_search('author=@root assignee=none', submit: false)
first('.tokens-container .filtered-search-token').click
+ wait_for_requests
end
it 'opens author dropdown' do
expect(page).to have_css('#js-dropdown-author', visible: true)
- end
-
- it 'makes value editable' do
expect_filtered_search_input('@root')
end
@@ -77,143 +75,10 @@ describe 'Visual tokens', :js do
end
end
- describe 'editing assignee token' do
- before do
- input_filtered_search('assignee:@root author:none', submit: false)
- first('.tokens-container .filtered-search-token').double_click
- end
-
- it 'opens assignee dropdown' do
- expect(page).to have_css('#js-dropdown-assignee', visible: true)
- end
-
- it 'makes value editable' do
- expect_filtered_search_input('@root')
- end
-
- it 'filters value' do
- filtered_search.send_keys(:backspace)
-
- expect(page).to have_css('#js-dropdown-assignee .filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'ends editing mode when document is clicked' do
- find('#content-body').click
-
- expect_filtered_search_input_empty
- expect(page).to have_css('#js-dropdown-assignee', visible: false)
- end
-
- describe 'selecting static option from dropdown' do
- before do
- find("#js-dropdown-assignee").find('.filter-dropdown-item', text: 'None').click
- end
-
- it 'changes value in visual token' do
- expect(first('.tokens-container .filtered-search-token .value').text).to eq('None')
- end
-
- it 'moves input to the right' do
- expect(is_input_focused).to eq(true)
- end
- end
- end
-
- describe 'editing milestone token' do
- before do
- input_filtered_search('milestone:%10.0 author:none', submit: false)
- first('.tokens-container .filtered-search-token').click
- first('#js-dropdown-milestone .filter-dropdown .filter-dropdown-item')
- end
-
- it 'opens milestone dropdown' do
- expect(filter_milestone_dropdown.find('.filter-dropdown-item', text: milestone_ten.title)).to be_visible
- expect(filter_milestone_dropdown.find('.filter-dropdown-item', text: milestone_nine.title)).to be_visible
- expect(page).to have_css('#js-dropdown-milestone', visible: true)
- end
-
- it 'selects static option from dropdown' do
- find("#js-dropdown-milestone").find('.filter-dropdown-item', text: 'Upcoming').click
-
- expect(first('.tokens-container .filtered-search-token .value').text).to eq('Upcoming')
- expect(is_input_focused).to eq(true)
- end
-
- it 'makes value editable' do
- expect_filtered_search_input('%10.0')
- end
-
- it 'filters value' do
- filtered_search.send_keys(:backspace)
-
- expect(page).to have_css('#js-dropdown-milestone .filter-dropdown .filter-dropdown-item', count: 1)
- end
-
- it 'ends editing mode when document is clicked' do
- find('#content-body').click
-
- expect_filtered_search_input_empty
- expect(page).to have_css('#js-dropdown-milestone', visible: false)
- end
- end
-
- describe 'editing label token' do
- before do
- input_filtered_search("label:~#{label.title} author:none", submit: false)
- first('.tokens-container .filtered-search-token').double_click
- first('#js-dropdown-label .filter-dropdown .filter-dropdown-item')
- end
-
- it 'opens label dropdown' do
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: label.title)).to be_visible
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: cc_label.title)).to be_visible
- expect(page).to have_css('#js-dropdown-label', visible: true)
- end
-
- it 'selects option from dropdown' do
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: label.title)).to be_visible
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: cc_label.title)).to be_visible
-
- find("#js-dropdown-label").find('.filter-dropdown-item', text: cc_label.title).click
-
- expect(first('.tokens-container .filtered-search-token .value').text).to eq("~\"#{cc_label.title}\"")
- expect(is_input_focused).to eq(true)
- end
-
- it 'makes value editable' do
- expect_filtered_search_input("~#{label.title}")
- end
-
- it 'filters value' do
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: label.title)).to be_visible
- expect(filter_label_dropdown.find('.filter-dropdown-item', text: cc_label.title)).to be_visible
-
- filtered_search.send_keys(:backspace)
-
- filter_label_dropdown.find('.filter-dropdown-item')
-
- expect(page.all('#js-dropdown-label .filter-dropdown .filter-dropdown-item').size).to eq(1)
- end
-
- it 'ends editing mode when document is clicked' do
- find('#content-body').click
-
- expect_filtered_search_input_empty
- expect(page).to have_css('#js-dropdown-label', visible: false)
- end
-
- it 'ends editing mode when scroll container is clicked' do
- find('.scroll-container').click
-
- expect_filtered_search_input_empty
- expect(page).to have_css('#js-dropdown-label', visible: false)
- end
- end
-
describe 'editing multiple tokens' do
before do
- input_filtered_search('author:@root assignee:none', submit: false)
- first('.tokens-container .filtered-search-token').double_click
+ input_filtered_search('author=@root assignee=none', submit: false)
+ first('.tokens-container .filtered-search-token').click
end
it 'opens author dropdown' do
@@ -221,31 +86,33 @@ describe 'Visual tokens', :js do
end
it 'opens assignee dropdown' do
- find('.tokens-container .filtered-search-token', text: 'Assignee').double_click
+ find('.tokens-container .filtered-search-token', text: 'Assignee').click
expect(page).to have_css('#js-dropdown-assignee', visible: true)
end
end
describe 'editing a search term while editing another filter token' do
before do
- input_filtered_search('author assignee:', submit: false)
- first('.tokens-container .filtered-search-term').double_click
- end
-
- it 'opens hint dropdown' do
- expect(page).to have_css('#js-dropdown-hint', visible: true)
+ input_filtered_search('foo assignee=', submit: false)
+ first('.tokens-container .filtered-search-term').click
end
it 'opens author dropdown' do
- find('#js-dropdown-hint .filter-dropdown .filter-dropdown-item', text: 'author').click
+ find('#js-dropdown-hint .filter-dropdown .filter-dropdown-item', text: 'Author').click
+
+ expect(page).to have_css('#js-dropdown-operator', visible: true)
+ expect(page).to have_css('#js-dropdown-author', visible: false)
+ find('#js-dropdown-operator .filter-dropdown .filter-dropdown-item[data-value="="]').click
+
+ expect(page).to have_css('#js-dropdown-operator', visible: false)
expect(page).to have_css('#js-dropdown-author', visible: true)
end
end
describe 'add new token after editing existing token' do
before do
- input_filtered_search('author:@root assignee:none', submit: false)
+ input_filtered_search('author=@root assignee=none', submit: false)
first('.tokens-container .filtered-search-token').double_click
filtered_search.send_keys(' ')
end
@@ -255,63 +122,25 @@ describe 'Visual tokens', :js do
expect(page).to have_css('#js-dropdown-hint', visible: true)
end
- it 'opens author dropdown' do
- filtered_search.send_keys('author:')
- expect(page).to have_css('#js-dropdown-author', visible: true)
- end
-
- it 'opens assignee dropdown' do
- filtered_search.send_keys('assignee:')
- expect(page).to have_css('#js-dropdown-assignee', visible: true)
- end
-
- it 'opens milestone dropdown' do
- filtered_search.send_keys('milestone:')
- expect(page).to have_css('#js-dropdown-milestone', visible: true)
- end
+ it 'opens token dropdown' do
+ filtered_search.send_keys('author=')
- it 'opens label dropdown' do
- filtered_search.send_keys('label:')
- expect(page).to have_css('#js-dropdown-label', visible: true)
+ expect(page).to have_css('#js-dropdown-author', visible: true)
end
end
- describe 'creates visual tokens' do
- it 'creates author token' do
- filtered_search.send_keys('author:@thomas ')
+ describe 'visual tokens' do
+ it 'creates visual token' do
+ filtered_search.send_keys('author=@thomas ')
token = page.all('.tokens-container .filtered-search-token')[1]
expect(token.find('.name').text).to eq('Author')
expect(token.find('.value').text).to eq('@thomas')
end
-
- it 'creates assignee token' do
- filtered_search.send_keys('assignee:@thomas ')
- token = page.all('.tokens-container .filtered-search-token')[1]
-
- expect(token.find('.name').text).to eq('Assignee')
- expect(token.find('.value').text).to eq('@thomas')
- end
-
- it 'creates milestone token' do
- filtered_search.send_keys('milestone:none ')
- token = page.all('.tokens-container .filtered-search-token')[1]
-
- expect(token.find('.name').text).to eq('Milestone')
- expect(token.find('.value').text).to eq('none')
- end
-
- it 'creates label token' do
- filtered_search.send_keys('label:~Backend ')
- token = page.all('.tokens-container .filtered-search-token')[1]
-
- expect(token.find('.name').text).to eq('Label')
- expect(token.find('.value').text).to eq('~Backend')
- end
end
it 'does not tokenize incomplete token' do
- filtered_search.send_keys('author:')
+ filtered_search.send_keys('author=')
find('body').click
token = page.all('.tokens-container .js-visual-token')[1]
@@ -323,7 +152,7 @@ describe 'Visual tokens', :js do
describe 'search using incomplete visual tokens' do
before do
- input_filtered_search('author:@root assignee:none', extra_space: false)
+ input_filtered_search('author=@root assignee=none', extra_space: false)
end
it 'tokenizes the search term to complete visual token' do
diff --git a/spec/features/issues/rss_spec.rb b/spec/features/issues/rss_spec.rb
index d6a406f4f44..7577df3bc7d 100644
--- a/spec/features/issues/rss_spec.rb
+++ b/spec/features/issues/rss_spec.rb
@@ -3,11 +3,14 @@
require 'spec_helper'
describe 'Project Issues RSS' do
- let(:project) { create(:project, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
+ let!(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
let(:path) { project_issues_path(project) }
before do
- create(:issue, project: project)
+ create(:issue, project: project, assignees: [user])
+ group.add_developer(user)
end
context 'when signed in' do
@@ -31,4 +34,34 @@ describe 'Project Issues RSS' do
it_behaves_like "it has an RSS button without a feed token"
it_behaves_like "an autodiscoverable RSS feed without a feed token"
end
+
+ describe 'feeds' do
+ shared_examples 'updates atom feed link' do |type|
+ it "for #{type}" do
+ sign_in(user)
+ visit path
+
+ link = find_link('Subscribe to RSS feed')
+ params = CGI.parse(URI.parse(link[:href]).query)
+ auto_discovery_link = find('link[type="application/atom+xml"]', visible: false)
+ auto_discovery_params = CGI.parse(URI.parse(auto_discovery_link[:href]).query)
+
+ expected = {
+ 'feed_token' => [user.feed_token],
+ 'assignee_id' => [user.id.to_s]
+ }
+
+ expect(params).to include(expected)
+ expect(auto_discovery_params).to include(expected)
+ end
+ end
+
+ it_behaves_like 'updates atom feed link', :project do
+ let(:path) { project_issues_path(project, assignee_id: user.id) }
+ end
+
+ it_behaves_like 'updates atom feed link', :group do
+ let(:path) { issues_group_path(group, assignee_id: user.id) }
+ end
+ end
end
diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb
index 829f945c47f..363906b017a 100644
--- a/spec/features/issues/user_comments_on_issue_spec.rb
+++ b/spec/features/issues/user_comments_on_issue_spec.rb
@@ -43,17 +43,17 @@ describe "User comments on issue", :js do
expect(page.find('pre code').text).to eq code_block_content
end
- it "renders escaped HTML content in Mermaid" do
+ it "renders HTML content as text in Mermaid" do
html_content = "<img onerror=location=`javascript\\u003aalert\\u0028document.domain\\u0029` src=x>"
mermaid_content = "graph LR\n B-->D(#{html_content});"
- escaped_content = CGI.escapeHTML(html_content).gsub('=', "&equals;")
comment = "```mermaid\n#{mermaid_content}\n```"
add_note(comment)
wait_for_requests
- expect(page.find('svg.mermaid')).to have_content escaped_content
+ expect(page.find('svg.mermaid')).to have_content html_content
+ within('svg.mermaid') { expect(page).not_to have_selector('img') }
end
it 'opens autocomplete menu for quick actions and have `/label` first choice' do
diff --git a/spec/features/issues/user_creates_issue_by_email_spec.rb b/spec/features/issues/user_creates_issue_by_email_spec.rb
new file mode 100644
index 00000000000..c73a65849cc
--- /dev/null
+++ b/spec/features/issues/user_creates_issue_by_email_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues > User creates issue by email' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+
+ before do
+ sign_in(user)
+
+ project.add_developer(user)
+ end
+
+ describe 'new issue by email' do
+ shared_examples 'show the email in the modal' do
+ let(:issue) { create(:issue, project: project) }
+
+ before do
+ project.issues << issue
+ stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
+
+ visit project_issues_path(project)
+ click_button('Email a new issue')
+ end
+
+ it 'click the button to show modal for the new email' do
+ page.within '#issuable-email-modal' do
+ email = project.new_issuable_address(user, 'issue')
+
+ expect(page).to have_selector("input[value='#{email}']")
+ end
+ end
+ end
+
+ context 'with existing issues' do
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ it_behaves_like 'show the email in the modal'
+ end
+
+ context 'without existing issues' do
+ it_behaves_like 'show the email in the modal'
+ end
+ end
+end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 39ce3415727..b0a2a734877 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -3,8 +3,32 @@
require "spec_helper"
describe "User creates issue" do
- let(:project) { create(:project_empty_repo, :public) }
- let(:user) { create(:user) }
+ include DropzoneHelper
+
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:user) { create(:user) }
+
+ context "when unauthenticated" do
+ before do
+ sign_out(:user)
+ end
+
+ it "redirects to signin then back to new issue after signin" do
+ create(:issue, project: project)
+
+ visit project_issues_path(project)
+
+ page.within ".nav-controls" do
+ click_link "New issue"
+ end
+
+ expect(current_path).to eq new_user_session_path
+
+ gitlab_sign_in(create(:user))
+
+ expect(current_path).to eq new_project_issue_path(project)
+ end
+ end
context "when signed in as guest" do
before do
@@ -92,6 +116,104 @@ describe "User creates issue" do
.and have_content(label_titles.first)
end
end
+
+ context 'with due date', :js do
+ it 'saves with due date' do
+ date = Date.today.at_beginning_of_month
+
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
+ find('#issuable-due-date').click
+
+ page.within '.pika-single' do
+ click_button date.day
+ end
+
+ expect(find('#issuable-due-date').value).to eq date.to_s
+
+ click_button 'Submit issue'
+
+ page.within '.issuable-sidebar' do
+ expect(page).to have_content date.to_s(:medium)
+ end
+ end
+ end
+
+ context 'dropzone upload file', :js do
+ before do
+ visit new_project_issue_path(project)
+ end
+
+ it 'uploads file when dragging into textarea' do
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
+
+ expect(page.find_field("issue_description").value).to have_content 'banana_sample'
+ end
+
+ it "doesn't add double newline to end of a single attachment markdown" do
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
+
+ expect(page.find_field("issue_description").value).not_to match /\n\n$/
+ end
+
+ it "cancels a file upload correctly" do
+ slow_requests do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
+
+ click_button 'Cancel'
+ end
+
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_button('Cancel')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+ end
+
+ context 'form filled by URL parameters' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ project.repository.create_file(
+ user,
+ '.gitlab/issue_templates/bug.md',
+ 'this is a test "bug" template',
+ message: 'added issue template',
+ branch_name: 'master')
+
+ visit new_project_issue_path(project, issuable_template: 'bug')
+ end
+
+ it 'fills in template' do
+ expect(find('.js-issuable-selector .dropdown-toggle-text')).to have_content('bug')
+ end
+ end
+
+ context 'suggestions', :js do
+ it 'displays list of related issues' do
+ issue = create(:issue, project: project)
+ create(:issue, project: project, title: 'test issue')
+
+ visit new_project_issue_path(project)
+
+ fill_in 'issue_title', with: issue.title
+
+ expect(page).to have_selector('.suggestion-item', count: 1)
+ end
+ end
+
+ it 'clears local storage after creating a new issue', :js do
+ 2.times do
+ visit new_project_issue_path(project)
+ wait_for_requests
+
+ expect(page).to have_field('Title', with: '')
+
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
+
+ click_button 'Submit issue'
+ end
+ end
end
context "when signed in as user with special characters in their name" do
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 0afc19d9519..ad984cf07e2 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -2,26 +2,283 @@
require "spec_helper"
-describe "User edits issue", :js do
- set(:project) { create(:project_empty_repo, :public) }
- set(:user) { create(:user) }
- set(:issue) { create(:issue, project: project, author: user) }
+describe "Issues > User edits issue", :js do
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
before do
project.add_developer(user)
sign_in(user)
+ end
+
+ context "from edit page" do
+ before do
+ visit edit_project_issue_path(project, issue)
+ end
+
+ it "previews content" do
+ form = first(".gfm-form")
+
+ page.within(form) do
+ fill_in("Description", with: "Bug fixed :smile:")
+ click_button("Preview")
+ end
+
+ expect(form).to have_button("Write")
+ end
+
+ it 'allows user to select unassigned' do
+ visit edit_project_issue_path(project, issue)
+
+ expect(page).to have_content "Assignee #{user.name}"
+
+ first('.js-user-search').click
+ click_link 'Unassigned'
+
+ click_button 'Save changes'
+
+ page.within('.assignee') do
+ expect(page).to have_content 'None - assign yourself'
+ end
+ end
+
+ context 'with due date' do
+ before do
+ visit edit_project_issue_path(project, issue)
+ end
+
+ it 'saves with due date' do
+ date = Date.today.at_beginning_of_month.tomorrow
+
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
+ find('#issuable-due-date').click
+
+ page.within '.pika-single' do
+ click_button date.day
+ end
+
+ expect(find('#issuable-due-date').value).to eq date.to_s
+
+ click_button 'Save changes'
- visit(edit_project_issue_path(project, issue))
+ page.within '.issuable-sidebar' do
+ expect(page).to have_content date.to_s(:medium)
+ end
+ end
+
+ it 'warns about version conflict' do
+ issue.update(title: "New title")
+
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
+
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Someone edited the issue the same time you did'
+ end
+ end
end
- it "previews content" do
- form = first(".gfm-form")
+ context "from issue#show" do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ describe 'update labels' do
+ it 'will not send ajax request when no data is changed' do
+ page.within '.labels' do
+ click_link 'Edit'
- page.within(form) do
- fill_in("Description", with: "Bug fixed :smile:")
- click_button("Preview")
+ find('.dropdown-menu-close', match: :first).click
+
+ expect(page).not_to have_selector('.block-loading')
+ end
+ end
end
- expect(form).to have_button("Write")
+ describe 'update assignee' do
+ context 'by authorized user' do
+ def close_dropdown_menu_if_visible
+ find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
+ toggle.click if toggle.visible?
+ end
+ end
+
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
+
+ page.within('.assignee') do
+ expect(page).to have_content "#{user.name}"
+
+ click_link 'Edit'
+ click_link 'Unassigned'
+ first('.title').click
+ expect(page).to have_content 'None - assign yourself'
+ end
+ end
+
+ it 'allows user to select an assignee' do
+ issue2 = create(:issue, project: project, author: user)
+ visit project_issue_path(project, issue2)
+
+ page.within('.assignee') do
+ expect(page).to have_content "None"
+ end
+
+ page.within '.assignee' do
+ click_link 'Edit'
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within('.assignee') do
+ expect(page).to have_content user.name
+ end
+ end
+
+ it 'allows user to unselect themselves' do
+ issue2 = create(:issue, project: project, author: user)
+
+ visit project_issue_path(project, issue2)
+
+ page.within '.assignee' do
+ click_link 'Edit'
+ click_link user.name
+
+ close_dropdown_menu_if_visible
+
+ page.within '.value .author' do
+ expect(page).to have_content user.name
+ end
+
+ click_link 'Edit'
+ click_link user.name
+
+ close_dropdown_menu_if_visible
+
+ page.within '.value .assign-yourself' do
+ expect(page).to have_content "None"
+ end
+ end
+ end
+ end
+
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it 'shows assignee text' do
+ sign_out(:user)
+ sign_in(guest)
+
+ visit project_issue_path(project, issue)
+ expect(page).to have_content issue.assignees.first.name
+ end
+ end
+ end
+
+ describe 'update milestone' do
+ context 'by authorized user' do
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
+
+ page.within('.milestone') do
+ expect(page).to have_content "None"
+ end
+
+ find('.block.milestone .edit-link').click
+ sleep 2 # wait for ajax stuff to complete
+ first('.dropdown-content li').click
+ sleep 2
+ page.within('.milestone') do
+ expect(page).to have_content 'None'
+ end
+ end
+
+ it 'allows user to de-select milestone' do
+ visit project_issue_path(project, issue)
+
+ page.within('.milestone') do
+ click_link 'Edit'
+ click_link milestone.title
+
+ page.within '.value' do
+ expect(page).to have_content milestone.title
+ end
+
+ click_link 'Edit'
+ click_link milestone.title
+
+ page.within '.value' do
+ expect(page).to have_content 'None'
+ end
+ end
+ end
+ end
+
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
+
+ before do
+ project.add_guest(guest)
+ issue.milestone = milestone
+ issue.save
+ end
+
+ it 'shows milestone text' do
+ sign_out(:user)
+ sign_in(guest)
+
+ visit project_issue_path(project, issue)
+ expect(page).to have_content milestone.title
+ end
+ end
+ end
+
+ context 'update due date' do
+ it 'adds due date to issue' do
+ date = Date.today.at_beginning_of_month + 2.days
+
+ page.within '.due_date' do
+ click_link 'Edit'
+
+ page.within '.pika-single' do
+ click_button date.day
+ end
+
+ wait_for_requests
+
+ expect(find('.value').text).to have_content date.strftime('%b %-d, %Y')
+ end
+ end
+
+ it 'removes due date from issue' do
+ date = Date.today.at_beginning_of_month + 2.days
+
+ page.within '.due_date' do
+ click_link 'Edit'
+
+ page.within '.pika-single' do
+ click_button date.day
+ end
+
+ wait_for_requests
+
+ expect(page).to have_no_content 'None'
+
+ click_link 'remove due date'
+ expect(page).to have_content 'None'
+ end
+ end
+ end
end
end
diff --git a/spec/features/issues/user_filters_issues_spec.rb b/spec/features/issues/user_filters_issues_spec.rb
new file mode 100644
index 00000000000..714bc972025
--- /dev/null
+++ b/spec/features/issues/user_filters_issues_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User filters issues' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+
+ before do
+ %w[foobar barbaz].each do |title|
+ create(:issue,
+ author: user,
+ assignees: [user],
+ project: project,
+ title: title)
+ end
+
+ @issue = Issue.find_by(title: 'foobar')
+ @issue.milestone = create(:milestone, project: project)
+ @issue.assignees = []
+ @issue.save
+ end
+
+ let(:issue) { @issue }
+
+ it 'allows filtering by issues with no specified assignee' do
+ visit project_issues_path(project, assignee_id: IssuableFinder::FILTER_NONE)
+
+ expect(page).to have_content 'foobar'
+ expect(page).not_to have_content 'barbaz'
+ end
+
+ it 'allows filtering by a specified assignee' do
+ visit project_issues_path(project, assignee_id: user.id)
+
+ expect(page).not_to have_content 'foobar'
+ expect(page).to have_content 'barbaz'
+ end
+end
diff --git a/spec/features/issues/user_resets_their_incoming_email_token_spec.rb b/spec/features/issues/user_resets_their_incoming_email_token_spec.rb
new file mode 100644
index 00000000000..108b6f550db
--- /dev/null
+++ b/spec/features/issues/user_resets_their_incoming_email_token_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues > User resets their incoming email token' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, namespace: user.namespace) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit namespace_project_issues_path(user.namespace, project)
+ end
+
+ it 'changes incoming email address token', :js do
+ find('.issuable-email-modal-btn').click
+ previous_token = find('input#issuable_email').value
+ find('.incoming-email-token-reset').click
+
+ wait_for_requests
+
+ expect(page).to have_no_field('issuable_email', with: previous_token)
+ new_token = project.new_issuable_address(user.reload, 'issue')
+ expect(page).to have_field(
+ 'issuable_email',
+ with: new_token
+ )
+ end
+end
diff --git a/spec/features/issues/user_sees_breadcrumb_links_spec.rb b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
index f31d730c337..8a120a0a0b2 100644
--- a/spec/features/issues/user_sees_breadcrumb_links_spec.rb
+++ b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'New issue breadcrumb' do
- let(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:user) { project.creator }
before do
@@ -17,4 +17,22 @@ describe 'New issue breadcrumb' do
expect(find_link('New')[:href]).to end_with(new_project_issue_path(project))
end
end
+
+ it 'links to current issue in breadcrubs' do
+ issue = create(:issue, project: project)
+
+ visit project_issue_path(project, issue)
+
+ expect(find('.breadcrumbs-sub-title a')[:href]).to end_with(issue_path(issue))
+ end
+
+ it 'excludes award_emoji from comment count' do
+ issue = create(:issue, author: user, assignees: [user], project: project, title: 'foobar')
+ create(:award_emoji, awardable: issue)
+
+ visit project_issues_path(project, assignee_id: user.id)
+
+ expect(page).to have_content 'foobar'
+ expect(page.all('.no-comments').first.text).to eq "0"
+ end
end
diff --git a/spec/features/issues/user_sees_empty_state_spec.rb b/spec/features/issues/user_sees_empty_state_spec.rb
new file mode 100644
index 00000000000..114d119aca8
--- /dev/null
+++ b/spec/features/issues/user_sees_empty_state_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues > User sees empty state' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { project.creator }
+
+ shared_examples_for 'empty state with filters' do
+ it 'user sees empty state with filters' do
+ create(:issue, author: user, project: project)
+
+ visit project_issues_path(project, milestone_title: "1.0")
+
+ expect(page).to have_content('Sorry, your filter produced no results')
+ expect(page).to have_content('To widen your search, change or remove filters above')
+ end
+ end
+
+ describe 'while user is signed out' do
+ describe 'empty state' do
+ it 'user sees empty state' do
+ visit project_issues_path(project)
+
+ expect(page).to have_content('Register / Sign In')
+ expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project.')
+ expect(page).to have_content('You can register or sign in to create issues for this project.')
+ end
+
+ it_behaves_like 'empty state with filters'
+ end
+ end
+
+ describe 'while user is signed in' do
+ before do
+ sign_in(user)
+ end
+
+ describe 'empty state' do
+ it 'user sees empty state' do
+ visit project_issues_path(project)
+
+ expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project')
+ expect(page).to have_content('Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.')
+ expect(page).to have_content('New issue')
+ end
+
+ it_behaves_like 'empty state with filters'
+ end
+ end
+end
diff --git a/spec/features/issues/user_sees_live_update_spec.rb b/spec/features/issues/user_sees_live_update_spec.rb
new file mode 100644
index 00000000000..98c7d289fb0
--- /dev/null
+++ b/spec/features/issues/user_sees_live_update_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues > User sees live update', :js do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'title issue#show' do
+ it 'updates the title' do
+ issue = create(:issue, author: user, assignees: [user], project: project, title: 'new title')
+
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_text("new title")
+
+ issue.update(title: "updated title")
+
+ wait_for_requests
+ expect(page).to have_text("updated title")
+ end
+ end
+
+ describe 'confidential issue#show' do
+ it 'shows confidential sibebar information as confidential and can be turned off' do
+ issue = create(:issue, :confidential, project: project)
+
+ visit project_issue_path(project, issue)
+
+ expect(page).to have_css('.issuable-note-warning')
+ expect(find('.issuable-sidebar-item.confidentiality')).to have_css('.is-active')
+ expect(find('.issuable-sidebar-item.confidentiality')).not_to have_css('.not-active')
+
+ find('.confidential-edit').click
+ expect(page).to have_css('.sidebar-item-warning-message')
+
+ within('.sidebar-item-warning-message') do
+ find('.btn-close').click
+ end
+
+ wait_for_requests
+
+ visit project_issue_path(project, issue)
+
+ expect(page).not_to have_css('.is-active')
+ end
+ end
+end
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index 79938785633..66110f55435 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -3,12 +3,17 @@
require "spec_helper"
describe "User sorts issues" do
- set(:user) { create(:user) }
- set(:group) { create(:group) }
- set(:project) { create(:project_empty_repo, :public, group: group) }
- set(:issue1) { create(:issue, project: project) }
- set(:issue2) { create(:issue, project: project) }
- set(:issue3) { create(:issue, project: project) }
+ include SortingHelper
+ include IssueHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project_empty_repo, :public, group: group) }
+ let_it_be(:issue1, reload: true) { create(:issue, title: 'foo', created_at: Time.now, project: project) }
+ let_it_be(:issue2, reload: true) { create(:issue, title: 'bar', created_at: Time.now - 60, project: project) }
+ let_it_be(:issue3, reload: true) { create(:issue, title: 'baz', created_at: Time.now - 120, project: project) }
+ let_it_be(:newer_due_milestone) { create(:milestone, project: project, due_date: '2013-12-11') }
+ let_it_be(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
before do
create_list(:award_emoji, 2, :upvote, awardable: issue1)
@@ -62,4 +67,174 @@ describe "User sorts issues" do
end
end
end
+
+ it 'sorts by newest' do
+ visit project_issues_path(project, sort: sort_value_created_date)
+
+ expect(first_issue).to include('foo')
+ expect(last_issue).to include('baz')
+ end
+
+ it 'sorts by most recently updated' do
+ issue3.updated_at = Time.now + 100
+ issue3.save
+ visit project_issues_path(project, sort: sort_value_recently_updated)
+
+ expect(first_issue).to include('baz')
+ end
+
+ describe 'sorting by due date' do
+ before do
+ issue1.update(due_date: 1.day.from_now)
+ issue2.update(due_date: 6.days.from_now)
+ end
+
+ it 'sorts by due date' do
+ visit project_issues_path(project, sort: sort_value_due_date)
+
+ expect(first_issue).to include('foo')
+ end
+
+ it 'sorts by due date by excluding nil due dates' do
+ issue2.update(due_date: nil)
+
+ visit project_issues_path(project, sort: sort_value_due_date)
+
+ expect(first_issue).to include('foo')
+ end
+
+ context 'with a filter on labels' do
+ let(:label) { create(:label, project: project) }
+
+ before do
+ create(:label_link, label: label, target: issue1)
+ end
+
+ it 'sorts by least recently due date by excluding nil due dates' do
+ issue2.update(due_date: nil)
+
+ visit project_issues_path(project, label_names: [label.name], sort: sort_value_due_date_later)
+
+ expect(first_issue).to include('foo')
+ end
+ end
+ end
+
+ describe 'filtering by due date' do
+ before do
+ issue1.update(due_date: 1.day.from_now)
+ issue2.update(due_date: 6.days.from_now)
+ end
+
+ it 'filters by none' do
+ visit project_issues_path(project, due_date: Issue::NoDueDate.name)
+
+ page.within '.issues-holder' do
+ expect(page).not_to have_content('foo')
+ expect(page).not_to have_content('bar')
+ expect(page).to have_content('baz')
+ end
+ end
+
+ it 'filters by any' do
+ visit project_issues_path(project, due_date: Issue::AnyDueDate.name)
+
+ page.within '.issues-holder' do
+ expect(page).to have_content('foo')
+ expect(page).to have_content('bar')
+ expect(page).to have_content('baz')
+ end
+ end
+
+ it 'filters by due this week' do
+ issue1.update(due_date: Date.today.beginning_of_week + 2.days)
+ issue2.update(due_date: Date.today.end_of_week)
+ issue3.update(due_date: Date.today - 8.days)
+
+ visit project_issues_path(project, due_date: Issue::DueThisWeek.name)
+
+ page.within '.issues-holder' do
+ expect(page).to have_content('foo')
+ expect(page).to have_content('bar')
+ expect(page).not_to have_content('baz')
+ end
+ end
+
+ it 'filters by due this month' do
+ issue1.update(due_date: Date.today.beginning_of_month + 2.days)
+ issue2.update(due_date: Date.today.end_of_month)
+ issue3.update(due_date: Date.today - 50.days)
+
+ visit project_issues_path(project, due_date: Issue::DueThisMonth.name)
+
+ page.within '.issues-holder' do
+ expect(page).to have_content('foo')
+ expect(page).to have_content('bar')
+ expect(page).not_to have_content('baz')
+ end
+ end
+
+ it 'filters by overdue' do
+ issue1.update(due_date: Date.today + 2.days)
+ issue2.update(due_date: Date.today + 20.days)
+ issue3.update(due_date: Date.yesterday)
+
+ visit project_issues_path(project, due_date: Issue::Overdue.name)
+
+ page.within '.issues-holder' do
+ expect(page).not_to have_content('foo')
+ expect(page).not_to have_content('bar')
+ expect(page).to have_content('baz')
+ end
+ end
+
+ it 'filters by due next month and previous two weeks' do
+ issue1.update(due_date: Date.today - 4.weeks)
+ issue2.update(due_date: (Date.today + 2.months).beginning_of_month)
+ issue3.update(due_date: Date.yesterday)
+
+ visit project_issues_path(project, due_date: Issue::DueNextMonthAndPreviousTwoWeeks.name)
+
+ page.within '.issues-holder' do
+ expect(page).not_to have_content('foo')
+ expect(page).not_to have_content('bar')
+ expect(page).to have_content('baz')
+ end
+ end
+ end
+
+ describe 'sorting by milestone' do
+ before do
+ issue1.milestone = newer_due_milestone
+ issue1.save
+ issue2.milestone = later_due_milestone
+ issue2.save
+ end
+
+ it 'sorts by milestone' do
+ visit project_issues_path(project, sort: sort_value_milestone)
+
+ expect(first_issue).to include('foo')
+ expect(last_issue).to include('baz')
+ end
+ end
+
+ describe 'combine filter and sort' do
+ let(:user2) { create(:user) }
+
+ before do
+ issue1.assignees << user2
+ issue1.save
+ issue2.assignees << user2
+ issue2.save
+ end
+
+ it 'sorts with a filter applied' do
+ visit project_issues_path(project, sort: sort_value_created_date, assignee_id: user2.id)
+
+ expect(first_issue).to include('foo')
+ expect(last_issue).to include('bar')
+ expect(page).not_to have_content('baz')
+ end
+ end
end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
deleted file mode 100644
index ef9daf70b0c..00000000000
--- a/spec/features/issues_spec.rb
+++ /dev/null
@@ -1,828 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe 'Issues' do
- include DropzoneHelper
- include IssueHelpers
- include SortingHelper
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
-
- shared_examples_for 'empty state with filters' do
- it 'user sees empty state with filters' do
- create(:issue, author: user, project: project)
-
- visit project_issues_path(project, milestone_title: "1.0")
-
- expect(page).to have_content('Sorry, your filter produced no results')
- expect(page).to have_content('To widen your search, change or remove filters above')
- end
- end
-
- describe 'while user is signed out' do
- describe 'empty state' do
- it 'user sees empty state' do
- visit project_issues_path(project)
-
- expect(page).to have_content('Register / Sign In')
- expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project.')
- expect(page).to have_content('You can register or sign in to create issues for this project.')
- end
-
- it_behaves_like 'empty state with filters'
- end
- end
-
- describe 'while user is signed in' do
- before do
- sign_in(user)
- user2 = create(:user)
-
- project.add_developer(user)
- project.add_developer(user2)
- end
-
- describe 'empty state' do
- it 'user sees empty state' do
- visit project_issues_path(project)
-
- expect(page).to have_content('The Issue Tracker is the place to add things that need to be improved or solved in a project')
- expect(page).to have_content('Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.')
- expect(page).to have_content('New issue')
- end
-
- it_behaves_like 'empty state with filters'
- end
-
- describe 'Edit issue' do
- let!(:issue) do
- create(:issue,
- author: user,
- assignees: [user],
- project: project)
- end
-
- before do
- visit edit_project_issue_path(project, issue)
- find('.js-zen-enter').click
- end
-
- it 'opens new issue popup' do
- expect(page).to have_content("Issue ##{issue.iid}")
- end
- end
-
- describe 'Editing issue assignee' do
- let!(:issue) do
- create(:issue,
- author: user,
- assignees: [user],
- project: project)
- end
-
- it 'allows user to select unassigned', :js do
- visit edit_project_issue_path(project, issue)
-
- expect(page).to have_content "Assignee #{user.name}"
-
- first('.js-user-search').click
- click_link 'Unassigned'
-
- click_button 'Save changes'
-
- page.within('.assignee') do
- expect(page).to have_content 'None - assign yourself'
- end
-
- expect(issue.reload.assignees).to be_empty
- end
- end
-
- describe 'due date', :js do
- context 'on new form' do
- before do
- visit new_project_issue_path(project)
- end
-
- it 'saves with due date' do
- date = Date.today.at_beginning_of_month
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- find('#issuable-due-date').click
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- click_button 'Submit issue'
-
- page.within '.issuable-sidebar' do
- expect(page).to have_content date.to_s(:medium)
- end
- end
- end
-
- context 'on edit form' do
- let(:issue) { create(:issue, author: user, project: project, due_date: Date.today.at_beginning_of_month.to_s) }
-
- before do
- visit edit_project_issue_path(project, issue)
- end
-
- it 'saves with due date' do
- date = Date.today.at_beginning_of_month
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- date = date.tomorrow
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- find('#issuable-due-date').click
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- expect(find('#issuable-due-date').value).to eq date.to_s
-
- click_button 'Save changes'
-
- page.within '.issuable-sidebar' do
- expect(page).to have_content date.to_s(:medium)
- end
- end
-
- it 'warns about version conflict' do
- issue.update(title: "New title")
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
-
- click_button 'Save changes'
-
- expect(page).to have_content 'Someone edited the issue the same time you did'
- end
- end
- end
-
- describe 'Issue info' do
- it 'links to current issue in breadcrubs' do
- issue = create(:issue, project: project)
-
- visit project_issue_path(project, issue)
-
- expect(find('.breadcrumbs-sub-title a')[:href]).to end_with(issue_path(issue))
- end
-
- it 'excludes award_emoji from comment count' do
- issue = create(:issue, author: user, assignees: [user], project: project, title: 'foobar')
- create(:award_emoji, awardable: issue)
-
- visit project_issues_path(project, assignee_id: user.id)
-
- expect(page).to have_content 'foobar'
- expect(page.all('.no-comments').first.text).to eq "0"
- end
- end
-
- describe 'Filter issue' do
- before do
- %w(foobar barbaz gitlab).each do |title|
- create(:issue,
- author: user,
- assignees: [user],
- project: project,
- title: title)
- end
-
- @issue = Issue.find_by(title: 'foobar')
- @issue.milestone = create(:milestone, project: project)
- @issue.assignees = []
- @issue.save
- end
-
- let(:issue) { @issue }
-
- it 'allows filtering by issues with no specified assignee' do
- visit project_issues_path(project, assignee_id: IssuableFinder::FILTER_NONE)
-
- expect(page).to have_content 'foobar'
- expect(page).not_to have_content 'barbaz'
- expect(page).not_to have_content 'gitlab'
- end
-
- it 'allows filtering by a specified assignee' do
- visit project_issues_path(project, assignee_id: user.id)
-
- expect(page).not_to have_content 'foobar'
- expect(page).to have_content 'barbaz'
- expect(page).to have_content 'gitlab'
- end
- end
-
- describe 'filter issue' do
- titles = %w[foo bar baz]
- titles.each_with_index do |title, index|
- let!(title.to_sym) do
- create(:issue, title: title,
- project: project,
- created_at: Time.now - (index * 60))
- end
- end
- let(:newer_due_milestone) { create(:milestone, project: project, due_date: '2013-12-11') }
- let(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
-
- it 'sorts by newest' do
- visit project_issues_path(project, sort: sort_value_created_date)
-
- expect(first_issue).to include('foo')
- expect(last_issue).to include('baz')
- end
-
- it 'sorts by most recently updated' do
- baz.updated_at = Time.now + 100
- baz.save
- visit project_issues_path(project, sort: sort_value_recently_updated)
-
- expect(first_issue).to include('baz')
- end
-
- describe 'sorting by due date' do
- before do
- foo.update(due_date: 1.day.from_now)
- bar.update(due_date: 6.days.from_now)
- end
-
- it 'sorts by due date' do
- visit project_issues_path(project, sort: sort_value_due_date)
-
- expect(first_issue).to include('foo')
- end
-
- it 'sorts by due date by excluding nil due dates' do
- bar.update(due_date: nil)
-
- visit project_issues_path(project, sort: sort_value_due_date)
-
- expect(first_issue).to include('foo')
- end
-
- context 'with a filter on labels' do
- let(:label) { create(:label, project: project) }
-
- before do
- create(:label_link, label: label, target: foo)
- end
-
- it 'sorts by least recently due date by excluding nil due dates' do
- bar.update(due_date: nil)
-
- visit project_issues_path(project, label_names: [label.name], sort: sort_value_due_date_later)
-
- expect(first_issue).to include('foo')
- end
- end
- end
-
- describe 'filtering by due date' do
- before do
- foo.update(due_date: 1.day.from_now)
- bar.update(due_date: 6.days.from_now)
- end
-
- it 'filters by none' do
- visit project_issues_path(project, due_date: Issue::NoDueDate.name)
-
- page.within '.issues-holder' do
- expect(page).not_to have_content('foo')
- expect(page).not_to have_content('bar')
- expect(page).to have_content('baz')
- end
- end
-
- it 'filters by any' do
- visit project_issues_path(project, due_date: Issue::AnyDueDate.name)
-
- page.within '.issues-holder' do
- expect(page).to have_content('foo')
- expect(page).to have_content('bar')
- expect(page).to have_content('baz')
- end
- end
-
- it 'filters by due this week' do
- foo.update(due_date: Date.today.beginning_of_week + 2.days)
- bar.update(due_date: Date.today.end_of_week)
- baz.update(due_date: Date.today - 8.days)
-
- visit project_issues_path(project, due_date: Issue::DueThisWeek.name)
-
- page.within '.issues-holder' do
- expect(page).to have_content('foo')
- expect(page).to have_content('bar')
- expect(page).not_to have_content('baz')
- end
- end
-
- it 'filters by due this month' do
- foo.update(due_date: Date.today.beginning_of_month + 2.days)
- bar.update(due_date: Date.today.end_of_month)
- baz.update(due_date: Date.today - 50.days)
-
- visit project_issues_path(project, due_date: Issue::DueThisMonth.name)
-
- page.within '.issues-holder' do
- expect(page).to have_content('foo')
- expect(page).to have_content('bar')
- expect(page).not_to have_content('baz')
- end
- end
-
- it 'filters by overdue' do
- foo.update(due_date: Date.today + 2.days)
- bar.update(due_date: Date.today + 20.days)
- baz.update(due_date: Date.yesterday)
-
- visit project_issues_path(project, due_date: Issue::Overdue.name)
-
- page.within '.issues-holder' do
- expect(page).not_to have_content('foo')
- expect(page).not_to have_content('bar')
- expect(page).to have_content('baz')
- end
- end
-
- it 'filters by due next month and previous two weeks' do
- foo.update(due_date: Date.today - 4.weeks)
- bar.update(due_date: (Date.today + 2.months).beginning_of_month)
- baz.update(due_date: Date.yesterday)
-
- visit project_issues_path(project, due_date: Issue::DueNextMonthAndPreviousTwoWeeks.name)
-
- page.within '.issues-holder' do
- expect(page).not_to have_content('foo')
- expect(page).not_to have_content('bar')
- expect(page).to have_content('baz')
- end
- end
- end
-
- describe 'sorting by milestone' do
- before do
- foo.milestone = newer_due_milestone
- foo.save
- bar.milestone = later_due_milestone
- bar.save
- end
-
- it 'sorts by milestone' do
- visit project_issues_path(project, sort: sort_value_milestone)
-
- expect(first_issue).to include('foo')
- expect(last_issue).to include('baz')
- end
- end
-
- describe 'combine filter and sort' do
- let(:user2) { create(:user) }
-
- before do
- foo.assignees << user2
- foo.save
- bar.assignees << user2
- bar.save
- end
-
- it 'sorts with a filter applied' do
- visit project_issues_path(project, sort: sort_value_created_date, assignee_id: user2.id)
-
- expect(first_issue).to include('foo')
- expect(last_issue).to include('bar')
- expect(page).not_to have_content('baz')
- end
- end
- end
-
- describe 'when I want to reset my incoming email token' do
- let(:project1) { create(:project, namespace: user.namespace) }
- let!(:issue) { create(:issue, project: project1) }
-
- before do
- stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
- project1.add_maintainer(user)
- visit namespace_project_issues_path(user.namespace, project1)
- end
-
- it 'changes incoming email address token', :js do
- find('.issuable-email-modal-btn').click
- previous_token = find('input#issuable_email').value
- find('.incoming-email-token-reset').click
-
- wait_for_requests
-
- expect(page).to have_no_field('issuable_email', with: previous_token)
- new_token = project1.new_issuable_address(user.reload, 'issue')
- expect(page).to have_field(
- 'issuable_email',
- with: new_token
- )
- end
- end
-
- describe 'update labels from issue#show', :js do
- let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
- let!(:label) { create(:label, project: project) }
-
- before do
- visit project_issue_path(project, issue)
- end
-
- it 'will not send ajax request when no data is changed' do
- page.within '.labels' do
- click_link 'Edit'
-
- find('.dropdown-menu-close', match: :first).click
-
- expect(page).not_to have_selector('.block-loading')
- end
- end
- end
-
- describe 'update assignee from issue#show' do
- let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
-
- context 'by authorized user' do
- it 'allows user to select unassigned', :js do
- visit project_issue_path(project, issue)
-
- page.within('.assignee') do
- expect(page).to have_content "#{user.name}"
-
- click_link 'Edit'
- click_link 'Unassigned'
- first('.title').click
- expect(page).to have_content 'None'
- end
-
- wait_for_requests
-
- expect(issue.reload.assignees).to be_empty
- end
-
- it 'allows user to select an assignee', :js do
- issue2 = create(:issue, project: project, author: user)
- visit project_issue_path(project, issue2)
-
- page.within('.assignee') do
- expect(page).to have_content "None"
- end
-
- page.within '.assignee' do
- click_link 'Edit'
- end
-
- page.within '.dropdown-menu-user' do
- click_link user.name
- end
-
- page.within('.assignee') do
- expect(page).to have_content user.name
- end
- end
-
- it 'allows user to unselect themselves', :js do
- issue2 = create(:issue, project: project, author: user)
-
- visit project_issue_path(project, issue2)
-
- def close_dropdown_menu_if_visible
- find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
- toggle.click if toggle.visible?
- end
- end
-
- page.within '.assignee' do
- click_link 'Edit'
- click_link user.name
-
- close_dropdown_menu_if_visible
-
- page.within '.value .author' do
- expect(page).to have_content user.name
- end
-
- click_link 'Edit'
- click_link user.name
-
- close_dropdown_menu_if_visible
-
- page.within '.value .assign-yourself' do
- expect(page).to have_content "None"
- end
- end
- end
- end
-
- context 'by unauthorized user' do
- let(:guest) { create(:user) }
-
- before do
- project.add_guest(guest)
- end
-
- it 'shows assignee text', :js do
- sign_out(:user)
- sign_in(guest)
-
- visit project_issue_path(project, issue)
- expect(page).to have_content issue.assignees.first.name
- end
- end
- end
-
- describe 'update milestone from issue#show' do
- let!(:issue) { create(:issue, project: project, author: user) }
- let!(:milestone) { create(:milestone, project: project) }
-
- context 'by authorized user' do
- it 'allows user to select unassigned', :js do
- visit project_issue_path(project, issue)
-
- page.within('.milestone') do
- expect(page).to have_content "None"
- end
-
- find('.block.milestone .edit-link').click
- sleep 2 # wait for ajax stuff to complete
- first('.dropdown-content li').click
- sleep 2
- page.within('.milestone') do
- expect(page).to have_content 'None'
- end
-
- expect(issue.reload.milestone).to be_nil
- end
-
- it 'allows user to de-select milestone', :js do
- visit project_issue_path(project, issue)
-
- page.within('.milestone') do
- click_link 'Edit'
- click_link milestone.title
-
- page.within '.value' do
- expect(page).to have_content milestone.title
- end
-
- click_link 'Edit'
- click_link milestone.title
-
- page.within '.value' do
- expect(page).to have_content 'None'
- end
- end
- end
- end
-
- context 'by unauthorized user' do
- let(:guest) { create(:user) }
-
- before do
- project.add_guest(guest)
- issue.milestone = milestone
- issue.save
- end
-
- it 'shows milestone text', :js do
- sign_out(:user)
- sign_in(guest)
-
- visit project_issue_path(project, issue)
- expect(page).to have_content milestone.title
- end
- end
- end
-
- describe 'new issue' do
- let!(:issue) { create(:issue, project: project) }
-
- context 'by unauthenticated user' do
- before do
- sign_out(:user)
- end
-
- it 'redirects to signin then back to new issue after signin' do
- visit project_issues_path(project)
-
- page.within '.nav-controls' do
- click_link 'New issue'
- end
-
- expect(current_path).to eq new_user_session_path
-
- gitlab_sign_in(create(:user))
-
- expect(current_path).to eq new_project_issue_path(project)
- end
- end
-
- it 'clears local storage after creating a new issue', :js do
- 2.times do
- visit new_project_issue_path(project)
- wait_for_requests
-
- expect(page).to have_field('Title', with: '')
-
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
-
- click_button 'Submit issue'
- end
- end
-
- context 'dropzone upload file', :js do
- before do
- visit new_project_issue_path(project)
- end
-
- it 'uploads file when dragging into textarea' do
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
-
- expect(page.find_field("issue_description").value).to have_content 'banana_sample'
- end
-
- it "doesn't add double newline to end of a single attachment markdown" do
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
-
- expect(page.find_field("issue_description").value).not_to match /\n\n$/
- end
-
- it "cancels a file upload correctly" do
- slow_requests do
- dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
-
- click_button 'Cancel'
- end
-
- expect(page).to have_button('Attach a file')
- expect(page).not_to have_button('Cancel')
- expect(page).not_to have_selector('.uploading-progress-container', visible: true)
- end
- end
-
- context 'form filled by URL parameters' do
- let(:project) { create(:project, :public, :repository) }
-
- before do
- project.repository.create_file(
- user,
- '.gitlab/issue_templates/bug.md',
- 'this is a test "bug" template',
- message: 'added issue template',
- branch_name: 'master')
-
- visit new_project_issue_path(project, issuable_template: 'bug')
- end
-
- it 'fills in template' do
- expect(find('.js-issuable-selector .dropdown-toggle-text')).to have_content('bug')
- end
- end
-
- context 'suggestions', :js do
- it 'displays list of related issues' do
- create(:issue, project: project, title: 'test issue')
-
- visit new_project_issue_path(project)
-
- fill_in 'issue_title', with: issue.title
-
- expect(page).to have_selector('.suggestion-item', count: 1)
- end
- end
- end
-
- describe 'new issue by email' do
- shared_examples 'show the email in the modal' do
- let(:issue) { create(:issue, project: project) }
-
- before do
- project.issues << issue
- stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
-
- visit project_issues_path(project)
- click_button('Email a new issue')
- end
-
- it 'click the button to show modal for the new email' do
- page.within '#issuable-email-modal' do
- email = project.new_issuable_address(user, 'issue')
-
- expect(page).to have_selector("input[value='#{email}']")
- end
- end
- end
-
- context 'with existing issues' do
- let!(:issue) { create(:issue, project: project, author: user) }
-
- it_behaves_like 'show the email in the modal'
- end
-
- context 'without existing issues' do
- it_behaves_like 'show the email in the modal'
- end
- end
-
- describe 'due date' do
- context 'update due on issue#show', :js do
- let(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
-
- before do
- visit project_issue_path(project, issue)
- end
-
- it 'adds due date to issue' do
- date = Date.today.at_beginning_of_month + 2.days
-
- page.within '.due_date' do
- click_link 'Edit'
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- wait_for_requests
-
- expect(find('.value').text).to have_content date.strftime('%b %-d, %Y')
- end
- end
-
- it 'removes due date from issue' do
- date = Date.today.at_beginning_of_month + 2.days
-
- page.within '.due_date' do
- click_link 'Edit'
-
- page.within '.pika-single' do
- click_button date.day
- end
-
- wait_for_requests
-
- expect(page).to have_no_content 'None'
-
- click_link 'remove due date'
- expect(page).to have_content 'None'
- end
- end
- end
- end
-
- describe 'title issue#show', :js do
- it 'updates the title', :js do
- issue = create(:issue, author: user, assignees: [user], project: project, title: 'new title')
-
- visit project_issue_path(project, issue)
-
- expect(page).to have_text("new title")
-
- issue.update(title: "updated title")
-
- wait_for_requests
- expect(page).to have_text("updated title")
- end
- end
-
- describe 'confidential issue#show', :js do
- it 'shows confidential sibebar information as confidential and can be turned off' do
- issue = create(:issue, :confidential, project: project)
-
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('.issuable-note-warning')
- expect(find('.issuable-sidebar-item.confidentiality')).to have_css('.is-active')
- expect(find('.issuable-sidebar-item.confidentiality')).not_to have_css('.not-active')
-
- find('.confidential-edit').click
- expect(page).to have_css('.sidebar-item-warning-message')
-
- within('.sidebar-item-warning-message') do
- find('.btn-close').click
- end
-
- wait_for_requests
-
- visit project_issue_path(project, issue)
-
- expect(page).not_to have_css('.is-active')
- end
- end
- end
-end
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index b7a45905845..c1a2e22a0c2 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -70,7 +70,7 @@ describe 'Labels Hierarchy', :js do
end
it 'does not filter by descendant group labels' do
- filtered_search.set("label:")
+ filtered_search.set("label=")
wait_for_requests
@@ -134,7 +134,7 @@ describe 'Labels Hierarchy', :js do
end
it 'does not filter by descendant group project labels' do
- filtered_search.set("label:")
+ filtered_search.set("label=")
wait_for_requests
@@ -227,7 +227,7 @@ describe 'Labels Hierarchy', :js do
it_behaves_like 'filtering by ancestor labels for projects'
it 'does not filter by descendant group labels' do
- filtered_search.set("label:")
+ filtered_search.set("label=")
wait_for_requests
diff --git a/spec/features/markdown/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index a45fa67ce9e..9ebd85acb81 100644
--- a/spec/features/markdown/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
@@ -208,6 +208,8 @@ describe 'GitLab Markdown', :aggregate_failures do
@group = @feat.group
end
+ let(:project) { @feat.project } # Shadow this so matchers can use it
+
context 'default pipeline' do
before do
@html = markdown(@feat.raw_markdown)
@@ -216,8 +218,12 @@ describe 'GitLab Markdown', :aggregate_failures do
it_behaves_like 'all pipelines'
it 'includes custom filters' do
- aggregate_failures 'RelativeLinkFilter' do
- expect(doc).to parse_relative_links
+ aggregate_failures 'UploadLinkFilter' do
+ expect(doc).to parse_upload_links
+ end
+
+ aggregate_failures 'RepositoryLinkFilter' do
+ expect(doc).to parse_repository_links
end
aggregate_failures 'EmojiFilter' do
@@ -277,8 +283,12 @@ describe 'GitLab Markdown', :aggregate_failures do
it_behaves_like 'all pipelines'
it 'includes custom filters' do
- aggregate_failures 'RelativeLinkFilter' do
- expect(doc).not_to parse_relative_links
+ aggregate_failures 'UploadLinkFilter' do
+ expect(doc).to parse_upload_links
+ end
+
+ aggregate_failures 'RepositoryLinkFilter' do
+ expect(doc).not_to parse_repository_links
end
aggregate_failures 'EmojiFilter' do
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 4e161d530d3..4f2c5fc73d8 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -32,8 +32,6 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
wait_for_requests
end
- it_behaves_like 'rendering a single diff version'
-
it 'mentions commits will go to the source branch' do
expect(page).to have_content('Your changes can be committed to fix because a merge request is open.')
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 6a23b6cdf60..19b8a7f74b7 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -13,15 +13,12 @@ describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit(diffs_project_merge_request_path(project, merge_request))
end
- it_behaves_like 'rendering a single diff version'
-
context 'when viewing comments' do
context 'when toggling inline comments' do
context 'in a single file' do
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index e6634a8ff39..e0724a04ea3 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -9,7 +9,6 @@ describe 'Merge request > User creates image diff notes', :js do
let(:user) { project.creator }
before do
- stub_feature_flags(single_mr_diff_view: false)
sign_in(user)
# Stub helper to return any blob file as image from public app folder.
@@ -18,8 +17,6 @@ describe 'Merge request > User creates image diff notes', :js do
allow_any_instance_of(DiffHelper).to receive(:diff_file_old_blob_raw_url).and_return('/favicon.png')
end
- it_behaves_like 'rendering a single diff version'
-
context 'create commit diff notes' do
commit_id = '2f63565e7aac07bcdadb654e253078b727143ec4'
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
index 9b040271468..9bce5264817 100644
--- a/spec/features/merge_request/user_expands_diff_spec.rb
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -7,7 +7,6 @@ describe 'User expands diff', :js do
let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(100.kilobytes)
@@ -18,8 +17,6 @@ describe 'User expands diff', :js do
wait_for_requests
end
- it_behaves_like 'rendering a single diff version'
-
it 'allows user to expand diff' do
page.within find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9"]') do
click_link 'Click to expand it.'
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 6328c0a5133..8b16760606c 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -14,15 +14,12 @@ describe 'Merge request > User posts diff notes', :js do
let(:test_note_comment) { 'this is a test note!' }
before do
- stub_feature_flags(single_mr_diff_view: false)
set_cookie('sidebar_collapsed', 'true')
project.add_developer(user)
sign_in(user)
end
- it_behaves_like 'rendering a single diff version'
-
context 'when hovering over a parallel view diff file' do
before do
visit diffs_project_merge_request_path(project, merge_request, view: 'parallel')
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index c0655581b18..f24e7090605 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -165,9 +165,9 @@ describe 'Merge request > User posts notes', :js do
find('.js-note-edit').click
page.within('.current-note-edit-form') do
- expect(find('#note_note').value).to eq('This is the new content')
+ expect(find('#note_note').value).to include('This is the new content')
first('.js-md').click
- expect(find('#note_note').value).to eq('This is the new content****')
+ expect(find('#note_note').value).to include('This is the new content****')
end
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index f0949fefa3b..ce85e81868d 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -9,7 +9,6 @@ describe 'Merge request > User resolves conflicts', :js do
before do
# In order to have the diffs collapsed, we need to disable the increase feature
stub_feature_flags(gitlab_git_diff_size_limit_increase: false)
- stub_feature_flags(single_mr_diff_view: false)
end
def create_merge_request(source_branch)
@@ -18,8 +17,6 @@ describe 'Merge request > User resolves conflicts', :js do
end
end
- it_behaves_like 'rendering a single diff version'
-
shared_examples 'conflicts are resolved in Interactive mode' do
it 'conflicts are resolved in Interactive mode' do
within find('.files-wrapper .diff-file', text: 'files/ruby/popen.rb') do
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 9cbea8a8466..eb86b1e33af 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -20,12 +20,9 @@ describe 'Merge request > User resolves diff notes and threads', :js do
end
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
end
- it_behaves_like 'rendering a single diff version'
-
context 'no threads' do
before do
project.add_maintainer(user)
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 70afe056c64..3e77b9e75d6 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -21,7 +21,6 @@ describe 'Merge request > User sees avatars on diff notes', :js do
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
project.add_maintainer(user)
sign_in user
@@ -29,8 +28,6 @@ describe 'Merge request > User sees avatars on diff notes', :js do
set_cookie('sidebar_collapsed', 'true')
end
- it_behaves_like 'rendering a single diff version'
-
context 'discussion tab' do
before do
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 3743ef0f25d..99c9e9dc501 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -33,10 +33,10 @@ describe 'Merge request > User sees deployment widget', :js do
end
context 'when a user created a new merge request with the same SHA' do
- let(:pipeline2) { create(:ci_pipeline, sha: sha, project: project, ref: 'new-patch-1') }
+ let(:pipeline2) { create(:ci_pipeline, sha: sha, project: project, ref: 'video') }
let(:build2) { create(:ci_build, :success, pipeline: pipeline2) }
let(:environment2) { create(:environment, project: project) }
- let!(:deployment2) { create(:deployment, environment: environment2, sha: sha, ref: 'new-patch-1', deployable: build2) }
+ let!(:deployment2) { create(:deployment, environment: environment2, sha: sha, ref: 'video', deployable: build2) }
it 'displays one environment which is related to the pipeline' do
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index de142344c26..2d91d09a486 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -10,12 +10,9 @@ describe 'Merge request > User sees diff', :js do
let(:merge_request) { create(:merge_request, source_project: project) }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
end
- it_behaves_like 'rendering a single diff version'
-
context 'when linking to note' do
describe 'with unresolved note' do
let(:note) { create :diff_note_on_merge_request, project: project, noteable: merge_request }
diff --git a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
index e28d2ca5536..59e5f5c847d 100644
--- a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
+++ b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
@@ -11,14 +11,11 @@ describe 'Merge request > User sees MR with deleted source branch', :js do
let(:user) { project.creator }
before do
- stub_feature_flags(single_mr_diff_view: false)
merge_request.update!(source_branch: 'this-branch-does-not-exist')
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
- it_behaves_like 'rendering a single diff version'
-
it 'shows a message about missing source branch' do
expect(page).to have_content('Source branch does not exist.')
end
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index b3aef601c7b..cd62bab412a 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -16,7 +16,6 @@ describe 'Merge request > User sees versions', :js do
let!(:params) { {} }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
project.add_maintainer(user)
@@ -24,8 +23,6 @@ describe 'Merge request > User sees versions', :js do
visit diffs_project_merge_request_path(project, merge_request, params)
end
- it_behaves_like 'rendering a single diff version'
-
shared_examples 'allows commenting' do |file_id:, line_code:, comment:|
it do
diff_file_selector = ".diff-file[id='#{file_id}']"
@@ -53,7 +50,7 @@ describe 'Merge request > User sees versions', :js do
expect(page).to have_content 'latest version'
end
- expect(page).to have_content '8 Files'
+ expect(page).to have_content '8 files'
end
it_behaves_like 'allows commenting',
@@ -87,7 +84,7 @@ describe 'Merge request > User sees versions', :js do
end
it 'shows comments that were last relevant at that version' do
- expect(page).to have_content '5 Files'
+ expect(page).to have_content '5 files'
position = Gitlab::Diff::Position.new(
old_path: ".gitmodules",
@@ -131,12 +128,10 @@ describe 'Merge request > User sees versions', :js do
diff_id: merge_request_diff3.id,
start_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9'
)
- expect(page).to have_content '4 Files'
+ expect(page).to have_content '4 files'
- additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-addition')
- .ancestor('.diff-stats-group').text
- deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-deletion')
- .ancestor('.diff-stats-group').text
+ additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-addition-line').text
+ deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-deletion-line').text
expect(additions_content).to eq '15'
expect(deletions_content).to eq '6'
@@ -159,12 +154,10 @@ describe 'Merge request > User sees versions', :js do
end
it 'show diff between new and old version' do
- additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-addition')
- .ancestor('.diff-stats-group').text
- deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group svg.ic-file-deletion')
- .ancestor('.diff-stats-group').text
+ additions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-addition-line').text
+ deletions_content = page.find('.diff-stats.is-compare-versions-header .diff-stats-group .js-file-deletion-line').text
- expect(page).to have_content '4 Files'
+ expect(page).to have_content '4 files'
expect(additions_content).to eq '15'
expect(deletions_content).to eq '6'
end
@@ -174,7 +167,7 @@ describe 'Merge request > User sees versions', :js do
page.within '.mr-version-dropdown' do
expect(page).to have_content 'latest version'
end
- expect(page).to have_content '8 Files'
+ expect(page).to have_content '8 files'
end
it_behaves_like 'allows commenting',
@@ -200,7 +193,7 @@ describe 'Merge request > User sees versions', :js do
find('.btn-default').click
click_link 'version 1'
end
- expect(page).to have_content '0 Files'
+ expect(page).to have_content '0 files'
end
end
@@ -226,7 +219,7 @@ describe 'Merge request > User sees versions', :js do
expect(page).to have_content 'version 1'
end
- expect(page).to have_content '0 Files'
+ expect(page).to have_content '0 files'
end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 7fe72e1bc8a..95cb0a2dee3 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -25,15 +25,12 @@ describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit(diffs_project_merge_request_path(project, merge_request))
end
- it_behaves_like 'rendering a single diff version'
-
context 'single suggestion note' do
it 'hides suggestion popover' do
click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
@@ -97,8 +94,7 @@ describe 'User comments on a diff', :js do
end
context 'multiple suggestions in expanded lines' do
- # Report issue: https://gitlab.com/gitlab-org/gitlab/issues/38277
- # Fix issue: https://gitlab.com/gitlab-org/gitlab/issues/39095
+ # https://gitlab.com/gitlab-org/gitlab/issues/38277
it 'suggestions are appliable', :quarantine do
diff_file = merge_request.diffs(paths: ['files/ruby/popen.rb']).diff_files.first
hash = Digest::SHA1.hexdigest(diff_file.file_path)
diff --git a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
index 5e59bc87e68..4db067a4e41 100644
--- a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
+++ b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
@@ -8,7 +8,6 @@ describe 'Merge request > User toggles whitespace changes', :js do
let(:user) { project.creator }
before do
- stub_feature_flags(single_mr_diff_view: false)
project.add_maintainer(user)
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request)
@@ -16,8 +15,6 @@ describe 'Merge request > User toggles whitespace changes', :js do
find('.js-show-diff-settings').click
end
- it_behaves_like 'rendering a single diff version'
-
it 'has a button to toggle whitespace changes' do
expect(page).to have_content 'Show whitespace changes'
end
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 313f438e23b..e0e4058dd47 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -9,7 +9,6 @@ describe 'User views diffs', :js do
let(:project) { create(:project, :public, :repository) }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
visit(diffs_project_merge_request_path(project, merge_request))
@@ -18,8 +17,6 @@ describe 'User views diffs', :js do
find('.js-toggle-tree-list').click
end
- it_behaves_like 'rendering a single diff version'
-
shared_examples 'unfold diffs' do
it 'unfolds diffs upwards' do
first('.js-unfold').click
diff --git a/spec/features/merge_requests/filters_generic_behavior_spec.rb b/spec/features/merge_requests/filters_generic_behavior_spec.rb
index 58aad1b7e91..c3400acae4f 100644
--- a/spec/features/merge_requests/filters_generic_behavior_spec.rb
+++ b/spec/features/merge_requests/filters_generic_behavior_spec.rb
@@ -23,7 +23,7 @@ describe 'Merge Requests > Filters generic behavior', :js do
context 'when filtered by a label' do
before do
- input_filtered_search('label:~bug')
+ input_filtered_search('label=~bug')
end
describe 'state tabs' do
diff --git a/spec/features/merge_requests/user_filters_by_assignees_spec.rb b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
index 00bd8455ae1..3abee3b656a 100644
--- a/spec/features/merge_requests/user_filters_by_assignees_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
@@ -18,7 +18,7 @@ describe 'Merge Requests > User filters by assignees', :js do
context 'filtering by assignee:none' do
it 'applies the filter' do
- input_filtered_search('assignee:none')
+ input_filtered_search('assignee=none')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).not_to have_content 'Bugfix1'
@@ -26,9 +26,9 @@ describe 'Merge Requests > User filters by assignees', :js do
end
end
- context 'filtering by assignee:@username' do
+ context 'filtering by assignee=@username' do
it 'applies the filter' do
- input_filtered_search("assignee:@#{user.username}")
+ input_filtered_search("assignee=@#{user.username}")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix1'
diff --git a/spec/features/merge_requests/user_filters_by_labels_spec.rb b/spec/features/merge_requests/user_filters_by_labels_spec.rb
index fd2b4b23f96..7a80ebe9be3 100644
--- a/spec/features/merge_requests/user_filters_by_labels_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_labels_spec.rb
@@ -22,7 +22,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:none' do
it 'applies the filter' do
- input_filtered_search('label:none')
+ input_filtered_search('label=none')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).not_to have_content 'Bugfix1'
@@ -32,7 +32,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:~enhancement' do
it 'applies the filter' do
- input_filtered_search('label:~enhancement')
+ input_filtered_search('label=~enhancement')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
@@ -42,7 +42,7 @@ describe 'Merge Requests > User filters by labels', :js do
context 'filtering by label:~enhancement and label:~bug' do
it 'applies the filters' do
- input_filtered_search('label:~bug label:~enhancement')
+ input_filtered_search('label=~bug label=~enhancement')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
diff --git a/spec/features/merge_requests/user_filters_by_milestones_spec.rb b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
index e0ee69d7a5b..8cb686e191e 100644
--- a/spec/features/merge_requests/user_filters_by_milestones_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
@@ -18,14 +18,14 @@ describe 'Merge Requests > User filters by milestones', :js do
end
it 'filters by no milestone' do
- input_filtered_search('milestone:none')
+ input_filtered_search('milestone=none')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
end
it 'filters by a specific milestone' do
- input_filtered_search("milestone:%'#{milestone.title}'")
+ input_filtered_search("milestone=%'#{milestone.title}'")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
@@ -33,7 +33,7 @@ describe 'Merge Requests > User filters by milestones', :js do
describe 'filters by upcoming milestone' do
it 'does not show merge requests with no expiry' do
- input_filtered_search('milestone:upcoming')
+ input_filtered_search('milestone=upcoming')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).to have_css('.merge-request', count: 0)
@@ -43,7 +43,7 @@ describe 'Merge Requests > User filters by milestones', :js do
let(:milestone) { create(:milestone, project: project, due_date: Date.tomorrow) }
it 'shows merge requests' do
- input_filtered_search('milestone:upcoming')
+ input_filtered_search('milestone=upcoming')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_css('.merge-request', count: 1)
@@ -54,7 +54,7 @@ describe 'Merge Requests > User filters by milestones', :js do
let(:milestone) { create(:milestone, project: project, due_date: Date.yesterday) }
it 'does not show any merge requests' do
- input_filtered_search('milestone:upcoming')
+ input_filtered_search('milestone=upcoming')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).to have_css('.merge-request', count: 0)
diff --git a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
index bc6e2ac5132..5c9d53778d2 100644
--- a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
@@ -20,7 +20,7 @@ describe 'Merge requests > User filters by multiple criteria', :js do
describe 'filtering by label:~"Won\'t fix" and assignee:~bug' do
it 'applies the filters' do
- input_filtered_search("label:~\"Won't fix\" assignee:@#{user.username}")
+ input_filtered_search("label=~\"Won't fix\" assignee=@#{user.username}")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
@@ -30,7 +30,7 @@ describe 'Merge requests > User filters by multiple criteria', :js do
describe 'filtering by text, author, assignee, milestone, and label' do
it 'filters by text, author, assignee, milestone, and label' do
- input_filtered_search_keys("author:@#{user.username} assignee:@#{user.username} milestone:%\"v1.1\" label:~\"Won't fix\" Bug")
+ input_filtered_search_keys("author=@#{user.username} assignee=@#{user.username} milestone=%\"v1.1\" label=~\"Won't fix\" Bug")
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content 'Bugfix2'
diff --git a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
index 0d03c5eae31..faff7de729d 100644
--- a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
@@ -17,7 +17,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:master' do
it 'applies the filter' do
- input_filtered_search('target-branch:master')
+ input_filtered_search('target-branch=master')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).to have_content mr1.title
@@ -27,7 +27,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:merged-target' do
it 'applies the filter' do
- input_filtered_search('target-branch:merged-target')
+ input_filtered_search('target-branch=merged-target')
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
expect(page).not_to have_content mr1.title
@@ -37,7 +37,7 @@ describe 'Merge Requests > User filters by target branch', :js do
context 'filtering by target-branch:feature' do
it 'applies the filter' do
- input_filtered_search('target-branch:feature')
+ input_filtered_search('target-branch=feature')
expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
expect(page).not_to have_content mr1.title
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/profiles/active_sessions_spec.rb
index a5c2d15f598..bab6251a5d4 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/profiles/active_sessions_spec.rb
@@ -84,4 +84,31 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
expect(page).not_to have_content('Chrome on Windows')
end
end
+
+ it 'User can revoke a session', :js, :redis_session_store do
+ Capybara::Session.new(:session1)
+ Capybara::Session.new(:session2)
+
+ # set an additional session in another browser
+ using_session :session2 do
+ gitlab_sign_in(user)
+ end
+
+ using_session :session1 do
+ gitlab_sign_in(user)
+ visit profile_active_sessions_path
+
+ expect(page).to have_link('Revoke', count: 1)
+
+ accept_confirm { click_on 'Revoke' }
+
+ expect(page).not_to have_link('Revoke')
+ end
+
+ using_session :session2 do
+ visit profile_active_sessions_path
+
+ expect(page).to have_content('You need to sign in or sign up before continuing.')
+ end
+ end
end
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index 4dbdea02e27..b18f763a968 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -86,6 +86,23 @@ describe 'User visits the profile preferences page' do
end
end
+ describe 'User changes whitespace in code' do
+ it 'updates their preference' do
+ expect(user.render_whitespace_in_code).to be(false)
+ expect(render_whitespace_field).not_to be_checked
+ render_whitespace_field.click
+
+ click_button 'Save changes'
+
+ expect(user.reload.render_whitespace_in_code).to be(true)
+ expect(render_whitespace_field).to be_checked
+ end
+ end
+
+ def render_whitespace_field
+ find_field('user[render_whitespace_in_code]')
+ end
+
def expect_preferences_saved_message
page.within('.flash-container') do
expect(page).to have_content('Preferences saved.')
diff --git a/spec/features/projects/badges/coverage_spec.rb b/spec/features/projects/badges/coverage_spec.rb
index 46aa104fdd7..dd51eac9be1 100644
--- a/spec/features/projects/badges/coverage_spec.rb
+++ b/spec/features/projects/badges/coverage_spec.rb
@@ -63,7 +63,7 @@ describe 'test coverage badge' do
create(:ci_pipeline, opts).tap do |pipeline|
yield pipeline
- pipeline.update_status
+ pipeline.update_legacy_status
end
end
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 0a5bc64b429..a1d6a8896c7 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -12,11 +12,9 @@ describe 'Editing file blob', :js do
let(:readme_file_path) { 'README.md' }
before do
- stub_feature_flags(web_ide_default: false, single_mr_diff_view: false)
+ stub_feature_flags(web_ide_default: false)
end
- it_behaves_like 'rendering a single diff version'
-
context 'as a developer' do
let(:user) { create(:user) }
let(:role) { :developer }
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index c027b776d67..d34db5e15cc 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -6,7 +6,7 @@ describe 'Environment > Metrics' do
include PrometheusHelpers
let(:user) { create(:user) }
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:prometheus_project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:environment) { create(:environment, project: project) }
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 3eab13cb820..bbd33225bb9 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'Environment' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:role) { :developer }
@@ -12,11 +12,16 @@ describe 'Environment' do
project.add_role(user, role)
end
+ def auto_stop_button_selector
+ %q{button[title="Prevent environment from auto-stopping"]}
+ end
+
describe 'environment details page' do
let!(:environment) { create(:environment, project: project) }
let!(:permissions) { }
let!(:deployment) { }
let!(:action) { }
+ let!(:cluster) { }
before do
visit_environment(environment)
@@ -26,6 +31,40 @@ describe 'Environment' do
expect(page).to have_content(environment.name)
end
+ context 'without auto-stop' do
+ it 'does not show auto-stop text' do
+ expect(page).not_to have_content('Auto stops')
+ end
+
+ it 'does not show auto-stop button' do
+ expect(page).not_to have_selector(auto_stop_button_selector)
+ end
+ end
+
+ context 'with auto-stop' do
+ let!(:environment) { create(:environment, :will_auto_stop, name: 'staging', project: project) }
+
+ before do
+ visit_environment(environment)
+ end
+
+ it 'shows auto stop info' do
+ expect(page).to have_content('Auto stops')
+ end
+
+ it 'shows auto stop button' do
+ expect(page).to have_selector(auto_stop_button_selector)
+ expect(page.find(auto_stop_button_selector).find(:xpath, '..')['action']).to have_content(cancel_auto_stop_project_environment_path(environment.project, environment))
+ end
+
+ it 'allows user to cancel auto stop', :js do
+ page.find(auto_stop_button_selector).click
+ wait_for_all_requests
+ expect(page).to have_content('Auto stop successfully canceled.')
+ expect(page).not_to have_selector(auto_stop_button_selector)
+ end
+ end
+
context 'without deployments' do
it 'does not show deployments' do
expect(page).to have_content('You don\'t have any deployments right now.')
@@ -94,19 +133,10 @@ describe 'Environment' do
it 'does show build name' do
expect(page).to have_link("#{build.name} (##{build.id})")
- expect(page).not_to have_link('Re-deploy')
- expect(page).not_to have_terminal_button
end
- context 'when user has ability to re-deploy' do
- let(:permissions) do
- create(:protected_branch, :developers_can_merge,
- name: build.ref, project: project)
- end
-
- it 'does show re-deploy' do
- expect(page).to have_link('Re-deploy')
- end
+ it 'shows the re-deploy button' do
+ expect(page).to have_button('Re-deploy to environment')
end
context 'with manual action' do
@@ -141,6 +171,11 @@ describe 'Environment' do
end
context 'when user has no ability to trigger a deployment' do
+ let(:permissions) do
+ create(:protected_branch, :no_one_can_merge,
+ name: action.ref, project: project)
+ end
+
it 'does not show a play button' do
expect(page).not_to have_link(action.name)
end
@@ -158,8 +193,9 @@ describe 'Environment' do
context 'with terminal' do
context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
+ let!(:cluster) do
+ create(:cluster, :project, :provided_by_gcp, projects: [project])
+ end
context 'for project maintainer' do
let(:role) { :maintainer }
@@ -228,6 +264,11 @@ describe 'Environment' do
end
context 'when user has no ability to stop environment' do
+ let(:permissions) do
+ create(:protected_branch, :no_one_can_merge,
+ name: action.ref, project: project)
+ end
+
it 'does not allow to stop environment' do
expect(page).not_to have_button('Stop')
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index a825911b01a..9854335a7ad 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -186,7 +186,7 @@ describe 'Edit Project Settings' do
click_button "Save changes"
end
- expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.is-disabled", count: 2)
+ expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.is-disabled", count: 3)
end
it "shows empty features project homepage" do
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 10672bbec68..b8efabb0cab 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -41,6 +41,11 @@ describe "User browses files" do
it "shows the `Browse Directory` link" do
click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
click_link("History")
expect(page).to have_link("Browse Directory").and have_no_link("Browse Code")
@@ -229,6 +234,16 @@ describe "User browses files" do
expect(page).to have_content("*.rb")
.and have_content("Dmitriy Zaporozhets")
.and have_content("Initial commit")
+ .and have_content("Ignore DS files")
+
+ previous_commit_anchor = "//a[@title='Ignore DS files']/parent::span/following-sibling::span/a"
+ find(:xpath, previous_commit_anchor).click
+
+ expect(page).to have_content("*.rb")
+ .and have_content("Dmitriy Zaporozhets")
+ .and have_content("Initial commit")
+
+ expect(page).not_to have_content("Ignore DS files")
end
end
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index 618290416bd..dbeec973865 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -19,7 +19,17 @@ describe 'Projects > Files > User browses LFS files' do
it 'is possible to see raw content of LFS pointer' do
click_link 'files'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
click_link 'lfs'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('lfs')
+ end
+
click_link 'lfs_object.iso'
expect(page).to have_content 'version https://git-lfs.github.com/spec/v1'
@@ -38,6 +48,11 @@ describe 'Projects > Files > User browses LFS files' do
it 'shows an LFS object' do
click_link('files')
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
click_link('lfs')
click_link('lfs_object.iso')
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 0f97032eefa..bfab4387688 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -27,6 +27,89 @@ describe 'Project fork' do
expect(page).to have_css('a.disabled', text: 'Fork')
end
+ context 'forking enabled / disabled in project settings' do
+ before do
+ project.project_feature.update_attribute(
+ :forking_access_level, forking_access_level)
+ end
+
+ context 'forking is enabled' do
+ let(:forking_access_level) { ProjectFeature::ENABLED }
+
+ it 'enables fork button' do
+ visit project_path(project)
+
+ expect(page).to have_css('a', text: 'Fork')
+ expect(page).not_to have_css('a.disabled', text: 'Fork')
+ end
+
+ it 'renders new project fork page' do
+ visit new_project_fork_path(project)
+
+ expect(page.status_code).to eq(200)
+ expect(page).to have_text(' Select a namespace to fork the project ')
+ end
+ end
+
+ context 'forking is disabled' do
+ let(:forking_access_level) { ProjectFeature::DISABLED }
+
+ it 'does not render fork button' do
+ visit project_path(project)
+
+ expect(page).not_to have_css('a', text: 'Fork')
+ end
+
+ it 'does not render new project fork page' do
+ visit new_project_fork_path(project)
+
+ expect(page.status_code).to eq(404)
+ end
+ end
+
+ context 'forking is private' do
+ let(:forking_access_level) { ProjectFeature::PRIVATE }
+
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ context 'user is not a team member' do
+ it 'does not render fork button' do
+ visit project_path(project)
+
+ expect(page).not_to have_css('a', text: 'Fork')
+ end
+
+ it 'does not render new project fork page' do
+ visit new_project_fork_path(project)
+
+ expect(page.status_code).to eq(404)
+ end
+ end
+
+ context 'user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'enables fork button' do
+ visit project_path(project)
+
+ expect(page).to have_css('a', text: 'Fork')
+ expect(page).not_to have_css('a.disabled', text: 'Fork')
+ end
+
+ it 'renders new project fork page' do
+ visit new_project_fork_path(project)
+
+ expect(page.status_code).to eq(200)
+ expect(page).to have_text(' Select a namespace to fork the project ')
+ end
+ end
+ end
+ end
+
it 'forks the project', :sidekiq_might_not_need_inline do
visit project_path(project)
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 26ba7ae7a29..f9ff076a416 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -306,6 +306,21 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
end
end
+ context 'when job is waiting for resource', :js do
+ let(:job) { create(:ci_build, :waiting_for_resource, pipeline: pipeline, resource_group: resource_group) }
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ before do
+ visit project_job_path(project, job)
+ wait_for_requests
+ end
+
+ it 'shows correct UI components' do
+ expect(page).to have_content("This job is waiting for resource: #{resource_group.key}")
+ expect(page).to have_link("Cancel this job")
+ end
+ end
+
context "Job from other project" do
before do
visit project_job_path(project, job2)
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 6d92c777033..84000ef73ce 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -87,12 +87,12 @@ describe 'Project members list' do
end
def add_user(id, role)
- page.within ".users-project-form" do
+ page.within ".invite-users-form" do
select2(id, from: "#user_ids", multiple: true)
select(role, from: "access_level")
end
- click_button "Add to project"
+ click_button "Invite"
end
def visit_members_page
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index 501dd05300a..cbcd03b33ce 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -20,10 +20,10 @@ describe 'Projects > Members > Maintainer adds member with expiration date', :js
date = 4.days.from_now
visit project_project_members_path(project)
- page.within '.users-project-form' do
+ page.within '.invite-users-form' do
select2(new_member.id, from: '#user_ids', multiple: true)
fill_in 'expires_at', with: date.to_s(:medium) + "\n"
- click_on 'Add to project'
+ click_on 'Invite'
end
page.within "#project_member_#{new_member.project_members.first.id}" do
diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb
index 3c4b5b2c4ca..c8da87041f9 100644
--- a/spec/features/projects/pages_spec.rb
+++ b/spec/features/projects/pages_spec.rb
@@ -322,7 +322,7 @@ shared_examples 'pages settings editing' do
before do
allow(Projects::UpdateService).to receive(:new).and_return(service)
- allow(service).to receive(:execute).and_return(status: :error)
+ allow(service).to receive(:execute).and_return(status: :error, message: 'Some error has occured')
end
it 'tries to change the setting' do
@@ -332,7 +332,7 @@ shared_examples 'pages settings editing' do
click_button 'Save'
- expect(page).to have_text('Something went wrong on our end')
+ expect(page).to have_text('Some error has occured')
end
end
@@ -347,7 +347,7 @@ shared_examples 'pages settings editing' do
visit project_pages_path(project)
expect(page).to have_field(:project_pages_https_only, disabled: true)
- expect(page).not_to have_button('Save')
+ expect(page).to have_button('Save')
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 94fac9a2eb5..198af65c361 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -59,7 +59,8 @@ describe 'Pipeline', :js do
describe 'GET /:project/pipelines/:id' do
include_context 'pipeline builds'
- let(:project) { create(:project, :repository) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id, user: user) }
subject(:visit_pipeline) { visit project_pipeline_path(project, pipeline) }
@@ -329,6 +330,32 @@ describe 'Pipeline', :js do
end
end
+ context 'deleting pipeline' do
+ context 'when user can not delete' do
+ before do
+ visit_pipeline
+ end
+
+ it { expect(page).not_to have_button('Delete') }
+ end
+
+ context 'when deleting' do
+ before do
+ group.add_owner(user)
+
+ visit_pipeline
+
+ click_button 'Delete'
+ click_button 'Delete pipeline'
+ end
+
+ it 'redirects to pipeline overview page', :sidekiq_might_not_need_inline do
+ expect(page).to have_content('The pipeline has been deleted')
+ expect(current_path).to eq(project_pipelines_path(project))
+ end
+ end
+ end
+
context 'when pipeline ref does not exist in repository anymore' do
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
@@ -606,6 +633,117 @@ describe 'Pipeline', :js do
end
end
+ context 'when build requires resource', :sidekiq_inline do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ let!(:test_job) do
+ create(:ci_build, :pending, stage: 'test', name: 'test',
+ stage_idx: 1, pipeline: pipeline, project: project)
+ end
+
+ let!(:deploy_job) do
+ create(:ci_build, :created, stage: 'deploy', name: 'deploy',
+ stage_idx: 2, pipeline: pipeline, project: project, resource_group: resource_group)
+ end
+
+ describe 'GET /:project/pipelines/:id' do
+ subject { visit project_pipeline_path(project, pipeline) }
+
+ it 'shows deploy job as created' do
+ subject
+
+ within('.pipeline-header-container') do
+ expect(page).to have_content('pending')
+ end
+
+ within('.pipeline-graph') do
+ within '.stage-column:nth-child(1)' do
+ expect(page).to have_content('test')
+ expect(page).to have_css('.ci-status-icon-pending')
+ end
+
+ within '.stage-column:nth-child(2)' do
+ expect(page).to have_content('deploy')
+ expect(page).to have_css('.ci-status-icon-created')
+ end
+ end
+ end
+
+ context 'when test job succeeded' do
+ before do
+ test_job.success!
+ end
+
+ it 'shows deploy job as pending' do
+ subject
+
+ within('.pipeline-header-container') do
+ expect(page).to have_content('running')
+ end
+
+ within('.pipeline-graph') do
+ within '.stage-column:nth-child(1)' do
+ expect(page).to have_content('test')
+ expect(page).to have_css('.ci-status-icon-success')
+ end
+
+ within '.stage-column:nth-child(2)' do
+ expect(page).to have_content('deploy')
+ expect(page).to have_css('.ci-status-icon-pending')
+ end
+ end
+ end
+ end
+
+ context 'when test job succeeded but there are no available resources' do
+ let(:another_job) { create(:ci_build, :running, project: project, resource_group: resource_group) }
+
+ before do
+ resource_group.assign_resource_to(another_job)
+ test_job.success!
+ end
+
+ it 'shows deploy job as waiting for resource' do
+ subject
+
+ within('.pipeline-header-container') do
+ expect(page).to have_content('waiting')
+ end
+
+ within('.pipeline-graph') do
+ within '.stage-column:nth-child(2)' do
+ expect(page).to have_content('deploy')
+ expect(page).to have_css('.ci-status-icon-waiting-for-resource')
+ end
+ end
+ end
+
+ context 'when resource is released from another job' do
+ before do
+ another_job.success!
+ end
+
+ it 'shows deploy job as pending' do
+ subject
+
+ within('.pipeline-header-container') do
+ expect(page).to have_content('running')
+ end
+
+ within('.pipeline-graph') do
+ within '.stage-column:nth-child(2)' do
+ expect(page).to have_content('deploy')
+ expect(page).to have_css('.ci-status-icon-pending')
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
describe 'GET /:project/pipelines/:id/builds' do
include_context 'pipeline builds'
diff --git a/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb b/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
index 6d587053b4f..673766073a2 100644
--- a/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
+++ b/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
@@ -31,8 +31,6 @@ describe 'Projects > Raw > User interacts with raw endpoint' do
visit project_raw_url(project, file_path)
end
- expect(source).to have_content('You are being redirected')
- click_link('redirected')
expect(page).to have_content('You cannot access the raw file. Please wait a minute.')
end
end
diff --git a/spec/features/projects/serverless/functions_spec.rb b/spec/features/projects/serverless/functions_spec.rb
index e82e5b81021..c661ceb8eda 100644
--- a/spec/features/projects/serverless/functions_spec.rb
+++ b/spec/features/projects/serverless/functions_spec.rb
@@ -6,7 +6,7 @@ describe 'Functions', :js do
include KubernetesHelpers
include ReactiveCachingHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
@@ -36,9 +36,8 @@ describe 'Functions', :js do
end
context 'when the user has a cluster and knative installed and visits the serverless page' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
let(:service) { cluster.platform_kubernetes }
- let(:project) { cluster.project }
let(:environment) { create(:environment, project: project) }
let!(:deployment) { create(:deployment, :success, cluster: cluster, environment: environment) }
let(:knative_services_finder) { environment.knative_services_finder }
diff --git a/spec/features/projects/settings/project_settings_spec.rb b/spec/features/projects/settings/project_settings_spec.rb
index 7afddc0e712..b601866c96b 100644
--- a/spec/features/projects/settings/project_settings_spec.rb
+++ b/spec/features/projects/settings/project_settings_spec.rb
@@ -34,6 +34,26 @@ describe 'Projects settings' do
expect_toggle_state(:expanded)
end
+ context 'forking enabled', :js do
+ it 'toggles forking enabled / disabled' do
+ visit edit_project_path(project)
+
+ forking_enabled_input = find('input[name="project[project_feature_attributes][forking_access_level]"]', visible: :hidden)
+ forking_enabled_button = find('input[name="project[project_feature_attributes][forking_access_level]"] + label > button')
+
+ expect(forking_enabled_input.value).to eq('20')
+
+ # disable by clicking toggle
+ forking_enabled_button.click
+ page.within('.sharing-permissions') do
+ find('input[value="Save changes"]').click
+ end
+ wait_for_requests
+
+ expect(forking_enabled_input.value).to eq('0')
+ end
+ end
+
def expect_toggle_state(state)
is_collapsed = state == :collapsed
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
new file mode 100644
index 00000000000..86da866a927
--- /dev/null
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Project > Settings > CI/CD > Container registry tag expiration policy', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ context 'as owner' do
+ before do
+ sign_in(user)
+ visit project_settings_ci_cd_path(project)
+ end
+
+ it 'section is available' do
+ settings_block = find('#js-registry-policies')
+ expect(settings_block).to have_text 'Container Registry tag expiration policy'
+ end
+
+ it 'Save expiration policy submit the form', :js do
+ within '#js-registry-policies' do
+ within '.card-body' do
+ click_button(class: 'gl-toggle')
+ select('7 days until tags are automatically removed', from: 'expiration-policy-interval')
+ select('Every day', from: 'expiration-policy-schedule')
+ select('50 tags per image name', from: 'expiration-policy-latest')
+ fill_in('expiration-policy-name-matching', with: '*-production')
+ end
+ submit_button = find('.card-footer .btn.btn-success')
+ expect(submit_button).not_to be_disabled
+ submit_button.click
+ end
+ flash_text = find('.flash-text')
+ expect(flash_text).to have_content('Expiration policy successfully saved.')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index 6d94388a6e2..705c60f15ee 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -37,7 +37,7 @@ describe 'Projects > Settings > User manages project members' do
visit(project_project_members_path(project))
- page.within('.users-project-form') do
+ page.within('.invite-users-form') do
click_link('Import')
end
diff --git a/spec/features/projects/settings/user_renames_a_project_spec.rb b/spec/features/projects/settings/user_renames_a_project_spec.rb
index d2daf8b922d..789c5e31748 100644
--- a/spec/features/projects/settings/user_renames_a_project_spec.rb
+++ b/spec/features/projects/settings/user_renames_a_project_spec.rb
@@ -59,8 +59,8 @@ describe 'Projects > Settings > User renames a project' do
context 'with emojis' do
it 'shows error for invalid project name' do
- change_name(project, '🚀 foo bar ☁️')
- expect(page).to have_field 'Project name', with: '🚀 foo bar ☁️'
+ change_name(project, '🧮 foo bar ☁️')
+ expect(page).to have_field 'Project name', with: '🧮 foo bar ☁️'
expect(page).not_to have_content "Name can contain only letters, digits, emojis '_', '.', dash and space. It must start with letter, digit, emoji or '_'."
end
end
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index ad65e04473c..94af023e804 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -50,7 +50,7 @@ describe 'Projects > Snippets > Create Snippet', :js do
wait_for_requests
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/#{Regexp.escape(project.full_path) }/uploads/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
end
it 'creates a snippet when all required fields are filled in after validation failing' do
@@ -72,7 +72,7 @@ describe 'Projects > Snippets > Create Snippet', :js do
expect(page).to have_selector('strong')
end
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/#{Regexp.escape(project.full_path) }/uploads/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
end
end
diff --git a/spec/features/projects/sourcegraph_csp_spec.rb b/spec/features/projects/sourcegraph_csp_spec.rb
new file mode 100644
index 00000000000..57d1e8e3034
--- /dev/null
+++ b/spec/features/projects/sourcegraph_csp_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Sourcegraph Content Security Policy' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+ let_it_be(:default_csp_values) { "'self' https://some-cdn.test" }
+ let_it_be(:sourcegraph_url) { 'https://sourcegraph.test' }
+ let(:sourcegraph_enabled) { true }
+
+ subject do
+ visit project_blob_path(project, File.join('master', 'README.md'))
+
+ response_headers['Content-Security-Policy']
+ end
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(sourcegraph_url)
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(sourcegraph_enabled)
+
+ sign_in(user)
+ end
+
+ shared_context 'csp config' do |csp_rule|
+ before do
+ csp = ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.send(csp_rule, default_csp_values) if csp_rule
+ end
+
+ expect_next_instance_of(Projects::BlobController) do |controller|
+ expect(controller).to receive(:current_content_security_policy).and_return(csp)
+ end
+ end
+ end
+
+ context 'when no CSP config' do
+ include_context 'csp config', nil
+
+ it 'does not add CSP directives' do
+ is_expected.to be_blank
+ end
+ end
+
+ describe 'when a CSP config exists for connect-src' do
+ include_context 'csp config', :connect_src
+
+ context 'when sourcegraph enabled' do
+ it 'appends to connect-src' do
+ is_expected.to eql("connect-src #{default_csp_values} #{sourcegraph_url}")
+ end
+ end
+
+ context 'when sourcegraph disabled' do
+ let(:sourcegraph_enabled) { false }
+
+ it 'keeps original connect-src' do
+ is_expected.to eql("connect-src #{default_csp_values}")
+ end
+ end
+ end
+
+ describe 'when a CSP config exists for default-src but not connect-src' do
+ include_context 'csp config', :default_src
+
+ context 'when sourcegraph enabled' do
+ it 'uses default-src values in connect-src' do
+ is_expected.to eql("default-src #{default_csp_values}; connect-src #{default_csp_values} #{sourcegraph_url}")
+ end
+ end
+
+ context 'when sourcegraph disabled' do
+ let(:sourcegraph_enabled) { false }
+
+ it 'does not add connect-src' do
+ is_expected.to eql("default-src #{default_csp_values}")
+ end
+ end
+ end
+
+ describe 'when a CSP config exists for font-src but not connect-src' do
+ include_context 'csp config', :font_src
+
+ context 'when sourcegraph enabled' do
+ it 'uses default-src values in connect-src' do
+ is_expected.to eql("font-src #{default_csp_values}; connect-src #{sourcegraph_url}")
+ end
+ end
+
+ context 'when sourcegraph disabled' do
+ let(:sourcegraph_enabled) { false }
+
+ it 'does not add connect-src' do
+ is_expected.to eql("font-src #{default_csp_values}")
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index 99285011405..7e0ee861b18 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -46,8 +46,6 @@ describe 'Multi-file editor new directory', :js do
find('.js-ide-commit-mode').click
- click_button 'Stage'
-
fill_in('commit-message', with: 'commit message ide')
find(:css, ".js-ide-commit-new-mr input").set(false)
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index 780575a5975..eba33168006 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -36,8 +36,6 @@ describe 'Multi-file editor new file', :js do
find('.js-ide-commit-mode').click
- click_button 'Stage'
-
fill_in('commit-message', with: 'commit message ide')
find(:css, ".js-ide-commit-new-mr input").set(false)
diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb
index c2d4cefad12..8b25565c08a 100644
--- a/spec/features/projects/view_on_env_spec.rb
+++ b/spec/features/projects/view_on_env_spec.rb
@@ -9,14 +9,11 @@ describe 'View on environment', :js do
let(:user) { project.creator }
before do
- stub_feature_flags(single_mr_diff_view: false)
stub_feature_flags(diffs_batch_load: false)
project.add_maintainer(user)
end
- it_behaves_like 'rendering a single diff version'
-
context 'when the branch has a route map' do
let(:route_map) do
<<-MAP.strip_heredoc
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 499c459621a..7503c8aa52e 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -145,6 +145,24 @@ describe "User creates wiki page" do
end
end
+ it 'creates a wiki page with Org markup', :aggregate_failures do
+ org_content = <<~ORG
+ * Heading
+ ** Subheading
+ [[home][Link to Home]]
+ ORG
+
+ page.within('.wiki-form') do
+ find('#wiki_format option[value=org]').select_option
+ fill_in(:wiki_content, with: org_content)
+ click_button('Create page')
+ end
+
+ expect(page).to have_selector('h1', text: 'Heading')
+ expect(page).to have_selector('h2', text: 'Subheading')
+ expect(page).to have_link('Link to Home', href: "/#{project.full_path}/-/wikis/home")
+ end
+
it_behaves_like 'wiki file attachments', :quarantine
end
diff --git a/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb b/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb
new file mode 100644
index 00000000000..08eea14c438
--- /dev/null
+++ b/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User views AsciiDoc page with includes', :js do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:wiki_content_selector) { '[data-qa-selector=wiki_page_content]' }
+ let(:project) { create(:project, :public, :wiki_repo) }
+ let!(:included_wiki_page) { create_wiki_page('included_page', content: 'Content from the included page')}
+ let!(:wiki_page) { create_wiki_page('home', content: "Content from the main page.\ninclude::included_page.asciidoc[]") }
+
+ def create_wiki_page(title, content:)
+ attrs = {
+ title: title,
+ content: content,
+ format: :asciidoc
+ }
+
+ create(:wiki_page, wiki: project.wiki, attrs: attrs)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when the file being included exists' do
+ it 'includes the file contents' do
+ visit(project_wiki_path(project, wiki_page))
+
+ page.within(:css, wiki_content_selector) do
+ expect(page).to have_content('Content from the main page. Content from the included page')
+ end
+ end
+
+ context 'when there are multiple versions of the wiki pages' do
+ before do
+ included_wiki_page.update(message: 'updated included file', content: 'Updated content from the included page')
+ wiki_page.update(message: 'updated wiki page', content: "Updated content from the main page.\ninclude::included_page.asciidoc[]")
+ end
+
+ let(:latest_version_id) { wiki_page.versions.first.id }
+ let(:oldest_version_id) { wiki_page.versions.last.id }
+
+ context 'viewing the latest version' do
+ it 'includes the latest content' do
+ visit(project_wiki_path(project, wiki_page, version_id: latest_version_id))
+
+ page.within(:css, wiki_content_selector) do
+ expect(page).to have_content('Updated content from the main page. Updated content from the included page')
+ end
+ end
+ end
+
+ context 'viewing the original version' do
+ it 'includes the content from the original version' do
+ visit(project_wiki_path(project, wiki_page, version_id: oldest_version_id))
+
+ page.within(:css, wiki_content_selector) do
+ expect(page).to have_content('Content from the main page. Content from the included page')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when the file being included does not exist' do
+ before do
+ included_wiki_page.delete
+ end
+
+ it 'outputs an error' do
+ visit(project_wiki_path(project, wiki_page))
+
+ page.within(:css, wiki_content_selector) do
+ expect(page).to have_content('Content from the main page. [ERROR: include::included_page.asciidoc[] - unresolved directive]')
+ end
+ end
+ end
+end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index 11429f16f42..bcd894a0d20 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -51,6 +51,27 @@ describe 'Task Lists' do
EOT
end
+ let(:commented_tasks_markdown) do
+ <<-EOT.strip_heredoc
+ <!--
+ - [ ] a
+ -->
+
+ - [ ] b
+ EOT
+ end
+
+ let(:summary_no_blank_line_markdown) do
+ <<-EOT.strip_heredoc
+ <details>
+ <summary>No blank line after summary element breaks task list</summary>
+ 1. [ ] People Ops: do such and such
+ </details>
+
+ * [ ] Task 1
+ EOT
+ end
+
before do
Warden.test_mode!
@@ -291,4 +312,52 @@ describe 'Task Lists' do
end
end
end
+
+ describe 'markdown task edge cases' do
+ describe 'commented tasks', :js do
+ let!(:issue) { create(:issue, description: commented_tasks_markdown, author: user, project: project) }
+
+ it 'renders' do
+ visit_issue(project, issue)
+ wait_for_requests
+
+ expect(page).to have_selector('ul.task-list', count: 1)
+ expect(page).to have_selector('li.task-list-item', count: 1)
+ expect(page).to have_selector('ul input[checked]', count: 0)
+
+ find('.task-list-item-checkbox').click
+ wait_for_requests
+
+ visit_issue(project, issue)
+ wait_for_requests
+
+ expect(page).to have_selector('ul.task-list', count: 1)
+ expect(page).to have_selector('li.task-list-item', count: 1)
+ expect(page).to have_selector('ul input[checked]', count: 1)
+ end
+ end
+
+ describe 'summary with no blank line', :js do
+ let!(:issue) { create(:issue, description: summary_no_blank_line_markdown, author: user, project: project) }
+
+ it 'renders' do
+ visit_issue(project, issue)
+ wait_for_requests
+
+ expect(page).to have_selector('ul.task-list', count: 1)
+ expect(page).to have_selector('li.task-list-item', count: 1)
+ expect(page).to have_selector('ul input[checked]', count: 0)
+
+ find('.task-list-item-checkbox').click
+ wait_for_requests
+
+ visit_issue(project, issue)
+ wait_for_requests
+
+ expect(page).to have_selector('ul.task-list', count: 1)
+ expect(page).to have_selector('li.task-list-item', count: 1)
+ expect(page).to have_selector('ul input[checked]', count: 1)
+ end
+ end
+ end
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 19cd21e4161..af406961bbc 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -65,22 +65,6 @@ describe 'Triggers', :js do
expect(page.find('.triggers-list')).to have_content new_trigger_title
expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
end
-
- it 'edit "legacy" trigger and save' do
- # Create new trigger without owner association, i.e. Legacy trigger
- create(:ci_trigger, owner: user, project: @project).update_attribute(:owner, nil)
- visit project_settings_ci_cd_path(@project)
-
- # See if the trigger can be edited and description is blank
- find('a[title="Edit"]').send_keys(:return)
- expect(page.find('#trigger_description').value).to have_content ''
-
- # See if trigger can be updated with description and saved successfully
- fill_in 'trigger_description', with: new_trigger_title
- click_button 'Save trigger'
- expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
- expect(page.find('.triggers-list')).to have_content new_trigger_title
- end
end
describe 'trigger "Revoke" workflow' do
@@ -106,43 +90,18 @@ describe 'Triggers', :js do
end
describe 'show triggers workflow' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: false)
- end
-
it 'contains trigger description placeholder' do
expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
end
- it 'show "invalid" badge for legacy trigger' do
- create(:ci_trigger, owner: user, project: @project).update_attribute(:owner, nil)
- visit project_settings_ci_cd_path(@project)
-
- expect(page.find('.triggers-list')).to have_content 'invalid'
- end
-
it 'show "invalid" badge for trigger with owner having insufficient permissions' do
create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
visit project_settings_ci_cd_path(@project)
- # See if trigger without owner (i.e. legacy) shows "legacy" badge and is non-editable
expect(page.find('.triggers-list')).to have_content 'invalid'
expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
end
- it 'do not show "Edit" or full token for legacy trigger' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- .update_attribute(:owner, nil)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger not owned shows only first few token chars and doesn't have copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
- expect(page.find('.triggers-list')).not_to have_selector('button.btn-clipboard')
-
- # See if trigger is non-editable
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
- end
-
it 'do not show "Edit" or full token for not owned trigger' do
# Create trigger with user different from current_user
create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
@@ -169,56 +128,5 @@ describe 'Triggers', :js do
expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
end
-
- context 'when :use_legacy_pipeline_triggers feature flag is enabled' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: true)
- end
-
- it 'show "legacy" badge for legacy trigger' do
- create(:ci_trigger, owner: nil, project: @project)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger without owner (i.e. legacy) shows "legacy" badge and is editable
- expect(page.find('.triggers-list')).to have_content 'legacy'
- expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
- end
-
- it 'show "invalid" badge for trigger with owner having insufficient permissions' do
- create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger without owner (i.e. legacy) shows "legacy" badge and is non-editable
- expect(page.find('.triggers-list')).to have_content 'invalid'
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
- end
-
- it 'do not show "Edit" or full token for not owned trigger' do
- # Create trigger with user different from current_user
- create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger not owned by current_user shows only first few token chars and doesn't have copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
- expect(page.find('.triggers-list')).not_to have_selector('button.btn-clipboard')
-
- # See if trigger owner name doesn't match with current_user and trigger is non-editable
- expect(page.find('.triggers-list .trigger-owner')).not_to have_content user.name
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
- end
-
- it 'show "Edit" and full token for owned trigger' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger shows full token and has copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content @project.triggers.first.token
- expect(page.find('.triggers-list')).to have_selector('button.btn-clipboard')
-
- # See if trigger owner name matches with current_user and is editable
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
- expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
- end
- end
end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 3b19bd423a4..30f298b1fc3 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -123,50 +123,6 @@ shared_examples 'Signup' do
end
end
- describe 'user\'s full name validation', :js do
- before do
- if Gitlab::Experimentation.enabled?(:signup_flow)
- user = create(:user, role: nil)
- sign_in(user)
- visit users_sign_up_welcome_path
- @user_name_field = 'user_name'
- else
- visit new_user_registration_path
- @user_name_field = 'new_user_name'
- end
- end
-
- it 'does not show an error border if the user\'s fullname length is not longer than 128 characters' do
- fill_in @user_name_field, with: 'u' * 128
-
- expect(find('.name')).not_to have_css '.gl-field-error-outline'
- end
-
- it 'shows an error border if the user\'s fullname contains an emoji' do
- simulate_input("##{@user_name_field}", 'Ehsan 🦋')
-
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
-
- it 'shows an error border if the user\'s fullname is longer than 128 characters' do
- fill_in @user_name_field, with: 'n' * 129
-
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
-
- it 'shows an error message if the user\'s fullname is longer than 128 characters' do
- fill_in @user_name_field, with: 'n' * 129
-
- expect(page).to have_content("Name is too long (maximum is 128 characters).")
- end
-
- it 'shows an error message if the username contains emojis' do
- simulate_input("##{@user_name_field}", 'Ehsan 🦋')
-
- expect(page).to have_content("Invalid input, please avoid emojis")
- end
- end
-
context 'with no errors' do
context 'when sending confirmation email' do
before do
@@ -184,7 +140,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -209,7 +168,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -235,7 +197,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email.capitalize
end
@@ -263,7 +228,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -287,7 +255,10 @@ shared_examples 'Signup' do
visit new_user_registration_path
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
end
@@ -313,7 +284,10 @@ shared_examples 'Signup' do
visit new_user_registration_path
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
end
@@ -338,7 +312,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -357,7 +334,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -394,7 +374,10 @@ shared_examples 'Signup' do
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
- unless Gitlab::Experimentation.enabled?(:signup_flow)
+ if Gitlab::Experimentation.enabled?(:signup_flow)
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
@@ -412,6 +395,44 @@ shared_examples 'Signup' do
end
end
+shared_examples 'Signup name validation' do |field, max_length|
+ before do
+ visit new_user_registration_path
+ end
+
+ describe "#{field} validation", :js do
+ it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do
+ fill_in field, with: 'u' * max_length
+
+ expect(find('.name')).not_to have_css '.gl-field-error-outline'
+ end
+
+ it 'shows an error border if the user\'s fullname contains an emoji' do
+ simulate_input("##{field}", 'Ehsan 🦋')
+
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
+
+ it "shows an error border if the user\'s fullname is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
+
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
+
+ it "shows an error message if the user\'s fullname is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
+
+ expect(page).to have_content("Name is too long (maximum is #{max_length} characters).")
+ end
+
+ it 'shows an error message if the username contains emojis' do
+ simulate_input("##{field}", 'Ehsan 🦋')
+
+ expect(page).to have_content("Invalid input, please avoid emojis")
+ end
+ end
+end
+
describe 'With original flow' do
before do
stub_experiment(signup_flow: false)
@@ -419,6 +440,7 @@ describe 'With original flow' do
end
it_behaves_like 'Signup'
+ it_behaves_like 'Signup name validation', 'new_user_name', 255
end
describe 'With experimental flow' do
@@ -428,11 +450,15 @@ describe 'With experimental flow' do
end
it_behaves_like 'Signup'
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127
describe 'when role is required' do
it 'after registering, it redirects to step 2 of the signup process, sets the name and role and then redirects to the original requested url' do
new_user = build_stubbed(:user)
visit new_user_registration_path
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
fill_in 'new_user_password', with: new_user.password
@@ -441,13 +467,11 @@ describe 'With experimental flow' do
expect(page).to have_current_path(users_sign_up_welcome_path)
- fill_in 'user_name', with: 'New name'
select 'Software Developer', from: 'user_role'
choose 'user_setup_for_company_true'
click_button 'Get started!'
new_user = User.find_by_username(new_user.username)
- expect(new_user.name).to eq 'New name'
expect(new_user.software_developer_role?).to be_truthy
expect(new_user.setup_for_company).to be_truthy
expect(page).to have_current_path(new_project_path)
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 70b5da0cc3c..5f75ff8c6ff 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -66,7 +66,7 @@ describe BranchesFinder do
end
it 'filters branches by provided names' do
- branches_finder = described_class.new(repository, { names: ['fix', 'csv', 'lfs', 'does-not-exist'] })
+ branches_finder = described_class.new(repository, { names: %w[fix csv lfs does-not-exist] })
result = branches_finder.execute
diff --git a/spec/finders/clusters/knative_services_finder_spec.rb b/spec/finders/clusters/knative_services_finder_spec.rb
index 7ad64cc3bca..57dbead7921 100644
--- a/spec/finders/clusters/knative_services_finder_spec.rb
+++ b/spec/finders/clusters/knative_services_finder_spec.rb
@@ -6,9 +6,9 @@ describe Clusters::KnativeServicesFinder do
include KubernetesHelpers
include ReactiveCachingHelpers
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { create(:project, :repository) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
let(:service) { environment.deployment_platform }
- let(:project) { cluster.cluster_project.project }
let(:environment) { create(:environment, project: project) }
let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
let(:namespace) do
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index be35a705b0d..b20c7e5a8a5 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe DeploymentsFinder do
subject { described_class.new(project, params).execute }
- let(:project) { create(:project, :public, :repository) }
+ let(:project) { create(:project, :public, :test_repo) }
let(:params) { {} }
describe "#execute" do
@@ -25,6 +25,42 @@ describe DeploymentsFinder do
is_expected.to match_array([deployment_1])
end
end
+
+ context 'when the environment name is specified' do
+ let!(:environment1) { create(:environment, project: project) }
+ let!(:environment2) { create(:environment, project: project) }
+ let!(:deployment1) do
+ create(:deployment, project: project, environment: environment1)
+ end
+
+ let!(:deployment2) do
+ create(:deployment, project: project, environment: environment2)
+ end
+
+ let(:params) { { environment: environment1.name } }
+
+ it 'returns deployments for the given environment' do
+ is_expected.to match_array([deployment1])
+ end
+ end
+
+ context 'when the deployment status is specified' do
+ let!(:deployment1) { create(:deployment, :success, project: project) }
+ let!(:deployment2) { create(:deployment, :failed, project: project) }
+ let(:params) { { status: 'success' } }
+
+ it 'returns deployments for the given environment' do
+ is_expected.to match_array([deployment1])
+ end
+ end
+
+ context 'when using an invalid deployment status' do
+ let(:params) { { status: 'kittens' } }
+
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
end
describe 'ordering' do
@@ -34,7 +70,7 @@ describe DeploymentsFinder do
let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: 2.days.ago, updated_at: Time.now) }
let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'patch', created_at: Time.now, updated_at: 1.hour.ago) }
+ let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'video', created_at: Time.now, updated_at: 1.hour.ago) }
where(:order_by, :sort, :ordered_deployments) do
'created_at' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
diff --git a/spec/finders/environments_finder_spec.rb b/spec/finders/environments_finder_spec.rb
index 69687eaa99f..7100376478a 100644
--- a/spec/finders/environments_finder_spec.rb
+++ b/spec/finders/environments_finder_spec.rb
@@ -13,17 +13,22 @@ describe EnvironmentsFinder do
end
context 'tagged deployment' do
+ let(:environment_two) { create(:environment, project: project) }
+ # Environments need to include commits, so rewind two commits to fit
+ let(:commit) { project.commit('HEAD~2') }
+
before do
- create(:deployment, :success, environment: environment, ref: 'v1.1.0', tag: true, sha: project.commit.id)
+ create(:deployment, :success, environment: environment, ref: 'v1.0.0', tag: true, sha: project.commit.id)
+ create(:deployment, :success, environment: environment_two, ref: 'v1.1.0', tag: true, sha: project.commit('HEAD~1').id)
end
it 'returns environment when with_tags is set' do
- expect(described_class.new(project, user, ref: 'master', commit: project.commit, with_tags: true).execute)
- .to contain_exactly(environment)
+ expect(described_class.new(project, user, ref: 'master', commit: commit, with_tags: true).execute)
+ .to contain_exactly(environment, environment_two)
end
it 'does not return environment when no with_tags is set' do
- expect(described_class.new(project, user, ref: 'master', commit: project.commit).execute)
+ expect(described_class.new(project, user, ref: 'master', commit: commit).execute)
.to be_empty
end
@@ -31,6 +36,21 @@ describe EnvironmentsFinder do
expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
.to be_empty
end
+
+ it 'returns environment when with_tags is set' do
+ expect(described_class.new(project, user, ref: 'master', commit: commit, with_tags: true).execute)
+ .to contain_exactly(environment, environment_two)
+ end
+
+ # We expect two Gitaly calls: FindCommit, CommitIsAncestor
+ # This tests to ensure we don't call one CommitIsAncestor per environment
+ it 'only calls Gitaly twice when multiple environments are present', :request_store do
+ expect do
+ result = described_class.new(project, user, ref: 'master', commit: commit, with_tags: true, find_latest: true).execute
+
+ expect(result).to contain_exactly(environment_two)
+ end.to change { Gitlab::GitalyClient.get_request_count }.by(2)
+ end
end
context 'branch deployment' do
diff --git a/spec/finders/events_finder_spec.rb b/spec/finders/events_finder_spec.rb
index 848030262cd..5c28b31e8c8 100644
--- a/spec/finders/events_finder_spec.rb
+++ b/spec/finders/events_finder_spec.rb
@@ -5,8 +5,10 @@ require 'spec_helper'
describe EventsFinder do
let(:user) { create(:user) }
let(:other_user) { create(:user) }
+
let(:project1) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
let(:project2) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
+
let(:closed_issue) { create(:closed_issue, project: project1, author: user) }
let(:opened_merge_request) { create(:merge_request, source_project: project2, author: user) }
let!(:closed_issue_event) { create(:event, project: project1, author: user, target: closed_issue, action: Event::CLOSED, created_at: Date.new(2016, 12, 30)) }
@@ -15,6 +17,8 @@ describe EventsFinder do
let(:opened_merge_request2) { create(:merge_request, source_project: project2, author: user) }
let!(:closed_issue_event2) { create(:event, project: project1, author: user, target: closed_issue, action: Event::CLOSED, created_at: Date.new(2016, 2, 2)) }
let!(:opened_merge_request_event2) { create(:event, project: project2, author: user, target: opened_merge_request, action: Event::CREATED, created_at: Date.new(2017, 2, 2)) }
+ let(:opened_merge_request3) { create(:merge_request, source_project: project1, author: other_user) }
+ let!(:other_developer_event) { create(:event, project: project1, author: other_user, target: opened_merge_request3, action: Event::CREATED) }
let(:public_project) { create(:project, :public, creator_id: user.id, namespace: user.namespace) }
let(:confidential_issue) { create(:closed_issue, confidential: true, project: public_project, author: user) }
@@ -55,6 +59,28 @@ describe EventsFinder do
end
end
+ context 'dashboard events' do
+ before do
+ project1.add_developer(other_user)
+ end
+
+ context 'scope is `all`' do
+ it 'includes activity of other users' do
+ events = described_class.new(source: user, current_user: user, scope: 'all').execute
+
+ expect(events).to include(other_developer_event)
+ end
+ end
+
+ context 'scope is not `all`' do
+ it 'does not include activity of other users' do
+ events = described_class.new(source: user, current_user: user, scope: '').execute
+
+ expect(events).not_to include(other_developer_event)
+ end
+ end
+ end
+
context 'when targeting a project' do
it 'returns project events between specified dates filtered on action and type' do
events = described_class.new(source: project1, current_user: user, action: 'closed', target_type: 'issue', after: Date.new(2016, 12, 1), before: Date.new(2017, 1, 1)).execute
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index f161a1df9c3..34649097f70 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -10,6 +10,7 @@ describe GroupMembersFinder, '#execute' do
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:user4) { create(:user) }
+ let(:user5) { create(:user, :two_factor_via_otp) }
it 'returns members for top-level group' do
member1 = group.add_maintainer(user1)
@@ -56,6 +57,14 @@ describe GroupMembersFinder, '#execute' do
expect(result.to_a).to match_array([member1])
end
+ it 'does not return nil if `inherited only` relation is requested on root group' do
+ group.add_developer(user2)
+
+ result = described_class.new(group).execute(include_relations: [:inherited])
+
+ expect(result).not_to be_nil
+ end
+
it 'returns members for descendant groups if requested' do
member1 = group.add_maintainer(user2)
member2 = group.add_maintainer(user1)
@@ -67,4 +76,56 @@ describe GroupMembersFinder, '#execute' do
expect(result.to_a).to match_array([member1, member2, member3, member4])
end
+
+ it 'returns searched members if requested' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ member = group.add_maintainer(user1)
+
+ result = described_class.new(group).execute(params: { search: user1.name })
+
+ expect(result.to_a).to match_array([member])
+ end
+
+ it 'returns nothing if search only in inherited relation' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ group.add_maintainer(user1)
+
+ result = described_class.new(group).execute(include_relations: [:inherited], params: { search: user1.name })
+
+ expect(result.to_a).to match_array([])
+ end
+
+ it 'returns searched member only from nested_group if search only in inherited relation' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ nested_group.add_maintainer(create(:user, name: user1.name))
+ member = group.add_maintainer(user1)
+
+ result = described_class.new(nested_group).execute(include_relations: [:inherited], params: { search: member.user.name })
+
+ expect(result.to_a).to contain_exactly(member)
+ end
+
+ it 'returns members with two-factor auth if requested by owner' do
+ group.add_owner(user2)
+ group.add_maintainer(user1)
+ member = group.add_maintainer(user5)
+
+ result = described_class.new(group, user2).execute(params: { two_factor: 'enabled' })
+
+ expect(result.to_a).to contain_exactly(member)
+ end
+
+ it 'returns members without two-factor auth if requested by owner' do
+ member1 = group.add_owner(user2)
+ member2 = group.add_maintainer(user1)
+ member_with_2fa = group.add_maintainer(user5)
+
+ result = described_class.new(group, user2).execute(params: { two_factor: 'disabled' })
+
+ expect(result.to_a).not_to include(member_with_2fa)
+ expect(result.to_a).to match_array([member1, member2])
+ end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index c52ee89006b..056795a50d0 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -435,9 +435,7 @@ describe IssuesFinder do
let(:params) { { label_name: described_class::FILTER_ANY } }
it 'returns issues that have one or more label' do
- 2.times do
- create(:label_link, label: create(:label, project: project2), target: issue3)
- end
+ create_list(:label_link, 2, label: create(:label, project: project2), target: issue3)
expect(issues).to contain_exactly(issue2, issue3)
end
diff --git a/spec/finders/keys_finder_spec.rb b/spec/finders/keys_finder_spec.rb
index f80abdcdb38..7605d066ddf 100644
--- a/spec/finders/keys_finder_spec.rb
+++ b/spec/finders/keys_finder_spec.rb
@@ -73,7 +73,15 @@ describe KeysFinder do
end
context 'with valid fingerprints' do
- context 'with valid MD5 params' do
+ let!(:deploy_key) do
+ create(:deploy_key,
+ user: user,
+ key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1017k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
+ fingerprint: '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4',
+ fingerprint_sha256: '4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk')
+ end
+
+ context 'personal key with valid MD5 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1'
@@ -85,6 +93,17 @@ describe KeysFinder do
end
end
+ context 'deploy key with an existent fingerprint' do
+ before do
+ params[:fingerprint] = '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4'
+ end
+
+ it 'returns the key' do
+ expect(subject).to eq(deploy_key)
+ expect(subject.user).to eq(user)
+ end
+ end
+
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'bb:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d2'
@@ -96,7 +115,7 @@ describe KeysFinder do
end
end
- context 'with valid SHA256 params' do
+ context 'personal key with valid SHA256 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg'
@@ -108,6 +127,17 @@ describe KeysFinder do
end
end
+ context 'deploy key with an existent fingerprint' do
+ before do
+ params[:fingerprint] = 'SHA256:4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk'
+ end
+
+ it 'returns key' do
+ expect(subject).to eq(deploy_key)
+ expect(subject.user).to eq(user)
+ end
+ end
+
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:xTjuFqftwADy8AH3wFY31tAKs7HufskYTte2aXi/mNp'
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index bc85a622119..849387b72bd 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -166,6 +166,38 @@ describe MergeRequestsFinder do
expect(scalar_params).to include(:wip, :assignee_id)
end
+
+ context 'filter by deployment' do
+ let_it_be(:project_with_repo) { create(:project, :repository) }
+
+ it 'returns the relevant merge requests' do
+ deployment1 = create(
+ :deployment,
+ project: project_with_repo,
+ sha: project_with_repo.commit.id,
+ merge_requests: [merge_request1, merge_request2]
+ )
+ create(
+ :deployment,
+ project: project_with_repo,
+ sha: project_with_repo.commit.id,
+ merge_requests: [merge_request3]
+ )
+ params = { deployment_id: deployment1.id }
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2)
+ end
+
+ context 'when a deployment does not contain any merge requests' do
+ it 'returns an empty result' do
+ params = { deployment_id: create(:deployment, project: project_with_repo, sha: project_with_repo.commit.sha).id }
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to be_empty
+ end
+ end
+ end
end
context 'assignee filtering' do
diff --git a/spec/finders/pipelines_finder_spec.rb b/spec/finders/pipelines_finder_spec.rb
index c8a4ea799c3..1dbf9491118 100644
--- a/spec/finders/pipelines_finder_spec.rb
+++ b/spec/finders/pipelines_finder_spec.rb
@@ -64,6 +64,19 @@ describe PipelinesFinder do
end
end
+ context 'when project has child pipelines' do
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, project: project, source: :parent_pipeline) }
+
+ let!(:pipeline_source) do
+ create(:ci_sources_pipeline, pipeline: child_pipeline, source_pipeline: parent_pipeline)
+ end
+
+ it 'filters out child pipelines and show only the parents' do
+ is_expected.to eq([parent_pipeline])
+ end
+ end
+
HasStatus::AVAILABLE_STATUSES.each do |target|
context "when status is #{target}" do
let(:params) { { status: target } }
diff --git a/spec/finders/projects/serverless/functions_finder_spec.rb b/spec/finders/projects/serverless/functions_finder_spec.rb
index 589e4000d46..d5644daebab 100644
--- a/spec/finders/projects/serverless/functions_finder_spec.rb
+++ b/spec/finders/projects/serverless/functions_finder_spec.rb
@@ -8,9 +8,9 @@ describe Projects::Serverless::FunctionsFinder do
include ReactiveCachingHelpers
let(:user) { create(:user) }
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { create(:project, :repository) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
let(:service) { cluster.platform_kubernetes }
- let(:project) { cluster.project }
let(:environment) { create(:environment, project: project) }
let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
let(:knative_services_finder) { environment.knative_services_finder }
@@ -108,7 +108,7 @@ describe Projects::Serverless::FunctionsFinder do
let(:finder) { described_class.new(project) }
before do
- allow(Prometheus::AdapterService).to receive(:new).and_return(double(prometheus_adapter: prometheus_adapter))
+ allow(Gitlab::Prometheus::Adapter).to receive(:new).and_return(double(prometheus_adapter: prometheus_adapter))
allow(prometheus_adapter).to receive(:query).and_return(prometheus_empty_body('matrix'))
end
diff --git a/spec/finders/sentry_issue_finder_spec.rb b/spec/finders/sentry_issue_finder_spec.rb
new file mode 100644
index 00000000000..5535eb8c214
--- /dev/null
+++ b/spec/finders/sentry_issue_finder_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SentryIssueFinder do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:issue) { create(:issue, project: project) }
+ let(:sentry_issue) { create(:sentry_issue, issue: issue) }
+
+ let(:finder) { described_class.new(project, current_user: user) }
+
+ describe '#execute' do
+ let(:identifier) { sentry_issue.sentry_issue_identifier }
+
+ subject { finder.execute(identifier) }
+
+ context 'when the user is not part of the project' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when the user is a project developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to eq(sentry_issue) }
+
+ context 'when identifier is incorrect' do
+ let(:identifier) { 1234 }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when accessing another projects identifier' do
+ let(:second_project) { create(:project) }
+ let(:second_issue) { create(:issue, project: second_project) }
+ let(:second_sentry_issue) { create(:sentry_issue, issue: second_issue) }
+
+ let(:identifier) { second_sentry_issue.sentry_issue_identifier }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index a837e7af251..a35c3a954e7 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -219,7 +219,7 @@ describe TodosFinder do
end
it "sorts by priority" do
- project_2 = create(:project)
+ project_2 = create(:project)
label_1 = create(:label, title: 'label_1', project: project, priority: 1)
label_2 = create(:label, title: 'label_2', project: project, priority: 2)
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
index f978baa2026..29c56b5c820 100644
--- a/spec/fixtures/api/schemas/cluster_status.json
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -35,9 +35,9 @@
"external_ip": { "type": ["string", "null"] },
"external_hostname": { "type": ["string", "null"] },
"hostname": { "type": ["string", "null"] },
- "kibana_hostname": { "type": ["string", "null"] },
"email": { "type": ["string", "null"] },
"stack": { "type": ["string", "null"] },
+ "modsecurity_enabled": { "type": ["boolean", "null"] },
"update_available": { "type": ["boolean", "null"] },
"can_uninstall": { "type": "boolean" }
},
diff --git a/spec/fixtures/api/schemas/entities/issue_board.json b/spec/fixtures/api/schemas/entities/issue_board.json
index 7cb65e1f2f5..09f66813c95 100644
--- a/spec/fixtures/api/schemas/entities/issue_board.json
+++ b/spec/fixtures/api/schemas/entities/issue_board.json
@@ -36,7 +36,8 @@
"real_path": { "type": "string" },
"issue_sidebar_endpoint": { "type": "string" },
"toggle_subscription_endpoint": { "type": "string" },
- "assignable_labels_endpoint": { "type": "string" }
+ "assignable_labels_endpoint": { "type": "string" },
+ "blocked": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/error_tracking/error_detailed.json b/spec/fixtures/api/schemas/error_tracking/error_detailed.json
index 2a1cd2c03e0..2b6580e39f7 100644
--- a/spec/fixtures/api/schemas/error_tracking/error_detailed.json
+++ b/spec/fixtures/api/schemas/error_tracking/error_detailed.json
@@ -1,10 +1,11 @@
{
"type": "object",
- "required" : [
+ "required": [
"external_url",
"external_base_url",
"last_seen",
"message",
+ "tags",
"type",
"title",
"project_id",
@@ -17,31 +18,46 @@
"first_release_last_commit",
"last_release_last_commit",
"first_release_short_version",
- "last_release_short_version"
+ "last_release_short_version",
+ "gitlab_commit"
],
- "properties" : {
- "id": { "type": "string"},
+ "properties": {
+ "id": { "type": "string" },
"first_seen": { "type": "string", "format": "date-time" },
"last_seen": { "type": "string", "format": "date-time" },
"type": { "type": "string" },
"message": { "type": "string" },
"culprit": { "type": "string" },
- "count": { "type": "integer"},
+ "count": { "type": "integer" },
"external_url": { "type": "string" },
"external_base_url": { "type": "string" },
- "user_count": { "type": "integer"},
- "title": { "type": "string"},
- "project_id": { "type": "string"},
- "project_name": { "type": "string"},
- "project_slug": { "type": "string"},
- "short_id": { "type": "string"},
- "status": { "type": "string"},
- "frequency": { "type": "array"},
+ "user_count": { "type": "integer" },
+ "tags": {
+ "type": "object",
+ "required": ["level", "logger"],
+ "properties": {
+ "level": {
+ "type": "string"
+ },
+ "logger": {
+ "type": "string"
+ }
+ }
+ },
+ "title": { "type": "string" },
+ "project_id": { "type": "string" },
+ "project_name": { "type": "string" },
+ "project_slug": { "type": "string" },
+ "short_id": { "type": "string" },
+ "status": { "type": "string" },
+ "frequency": { "type": "array" },
"gitlab_issue": { "type": ["string", "null"] },
"first_release_last_commit": { "type": ["string", "null"] },
"last_release_last_commit": { "type": ["string", "null"] },
"first_release_short_version": { "type": ["string", "null"] },
- "last_release_short_version": { "type": ["string", "null"] }
+ "last_release_short_version": { "type": ["string", "null"] },
+ "gitlab_commit": { "type": ["string", "null"] },
+ "gitlab_commit_path": { "type": ["string", "null"] }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/error_tracking/update_issue.json b/spec/fixtures/api/schemas/error_tracking/update_issue.json
new file mode 100644
index 00000000000..72514ce647d
--- /dev/null
+++ b/spec/fixtures/api/schemas/error_tracking/update_issue.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "required" : [
+ "result"
+ ],
+ "properties" : {
+ "result": {
+ "type": "object",
+ "properties": {
+ "status": { "type": "string" },
+ "updated": { "type": "boolean" }
+ }
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/issue.json b/spec/fixtures/api/schemas/public_api/v4/issue.json
index 147f53239e0..bf1b4a06f0b 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issue.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issue.json
@@ -84,6 +84,11 @@
"total_time_spent": { "type": "integer" },
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] }
+ },
+ "references": {
+ "short": {"type": "string"},
+ "relative": {"type": "string"},
+ "full": {"type": "string"}
}
},
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/label_basic.json b/spec/fixtures/api/schemas/public_api/v4/label_basic.json
index 37bbdcb14fe..a501bc2ec56 100644
--- a/spec/fixtures/api/schemas/public_api/v4/label_basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/label_basic.json
@@ -5,6 +5,7 @@
"name",
"color",
"description",
+ "description_html",
"text_color"
],
"properties": {
@@ -15,6 +16,7 @@
"pattern": "^#[0-9A-Fa-f]{3}{1,2}$"
},
"description": { "type": ["string", "null"] },
+ "description_html": { "type": ["string", "null"] },
"text_color": {
"type": "string",
"pattern": "^#[0-9A-Fa-f]{3}{1,2}$"
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_request.json b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
index a423bf70b69..3bf1299a1d8 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_request.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
@@ -113,7 +113,12 @@
"human_total_time_spent": { "type": ["string", "null"] }
},
"allow_collaboration": { "type": ["boolean", "null"] },
- "allow_maintainer_to_push": { "type": ["boolean", "null"] }
+ "allow_maintainer_to_push": { "type": ["boolean", "null"] },
+ "references": {
+ "short": {"type": "string"},
+ "relative": {"type": "string"},
+ "full": {"type": "string"}
+ }
},
"required": [
"id", "iid", "project_id", "title", "description",
diff --git a/spec/fixtures/api/schemas/public_api/v4/service.json b/spec/fixtures/api/schemas/public_api/v4/service.json
new file mode 100644
index 00000000000..b6f13d1cfe7
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/service.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "properties": {
+ "id": { "type": "integer" },
+ "title": { "type": "string" },
+ "slug": { "type": "string" },
+ "created_at": { "type": "date-time" },
+ "updated_at": { "type": "date-time" },
+ "active": { "type": "boolean" },
+ "commit_events": { "type": "boolean" },
+ "push_events": { "type": "boolean" },
+ "issues_events": { "type": "boolean" },
+ "confidential_issues_events": { "type": "boolean" },
+ "merge_requests_events": { "type": "boolean" },
+ "tag_push_events": { "type": "boolean" },
+ "note_events": { "type": "boolean" },
+ "confidential_note_events": { "type": "boolean" },
+ "pipeline_events": { "type": "boolean" },
+ "wiki_page_events": { "type": "boolean" },
+ "job_events": { "type": "boolean" },
+ "comment_on_event_enabled": { "type": "boolean" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/services.json b/spec/fixtures/api/schemas/public_api/v4/services.json
new file mode 100644
index 00000000000..78c59ecfa10
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/services.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "service.json" }
+}
diff --git a/spec/fixtures/emails/envelope_to_header.eml b/spec/fixtures/emails/envelope_to_header.eml
new file mode 100644
index 00000000000..4b6418d4c06
--- /dev/null
+++ b/spec/fixtures/emails/envelope_to_header.eml
@@ -0,0 +1,32 @@
+Return-Path: <jake@example.com>
+Received: from myserver.example.com ([unix socket]) by myserver (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail.example.com (mail.example.com [IPv6:2607:f8b0:4001:c03::234]) by myserver.example.com (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@example.com>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by myserver.example.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.example.com>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+From: "jake@example.com" <jake@example.com>
+To: "support@example.com" <support@example.com>
+Subject: Insert hilarious subject line here
+Date: Tue, 26 Nov 2019 14:22:41 +0000
+Message-ID: <7e2296f83dbf4de388cbf5f56f52c11f@EXDAG29-1.EXCHANGE.INT>
+Accept-Language: de-DE, en-US
+Content-Language: de-DE
+X-MS-Has-Attach:
+X-MS-TNEF-Correlator:
+x-ms-exchange-transport-fromentityheader: Hosted
+x-originating-ip: [62.96.54.178]
+Content-Type: multipart/alternative;
+ boundary="_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_"
+MIME-Version: 1.0
+Envelope-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/plain; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/html; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+Look, a message with some alternate headers! We should really support them.
diff --git a/spec/fixtures/emails/forwarded_new_issue.eml b/spec/fixtures/emails/forwarded_new_issue.eml
index 258106bb897..e3688697651 100644
--- a/spec/fixtures/emails/forwarded_new_issue.eml
+++ b/spec/fixtures/emails/forwarded_new_issue.eml
@@ -1,13 +1,13 @@
-Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.adventuretime.ooo
-Return-Path: <jake@adventuretime.ooo>
-Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
-Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
-Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+Return-Path: <jake@example.com>
+Received: from iceking.example.com ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.example.com (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.example.com>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq/gitlabhq@appmail.example.com>; Thu, 13 Jun 2013 14:03:48 -0700
Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
Date: Thu, 13 Jun 2013 17:03:48 -0400
-From: Jake the Dog <jake@adventuretime.ooo>
-Delivered-To: support@adventuretime.ooo
-To: support@adventuretime.ooo
+From: Jake the Dog <jake@example.com>
+Delivered-To: support@example.com
+To: support@example.com
Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
Subject: New Issue by email
Mime-Version: 1.0
diff --git a/spec/fixtures/gitlab/sample_metrics/sample_metric_query_result.yml b/spec/fixtures/gitlab/sample_metrics/sample_metric_query_result.yml
index ba074912b3b..ce49f551082 100644
--- a/spec/fixtures/gitlab/sample_metrics/sample_metric_query_result.yml
+++ b/spec/fixtures/gitlab/sample_metrics/sample_metric_query_result.yml
@@ -1,151 +1,332 @@
---
+30:
- metric: {}
values:
- - - 1573560714.209
- - '0.02361297607421875'
- - - 1573560774.209
- - '0.02361297607421875'
- - - 1573560834.209
- - '0.02362823486328125'
- - - 1573560894.209
- - '0.02361297607421875'
- - - 1573560954.209
- - '0.02385711669921875'
- - - 1573561014.209
- - '0.02361297607421875'
- - - 1573561074.209
- - '0.02361297607421875'
- - - 1573561134.209
- - '0.02362060546875'
- - - 1573561194.209
- - '0.02362060546875'
- - - 1573561254.209
- - '0.02362060546875'
- - - 1573561314.209
- - '0.02362060546875'
- - - 1573561374.209
- - '0.023624420166015625'
- - - 1573561434.209
- - '0.023651123046875'
- - - 1573561494.209
- - '0.02362060546875'
- - - 1573561554.209
- - '0.0236358642578125'
- - - 1573561614.209
- - '0.02362060546875'
- - - 1573561674.209
- - '0.02362060546875'
- - - 1573561734.209
- - '0.02362060546875'
- - - 1573561794.209
- - '0.02362060546875'
- - - 1573561854.209
- - '0.02362060546875'
- - - 1573561914.209
- - '0.023651123046875'
- - - 1573561974.209
- - '0.02362060546875'
- - - 1573562034.209
- - '0.02362060546875'
- - - 1573562094.209
- - '0.02362060546875'
- - - 1573562154.209
- - '0.02362060546875'
- - - 1573562214.209
- - '0.023624420166015625'
- - - 1573562274.209
- - '0.02362060546875'
- - - 1573562334.209
- - '0.023868560791015625'
- - - 1573562394.209
- - '0.02374267578125'
- - - 1573562454.209
- - '0.02362060546875'
- - - 1573562514.209
- - '0.02362060546875'
- - - 1573562574.209
- - '0.02362060546875'
- - - 1573562634.209
- - '0.02362060546875'
- - - 1573562694.209
- - '0.023639678955078125'
- - - 1573562754.209
- - '0.0236358642578125'
- - - 1573562814.209
- - '0.02362060546875'
- - - 1573562874.209
- - '0.0236358642578125'
- - - 1573562934.209
- - '0.023651123046875'
- - - 1573562994.209
- - '0.02362060546875'
- - - 1573563054.209
- - '0.023624420166015625'
- - - 1573563114.209
- - '0.02362060546875'
- - - 1573563174.209
- - '0.02362060546875'
- - - 1573563234.209
- - '0.02362060546875'
- - - 1573563294.209
- - '0.02362060546875'
- - - 1573563354.209
- - '0.02362060546875'
- - - 1573563414.209
- - '0.023651123046875'
- - - 1573563474.209
- - '0.023651123046875'
- - - 1573563534.209
- - '0.023651123046875'
- - - 1573563594.209
- - '0.023773193359375'
- - - 1573563654.209
- - '0.023681640625'
- - - 1573563714.209
- - '0.023895263671875'
- - - 1573563774.209
- - '0.023651123046875'
- - - 1573563834.209
- - '0.023651123046875'
- - - 1573563894.209
- - '0.023651123046875'
- - - 1573563954.209
- - '0.0236663818359375'
- - - 1573564014.209
- - '0.023651123046875'
- - - 1573564074.209
- - '0.023681640625'
- - - 1573564134.209
- - '0.0236663818359375'
- - - 1573564194.209
- - '0.0236663818359375'
- - - 1573564254.209
- - '0.023651123046875'
- - - 1573564314.209
- - '0.023651123046875'
- - - 1573564374.209
- - '0.023651123046875'
- - - 1573564434.209
- - '0.023773193359375'
- - - 1573564494.209
- - '0.023651123046875'
- - - 1573564554.209
- - '0.023681640625'
- - - 1573564614.209
- - '0.023773193359375'
- - - 1573564674.209
- - '0.023651123046875'
- - - 1573564734.209
- - '0.023651123046875'
- - - 1573564794.209
- - '0.023651123046875'
- - - 1573564854.209
- - '0.023651123046875'
- - - 1573564914.209
- - '0.023651123046875'
- - - 1573564974.209
- - '0.023651123046875'
- - - 1573565034.209
- - '0.023651123046875'
- - - 1573565094.209
- - '0.023895263671875' \ No newline at end of file
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+180:
+- metric: {}
+ values:
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+480:
+- metric: {}
+ values:
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+1440:
+- metric: {}
+ values:
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+4320:
+- metric: {}
+ values:
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+10080:
+- metric: {}
+ values:
+ - - 1576719533.248
+ - '0.0006172414678571515'
+ - - 1576719593.248
+ - '0.0006189408976190352'
+ - - 1576719653.248
+ - '0.0006182154988094691'
+ - - 1576719713.248
+ - '0.0006194998404763076'
+ - - 1576719773.248
+ - '0.0006194687678569856'
+ - - 1576719833.248
+ - '0.0006171203535713976'
+ - - 1576719893.248
+ - '0.0006244061773808577'
+ - - 1576719953.248
+ - '0.0006170288511561634'
+ - - 1576720013.248
+ - '0.0006243750281248557'
+ - - 1576720073.248
+ - '0.0006152456571427256'
+ - - 1576720133.248
+ - '0.0006215679095237733'
+ - - 1576720193.248
+ - '0.0006218523571429083'
+ - - 1576720253.248
+ - '0.0006200312440475792'
+ - - 1576720313.248
+ - '0.0006214166202382676'
+ - - 1576720373.248
+ - '0.0006152486976191084'
+ - - 1576720433.248
+ - '0.0006136406750000235'
+ - - 1576720493.248
+ - '0.0006135999154761997'
+ - - 1576720553.248
+ - '0.0006126559190475756'
+ - - 1576720613.248
+ - '0.0006153160392857769'
+ - - 1576720673.248
+ - '0.0006146447178572262'
+ - - 1576720733.248
+ - '0.0006146970476189988'
+ - - 1576720793.248
+ - '0.0006219259035715042'
+ - - 1576720853.248
+ - '0.0006111198750001481'
+ - - 1576720913.248
+ - '0.0006169941035715337'
+ - - 1576720973.248
+ - '0.0006102626761905379'
+ - - 1576721033.248
+ - '0.0006163839964285346'
+ \ No newline at end of file
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 583d6c7b78a..7d784fbd54f 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -80,6 +80,17 @@
"issue_id": 40
}
],
+ "award_emoji": [
+ {
+ "id": 1,
+ "name": "musical_keyboard",
+ "user_id": 1,
+ "awardable_type": "Issue",
+ "awardable_id": 40,
+ "created_at": "2020-01-07T11:55:22.234Z",
+ "updated_at": "2020-01-07T11:55:22.234Z"
+ }
+ ],
"zoom_meetings": [
{
"id": 1,
@@ -188,7 +199,18 @@
"author": {
"name": "User 4"
},
- "events": []
+ "events": [],
+ "award_emoji": [
+ {
+ "id": 1,
+ "name": "clapper",
+ "user_id": 1,
+ "awardable_type": "Note",
+ "awardable_id": 351,
+ "created_at": "2020-01-07T11:55:22.234Z",
+ "updated_at": "2020-01-07T11:55:22.234Z"
+ }
+ ]
},
{
"id": 352,
@@ -1980,7 +2002,7 @@
},
{
"id": 31,
- "title": "Libero nam magnam incidunt eaque placeat error et.",
+ "title": "issue_with_timelogs",
"author_id": 16,
"project_id": 5,
"created_at": "2016-06-14T15:02:07.280Z",
@@ -1994,6 +2016,16 @@
"confidential": false,
"due_date": null,
"moved_to_id": null,
+ "timelogs": [
+ {
+ "id": 1,
+ "time_spent": 72000,
+ "user_id": 1,
+ "created_at": "2019-12-27T09:15:22.302Z",
+ "updated_at": "2019-12-27T09:15:22.302Z",
+ "spent_at": "2019-12-27T00:00:00.000Z"
+ }
+ ],
"notes": [
{
"id": 423,
@@ -2297,10 +2329,58 @@
"updated_at": "2019-11-05T15:37:24.645Z"
}
],
- "notes": []
+ "notes": [
+ {
+ "id": 872,
+ "note": "This is a test note",
+ "noteable_type": "Snippet",
+ "author_id": 1,
+ "created_at": "2019-11-05T15:37:24.645Z",
+ "updated_at": "2019-11-05T15:37:24.645Z",
+ "noteable_id": 1,
+ "author": {
+ "name": "Random name"
+ },
+ "events": [],
+ "award_emoji": [
+ {
+ "id": 12,
+ "name": "thumbsup",
+ "user_id": 1,
+ "awardable_type": "Note",
+ "awardable_id": 872,
+ "created_at": "2019-11-05T15:37:21.287Z",
+ "updated_at": "2019-11-05T15:37:21.287Z"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "releases": [
+ {
+ "id": 1,
+ "tag": "release-1.1",
+ "description": "Some release notes",
+ "project_id": 5,
+ "created_at": "2019-12-26T10:17:14.621Z",
+ "updated_at": "2019-12-26T10:17:14.621Z",
+ "author_id": 1,
+ "name": "release-1.1",
+ "sha": "901de3a8bd5573f4a049b1457d28bc1592ba6bf9",
+ "released_at": "2019-12-26T10:17:14.615Z",
+ "links": [
+ {
+ "id": 1,
+ "release_id" : 1,
+ "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
+ "name": "release-1.1.dmg",
+ "created_at": "2019-12-26T10:17:14.621Z",
+ "updated_at": "2019-12-26T10:17:14.621Z"
+ }
+ ]
}
],
- "releases": [],
"project_members": [
{
"id": 36,
@@ -2434,7 +2514,18 @@
"author": {
"name": "User 4"
},
- "events": []
+ "events": [],
+ "award_emoji": [
+ {
+ "id": 1,
+ "name": "tada",
+ "user_id": 1,
+ "awardable_type": "Note",
+ "awardable_id": 1,
+ "created_at": "2019-11-05T15:37:21.287Z",
+ "updated_at": "2019-11-05T15:37:21.287Z"
+ }
+ ]
},
{
"id": 672,
@@ -2840,7 +2931,27 @@
"author_id": 1
}
],
- "approvals_before_merge": 1
+ "approvals_before_merge": 1,
+ "award_emoji": [
+ {
+ "id": 1,
+ "name": "thumbsup",
+ "user_id": 1,
+ "awardable_type": "MergeRequest",
+ "awardable_id": 27,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-07T11:21:21.235Z"
+ },
+ {
+ "id": 2,
+ "name": "drum",
+ "user_id": 1,
+ "awardable_type": "MergeRequest",
+ "awardable_id": 27,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-07T11:21:21.235Z"
+ }
+ ]
},
{
"id": 26,
@@ -6738,6 +6849,40 @@
"duration": null,
"stages": [
]
+ },
+ {
+ "id": 42,
+ "project_id": 5,
+ "ref": "master",
+ "sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
+ "before_sha": null,
+ "push_data": null,
+ "created_at": "2016-03-22T15:20:35.763Z",
+ "updated_at": "2016-03-22T15:20:35.763Z",
+ "tag": false,
+ "yaml_errors": null,
+ "committed_at": null,
+ "status": "failed",
+ "started_at": null,
+ "finished_at": null,
+ "duration": null,
+ "stages": [
+ ],
+ "source": "external_pull_request_event",
+ "external_pull_request":
+ {
+ "id": 3,
+ "pull_request_iid": 4,
+ "source_branch": "feature",
+ "target_branch": "master",
+ "source_repository": "the-repository",
+ "target_repository": "the-repository",
+ "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
+ "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
+ "status": "open",
+ "created_at": "2016-03-22T15:20:35.763Z",
+ "updated_at": "2016-03-22T15:20:35.763Z"
+ }
}
],
"triggers": [
@@ -6757,6 +6902,21 @@
"updated_at": "2017-01-16T15:25:29.637Z"
}
],
+ "pipeline_schedules": [
+ {
+ "id": 1,
+ "description": "Schedule Description",
+ "ref": "master",
+ "cron": "0 4 * * 0",
+ "cron_timezone": "UTC",
+ "next_run_at": "2019-12-29T04:19:00.000Z",
+ "project_id": 5,
+ "owner_id": 1,
+ "active": true,
+ "created_at": "2019-12-26T10:14:57.778Z",
+ "updated_at": "2019-12-26T10:14:57.778Z"
+ }
+ ],
"container_expiration_policy": {
"created_at": "2019-12-13 13:45:04 UTC",
"updated_at": "2019-12-13 13:45:04 UTC",
@@ -7276,6 +7436,33 @@
"ci_cd_settings": {
"group_runners_enabled": false
},
+ "auto_devops": {
+ "id": 1,
+ "created_at": "2017-10-19T15:36:23.466Z",
+ "updated_at": "2017-10-19T15:36:23.466Z",
+ "enabled": null,
+ "deploy_strategy": "continuous"
+ },
+ "error_tracking_setting": {
+ "api_url": "https://gitlab.example.com/api/0/projects/sentry-org/sentry-project",
+ "project_name": "Sentry Project",
+ "organization_name": "Sentry Org"
+ },
+ "external_pull_requests": [
+ {
+ "id": 3,
+ "pull_request_iid": 4,
+ "source_branch": "feature",
+ "target_branch": "master",
+ "source_repository": "the-repository",
+ "target_repository": "the-repository",
+ "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
+ "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
+ "status": "open",
+ "created_at": "2019-12-24T14:04:50.053Z",
+ "updated_at": "2019-12-24T14:05:18.138Z"
+ }
+ ],
"boards": [
{
"id": 29,
diff --git a/spec/fixtures/lib/gitlab/import_export/group/project.json b/spec/fixtures/lib/gitlab/import_export/group/project.json
index 47faf271cca..ce4fa1981ff 100644
--- a/spec/fixtures/lib/gitlab/import_export/group/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/group/project.json
@@ -175,6 +175,67 @@
}
}
]
+ },
+ {
+ "id": 3,
+ "title": "Issue with Epic",
+ "author_id": 1,
+ "project_id": 8,
+ "created_at": "2019-12-08T19:41:11.233Z",
+ "updated_at": "2019-12-08T19:41:53.194Z",
+ "position": 0,
+ "branch_name": null,
+ "description": "Donec at nulla vitae sem molestie rutrum ut at sem.",
+ "state": "opened",
+ "iid": 3,
+ "updated_by_id": null,
+ "confidential": false,
+ "due_date": null,
+ "moved_to_id": null,
+ "issue_assignees": [],
+ "notes": [],
+ "milestone": {
+ "id": 2,
+ "title": "A group milestone",
+ "description": "Group-level milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "group_id": 100
+ },
+ "epic": {
+ "id": 1,
+ "group_id": 5,
+ "author_id": 1,
+ "assignee_id": null,
+ "iid": 1,
+ "updated_by_id": null,
+ "last_edited_by_id": null,
+ "lock_version": 0,
+ "start_date": null,
+ "end_date": null,
+ "last_edited_at": null,
+ "created_at": "2019-12-08T19:37:07.098Z",
+ "updated_at": "2019-12-08T19:43:11.568Z",
+ "title": "An epic",
+ "description": null,
+ "start_date_sourcing_milestone_id": null,
+ "due_date_sourcing_milestone_id": null,
+ "start_date_fixed": null,
+ "due_date_fixed": null,
+ "start_date_is_fixed": null,
+ "due_date_is_fixed": null,
+ "closed_by_id": null,
+ "closed_at": null,
+ "parent_id": null,
+ "relative_position": null,
+ "state_id": "opened",
+ "start_date_sourcing_epic_id": null,
+ "due_date_sourcing_epic_id": null,
+ "milestone_id": null
+ }
}
],
"snippets": [
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index b19b45928d9..59795c835a2 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -111,7 +111,13 @@ Markdown should be usable inside a link. Let's try!
- [**text**](#link-strong)
- [`text`](#link-code)
-### RelativeLinkFilter
+### UploadLinkFilter
+
+Linking to an upload in this project should work:
+[Relative Upload Link](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
+![Relative Upload Image](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
+
+### RepositoryLinkFilter
Linking to a file relative to this project's repository should work.
diff --git a/spec/fixtures/not_a_png.png b/spec/fixtures/not_a_png.png
new file mode 100644
index 00000000000..932f9efaed9
--- /dev/null
+++ b/spec/fixtures/not_a_png.png
Binary files differ
diff --git a/spec/fixtures/referees/metrics_referee.json.gz b/spec/fixtures/referees/metrics_referee.json.gz
new file mode 100644
index 00000000000..88b7de6fa61
--- /dev/null
+++ b/spec/fixtures/referees/metrics_referee.json.gz
Binary files differ
diff --git a/spec/fixtures/referees/network_referee.json.gz b/spec/fixtures/referees/network_referee.json.gz
new file mode 100644
index 00000000000..88b7de6fa61
--- /dev/null
+++ b/spec/fixtures/referees/network_referee.json.gz
Binary files differ
diff --git a/spec/fixtures/sentry/issue_latest_event_no_stack_sample_response.json b/spec/fixtures/sentry/issue_latest_event_no_stack_sample_response.json
new file mode 100644
index 00000000000..c0860ebbbd8
--- /dev/null
+++ b/spec/fixtures/sentry/issue_latest_event_no_stack_sample_response.json
@@ -0,0 +1,300 @@
+{
+ "eventID": "333b98e3b91341d8a6247edff171d8cf",
+ "dist": null,
+ "userReport": null,
+ "projectID": "1788822",
+ "previousEventID": "d32f1ce60de14911beec5109d9b5bdbd",
+ "message": null,
+ "id": "333b98e3b91341d8a6247edff171d8cf",
+ "size": 77202,
+ "errors": [
+ {
+ "data": {
+ "reason": "the cookie is missing a name/value pair",
+ "name": "request.cookies",
+ "value": "********"
+ },
+ "message": "Discarded invalid value",
+ "type": "invalid_data"
+ },
+ {
+ "data": {
+ "reason": "the cookie is missing a name/value pair",
+ "name": "request.cookies",
+ "value": "********"
+ },
+ "message": "Discarded invalid value",
+ "type": "invalid_data"
+ }
+ ],
+ "culprit": "/",
+ "title": "ActiveRecord::NoDatabaseError: FATAL: database \"test_development\" does not exist",
+ "sdkUpdates": [],
+ "platform": "ruby",
+ "location": "active_record/connection_adapters/postgresql_adapter.rb",
+ "nextEventID": null,
+ "type": "error",
+ "metadata": {
+ "function": "rescue in connect",
+ "type": "ActiveRecord::NoDatabaseError",
+ "value": "FATAL: database \"test_development\" does not exist\n",
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb"
+ },
+ "groupingConfig": {
+ "enhancements": "eJybzDhxY3J-bm5-npWRgaGlroGxrpHxBABcTQcY",
+ "id": "newstyle:2019-05-08"
+ },
+ "crashFile": null,
+ "tags": [
+ {
+ "value": "Chrome 78.0.3904",
+ "key": "browser",
+ "_meta": null
+ },
+ {
+ "value": "Chrome",
+ "key": "browser.name",
+ "_meta": null
+ },
+ {
+ "value": "Mac OS X 10.15.1",
+ "key": "client_os",
+ "_meta": null
+ },
+ {
+ "value": "Mac OS X",
+ "key": "client_os.name",
+ "_meta": null
+ },
+ {
+ "value": "development",
+ "key": "environment",
+ "_meta": null
+ },
+ {
+ "value": "error",
+ "key": "level",
+ "_meta": null
+ },
+ {
+ "value": "ruby",
+ "key": "logger",
+ "_meta": null
+ },
+ {
+ "value": "b56ae26",
+ "key": "release",
+ "_meta": null
+ },
+ {
+ "value": "Seans-MBP.fritz.box",
+ "key": "server_name",
+ "_meta": null
+ },
+ {
+ "value": "/",
+ "key": "transaction",
+ "_meta": null
+ },
+ {
+ "value": "http://localhost:3001/",
+ "key": "url",
+ "_meta": null
+ },
+ {
+ "query": "user.ip:\"::1\"",
+ "value": "ip:::1",
+ "key": "user",
+ "_meta": null
+ }
+ ],
+ "dateCreated": "2019-12-08T21:48:07Z",
+ "dateReceived": "2019-12-08T21:48:08.579417Z",
+ "user": {
+ "username": null,
+ "name": null,
+ "ip_address": "::1",
+ "email": null,
+ "data": null,
+ "id": null
+ },
+ "entries": [],
+ "packages": {
+ "coffee-script": "2.4.1",
+ "uglifier": "4.1.20",
+ "ffi": "1.11.1",
+ "actioncable": "5.2.3",
+ "io-like": "0.3.0",
+ "rb-inotify": "0.10.0",
+ "spring": "2.1.0",
+ "loofah": "2.2.3",
+ "selenium-webdriver": "3.142.3",
+ "marcel": "0.3.3",
+ "sass-listen": "4.0.0",
+ "nokogiri": "1.10.4",
+ "activestorage": "5.2.3",
+ "activejob": "5.2.3",
+ "mimemagic": "0.3.3",
+ "faraday": "0.17.1",
+ "execjs": "2.7.0",
+ "activesupport": "5.2.3",
+ "rails-html-sanitizer": "1.2.0",
+ "byebug": "11.0.1",
+ "xpath": "3.2.0",
+ "msgpack": "1.3.1",
+ "childprocess": "1.0.1",
+ "rails-dom-testing": "2.0.3",
+ "public_suffix": "3.1.1",
+ "mini_mime": "1.0.2",
+ "arel": "9.0.0",
+ "coffee-rails": "4.2.2",
+ "bundler": "1.17.3",
+ "rails": "5.2.3",
+ "globalid": "0.4.2",
+ "sentry-raven": "2.12.3",
+ "concurrent-ruby": "1.1.5",
+ "capybara": "3.28.0",
+ "regexp_parser": "1.6.0",
+ "sprockets-rails": "3.2.1",
+ "tzinfo": "1.2.5",
+ "mail": "2.7.1",
+ "actionview": "5.2.3",
+ "rubyzip": "1.2.3",
+ "coffee-script-source": "1.12.2",
+ "listen": "3.1.5",
+ "i18n": "1.6.0",
+ "erubi": "1.8.0",
+ "rake": "12.3.3",
+ "nio4r": "2.4.0",
+ "activemodel": "5.2.3",
+ "web-console": "3.7.0",
+ "ruby_dep": "1.5.0",
+ "turbolinks": "5.2.0",
+ "archive-zip": "0.12.0",
+ "method_source": "0.9.2",
+ "minitest": "5.11.3",
+ "puma": "3.12.1",
+ "sass-rails": "5.1.0",
+ "chromedriver-helper": "2.1.1",
+ "sprockets": "3.7.2",
+ "bindex": "0.8.1",
+ "actionmailer": "5.2.3",
+ "rack-test": "1.1.0",
+ "bootsnap": "1.4.4",
+ "railties": "5.2.3",
+ "mini_portile2": "2.4.0",
+ "crass": "1.0.4",
+ "websocket-extensions": "0.1.4",
+ "multipart-post": "2.1.1",
+ "rb-fsevent": "0.10.3",
+ "jbuilder": "2.9.1",
+ "pg": "1.1.4",
+ "sass": "3.7.4",
+ "activerecord": "5.2.3",
+ "builder": "3.2.3",
+ "spring-watcher-listen": "2.0.1",
+ "websocket-driver": "0.7.1",
+ "thor": "0.20.3",
+ "thread_safe": "0.3.6",
+ "addressable": "2.6.0",
+ "prometheus-client-mmap": "0.9.8",
+ "tilt": "2.0.9",
+ "actionpack": "5.2.3",
+ "rack": "2.0.7",
+ "turbolinks-source": "5.2.0"
+ },
+ "sdk": {
+ "version": "2.12.3",
+ "name": "raven-ruby"
+ },
+ "_meta": {
+ "user": null,
+ "context": null,
+ "entries": {
+ "1": {
+ "data": {
+ "": null,
+ "cookies": {
+ "": {
+ "err": [
+ [
+ "invalid_data",
+ {
+ "reason": "the cookie is missing a name/value pair"
+ }
+ ]
+ ],
+ "val": "********"
+ }
+ },
+ "url": null,
+ "headers": null,
+ "env": null,
+ "query": null,
+ "data": null,
+ "method": null
+ }
+ }
+ },
+ "contexts": null,
+ "message": null,
+ "packages": null,
+ "tags": {},
+ "sdk": null
+ },
+ "contexts": {
+ "browser": {
+ "version": "78.0.3904",
+ "type": "browser",
+ "name": "Chrome"
+ },
+ "client_os": {
+ "version": "10.15.1",
+ "type": "os",
+ "name": "Mac OS X"
+ }
+ },
+ "fingerprints": [
+ "6aa133ea51857634f2d113de52b5cc61",
+ "e1613eeb169241eab95b76ab52a80c68"
+ ],
+ "context": {
+ "server": {
+ "runtime": {
+ "version": "ruby 2.6.5p114 (2019-10-01 revision 67812) [x86_64-darwin18]",
+ "name": "ruby"
+ },
+ "os": {
+ "kernel_version": "Darwin Seans-MBP.fritz.box 19.0.0 Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64 x86_64",
+ "version": "Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64",
+ "build": "19.0.0",
+ "name": "Darwin"
+ }
+ }
+ },
+ "release": {
+ "dateReleased": null,
+ "commitCount": 0,
+ "url": null,
+ "data": {},
+ "lastDeploy": null,
+ "deployCount": 0,
+ "dateCreated": "2019-12-08T21:47:47Z",
+ "lastEvent": "2019-12-09T21:52:05Z",
+ "version": "b56ae26",
+ "firstEvent": "2019-12-08T21:47:47Z",
+ "lastCommit": null,
+ "shortVersion": "b56ae26",
+ "authors": [],
+ "owner": null,
+ "newGroups": 26,
+ "ref": null,
+ "projects": [
+ {
+ "slug": "gitlab-03",
+ "name": "gitlab-03"
+ }
+ ]
+ },
+ "groupID": "1378364652"
+}
diff --git a/spec/fixtures/sentry/issue_latest_event_sample_response.json b/spec/fixtures/sentry/issue_latest_event_sample_response.json
new file mode 100644
index 00000000000..f047eb07e1f
--- /dev/null
+++ b/spec/fixtures/sentry/issue_latest_event_sample_response.json
@@ -0,0 +1,5299 @@
+{
+ "eventID": "333b98e3b91341d8a6247edff171d8cf",
+ "dist": null,
+ "userReport": null,
+ "projectID": "1788822",
+ "previousEventID": "d32f1ce60de14911beec5109d9b5bdbd",
+ "message": null,
+ "id": "333b98e3b91341d8a6247edff171d8cf",
+ "size": 77202,
+ "errors": [
+ {
+ "data": {
+ "reason": "the cookie is missing a name/value pair",
+ "name": "request.cookies",
+ "value": "********"
+ },
+ "message": "Discarded invalid value",
+ "type": "invalid_data"
+ },
+ {
+ "data": {
+ "reason": "the cookie is missing a name/value pair",
+ "name": "request.cookies",
+ "value": "********"
+ },
+ "message": "Discarded invalid value",
+ "type": "invalid_data"
+ }
+ ],
+ "culprit": "/",
+ "title": "ActiveRecord::NoDatabaseError: FATAL: database \"test_development\" does not exist",
+ "sdkUpdates": [],
+ "platform": "ruby",
+ "location": "active_record/connection_adapters/postgresql_adapter.rb",
+ "nextEventID": null,
+ "type": "error",
+ "metadata": {
+ "function": "rescue in connect",
+ "type": "ActiveRecord::NoDatabaseError",
+ "value": "FATAL: database \"test_development\" does not exist\n",
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb"
+ },
+ "groupingConfig": {
+ "enhancements": "eJybzDhxY3J-bm5-npWRgaGlroGxrpHxBABcTQcY",
+ "id": "newstyle:2019-05-08"
+ },
+ "crashFile": null,
+ "tags": [
+ {
+ "value": "Chrome 78.0.3904",
+ "key": "browser",
+ "_meta": null
+ },
+ {
+ "value": "Chrome",
+ "key": "browser.name",
+ "_meta": null
+ },
+ {
+ "value": "Mac OS X 10.15.1",
+ "key": "client_os",
+ "_meta": null
+ },
+ {
+ "value": "Mac OS X",
+ "key": "client_os.name",
+ "_meta": null
+ },
+ {
+ "value": "development",
+ "key": "environment",
+ "_meta": null
+ },
+ {
+ "value": "error",
+ "key": "level",
+ "_meta": null
+ },
+ {
+ "value": "ruby",
+ "key": "logger",
+ "_meta": null
+ },
+ {
+ "value": "b56ae26",
+ "key": "release",
+ "_meta": null
+ },
+ {
+ "value": "Seans-MBP.fritz.box",
+ "key": "server_name",
+ "_meta": null
+ },
+ {
+ "value": "/",
+ "key": "transaction",
+ "_meta": null
+ },
+ {
+ "value": "http://localhost:3001/",
+ "key": "url",
+ "_meta": null
+ },
+ {
+ "query": "user.ip:\"::1\"",
+ "value": "ip:::1",
+ "key": "user",
+ "_meta": null
+ }
+ ],
+ "dateCreated": "2019-12-08T21:48:07Z",
+ "dateReceived": "2019-12-08T21:48:08.579417Z",
+ "user": {
+ "username": null,
+ "name": null,
+ "ip_address": "::1",
+ "email": null,
+ "data": null,
+ "id": null
+ },
+ "entries": [
+ {
+ "type": "exception",
+ "data": {
+ "values": [
+ {
+ "stacktrace": {
+ "frames": [
+ {
+ "function": "block in spawn_thread",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/thread_pool.rb",
+ "inApp": false,
+ "lineNo": 135,
+ "module": null,
+ "filename": "puma/thread_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 132,
+ " end\n"
+ ],
+ [
+ 133,
+ "\n"
+ ],
+ [
+ 134,
+ " begin\n"
+ ],
+ [
+ 135,
+ " block.call(work, *extra)\n"
+ ],
+ [
+ 136,
+ " rescue Exception => e\n"
+ ],
+ [
+ 137,
+ " STDERR.puts \"Error reached top of thread-pool: #{e.message} (#{e.class})\"\n"
+ ],
+ [
+ 138,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in run",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 334,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 331,
+ " client.close\n"
+ ],
+ [
+ 332,
+ " else\n"
+ ],
+ [
+ 333,
+ " if process_now\n"
+ ],
+ [
+ 334,
+ " process_client client, buffer\n"
+ ],
+ [
+ 335,
+ " else\n"
+ ],
+ [
+ 336,
+ " client.set_timeout @first_data_timeout\n"
+ ],
+ [
+ 337,
+ " @reactor.add client\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "process_client",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 474,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 471,
+ " close_socket = true\n"
+ ],
+ [
+ 472,
+ "\n"
+ ],
+ [
+ 473,
+ " while true\n"
+ ],
+ [
+ 474,
+ " case handle_request(client, buffer)\n"
+ ],
+ [
+ 475,
+ " when false\n"
+ ],
+ [
+ 476,
+ " return\n"
+ ],
+ [
+ 477,
+ " when :async\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "handle_request",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 660,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 657,
+ "\n"
+ ],
+ [
+ 658,
+ " begin\n"
+ ],
+ [
+ 659,
+ " begin\n"
+ ],
+ [
+ 660,
+ " status, headers, res_body = @app.call(env)\n"
+ ],
+ [
+ 661,
+ "\n"
+ ],
+ [
+ 662,
+ " return :async if req.hijacked\n"
+ ],
+ [
+ 663,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/configuration.rb",
+ "inApp": false,
+ "lineNo": 227,
+ "module": null,
+ "filename": "puma/configuration.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 224,
+ "\n"
+ ],
+ [
+ 225,
+ " def call(env)\n"
+ ],
+ [
+ 226,
+ " env[Const::PUMA_CONFIG] = @config\n"
+ ],
+ [
+ 227,
+ " @app.call(env)\n"
+ ],
+ [
+ 228,
+ " end\n"
+ ],
+ [
+ 229,
+ " end\n"
+ ],
+ [
+ 230,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 24,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 21,
+ " end\n"
+ ],
+ [
+ 22,
+ "\n"
+ ],
+ [
+ 23,
+ " def call(env) # :nodoc:\n"
+ ],
+ [
+ 24,
+ " trace(env) { @app.call(env) }\n"
+ ],
+ [
+ 25,
+ " end\n"
+ ],
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " protected\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "trace",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 61,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ " def trace(env)\n"
+ ],
+ [
+ 60,
+ " start = Time.now\n"
+ ],
+ [
+ 61,
+ " yield.tap do |response|\n"
+ ],
+ [
+ 62,
+ " duration = (Time.now - start).to_f\n"
+ ],
+ [
+ 63,
+ " record(labels(env, response), duration)\n"
+ ],
+ [
+ 64,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 24,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 21,
+ " end\n"
+ ],
+ [
+ 22,
+ "\n"
+ ],
+ [
+ 23,
+ " def call(env) # :nodoc:\n"
+ ],
+ [
+ 24,
+ " trace(env) { @app.call(env) }\n"
+ ],
+ [
+ 25,
+ " end\n"
+ ],
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " protected\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/exporter.rb",
+ "inApp": false,
+ "lineNo": 29,
+ "module": null,
+ "filename": "prometheus/client/rack/exporter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 26,
+ " format = negotiate(env['HTTP_ACCEPT'], @acceptable)\n"
+ ],
+ [
+ 27,
+ " format ? respond_with(format) : not_acceptable(FORMATS)\n"
+ ],
+ [
+ 28,
+ " else\n"
+ ],
+ [
+ 29,
+ " @app.call(env)\n"
+ ],
+ [
+ 30,
+ " end\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ],
+ [
+ 32,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/engine.rb",
+ "inApp": false,
+ "lineNo": 524,
+ "module": null,
+ "filename": "rails/engine.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 521,
+ " # Define the Rack API for this engine.\n"
+ ],
+ [
+ 522,
+ " def call(env)\n"
+ ],
+ [
+ 523,
+ " req = build_request env\n"
+ ],
+ [
+ 524,
+ " app.call req.env\n"
+ ],
+ [
+ 525,
+ " end\n"
+ ],
+ [
+ 526,
+ "\n"
+ ],
+ [
+ 527,
+ " # Defines additional Rack env configuration that is added on each call.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/sentry-raven-2.12.3/lib/raven/integrations/rack.rb",
+ "inApp": false,
+ "lineNo": 51,
+ "module": null,
+ "filename": "raven/integrations/rack.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 48,
+ " Raven.context.transaction.push(env[\"PATH_INFO\"]) if env[\"PATH_INFO\"]\n"
+ ],
+ [
+ 49,
+ "\n"
+ ],
+ [
+ 50,
+ " begin\n"
+ ],
+ [
+ 51,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 52,
+ " rescue Error\n"
+ ],
+ [
+ 53,
+ " raise # Don't capture Raven errors\n"
+ ],
+ [
+ 54,
+ " rescue Exception => e\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/sendfile.rb",
+ "inApp": false,
+ "lineNo": 111,
+ "module": null,
+ "filename": "rack/sendfile.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 108,
+ " end\n"
+ ],
+ [
+ 109,
+ "\n"
+ ],
+ [
+ 110,
+ " def call(env)\n"
+ ],
+ [
+ 111,
+ " status, headers, body = @app.call(env)\n"
+ ],
+ [
+ 112,
+ " if body.respond_to?(:to_path)\n"
+ ],
+ [
+ 113,
+ " case type = variation(env)\n"
+ ],
+ [
+ 114,
+ " when 'X-Accel-Redirect'\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/static.rb",
+ "inApp": false,
+ "lineNo": 127,
+ "module": null,
+ "filename": "action_dispatch/middleware/static.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 124,
+ " end\n"
+ ],
+ [
+ 125,
+ " end\n"
+ ],
+ [
+ 126,
+ "\n"
+ ],
+ [
+ 127,
+ " @app.call(req.env)\n"
+ ],
+ [
+ 128,
+ " end\n"
+ ],
+ [
+ 129,
+ " end\n"
+ ],
+ [
+ 130,
+ "end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/executor.rb",
+ "inApp": false,
+ "lineNo": 14,
+ "module": null,
+ "filename": "action_dispatch/middleware/executor.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 11,
+ " def call(env)\n"
+ ],
+ [
+ 12,
+ " state = @executor.run!\n"
+ ],
+ [
+ 13,
+ " begin\n"
+ ],
+ [
+ 14,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 15,
+ " returned = response << ::Rack::BodyProxy.new(response.pop) { state.complete! }\n"
+ ],
+ [
+ 16,
+ " ensure\n"
+ ],
+ [
+ 17,
+ " state.complete! unless returned\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/cache/strategy/local_cache_middleware.rb",
+ "inApp": false,
+ "lineNo": 29,
+ "module": null,
+ "filename": "active_support/cache/strategy/local_cache_middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " def call(env)\n"
+ ],
+ [
+ 28,
+ " LocalCacheRegistry.set_cache_for(local_cache_key, LocalStore.new)\n"
+ ],
+ [
+ 29,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 30,
+ " response[2] = ::Rack::BodyProxy.new(response[2]) do\n"
+ ],
+ [
+ 31,
+ " LocalCacheRegistry.set_cache_for(local_cache_key, nil)\n"
+ ],
+ [
+ 32,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/runtime.rb",
+ "inApp": false,
+ "lineNo": 22,
+ "module": null,
+ "filename": "rack/runtime.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 19,
+ "\n"
+ ],
+ [
+ 20,
+ " def call(env)\n"
+ ],
+ [
+ 21,
+ " start_time = Utils.clock_time\n"
+ ],
+ [
+ 22,
+ " status, headers, body = @app.call(env)\n"
+ ],
+ [
+ 23,
+ " request_time = Utils.clock_time - start_time\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " unless headers.has_key?(@header_name)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/method_override.rb",
+ "inApp": false,
+ "lineNo": 22,
+ "module": null,
+ "filename": "rack/method_override.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 19,
+ " end\n"
+ ],
+ [
+ 20,
+ " end\n"
+ ],
+ [
+ 21,
+ "\n"
+ ],
+ [
+ 22,
+ " @app.call(env)\n"
+ ],
+ [
+ 23,
+ " end\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " def method_override(env)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/request_id.rb",
+ "inApp": false,
+ "lineNo": 27,
+ "module": null,
+ "filename": "action_dispatch/middleware/request_id.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 24,
+ " def call(env)\n"
+ ],
+ [
+ 25,
+ " req = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 26,
+ " req.request_id = make_request_id(req.x_request_id)\n"
+ ],
+ [
+ 27,
+ " @app.call(env).tap { |_status, headers, _body| headers[X_REQUEST_ID] = req.request_id }\n"
+ ],
+ [
+ 28,
+ " end\n"
+ ],
+ [
+ 29,
+ "\n"
+ ],
+ [
+ 30,
+ " private\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/remote_ip.rb",
+ "inApp": false,
+ "lineNo": 81,
+ "module": null,
+ "filename": "action_dispatch/middleware/remote_ip.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 78,
+ " def call(env)\n"
+ ],
+ [
+ 79,
+ " req = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 80,
+ " req.remote_ip = GetIp.new(req, check_ip, proxies)\n"
+ ],
+ [
+ 81,
+ " @app.call(req.env)\n"
+ ],
+ [
+ 82,
+ " end\n"
+ ],
+ [
+ 83,
+ "\n"
+ ],
+ [
+ 84,
+ " # The GetIp class exists as a way to defer processing of the request data\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/sprockets-rails-3.2.1/lib/sprockets/rails/quiet_assets.rb",
+ "inApp": false,
+ "lineNo": 13,
+ "module": null,
+ "filename": "sprockets/rails/quiet_assets.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 10,
+ " if env['PATH_INFO'] =~ @assets_regex\n"
+ ],
+ [
+ 11,
+ " ::Rails.logger.silence { @app.call(env) }\n"
+ ],
+ [
+ 12,
+ " else\n"
+ ],
+ [
+ 13,
+ " @app.call(env)\n"
+ ],
+ [
+ 14,
+ " end\n"
+ ],
+ [
+ 15,
+ " end\n"
+ ],
+ [
+ 16,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ " request = ActionDispatch::Request.new(env)\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " if logger.respond_to?(:tagged)\n"
+ ],
+ [
+ 26,
+ " logger.tagged(compute_tags(request)) { call_app(request, env) }\n"
+ ],
+ [
+ 27,
+ " else\n"
+ ],
+ [
+ 28,
+ " call_app(request, env)\n"
+ ],
+ [
+ 29,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 71,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 68,
+ " delegate :push_tags, :pop_tags, :clear_tags!, to: :formatter\n"
+ ],
+ [
+ 69,
+ "\n"
+ ],
+ [
+ 70,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 71,
+ " formatter.tagged(*tags) { yield self }\n"
+ ],
+ [
+ 72,
+ " end\n"
+ ],
+ [
+ 73,
+ "\n"
+ ],
+ [
+ 74,
+ " def flush\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 28,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 25,
+ "\n"
+ ],
+ [
+ 26,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 27,
+ " new_tags = push_tags(*tags)\n"
+ ],
+ [
+ 28,
+ " yield self\n"
+ ],
+ [
+ 29,
+ " ensure\n"
+ ],
+ [
+ 30,
+ " pop_tags(new_tags.size)\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 71,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 68,
+ " delegate :push_tags, :pop_tags, :clear_tags!, to: :formatter\n"
+ ],
+ [
+ 69,
+ "\n"
+ ],
+ [
+ 70,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 71,
+ " formatter.tagged(*tags) { yield self }\n"
+ ],
+ [
+ 72,
+ " end\n"
+ ],
+ [
+ 73,
+ "\n"
+ ],
+ [
+ 74,
+ " def flush\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ " request = ActionDispatch::Request.new(env)\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " if logger.respond_to?(:tagged)\n"
+ ],
+ [
+ 26,
+ " logger.tagged(compute_tags(request)) { call_app(request, env) }\n"
+ ],
+ [
+ 27,
+ " else\n"
+ ],
+ [
+ 28,
+ " call_app(request, env)\n"
+ ],
+ [
+ 29,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call_app",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 38,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 38,
+ " status, headers, body = @app.call(env)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/show_exceptions.rb",
+ "inApp": false,
+ "lineNo": 33,
+ "module": null,
+ "filename": "action_dispatch/middleware/show_exceptions.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 33,
+ " @app.call(env)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 20,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 20,
+ " app_exception = catch :app_exception do\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "catch",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 20,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 17,
+ " end\n"
+ ],
+ [
+ 18,
+ "\n"
+ ],
+ [
+ 19,
+ " def call(env)\n"
+ ],
+ [
+ 20,
+ " app_exception = catch :app_exception do\n"
+ ],
+ [
+ 21,
+ " request = create_regular_or_whiny_request(env)\n"
+ ],
+ [
+ 22,
+ " return call_app(env) unless request.from_whitelisted_ip?\n"
+ ],
+ [
+ 23,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 30,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 27,
+ " return change_stack_trace(id, request)\n"
+ ],
+ [
+ 28,
+ " end\n"
+ ],
+ [
+ 29,
+ "\n"
+ ],
+ [
+ 30,
+ " status, headers, body = call_app(env)\n"
+ ],
+ [
+ 31,
+ "\n"
+ ],
+ [
+ 32,
+ " if (session = Session.from(Thread.current)) && acceptable_content_type?(headers)\n"
+ ],
+ [
+ 33,
+ " headers[\"X-Web-Console-Session-Id\"] = session.id\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call_app",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 135,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 132,
+ " end\n"
+ ],
+ [
+ 133,
+ "\n"
+ ],
+ [
+ 134,
+ " def call_app(env)\n"
+ ],
+ [
+ 135,
+ " @app.call(env)\n"
+ ],
+ [
+ 136,
+ " rescue => e\n"
+ ],
+ [
+ 137,
+ " throw :app_exception, e\n"
+ ],
+ [
+ 138,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/debug_exceptions.rb",
+ "inApp": false,
+ "lineNo": 61,
+ "module": null,
+ "filename": "action_dispatch/middleware/debug_exceptions.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ " def call(env)\n"
+ ],
+ [
+ 60,
+ " request = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 61,
+ " _, headers, body = response = @app.call(env)\n"
+ ],
+ [
+ 62,
+ "\n"
+ ],
+ [
+ 63,
+ " if headers[\"X-Cascade\"] == \"pass\"\n"
+ ],
+ [
+ 64,
+ " body.close if body.respond_to?(:close)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/executor.rb",
+ "inApp": false,
+ "lineNo": 14,
+ "module": null,
+ "filename": "action_dispatch/middleware/executor.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 11,
+ " def call(env)\n"
+ ],
+ [
+ 12,
+ " state = @executor.run!\n"
+ ],
+ [
+ 13,
+ " begin\n"
+ ],
+ [
+ 14,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 15,
+ " returned = response << ::Rack::BodyProxy.new(response.pop) { state.complete! }\n"
+ ],
+ [
+ 16,
+ " ensure\n"
+ ],
+ [
+ 17,
+ " state.complete! unless returned\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/callbacks.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "action_dispatch/middleware/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ "\n"
+ ],
+ [
+ 24,
+ " def call(env)\n"
+ ],
+ [
+ 25,
+ " error = nil\n"
+ ],
+ [
+ 26,
+ " result = run_callbacks :call do\n"
+ ],
+ [
+ 27,
+ " begin\n"
+ ],
+ [
+ 28,
+ " @app.call(env)\n"
+ ],
+ [
+ 29,
+ " rescue => error\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "run_callbacks",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/callbacks.rb",
+ "inApp": false,
+ "lineNo": 98,
+ "module": null,
+ "filename": "active_support/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 95,
+ " callbacks = __callbacks[kind.to_sym]\n"
+ ],
+ [
+ 96,
+ "\n"
+ ],
+ [
+ 97,
+ " if callbacks.empty?\n"
+ ],
+ [
+ 98,
+ " yield if block_given?\n"
+ ],
+ [
+ 99,
+ " else\n"
+ ],
+ [
+ 100,
+ " env = Filters::Environment.new(self, false, nil)\n"
+ ],
+ [
+ 101,
+ " next_sequence = callbacks.compile\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/callbacks.rb",
+ "inApp": false,
+ "lineNo": 28,
+ "module": null,
+ "filename": "action_dispatch/middleware/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 25,
+ " error = nil\n"
+ ],
+ [
+ 26,
+ " result = run_callbacks :call do\n"
+ ],
+ [
+ 27,
+ " begin\n"
+ ],
+ [
+ 28,
+ " @app.call(env)\n"
+ ],
+ [
+ 29,
+ " rescue => error\n"
+ ],
+ [
+ 30,
+ " end\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/migration.rb",
+ "inApp": false,
+ "lineNo": 554,
+ "module": null,
+ "filename": "active_record/migration.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 551,
+ " end\n"
+ ],
+ [
+ 552,
+ "\n"
+ ],
+ [
+ 553,
+ " def call(env)\n"
+ ],
+ [
+ 554,
+ " mtime = ActiveRecord::Base.connection.migration_context.last_migration.mtime.to_i\n"
+ ],
+ [
+ 555,
+ " if @last_check < mtime\n"
+ ],
+ [
+ 556,
+ " ActiveRecord::Migration.check_pending!(connection)\n"
+ ],
+ [
+ 557,
+ " @last_check = mtime\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_handling.rb",
+ "inApp": false,
+ "lineNo": 90,
+ "module": null,
+ "filename": "active_record/connection_handling.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 87,
+ " # also be used to \"borrow\" the connection to do database work unrelated\n"
+ ],
+ [
+ 88,
+ " # to any of the specific Active Records.\n"
+ ],
+ [
+ 89,
+ " def connection\n"
+ ],
+ [
+ 90,
+ " retrieve_connection\n"
+ ],
+ [
+ 91,
+ " end\n"
+ ],
+ [
+ 92,
+ "\n"
+ ],
+ [
+ 93,
+ " attr_writer :connection_specification_name\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "retrieve_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_handling.rb",
+ "inApp": false,
+ "lineNo": 118,
+ "module": null,
+ "filename": "active_record/connection_handling.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 115,
+ " end\n"
+ ],
+ [
+ 116,
+ "\n"
+ ],
+ [
+ 117,
+ " def retrieve_connection\n"
+ ],
+ [
+ 118,
+ " connection_handler.retrieve_connection(connection_specification_name)\n"
+ ],
+ [
+ 119,
+ " end\n"
+ ],
+ [
+ 120,
+ "\n"
+ ],
+ [
+ 121,
+ " # Returns +true+ if Active Record is connected.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "retrieve_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 1014,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 1011,
+ " def retrieve_connection(spec_name) #:nodoc:\n"
+ ],
+ [
+ 1012,
+ " pool = retrieve_connection_pool(spec_name)\n"
+ ],
+ [
+ 1013,
+ " raise ConnectionNotEstablished, \"No connection pool with '#{spec_name}' found.\" unless pool\n"
+ ],
+ [
+ 1014,
+ " pool.connection\n"
+ ],
+ [
+ 1015,
+ " end\n"
+ ],
+ [
+ 1016,
+ "\n"
+ ],
+ [
+ 1017,
+ " # Returns true if a connection that's accessible to this class has\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 382,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 379,
+ " # #connection can be called any number of times; the connection is\n"
+ ],
+ [
+ 380,
+ " # held in a cache keyed by a thread.\n"
+ ],
+ [
+ 381,
+ " def connection\n"
+ ],
+ [
+ 382,
+ " @thread_cached_conns[connection_cache_key(@lock_thread || Thread.current)] ||= checkout\n"
+ ],
+ [
+ 383,
+ " end\n"
+ ],
+ [
+ 384,
+ "\n"
+ ],
+ [
+ 385,
+ " # Returns true if there is an open connection being used for the current thread.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "checkout",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 523,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 520,
+ " # Raises:\n"
+ ],
+ [
+ 521,
+ " # - ActiveRecord::ConnectionTimeoutError no connection can be obtained from the pool.\n"
+ ],
+ [
+ 522,
+ " def checkout(checkout_timeout = @checkout_timeout)\n"
+ ],
+ [
+ 523,
+ " checkout_and_verify(acquire_connection(checkout_timeout))\n"
+ ],
+ [
+ 524,
+ " end\n"
+ ],
+ [
+ 525,
+ "\n"
+ ],
+ [
+ 526,
+ " # Check-in a database connection back into the pool, indicating that you\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "acquire_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 795,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 792,
+ " # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to <tt>@available.poll</tt>\n"
+ ],
+ [
+ 793,
+ " # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections\n"
+ ],
+ [
+ 794,
+ " # of the said methods and avoid an additional +synchronize+ overhead.\n"
+ ],
+ [
+ 795,
+ " if conn = @available.poll || try_to_checkout_new_connection\n"
+ ],
+ [
+ 796,
+ " conn\n"
+ ],
+ [
+ 797,
+ " else\n"
+ ],
+ [
+ 798,
+ " reap\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "try_to_checkout_new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 834,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 831,
+ " begin\n"
+ ],
+ [
+ 832,
+ " # if successfully incremented @now_connecting establish new connection\n"
+ ],
+ [
+ 833,
+ " # outside of synchronized section\n"
+ ],
+ [
+ 834,
+ " conn = checkout_new_connection\n"
+ ],
+ [
+ 835,
+ " ensure\n"
+ ],
+ [
+ 836,
+ " synchronize do\n"
+ ],
+ [
+ 837,
+ " if conn\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "checkout_new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 855,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 852,
+ "\n"
+ ],
+ [
+ 853,
+ " def checkout_new_connection\n"
+ ],
+ [
+ 854,
+ " raise ConnectionNotEstablished unless @automatic_reconnect\n"
+ ],
+ [
+ 855,
+ " new_connection\n"
+ ],
+ [
+ 856,
+ " end\n"
+ ],
+ [
+ 857,
+ "\n"
+ ],
+ [
+ 858,
+ " def checkout_and_verify(c)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 811,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 808,
+ " alias_method :release, :remove_connection_from_thread_cache\n"
+ ],
+ [
+ 809,
+ "\n"
+ ],
+ [
+ 810,
+ " def new_connection\n"
+ ],
+ [
+ 811,
+ " Base.send(spec.adapter_method, spec.config).tap do |conn|\n"
+ ],
+ [
+ 812,
+ " conn.schema_cache = schema_cache.dup if schema_cache\n"
+ ],
+ [
+ 813,
+ " end\n"
+ ],
+ [
+ 814,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "postgresql_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 48,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 45,
+ "\n"
+ ],
+ [
+ 46,
+ " # The postgres drivers don't allow the creation of an unconnected PG::Connection object,\n"
+ ],
+ [
+ 47,
+ " # so just pass a nil connection object for the time being.\n"
+ ],
+ [
+ 48,
+ " ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)\n"
+ ],
+ [
+ 49,
+ " end\n"
+ ],
+ [
+ 50,
+ " end\n"
+ ],
+ [
+ 51,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "new",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 48,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 45,
+ "\n"
+ ],
+ [
+ 46,
+ " # The postgres drivers don't allow the creation of an unconnected PG::Connection object,\n"
+ ],
+ [
+ 47,
+ " # so just pass a nil connection object for the time being.\n"
+ ],
+ [
+ 48,
+ " ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)\n"
+ ],
+ [
+ 49,
+ " end\n"
+ ],
+ [
+ 50,
+ " end\n"
+ ],
+ [
+ 51,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "initialize",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 223,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 220,
+ " @local_tz = nil\n"
+ ],
+ [
+ 221,
+ " @max_identifier_length = nil\n"
+ ],
+ [
+ 222,
+ "\n"
+ ],
+ [
+ 223,
+ " connect\n"
+ ],
+ [
+ 224,
+ " add_pg_encoders\n"
+ ],
+ [
+ 225,
+ " @statements = StatementPool.new @connection,\n"
+ ],
+ [
+ 226,
+ " self.class.type_cast_config_to_integer(config[:statement_limit])\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connect",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 692,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 689,
+ " # Connects to a PostgreSQL server and sets up the adapter depending on the\n"
+ ],
+ [
+ 690,
+ " # connected server's characteristics.\n"
+ ],
+ [
+ 691,
+ " def connect\n"
+ ],
+ [
+ 692,
+ " @connection = PG.connect(@connection_parameters)\n"
+ ],
+ [
+ 693,
+ " configure_connection\n"
+ ],
+ [
+ 694,
+ " rescue ::PG::Error => error\n"
+ ],
+ [
+ 695,
+ " if error.message.include?(\"does not exist\")\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connect",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/pg-1.1.4/lib/pg.rb",
+ "inApp": false,
+ "lineNo": 56,
+ "module": null,
+ "filename": "pg.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 53,
+ "\n"
+ ],
+ [
+ 54,
+ "\t### Convenience alias for PG::Connection.new.\n"
+ ],
+ [
+ 55,
+ "\tdef self::connect( *args )\n"
+ ],
+ [
+ 56,
+ "\t\treturn PG::Connection.new( *args )\n"
+ ],
+ [
+ 57,
+ "\tend\n"
+ ],
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "new",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/pg-1.1.4/lib/pg.rb",
+ "inApp": false,
+ "lineNo": 56,
+ "module": null,
+ "filename": "pg.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 53,
+ "\n"
+ ],
+ [
+ 54,
+ "\t### Convenience alias for PG::Connection.new.\n"
+ ],
+ [
+ 55,
+ "\tdef self::connect( *args )\n"
+ ],
+ [
+ 56,
+ "\t\treturn PG::Connection.new( *args )\n"
+ ],
+ [
+ 57,
+ "\tend\n"
+ ],
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "initialize",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/pg-1.1.4/lib/pg.rb",
+ "inApp": false,
+ "lineNo": 56,
+ "module": null,
+ "filename": "pg.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 53,
+ "\n"
+ ],
+ [
+ 54,
+ "\t### Convenience alias for PG::Connection.new.\n"
+ ],
+ [
+ 55,
+ "\tdef self::connect( *args )\n"
+ ],
+ [
+ 56,
+ "\t\treturn PG::Connection.new( *args )\n"
+ ],
+ [
+ 57,
+ "\tend\n"
+ ],
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ }
+ ],
+ "framesOmitted": null,
+ "registers": null,
+ "hasSystemFrames": false
+ },
+ "module": "PG",
+ "rawStacktrace": null,
+ "mechanism": null,
+ "threadId": null,
+ "value": "FATAL: database \"test_development\" does not exist\n",
+ "type": "PG::ConnectionBad"
+ },
+ {
+ "stacktrace": {
+ "frames": [
+ {
+ "function": "block in spawn_thread",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/thread_pool.rb",
+ "inApp": false,
+ "lineNo": 135,
+ "module": null,
+ "filename": "puma/thread_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 132,
+ " end\n"
+ ],
+ [
+ 133,
+ "\n"
+ ],
+ [
+ 134,
+ " begin\n"
+ ],
+ [
+ 135,
+ " block.call(work, *extra)\n"
+ ],
+ [
+ 136,
+ " rescue Exception => e\n"
+ ],
+ [
+ 137,
+ " STDERR.puts \"Error reached top of thread-pool: #{e.message} (#{e.class})\"\n"
+ ],
+ [
+ 138,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in run",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 334,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 331,
+ " client.close\n"
+ ],
+ [
+ 332,
+ " else\n"
+ ],
+ [
+ 333,
+ " if process_now\n"
+ ],
+ [
+ 334,
+ " process_client client, buffer\n"
+ ],
+ [
+ 335,
+ " else\n"
+ ],
+ [
+ 336,
+ " client.set_timeout @first_data_timeout\n"
+ ],
+ [
+ 337,
+ " @reactor.add client\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "process_client",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 474,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 471,
+ " close_socket = true\n"
+ ],
+ [
+ 472,
+ "\n"
+ ],
+ [
+ 473,
+ " while true\n"
+ ],
+ [
+ 474,
+ " case handle_request(client, buffer)\n"
+ ],
+ [
+ 475,
+ " when false\n"
+ ],
+ [
+ 476,
+ " return\n"
+ ],
+ [
+ 477,
+ " when :async\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "handle_request",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/server.rb",
+ "inApp": false,
+ "lineNo": 660,
+ "module": null,
+ "filename": "puma/server.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 657,
+ "\n"
+ ],
+ [
+ 658,
+ " begin\n"
+ ],
+ [
+ 659,
+ " begin\n"
+ ],
+ [
+ 660,
+ " status, headers, res_body = @app.call(env)\n"
+ ],
+ [
+ 661,
+ "\n"
+ ],
+ [
+ 662,
+ " return :async if req.hijacked\n"
+ ],
+ [
+ 663,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/puma-3.12.1/lib/puma/configuration.rb",
+ "inApp": false,
+ "lineNo": 227,
+ "module": null,
+ "filename": "puma/configuration.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 224,
+ "\n"
+ ],
+ [
+ 225,
+ " def call(env)\n"
+ ],
+ [
+ 226,
+ " env[Const::PUMA_CONFIG] = @config\n"
+ ],
+ [
+ 227,
+ " @app.call(env)\n"
+ ],
+ [
+ 228,
+ " end\n"
+ ],
+ [
+ 229,
+ " end\n"
+ ],
+ [
+ 230,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 24,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 21,
+ " end\n"
+ ],
+ [
+ 22,
+ "\n"
+ ],
+ [
+ 23,
+ " def call(env) # :nodoc:\n"
+ ],
+ [
+ 24,
+ " trace(env) { @app.call(env) }\n"
+ ],
+ [
+ 25,
+ " end\n"
+ ],
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " protected\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "trace",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 61,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ " def trace(env)\n"
+ ],
+ [
+ 60,
+ " start = Time.now\n"
+ ],
+ [
+ 61,
+ " yield.tap do |response|\n"
+ ],
+ [
+ 62,
+ " duration = (Time.now - start).to_f\n"
+ ],
+ [
+ 63,
+ " record(labels(env, response), duration)\n"
+ ],
+ [
+ 64,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/collector.rb",
+ "inApp": false,
+ "lineNo": 24,
+ "module": null,
+ "filename": "prometheus/client/rack/collector.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 21,
+ " end\n"
+ ],
+ [
+ 22,
+ "\n"
+ ],
+ [
+ 23,
+ " def call(env) # :nodoc:\n"
+ ],
+ [
+ 24,
+ " trace(env) { @app.call(env) }\n"
+ ],
+ [
+ 25,
+ " end\n"
+ ],
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " protected\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/prometheus-client-mmap-0.9.8/lib/prometheus/client/rack/exporter.rb",
+ "inApp": false,
+ "lineNo": 29,
+ "module": null,
+ "filename": "prometheus/client/rack/exporter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 26,
+ " format = negotiate(env['HTTP_ACCEPT'], @acceptable)\n"
+ ],
+ [
+ 27,
+ " format ? respond_with(format) : not_acceptable(FORMATS)\n"
+ ],
+ [
+ 28,
+ " else\n"
+ ],
+ [
+ 29,
+ " @app.call(env)\n"
+ ],
+ [
+ 30,
+ " end\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ],
+ [
+ 32,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/engine.rb",
+ "inApp": false,
+ "lineNo": 524,
+ "module": null,
+ "filename": "rails/engine.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 521,
+ " # Define the Rack API for this engine.\n"
+ ],
+ [
+ 522,
+ " def call(env)\n"
+ ],
+ [
+ 523,
+ " req = build_request env\n"
+ ],
+ [
+ 524,
+ " app.call req.env\n"
+ ],
+ [
+ 525,
+ " end\n"
+ ],
+ [
+ 526,
+ "\n"
+ ],
+ [
+ 527,
+ " # Defines additional Rack env configuration that is added on each call.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/sentry-raven-2.12.3/lib/raven/integrations/rack.rb",
+ "inApp": false,
+ "lineNo": 51,
+ "module": null,
+ "filename": "raven/integrations/rack.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 48,
+ " Raven.context.transaction.push(env[\"PATH_INFO\"]) if env[\"PATH_INFO\"]\n"
+ ],
+ [
+ 49,
+ "\n"
+ ],
+ [
+ 50,
+ " begin\n"
+ ],
+ [
+ 51,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 52,
+ " rescue Error\n"
+ ],
+ [
+ 53,
+ " raise # Don't capture Raven errors\n"
+ ],
+ [
+ 54,
+ " rescue Exception => e\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/sendfile.rb",
+ "inApp": false,
+ "lineNo": 111,
+ "module": null,
+ "filename": "rack/sendfile.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 108,
+ " end\n"
+ ],
+ [
+ 109,
+ "\n"
+ ],
+ [
+ 110,
+ " def call(env)\n"
+ ],
+ [
+ 111,
+ " status, headers, body = @app.call(env)\n"
+ ],
+ [
+ 112,
+ " if body.respond_to?(:to_path)\n"
+ ],
+ [
+ 113,
+ " case type = variation(env)\n"
+ ],
+ [
+ 114,
+ " when 'X-Accel-Redirect'\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/static.rb",
+ "inApp": false,
+ "lineNo": 127,
+ "module": null,
+ "filename": "action_dispatch/middleware/static.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 124,
+ " end\n"
+ ],
+ [
+ 125,
+ " end\n"
+ ],
+ [
+ 126,
+ "\n"
+ ],
+ [
+ 127,
+ " @app.call(req.env)\n"
+ ],
+ [
+ 128,
+ " end\n"
+ ],
+ [
+ 129,
+ " end\n"
+ ],
+ [
+ 130,
+ "end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/executor.rb",
+ "inApp": false,
+ "lineNo": 14,
+ "module": null,
+ "filename": "action_dispatch/middleware/executor.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 11,
+ " def call(env)\n"
+ ],
+ [
+ 12,
+ " state = @executor.run!\n"
+ ],
+ [
+ 13,
+ " begin\n"
+ ],
+ [
+ 14,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 15,
+ " returned = response << ::Rack::BodyProxy.new(response.pop) { state.complete! }\n"
+ ],
+ [
+ 16,
+ " ensure\n"
+ ],
+ [
+ 17,
+ " state.complete! unless returned\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/cache/strategy/local_cache_middleware.rb",
+ "inApp": false,
+ "lineNo": 29,
+ "module": null,
+ "filename": "active_support/cache/strategy/local_cache_middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 26,
+ "\n"
+ ],
+ [
+ 27,
+ " def call(env)\n"
+ ],
+ [
+ 28,
+ " LocalCacheRegistry.set_cache_for(local_cache_key, LocalStore.new)\n"
+ ],
+ [
+ 29,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 30,
+ " response[2] = ::Rack::BodyProxy.new(response[2]) do\n"
+ ],
+ [
+ 31,
+ " LocalCacheRegistry.set_cache_for(local_cache_key, nil)\n"
+ ],
+ [
+ 32,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/runtime.rb",
+ "inApp": false,
+ "lineNo": 22,
+ "module": null,
+ "filename": "rack/runtime.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 19,
+ "\n"
+ ],
+ [
+ 20,
+ " def call(env)\n"
+ ],
+ [
+ 21,
+ " start_time = Utils.clock_time\n"
+ ],
+ [
+ 22,
+ " status, headers, body = @app.call(env)\n"
+ ],
+ [
+ 23,
+ " request_time = Utils.clock_time - start_time\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " unless headers.has_key?(@header_name)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/rack-2.0.7/lib/rack/method_override.rb",
+ "inApp": false,
+ "lineNo": 22,
+ "module": null,
+ "filename": "rack/method_override.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 19,
+ " end\n"
+ ],
+ [
+ 20,
+ " end\n"
+ ],
+ [
+ 21,
+ "\n"
+ ],
+ [
+ 22,
+ " @app.call(env)\n"
+ ],
+ [
+ 23,
+ " end\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " def method_override(env)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/request_id.rb",
+ "inApp": false,
+ "lineNo": 27,
+ "module": null,
+ "filename": "action_dispatch/middleware/request_id.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 24,
+ " def call(env)\n"
+ ],
+ [
+ 25,
+ " req = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 26,
+ " req.request_id = make_request_id(req.x_request_id)\n"
+ ],
+ [
+ 27,
+ " @app.call(env).tap { |_status, headers, _body| headers[X_REQUEST_ID] = req.request_id }\n"
+ ],
+ [
+ 28,
+ " end\n"
+ ],
+ [
+ 29,
+ "\n"
+ ],
+ [
+ 30,
+ " private\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/remote_ip.rb",
+ "inApp": false,
+ "lineNo": 81,
+ "module": null,
+ "filename": "action_dispatch/middleware/remote_ip.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 78,
+ " def call(env)\n"
+ ],
+ [
+ 79,
+ " req = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 80,
+ " req.remote_ip = GetIp.new(req, check_ip, proxies)\n"
+ ],
+ [
+ 81,
+ " @app.call(req.env)\n"
+ ],
+ [
+ 82,
+ " end\n"
+ ],
+ [
+ 83,
+ "\n"
+ ],
+ [
+ 84,
+ " # The GetIp class exists as a way to defer processing of the request data\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/sprockets-rails-3.2.1/lib/sprockets/rails/quiet_assets.rb",
+ "inApp": false,
+ "lineNo": 13,
+ "module": null,
+ "filename": "sprockets/rails/quiet_assets.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 10,
+ " if env['PATH_INFO'] =~ @assets_regex\n"
+ ],
+ [
+ 11,
+ " ::Rails.logger.silence { @app.call(env) }\n"
+ ],
+ [
+ 12,
+ " else\n"
+ ],
+ [
+ 13,
+ " @app.call(env)\n"
+ ],
+ [
+ 14,
+ " end\n"
+ ],
+ [
+ 15,
+ " end\n"
+ ],
+ [
+ 16,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ " request = ActionDispatch::Request.new(env)\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " if logger.respond_to?(:tagged)\n"
+ ],
+ [
+ 26,
+ " logger.tagged(compute_tags(request)) { call_app(request, env) }\n"
+ ],
+ [
+ 27,
+ " else\n"
+ ],
+ [
+ 28,
+ " call_app(request, env)\n"
+ ],
+ [
+ 29,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 71,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 68,
+ " delegate :push_tags, :pop_tags, :clear_tags!, to: :formatter\n"
+ ],
+ [
+ 69,
+ "\n"
+ ],
+ [
+ 70,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 71,
+ " formatter.tagged(*tags) { yield self }\n"
+ ],
+ [
+ 72,
+ " end\n"
+ ],
+ [
+ 73,
+ "\n"
+ ],
+ [
+ 74,
+ " def flush\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 28,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 25,
+ "\n"
+ ],
+ [
+ 26,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 27,
+ " new_tags = push_tags(*tags)\n"
+ ],
+ [
+ 28,
+ " yield self\n"
+ ],
+ [
+ 29,
+ " ensure\n"
+ ],
+ [
+ 30,
+ " pop_tags(new_tags.size)\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in tagged",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/tagged_logging.rb",
+ "inApp": false,
+ "lineNo": 71,
+ "module": null,
+ "filename": "active_support/tagged_logging.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 68,
+ " delegate :push_tags, :pop_tags, :clear_tags!, to: :formatter\n"
+ ],
+ [
+ 69,
+ "\n"
+ ],
+ [
+ 70,
+ " def tagged(*tags)\n"
+ ],
+ [
+ 71,
+ " formatter.tagged(*tags) { yield self }\n"
+ ],
+ [
+ 72,
+ " end\n"
+ ],
+ [
+ 73,
+ "\n"
+ ],
+ [
+ 74,
+ " def flush\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ " request = ActionDispatch::Request.new(env)\n"
+ ],
+ [
+ 24,
+ "\n"
+ ],
+ [
+ 25,
+ " if logger.respond_to?(:tagged)\n"
+ ],
+ [
+ 26,
+ " logger.tagged(compute_tags(request)) { call_app(request, env) }\n"
+ ],
+ [
+ 27,
+ " else\n"
+ ],
+ [
+ 28,
+ " call_app(request, env)\n"
+ ],
+ [
+ 29,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call_app",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/railties-5.2.3/lib/rails/rack/logger.rb",
+ "inApp": false,
+ "lineNo": 38,
+ "module": null,
+ "filename": "rails/rack/logger.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 38,
+ " status, headers, body = @app.call(env)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/show_exceptions.rb",
+ "inApp": false,
+ "lineNo": 33,
+ "module": null,
+ "filename": "action_dispatch/middleware/show_exceptions.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 30,
+ "\n"
+ ],
+ [
+ 31,
+ " def call(env)\n"
+ ],
+ [
+ 32,
+ " request = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 33,
+ " @app.call(env)\n"
+ ],
+ [
+ 34,
+ " rescue Exception => exception\n"
+ ],
+ [
+ 35,
+ " if request.show_exceptions?\n"
+ ],
+ [
+ 36,
+ " render_exception(request, exception)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 20,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 17,
+ " end\n"
+ ],
+ [
+ 18,
+ "\n"
+ ],
+ [
+ 19,
+ " def call(env)\n"
+ ],
+ [
+ 20,
+ " app_exception = catch :app_exception do\n"
+ ],
+ [
+ 21,
+ " request = create_regular_or_whiny_request(env)\n"
+ ],
+ [
+ 22,
+ " return call_app(env) unless request.from_whitelisted_ip?\n"
+ ],
+ [
+ 23,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "catch",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 20,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 17,
+ " end\n"
+ ],
+ [
+ 18,
+ "\n"
+ ],
+ [
+ 19,
+ " def call(env)\n"
+ ],
+ [
+ 20,
+ " app_exception = catch :app_exception do\n"
+ ],
+ [
+ 21,
+ " request = create_regular_or_whiny_request(env)\n"
+ ],
+ [
+ 22,
+ " return call_app(env) unless request.from_whitelisted_ip?\n"
+ ],
+ [
+ 23,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 30,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 27,
+ " return change_stack_trace(id, request)\n"
+ ],
+ [
+ 28,
+ " end\n"
+ ],
+ [
+ 29,
+ "\n"
+ ],
+ [
+ 30,
+ " status, headers, body = call_app(env)\n"
+ ],
+ [
+ 31,
+ "\n"
+ ],
+ [
+ 32,
+ " if (session = Session.from(Thread.current)) && acceptable_content_type?(headers)\n"
+ ],
+ [
+ 33,
+ " headers[\"X-Web-Console-Session-Id\"] = session.id\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call_app",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/web-console-3.7.0/lib/web_console/middleware.rb",
+ "inApp": false,
+ "lineNo": 135,
+ "module": null,
+ "filename": "web_console/middleware.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 132,
+ " end\n"
+ ],
+ [
+ 133,
+ "\n"
+ ],
+ [
+ 134,
+ " def call_app(env)\n"
+ ],
+ [
+ 135,
+ " @app.call(env)\n"
+ ],
+ [
+ 136,
+ " rescue => e\n"
+ ],
+ [
+ 137,
+ " throw :app_exception, e\n"
+ ],
+ [
+ 138,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/debug_exceptions.rb",
+ "inApp": false,
+ "lineNo": 61,
+ "module": null,
+ "filename": "action_dispatch/middleware/debug_exceptions.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 58,
+ "\n"
+ ],
+ [
+ 59,
+ " def call(env)\n"
+ ],
+ [
+ 60,
+ " request = ActionDispatch::Request.new env\n"
+ ],
+ [
+ 61,
+ " _, headers, body = response = @app.call(env)\n"
+ ],
+ [
+ 62,
+ "\n"
+ ],
+ [
+ 63,
+ " if headers[\"X-Cascade\"] == \"pass\"\n"
+ ],
+ [
+ 64,
+ " body.close if body.respond_to?(:close)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/executor.rb",
+ "inApp": false,
+ "lineNo": 14,
+ "module": null,
+ "filename": "action_dispatch/middleware/executor.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 11,
+ " def call(env)\n"
+ ],
+ [
+ 12,
+ " state = @executor.run!\n"
+ ],
+ [
+ 13,
+ " begin\n"
+ ],
+ [
+ 14,
+ " response = @app.call(env)\n"
+ ],
+ [
+ 15,
+ " returned = response << ::Rack::BodyProxy.new(response.pop) { state.complete! }\n"
+ ],
+ [
+ 16,
+ " ensure\n"
+ ],
+ [
+ 17,
+ " state.complete! unless returned\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/callbacks.rb",
+ "inApp": false,
+ "lineNo": 26,
+ "module": null,
+ "filename": "action_dispatch/middleware/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 23,
+ "\n"
+ ],
+ [
+ 24,
+ " def call(env)\n"
+ ],
+ [
+ 25,
+ " error = nil\n"
+ ],
+ [
+ 26,
+ " result = run_callbacks :call do\n"
+ ],
+ [
+ 27,
+ " begin\n"
+ ],
+ [
+ 28,
+ " @app.call(env)\n"
+ ],
+ [
+ 29,
+ " rescue => error\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "run_callbacks",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activesupport-5.2.3/lib/active_support/callbacks.rb",
+ "inApp": false,
+ "lineNo": 98,
+ "module": null,
+ "filename": "active_support/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 95,
+ " callbacks = __callbacks[kind.to_sym]\n"
+ ],
+ [
+ 96,
+ "\n"
+ ],
+ [
+ 97,
+ " if callbacks.empty?\n"
+ ],
+ [
+ 98,
+ " yield if block_given?\n"
+ ],
+ [
+ 99,
+ " else\n"
+ ],
+ [
+ 100,
+ " env = Filters::Environment.new(self, false, nil)\n"
+ ],
+ [
+ 101,
+ " next_sequence = callbacks.compile\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "block in call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/actionpack-5.2.3/lib/action_dispatch/middleware/callbacks.rb",
+ "inApp": false,
+ "lineNo": 28,
+ "module": null,
+ "filename": "action_dispatch/middleware/callbacks.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 25,
+ " error = nil\n"
+ ],
+ [
+ 26,
+ " result = run_callbacks :call do\n"
+ ],
+ [
+ 27,
+ " begin\n"
+ ],
+ [
+ 28,
+ " @app.call(env)\n"
+ ],
+ [
+ 29,
+ " rescue => error\n"
+ ],
+ [
+ 30,
+ " end\n"
+ ],
+ [
+ 31,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "call",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/migration.rb",
+ "inApp": false,
+ "lineNo": 554,
+ "module": null,
+ "filename": "active_record/migration.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 551,
+ " end\n"
+ ],
+ [
+ 552,
+ "\n"
+ ],
+ [
+ 553,
+ " def call(env)\n"
+ ],
+ [
+ 554,
+ " mtime = ActiveRecord::Base.connection.migration_context.last_migration.mtime.to_i\n"
+ ],
+ [
+ 555,
+ " if @last_check < mtime\n"
+ ],
+ [
+ 556,
+ " ActiveRecord::Migration.check_pending!(connection)\n"
+ ],
+ [
+ 557,
+ " @last_check = mtime\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_handling.rb",
+ "inApp": false,
+ "lineNo": 90,
+ "module": null,
+ "filename": "active_record/connection_handling.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 87,
+ " # also be used to \"borrow\" the connection to do database work unrelated\n"
+ ],
+ [
+ 88,
+ " # to any of the specific Active Records.\n"
+ ],
+ [
+ 89,
+ " def connection\n"
+ ],
+ [
+ 90,
+ " retrieve_connection\n"
+ ],
+ [
+ 91,
+ " end\n"
+ ],
+ [
+ 92,
+ "\n"
+ ],
+ [
+ 93,
+ " attr_writer :connection_specification_name\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "retrieve_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_handling.rb",
+ "inApp": false,
+ "lineNo": 118,
+ "module": null,
+ "filename": "active_record/connection_handling.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 115,
+ " end\n"
+ ],
+ [
+ 116,
+ "\n"
+ ],
+ [
+ 117,
+ " def retrieve_connection\n"
+ ],
+ [
+ 118,
+ " connection_handler.retrieve_connection(connection_specification_name)\n"
+ ],
+ [
+ 119,
+ " end\n"
+ ],
+ [
+ 120,
+ "\n"
+ ],
+ [
+ 121,
+ " # Returns +true+ if Active Record is connected.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "retrieve_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 1014,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 1011,
+ " def retrieve_connection(spec_name) #:nodoc:\n"
+ ],
+ [
+ 1012,
+ " pool = retrieve_connection_pool(spec_name)\n"
+ ],
+ [
+ 1013,
+ " raise ConnectionNotEstablished, \"No connection pool with '#{spec_name}' found.\" unless pool\n"
+ ],
+ [
+ 1014,
+ " pool.connection\n"
+ ],
+ [
+ 1015,
+ " end\n"
+ ],
+ [
+ 1016,
+ "\n"
+ ],
+ [
+ 1017,
+ " # Returns true if a connection that's accessible to this class has\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 382,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 379,
+ " # #connection can be called any number of times; the connection is\n"
+ ],
+ [
+ 380,
+ " # held in a cache keyed by a thread.\n"
+ ],
+ [
+ 381,
+ " def connection\n"
+ ],
+ [
+ 382,
+ " @thread_cached_conns[connection_cache_key(@lock_thread || Thread.current)] ||= checkout\n"
+ ],
+ [
+ 383,
+ " end\n"
+ ],
+ [
+ 384,
+ "\n"
+ ],
+ [
+ 385,
+ " # Returns true if there is an open connection being used for the current thread.\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "checkout",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 523,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 520,
+ " # Raises:\n"
+ ],
+ [
+ 521,
+ " # - ActiveRecord::ConnectionTimeoutError no connection can be obtained from the pool.\n"
+ ],
+ [
+ 522,
+ " def checkout(checkout_timeout = @checkout_timeout)\n"
+ ],
+ [
+ 523,
+ " checkout_and_verify(acquire_connection(checkout_timeout))\n"
+ ],
+ [
+ 524,
+ " end\n"
+ ],
+ [
+ 525,
+ "\n"
+ ],
+ [
+ 526,
+ " # Check-in a database connection back into the pool, indicating that you\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "acquire_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 795,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 792,
+ " # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to <tt>@available.poll</tt>\n"
+ ],
+ [
+ 793,
+ " # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections\n"
+ ],
+ [
+ 794,
+ " # of the said methods and avoid an additional +synchronize+ overhead.\n"
+ ],
+ [
+ 795,
+ " if conn = @available.poll || try_to_checkout_new_connection\n"
+ ],
+ [
+ 796,
+ " conn\n"
+ ],
+ [
+ 797,
+ " else\n"
+ ],
+ [
+ 798,
+ " reap\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "try_to_checkout_new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 834,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 831,
+ " begin\n"
+ ],
+ [
+ 832,
+ " # if successfully incremented @now_connecting establish new connection\n"
+ ],
+ [
+ 833,
+ " # outside of synchronized section\n"
+ ],
+ [
+ 834,
+ " conn = checkout_new_connection\n"
+ ],
+ [
+ 835,
+ " ensure\n"
+ ],
+ [
+ 836,
+ " synchronize do\n"
+ ],
+ [
+ 837,
+ " if conn\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "checkout_new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 855,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 852,
+ "\n"
+ ],
+ [
+ 853,
+ " def checkout_new_connection\n"
+ ],
+ [
+ 854,
+ " raise ConnectionNotEstablished unless @automatic_reconnect\n"
+ ],
+ [
+ 855,
+ " new_connection\n"
+ ],
+ [
+ 856,
+ " end\n"
+ ],
+ [
+ 857,
+ "\n"
+ ],
+ [
+ 858,
+ " def checkout_and_verify(c)\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "new_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/abstract/connection_pool.rb",
+ "inApp": false,
+ "lineNo": 811,
+ "module": null,
+ "filename": "active_record/connection_adapters/abstract/connection_pool.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 808,
+ " alias_method :release, :remove_connection_from_thread_cache\n"
+ ],
+ [
+ 809,
+ "\n"
+ ],
+ [
+ 810,
+ " def new_connection\n"
+ ],
+ [
+ 811,
+ " Base.send(spec.adapter_method, spec.config).tap do |conn|\n"
+ ],
+ [
+ 812,
+ " conn.schema_cache = schema_cache.dup if schema_cache\n"
+ ],
+ [
+ 813,
+ " end\n"
+ ],
+ [
+ 814,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "postgresql_connection",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 48,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 45,
+ "\n"
+ ],
+ [
+ 46,
+ " # The postgres drivers don't allow the creation of an unconnected PG::Connection object,\n"
+ ],
+ [
+ 47,
+ " # so just pass a nil connection object for the time being.\n"
+ ],
+ [
+ 48,
+ " ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)\n"
+ ],
+ [
+ 49,
+ " end\n"
+ ],
+ [
+ 50,
+ " end\n"
+ ],
+ [
+ 51,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "new",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 48,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 45,
+ "\n"
+ ],
+ [
+ 46,
+ " # The postgres drivers don't allow the creation of an unconnected PG::Connection object,\n"
+ ],
+ [
+ 47,
+ " # so just pass a nil connection object for the time being.\n"
+ ],
+ [
+ 48,
+ " ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)\n"
+ ],
+ [
+ 49,
+ " end\n"
+ ],
+ [
+ 50,
+ " end\n"
+ ],
+ [
+ 51,
+ "\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "initialize",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 223,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 220,
+ " @local_tz = nil\n"
+ ],
+ [
+ 221,
+ " @max_identifier_length = nil\n"
+ ],
+ [
+ 222,
+ "\n"
+ ],
+ [
+ 223,
+ " connect\n"
+ ],
+ [
+ 224,
+ " add_pg_encoders\n"
+ ],
+ [
+ 225,
+ " @statements = StatementPool.new @connection,\n"
+ ],
+ [
+ 226,
+ " self.class.type_cast_config_to_integer(config[:statement_limit])\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "connect",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 691,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 688,
+ "\n"
+ ],
+ [
+ 689,
+ " # Connects to a PostgreSQL server and sets up the adapter depending on the\n"
+ ],
+ [
+ 690,
+ " # connected server's characteristics.\n"
+ ],
+ [
+ 691,
+ " def connect\n"
+ ],
+ [
+ 692,
+ " @connection = PG.connect(@connection_parameters)\n"
+ ],
+ [
+ 693,
+ " configure_connection\n"
+ ],
+ [
+ 694,
+ " rescue ::PG::Error => error\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ },
+ {
+ "function": "rescue in connect",
+ "errors": null,
+ "colNo": null,
+ "vars": null,
+ "package": null,
+ "absPath": "/Users/gitlab/.rvm/gems/ruby-2.6.5/gems/activerecord-5.2.3/lib/active_record/connection_adapters/postgresql_adapter.rb",
+ "inApp": false,
+ "lineNo": 696,
+ "module": null,
+ "filename": "active_record/connection_adapters/postgresql_adapter.rb",
+ "platform": null,
+ "instructionAddr": null,
+ "context": [
+ [
+ 693,
+ " configure_connection\n"
+ ],
+ [
+ 694,
+ " rescue ::PG::Error => error\n"
+ ],
+ [
+ 695,
+ " if error.message.include?(\"does not exist\")\n"
+ ],
+ [
+ 696,
+ " raise ActiveRecord::NoDatabaseError\n"
+ ],
+ [
+ 697,
+ " else\n"
+ ],
+ [
+ 698,
+ " raise\n"
+ ],
+ [
+ 699,
+ " end\n"
+ ]
+ ],
+ "symbolAddr": null,
+ "trust": null,
+ "symbol": null,
+ "rawFunction": null
+ }
+ ],
+ "framesOmitted": null,
+ "registers": null,
+ "hasSystemFrames": false
+ },
+ "module": "ActiveRecord",
+ "rawStacktrace": null,
+ "mechanism": null,
+ "threadId": null,
+ "value": "FATAL: database \"test_development\" does not exist\n",
+ "type": "ActiveRecord::NoDatabaseError"
+ }
+ ],
+ "excOmitted": null,
+ "hasSystemFrames": false
+ }
+ },
+ {
+ "type": "request",
+ "data": {
+ "fragment": null,
+ "cookies": [],
+ "inferredContentType": null,
+ "env": {
+ "SERVER_PORT": "3001",
+ "SERVER_NAME": "localhost",
+ "REMOTE_ADDR": "::1"
+ },
+ "headers": [
+ [
+ "Accept",
+ "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3"
+ ],
+ [
+ "Accept-Encoding",
+ "gzip, deflate, br"
+ ],
+ [
+ "Accept-Language",
+ "en-GB,en-US;q=0.9,en;q=0.8"
+ ],
+ [
+ "Cache-Control",
+ "max-age=0"
+ ],
+ [
+ "Connection",
+ "keep-alive"
+ ],
+ [
+ "Host",
+ "localhost:3001"
+ ],
+ [
+ "Sec-Fetch-Mode",
+ "navigate"
+ ],
+ [
+ "Sec-Fetch-Site",
+ "none"
+ ],
+ [
+ "Sec-Fetch-User",
+ "?1"
+ ],
+ [
+ "Upgrade-Insecure-Requests",
+ "1"
+ ],
+ [
+ "User-Agent",
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36"
+ ]
+ ],
+ "url": "http://localhost:3001/",
+ "query": [],
+ "data": null,
+ "method": "GET"
+ }
+ }
+ ],
+ "packages": {
+ "coffee-script": "2.4.1",
+ "uglifier": "4.1.20",
+ "ffi": "1.11.1",
+ "actioncable": "5.2.3",
+ "io-like": "0.3.0",
+ "rb-inotify": "0.10.0",
+ "spring": "2.1.0",
+ "loofah": "2.2.3",
+ "selenium-webdriver": "3.142.3",
+ "marcel": "0.3.3",
+ "sass-listen": "4.0.0",
+ "nokogiri": "1.10.4",
+ "activestorage": "5.2.3",
+ "activejob": "5.2.3",
+ "mimemagic": "0.3.3",
+ "faraday": "0.17.1",
+ "execjs": "2.7.0",
+ "activesupport": "5.2.3",
+ "rails-html-sanitizer": "1.2.0",
+ "byebug": "11.0.1",
+ "xpath": "3.2.0",
+ "msgpack": "1.3.1",
+ "childprocess": "1.0.1",
+ "rails-dom-testing": "2.0.3",
+ "public_suffix": "3.1.1",
+ "mini_mime": "1.0.2",
+ "arel": "9.0.0",
+ "coffee-rails": "4.2.2",
+ "bundler": "1.17.3",
+ "rails": "5.2.3",
+ "globalid": "0.4.2",
+ "sentry-raven": "2.12.3",
+ "concurrent-ruby": "1.1.5",
+ "capybara": "3.28.0",
+ "regexp_parser": "1.6.0",
+ "sprockets-rails": "3.2.1",
+ "tzinfo": "1.2.5",
+ "mail": "2.7.1",
+ "actionview": "5.2.3",
+ "rubyzip": "1.2.3",
+ "coffee-script-source": "1.12.2",
+ "listen": "3.1.5",
+ "i18n": "1.6.0",
+ "erubi": "1.8.0",
+ "rake": "12.3.3",
+ "nio4r": "2.4.0",
+ "activemodel": "5.2.3",
+ "web-console": "3.7.0",
+ "ruby_dep": "1.5.0",
+ "turbolinks": "5.2.0",
+ "archive-zip": "0.12.0",
+ "method_source": "0.9.2",
+ "minitest": "5.11.3",
+ "puma": "3.12.1",
+ "sass-rails": "5.1.0",
+ "chromedriver-helper": "2.1.1",
+ "sprockets": "3.7.2",
+ "bindex": "0.8.1",
+ "actionmailer": "5.2.3",
+ "rack-test": "1.1.0",
+ "bootsnap": "1.4.4",
+ "railties": "5.2.3",
+ "mini_portile2": "2.4.0",
+ "crass": "1.0.4",
+ "websocket-extensions": "0.1.4",
+ "multipart-post": "2.1.1",
+ "rb-fsevent": "0.10.3",
+ "jbuilder": "2.9.1",
+ "pg": "1.1.4",
+ "sass": "3.7.4",
+ "activerecord": "5.2.3",
+ "builder": "3.2.3",
+ "spring-watcher-listen": "2.0.1",
+ "websocket-driver": "0.7.1",
+ "thor": "0.20.3",
+ "thread_safe": "0.3.6",
+ "addressable": "2.6.0",
+ "prometheus-client-mmap": "0.9.8",
+ "tilt": "2.0.9",
+ "actionpack": "5.2.3",
+ "rack": "2.0.7",
+ "turbolinks-source": "5.2.0"
+ },
+ "sdk": {
+ "version": "2.12.3",
+ "name": "raven-ruby"
+ },
+ "_meta": {
+ "user": null,
+ "context": null,
+ "entries": {
+ "1": {
+ "data": {
+ "": null,
+ "cookies": {
+ "": {
+ "err": [
+ [
+ "invalid_data",
+ {
+ "reason": "the cookie is missing a name/value pair"
+ }
+ ]
+ ],
+ "val": "********"
+ }
+ },
+ "url": null,
+ "headers": null,
+ "env": null,
+ "query": null,
+ "data": null,
+ "method": null
+ }
+ }
+ },
+ "contexts": null,
+ "message": null,
+ "packages": null,
+ "tags": {},
+ "sdk": null
+ },
+ "contexts": {
+ "browser": {
+ "version": "78.0.3904",
+ "type": "browser",
+ "name": "Chrome"
+ },
+ "client_os": {
+ "version": "10.15.1",
+ "type": "os",
+ "name": "Mac OS X"
+ }
+ },
+ "fingerprints": [
+ "6aa133ea51857634f2d113de52b5cc61",
+ "e1613eeb169241eab95b76ab52a80c68"
+ ],
+ "context": {
+ "server": {
+ "runtime": {
+ "version": "ruby 2.6.5p114 (2019-10-01 revision 67812) [x86_64-darwin18]",
+ "name": "ruby"
+ },
+ "os": {
+ "kernel_version": "Darwin Seans-MBP.fritz.box 19.0.0 Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64 x86_64",
+ "version": "Darwin Kernel Version 19.0.0: Thu Oct 17 16:17:15 PDT 2019; root:xnu-6153.41.3~29/RELEASE_X86_64",
+ "build": "19.0.0",
+ "name": "Darwin"
+ }
+ }
+ },
+ "release": {
+ "dateReleased": null,
+ "commitCount": 0,
+ "url": null,
+ "data": {},
+ "lastDeploy": null,
+ "deployCount": 0,
+ "dateCreated": "2019-12-08T21:47:47Z",
+ "lastEvent": "2019-12-09T21:52:05Z",
+ "version": "b56ae26",
+ "firstEvent": "2019-12-08T21:47:47Z",
+ "lastCommit": null,
+ "shortVersion": "b56ae26",
+ "authors": [],
+ "owner": null,
+ "newGroups": 26,
+ "ref": null,
+ "projects": [
+ {
+ "slug": "gitlab-03",
+ "name": "gitlab-03"
+ }
+ ]
+ },
+ "groupID": "1378364652"
+}
diff --git a/spec/fixtures/sentry/issue_link_sample_response.json b/spec/fixtures/sentry/issue_link_sample_response.json
new file mode 100644
index 00000000000..f7f3220e83d
--- /dev/null
+++ b/spec/fixtures/sentry/issue_link_sample_response.json
@@ -0,0 +1,7 @@
+{
+ "url": "https://gitlab.com/test/tanuki-inc/issues/3",
+ "integrationId": 44444,
+ "displayName": "test/tanuki-inc#3",
+ "id": 140319,
+ "key": "gitlab.com/test:test/tanuki-inc#3"
+}
diff --git a/spec/fixtures/sentry/issue_sample_response.json b/spec/fixtures/sentry/issue_sample_response.json
new file mode 100644
index 00000000000..a320a21de34
--- /dev/null
+++ b/spec/fixtures/sentry/issue_sample_response.json
@@ -0,0 +1,311 @@
+{
+ "activity": [
+ {
+ "data": {},
+ "dateCreated": "2018-11-06T21:19:55Z",
+ "id": "0",
+ "type": "first_seen",
+ "user": null
+ }
+ ],
+ "annotations": [],
+ "assignedTo": null,
+ "count": "1",
+ "culprit": "raven.scripts.runner in main",
+ "firstRelease": {
+ "authors": [],
+ "commitCount": 0,
+ "data": {},
+ "dateCreated": "2018-11-06T21:19:55.146Z",
+ "dateReleased": null,
+ "deployCount": 0,
+ "firstEvent": "2018-11-06T21:19:55.271Z",
+ "lastCommit": null,
+ "lastDeploy": null,
+ "lastEvent": "2018-11-06T21:19:55.271Z",
+ "newGroups": 0,
+ "owner": null,
+ "projects": [
+ {
+ "name": "Pump Station",
+ "slug": "pump-station"
+ }
+ ],
+ "ref": null,
+ "shortVersion": "1764232",
+ "url": null,
+ "version": "17642328ead24b51867165985996d04b29310337"
+ },
+ "firstSeen": "2018-11-06T21:19:55Z",
+ "hasSeen": false,
+ "id": "503504",
+ "isBookmarked": false,
+ "isPublic": false,
+ "isSubscribed": true,
+ "lastRelease": null,
+ "lastSeen": "2018-11-06T21:19:55Z",
+ "level": "error",
+ "logger": null,
+ "metadata": {
+ "title": "This is an example Python exception"
+ },
+ "numComments": 0,
+ "participants": [],
+ "permalink": "https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/503504/",
+ "pluginActions": [],
+ "pluginContexts": [],
+ "pluginIssues": [
+ {
+ "id": "gitlab",
+ "issue": {
+ "url": "https://gitlab.com/gitlab-org/gitlab/issues/1"
+ }
+ }
+ ],
+ "project": {
+ "id": "2",
+ "name": "Pump Station",
+ "slug": "pump-station"
+ },
+ "seenBy": [],
+ "shareId": null,
+ "shortId": "PUMP-STATION-1",
+ "stats": {
+ "24h": [
+ [
+ 1541451600.0,
+ 557
+ ],
+ [
+ 1541455200.0,
+ 473
+ ],
+ [
+ 1541458800.0,
+ 914
+ ],
+ [
+ 1541462400.0,
+ 991
+ ],
+ [
+ 1541466000.0,
+ 925
+ ],
+ [
+ 1541469600.0,
+ 881
+ ],
+ [
+ 1541473200.0,
+ 182
+ ],
+ [
+ 1541476800.0,
+ 490
+ ],
+ [
+ 1541480400.0,
+ 820
+ ],
+ [
+ 1541484000.0,
+ 322
+ ],
+ [
+ 1541487600.0,
+ 836
+ ],
+ [
+ 1541491200.0,
+ 565
+ ],
+ [
+ 1541494800.0,
+ 758
+ ],
+ [
+ 1541498400.0,
+ 880
+ ],
+ [
+ 1541502000.0,
+ 677
+ ],
+ [
+ 1541505600.0,
+ 381
+ ],
+ [
+ 1541509200.0,
+ 814
+ ],
+ [
+ 1541512800.0,
+ 329
+ ],
+ [
+ 1541516400.0,
+ 446
+ ],
+ [
+ 1541520000.0,
+ 731
+ ],
+ [
+ 1541523600.0,
+ 111
+ ],
+ [
+ 1541527200.0,
+ 926
+ ],
+ [
+ 1541530800.0,
+ 772
+ ],
+ [
+ 1541534400.0,
+ 400
+ ],
+ [
+ 1541538000.0,
+ 943
+ ]
+ ],
+ "30d": [
+ [
+ 1538870400.0,
+ 565
+ ],
+ [
+ 1538956800.0,
+ 12862
+ ],
+ [
+ 1539043200.0,
+ 15617
+ ],
+ [
+ 1539129600.0,
+ 10809
+ ],
+ [
+ 1539216000.0,
+ 15065
+ ],
+ [
+ 1539302400.0,
+ 12927
+ ],
+ [
+ 1539388800.0,
+ 12994
+ ],
+ [
+ 1539475200.0,
+ 13139
+ ],
+ [
+ 1539561600.0,
+ 11838
+ ],
+ [
+ 1539648000.0,
+ 12088
+ ],
+ [
+ 1539734400.0,
+ 12338
+ ],
+ [
+ 1539820800.0,
+ 12768
+ ],
+ [
+ 1539907200.0,
+ 12816
+ ],
+ [
+ 1539993600.0,
+ 15356
+ ],
+ [
+ 1540080000.0,
+ 10910
+ ],
+ [
+ 1540166400.0,
+ 12306
+ ],
+ [
+ 1540252800.0,
+ 12912
+ ],
+ [
+ 1540339200.0,
+ 14700
+ ],
+ [
+ 1540425600.0,
+ 11890
+ ],
+ [
+ 1540512000.0,
+ 11684
+ ],
+ [
+ 1540598400.0,
+ 13510
+ ],
+ [
+ 1540684800.0,
+ 12625
+ ],
+ [
+ 1540771200.0,
+ 12811
+ ],
+ [
+ 1540857600.0,
+ 13180
+ ],
+ [
+ 1540944000.0,
+ 14651
+ ],
+ [
+ 1541030400.0,
+ 14161
+ ],
+ [
+ 1541116800.0,
+ 12612
+ ],
+ [
+ 1541203200.0,
+ 14316
+ ],
+ [
+ 1541289600.0,
+ 14742
+ ],
+ [
+ 1541376000.0,
+ 12505
+ ],
+ [
+ 1541462400.0,
+ 14180
+ ]
+ ]
+ },
+ "status": "unresolved",
+ "statusDetails": {},
+ "subscriptionDetails": null,
+ "tags": [],
+ "title": "This is an example Python exception",
+ "type": "default",
+ "userCount": 0,
+ "userReportCount": 0
+}
diff --git a/spec/fixtures/sentry/repos_sample_response.json b/spec/fixtures/sentry/repos_sample_response.json
new file mode 100644
index 00000000000..fe389035fe3
--- /dev/null
+++ b/spec/fixtures/sentry/repos_sample_response.json
@@ -0,0 +1,15 @@
+[
+ {
+ "status": "active",
+ "integrationId": "48066",
+ "externalSlug": 139,
+ "name": "test / tanuki-inc",
+ "provider": {
+ "id": "integrations:gitlab",
+ "name": "Gitlab"
+ },
+ "url": "https://gitlab.com/test/tanuki-inc",
+ "id": "52480",
+ "dateCreated": "2020-01-08T21:15:17.181520Z"
+ }
+]
diff --git a/spec/frontend/__mocks__/@gitlab/ui.js b/spec/frontend/__mocks__/@gitlab/ui.js
new file mode 100644
index 00000000000..ef97cb11424
--- /dev/null
+++ b/spec/frontend/__mocks__/@gitlab/ui.js
@@ -0,0 +1,19 @@
+export * from '@gitlab/ui';
+
+/**
+ * The @gitlab/ui tooltip directive requires awkward and distracting set up in tests
+ * for components that use it (e.g., `attachToDocument: true` and `sync: true` passed
+ * to the `mount` helper from `vue-test-utils`).
+ *
+ * This mock decouples those tests from the implementation, removing the need to set
+ * them up specially just for these tooltips.
+ */
+export const GlTooltipDirective = {
+ bind() {},
+};
+
+export const GlTooltip = {
+ render(h) {
+ return h('div', this.$attrs, this.$slots.default);
+ },
+};
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
index eba61949f8e..dda0c2b857c 100644
--- a/spec/frontend/admin/statistics_panel/components/app_spec.js
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -21,7 +21,6 @@ describe('Admin statistics app', () => {
wrapper = shallowMount(StatisticsPanelApp, {
localVue,
store,
- sync: false,
});
};
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index cef50bf553c..c0126b2330d 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -151,6 +151,21 @@ describe('Api', () => {
});
});
+ describe('updateProject', () => {
+ it('update a project with the given payload', done => {
+ const projectPath = 'foo';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}`;
+ mock.onPut(expectedUrl).reply(200, { foo: 'bar' });
+
+ Api.updateProject(projectPath, { foo: 'bar' })
+ .then(({ data }) => {
+ expect(data.foo).toBe('bar');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
describe('projectUsers', () => {
it('fetches all users of a particular project', done => {
const query = 'dummy query';
diff --git a/spec/frontend/behaviors/bind_in_out_spec.js b/spec/frontend/behaviors/bind_in_out_spec.js
new file mode 100644
index 00000000000..923b6d372dd
--- /dev/null
+++ b/spec/frontend/behaviors/bind_in_out_spec.js
@@ -0,0 +1,204 @@
+import BindInOut from '~/behaviors/bind_in_out';
+import ClassSpecHelper from '../helpers/class_spec_helper';
+
+describe('BindInOut', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ describe('constructor', () => {
+ beforeEach(() => {
+ testContext.in = {};
+ testContext.out = {};
+
+ testContext.bindInOut = new BindInOut(testContext.in, testContext.out);
+ });
+
+ it('should set .in', () => {
+ expect(testContext.bindInOut.in).toBe(testContext.in);
+ });
+
+ it('should set .out', () => {
+ expect(testContext.bindInOut.out).toBe(testContext.out);
+ });
+
+ it('should set .eventWrapper', () => {
+ expect(testContext.bindInOut.eventWrapper).toEqual({});
+ });
+
+ describe('if .in is an input', () => {
+ beforeEach(() => {
+ testContext.bindInOut = new BindInOut({ tagName: 'INPUT' });
+ });
+
+ it('should set .eventType to keyup ', () => {
+ expect(testContext.bindInOut.eventType).toEqual('keyup');
+ });
+ });
+
+ describe('if .in is a textarea', () => {
+ beforeEach(() => {
+ testContext.bindInOut = new BindInOut({ tagName: 'TEXTAREA' });
+ });
+
+ it('should set .eventType to keyup ', () => {
+ expect(testContext.bindInOut.eventType).toEqual('keyup');
+ });
+ });
+
+ describe('if .in is not an input or textarea', () => {
+ beforeEach(() => {
+ testContext.bindInOut = new BindInOut({ tagName: 'SELECT' });
+ });
+
+ it('should set .eventType to change ', () => {
+ expect(testContext.bindInOut.eventType).toEqual('change');
+ });
+ });
+ });
+
+ describe('addEvents', () => {
+ beforeEach(() => {
+ testContext.in = {
+ addEventListener: jest.fn(),
+ };
+
+ testContext.bindInOut = new BindInOut(testContext.in);
+
+ testContext.addEvents = testContext.bindInOut.addEvents();
+ });
+
+ it('should set .eventWrapper.updateOut', () => {
+ expect(testContext.bindInOut.eventWrapper.updateOut).toEqual(expect.any(Function));
+ });
+
+ it('should call .addEventListener', () => {
+ expect(testContext.in.addEventListener).toHaveBeenCalledWith(
+ testContext.bindInOut.eventType,
+ testContext.bindInOut.eventWrapper.updateOut,
+ );
+ });
+
+ it('should return the instance', () => {
+ expect(testContext.addEvents).toBe(testContext.bindInOut);
+ });
+ });
+
+ describe('updateOut', () => {
+ beforeEach(() => {
+ testContext.in = { value: 'the-value' };
+ testContext.out = { textContent: 'not-the-value' };
+
+ testContext.bindInOut = new BindInOut(testContext.in, testContext.out);
+
+ testContext.updateOut = testContext.bindInOut.updateOut();
+ });
+
+ it('should set .out.textContent to .in.value', () => {
+ expect(testContext.out.textContent).toBe(testContext.in.value);
+ });
+
+ it('should return the instance', () => {
+ expect(testContext.updateOut).toBe(testContext.bindInOut);
+ });
+ });
+
+ describe('removeEvents', () => {
+ beforeEach(() => {
+ testContext.in = {
+ removeEventListener: jest.fn(),
+ };
+ testContext.updateOut = () => {};
+
+ testContext.bindInOut = new BindInOut(testContext.in);
+ testContext.bindInOut.eventWrapper.updateOut = testContext.updateOut;
+
+ testContext.removeEvents = testContext.bindInOut.removeEvents();
+ });
+
+ it('should call .removeEventListener', () => {
+ expect(testContext.in.removeEventListener).toHaveBeenCalledWith(
+ testContext.bindInOut.eventType,
+ testContext.updateOut,
+ );
+ });
+
+ it('should return the instance', () => {
+ expect(testContext.removeEvents).toBe(testContext.bindInOut);
+ });
+ });
+
+ describe('initAll', () => {
+ beforeEach(() => {
+ testContext.ins = [0, 1, 2];
+ testContext.instances = [];
+
+ jest.spyOn(document, 'querySelectorAll').mockReturnValue(testContext.ins);
+ jest.spyOn(Array.prototype, 'map');
+ jest.spyOn(BindInOut, 'init').mockImplementation(() => {});
+
+ testContext.initAll = BindInOut.initAll();
+ });
+
+ ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'initAll');
+
+ it('should call .querySelectorAll', () => {
+ expect(document.querySelectorAll).toHaveBeenCalledWith('*[data-bind-in]');
+ });
+
+ it('should call .map', () => {
+ expect(Array.prototype.map).toHaveBeenCalledWith(expect.any(Function));
+ });
+
+ it('should call .init for each element', () => {
+ expect(BindInOut.init.mock.calls.length).toEqual(3);
+ });
+
+ it('should return an array of instances', () => {
+ expect(testContext.initAll).toEqual(expect.any(Array));
+ });
+ });
+
+ describe('init', () => {
+ beforeEach(() => {
+ // eslint-disable-next-line func-names
+ jest.spyOn(BindInOut.prototype, 'addEvents').mockImplementation(function() {
+ return this;
+ });
+ // eslint-disable-next-line func-names
+ jest.spyOn(BindInOut.prototype, 'updateOut').mockImplementation(function() {
+ return this;
+ });
+
+ testContext.init = BindInOut.init({}, {});
+ });
+
+ ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'init');
+
+ it('should call .addEvents', () => {
+ expect(BindInOut.prototype.addEvents).toHaveBeenCalled();
+ });
+
+ it('should call .updateOut', () => {
+ expect(BindInOut.prototype.updateOut).toHaveBeenCalled();
+ });
+
+ describe('if no anOut is provided', () => {
+ beforeEach(() => {
+ testContext.anIn = { dataset: { bindIn: 'the-data-bind-in' } };
+
+ jest.spyOn(document, 'querySelector').mockImplementation(() => {});
+
+ BindInOut.init(testContext.anIn);
+ });
+
+ it('should call .querySelector', () => {
+ expect(document.querySelector).toHaveBeenCalledWith(
+ `*[data-bind-out="${testContext.anIn.dataset.bindIn}"]`,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
new file mode 100644
index 00000000000..a98919e2113
--- /dev/null
+++ b/spec/frontend/behaviors/markdown/paste_markdown_table_spec.js
@@ -0,0 +1,113 @@
+import PasteMarkdownTable from '~/behaviors/markdown/paste_markdown_table';
+
+describe('PasteMarkdownTable', () => {
+ let data;
+
+ beforeEach(() => {
+ const event = new window.Event('paste');
+
+ Object.defineProperty(event, 'dataTransfer', {
+ value: {
+ getData: jest.fn().mockImplementation(type => {
+ if (type === 'text/html') {
+ return '<table><tr><td>First</td><td>Second</td></tr></table>';
+ }
+ return 'First\tSecond';
+ }),
+ },
+ });
+
+ data = event.dataTransfer;
+ });
+
+ describe('isTable', () => {
+ it('return false when no HTML data is provided', () => {
+ data.types = ['text/plain'];
+
+ expect(new PasteMarkdownTable(data).isTable()).toBe(false);
+ });
+
+ it('returns false when no text data is provided', () => {
+ data.types = ['text/html'];
+
+ expect(new PasteMarkdownTable(data).isTable()).toBe(false);
+ });
+
+ it('returns true when a table is provided in both text and HTML', () => {
+ data.types = ['text/html', 'text/plain'];
+
+ expect(new PasteMarkdownTable(data).isTable()).toBe(true);
+ });
+
+ it('returns false when no HTML table is included', () => {
+ data.types = ['text/html', 'text/plain'];
+ data.getData = jest.fn().mockImplementation(() => 'nothing');
+
+ expect(new PasteMarkdownTable(data).isTable()).toBe(false);
+ });
+
+ it('returns false when the number of rows are not consistent', () => {
+ data.types = ['text/html', 'text/plain'];
+ data.getData = jest.fn().mockImplementation(mimeType => {
+ if (mimeType === 'text/html') {
+ return '<table><tr><td>def test<td></tr></table>';
+ }
+ return "def test\n 'hello'\n";
+ });
+
+ expect(new PasteMarkdownTable(data).isTable()).toBe(false);
+ });
+ });
+
+ describe('convertToTableMarkdown', () => {
+ it('returns a Markdown table', () => {
+ data.types = ['text/html', 'text/plain'];
+ data.getData = jest.fn().mockImplementation(type => {
+ if (type === 'text/html') {
+ return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>Doe</td></table>';
+ } else if (type === 'text/plain') {
+ return 'First\tLast\nJohn\tDoe\nJane\tDoe';
+ }
+
+ return '';
+ });
+
+ const expected = [
+ '| First | Last |',
+ '|-------|------|',
+ '| John | Doe |',
+ '| Jane | Doe |',
+ ].join('\n');
+
+ const converter = new PasteMarkdownTable(data);
+
+ expect(converter.isTable()).toBe(true);
+ expect(converter.convertToTableMarkdown()).toBe(expected);
+ });
+
+ it('returns a Markdown table with rows normalized', () => {
+ data.types = ['text/html', 'text/plain'];
+ data.getData = jest.fn().mockImplementation(type => {
+ if (type === 'text/html') {
+ return '<table><tr><td>First</td><td>Last</td><tr><td>John</td><td>Doe</td><tr><td>Jane</td><td>/td></table>';
+ } else if (type === 'text/plain') {
+ return 'First\tLast\nJohn\tDoe\nJane';
+ }
+
+ return '';
+ });
+
+ const expected = [
+ '| First | Last |',
+ '|-------|------|',
+ '| John | Doe |',
+ '| Jane | |',
+ ].join('\n');
+
+ const converter = new PasteMarkdownTable(data);
+
+ expect(converter.isTable()).toBe(true);
+ expect(converter.convertToTableMarkdown()).toBe(expected);
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/issue_time_estimate_spec.js b/spec/frontend/boards/components/issue_time_estimate_spec.js
index 25e922931c3..162a6df828b 100644
--- a/spec/frontend/boards/components/issue_time_estimate_spec.js
+++ b/spec/frontend/boards/components/issue_time_estimate_spec.js
@@ -20,7 +20,6 @@ describe('Issue Time Estimate component', () => {
propsData: {
estimate: 374460,
},
- sync: false,
});
});
@@ -61,7 +60,6 @@ describe('Issue Time Estimate component', () => {
propsData: {
estimate: 374460,
},
- sync: false,
});
});
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
index c7ab477c0af..df55a106945 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -50,8 +50,6 @@ describe('Issue card component', () => {
rootPath: '/',
},
store,
- sync: false,
- attachToDocument: true,
});
});
@@ -267,17 +265,13 @@ describe('Issue card component', () => {
});
it('renders label', () => {
- const nodes = wrapper
- .findAll('.badge')
- .wrappers.map(label => label.attributes('data-original-title'));
+ const nodes = wrapper.findAll('.badge').wrappers.map(label => label.attributes('title'));
expect(nodes.includes(label1.description)).toBe(true);
});
it('sets label description as title', () => {
- expect(wrapper.find('.badge').attributes('data-original-title')).toContain(
- label1.description,
- );
+ expect(wrapper.find('.badge').attributes('title')).toContain(label1.description);
});
it('sets background color of button', () => {
diff --git a/spec/javascripts/bootstrap_jquery_spec.js b/spec/frontend/bootstrap_jquery_spec.js
index 6957cf40301..d5d592e3839 100644
--- a/spec/javascripts/bootstrap_jquery_spec.js
+++ b/spec/frontend/bootstrap_jquery_spec.js
@@ -1,37 +1,40 @@
import $ from 'jquery';
import '~/commons/bootstrap';
-describe('Bootstrap jQuery extensions', function() {
- describe('disable', function() {
- beforeEach(function() {
- return setFixtures('<input type="text" />');
+describe('Bootstrap jQuery extensions', () => {
+ describe('disable', () => {
+ beforeEach(() => {
+ setFixtures('<input type="text" />');
});
- it('adds the disabled attribute', function() {
+ it('adds the disabled attribute', () => {
const $input = $('input').first();
$input.disable();
expect($input).toHaveAttr('disabled', 'disabled');
});
- return it('adds the disabled class', function() {
+
+ it('adds the disabled class', () => {
const $input = $('input').first();
$input.disable();
expect($input).toHaveClass('disabled');
});
});
- return describe('enable', function() {
- beforeEach(function() {
- return setFixtures('<input type="text" disabled="disabled" class="disabled" />');
+
+ describe('enable', () => {
+ beforeEach(() => {
+ setFixtures('<input type="text" disabled="disabled" class="disabled" />');
});
- it('removes the disabled attribute', function() {
+ it('removes the disabled attribute', () => {
const $input = $('input').first();
$input.enable();
expect($input).not.toHaveAttr('disabled');
});
- return it('removes the disabled class', function() {
+
+ it('removes the disabled class', () => {
const $input = $('input').first();
$input.enable();
diff --git a/spec/javascripts/branches/branches_delete_modal_spec.js b/spec/frontend/branches/branches_delete_modal_spec.js
index b223b8e2c0a..21608feafc8 100644
--- a/spec/javascripts/branches/branches_delete_modal_spec.js
+++ b/spec/frontend/branches/branches_delete_modal_spec.js
@@ -15,7 +15,7 @@ describe('branches delete modal', () => {
</div>
`);
$deleteButton = $('.js-delete-branch');
- submitSpy = jasmine.createSpy('submit').and.callFake(event => event.preventDefault());
+ submitSpy = jest.fn(event => event.preventDefault());
$('#modal-delete-branch form').on('submit', submitSpy);
// eslint-disable-next-line no-new
new DeleteModal();
diff --git a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
index 511c027dbc2..c9948db95f8 100644
--- a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
@@ -5,7 +5,7 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
class="divergence-graph px-2 d-none d-md-block"
title="10 commits behind master, 10 commits ahead"
>
- <graphbar-stub
+ <graph-bar-stub
count="10"
maxcommits="100"
position="left"
@@ -15,7 +15,7 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
class="graph-separator pull-left mt-1"
/>
- <graphbar-stub
+ <graph-bar-stub
count="10"
maxcommits="100"
position="right"
@@ -28,7 +28,7 @@ exports[`Branch divergence graph component renders distance count 1`] = `
class="divergence-graph px-2 d-none d-md-block"
title="More than 900 commits different with master"
>
- <graphbar-stub
+ <graph-bar-stub
count="900"
maxcommits="100"
position="full"
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 2d7958a6b65..01e9b04dcd7 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -17,7 +17,6 @@ describe('Applications', () => {
gon.features = gon.features || {};
gon.features.enableClusterApplicationElasticStack = true;
- gon.features.enableClusterApplicationCrossplane = true;
});
afterEach(() => {
@@ -190,6 +189,7 @@ describe('Applications', () => {
title: 'Ingress',
status: 'installed',
externalHostname: 'localhost.localdomain',
+ modsecurity_enabled: false,
},
helm: { title: 'Helm Tiller' },
cert_manager: { title: 'Cert-Manager' },
@@ -198,7 +198,7 @@ describe('Applications', () => {
prometheus: { title: 'Prometheus' },
jupyter: { title: 'JupyterHub', hostname: '' },
knative: { title: 'Knative', hostname: '' },
- elastic_stack: { title: 'Elastic Stack', kibana_hostname: '' },
+ elastic_stack: { title: 'Elastic Stack' },
},
});
@@ -432,76 +432,35 @@ describe('Applications', () => {
});
describe('Elastic Stack application', () => {
- describe('with ingress installed with ip & elastic stack installable', () => {
+ describe('with elastic stack installable', () => {
it('renders hostname active input', () => {
vm = mountComponent(Applications, {
applications: {
...APPLICATIONS_MOCK_STATE,
- ingress: {
- title: 'Ingress',
- status: 'installed',
- externalIp: '1.1.1.1',
- },
},
});
expect(
vm.$el
- .querySelector('.js-cluster-application-row-elastic_stack .js-hostname')
- .getAttribute('readonly'),
- ).toEqual(null);
- });
- });
-
- describe('with ingress installed without external ip', () => {
- it('does not render hostname input', () => {
- vm = mountComponent(Applications, {
- applications: {
- ...APPLICATIONS_MOCK_STATE,
- ingress: { title: 'Ingress', status: 'installed' },
- },
- });
-
- expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack .js-hostname')).toBe(
- null,
- );
+ .querySelector(
+ '.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
+ )
+ .getAttribute('disabled'),
+ ).toEqual('disabled');
});
});
- describe('with ingress & elastic stack installed', () => {
- it('renders readonly input', () => {
+ describe('elastic stack installed', () => {
+ it('renders uninstall button', () => {
vm = mountComponent(Applications, {
applications: {
...APPLICATIONS_MOCK_STATE,
- ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
- elastic_stack: { title: 'Elastic Stack', status: 'installed', kibana_hostname: '' },
+ elastic_stack: { title: 'Elastic Stack', status: 'installed' },
},
});
expect(
vm.$el
- .querySelector('.js-cluster-application-row-elastic_stack .js-hostname')
- .getAttribute('readonly'),
- ).toEqual('readonly');
- });
- });
-
- describe('without ingress installed', () => {
- beforeEach(() => {
- vm = mountComponent(Applications, {
- applications: APPLICATIONS_MOCK_STATE,
- });
- });
-
- it('does not render input', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack .js-hostname')).toBe(
- null,
- );
- });
-
- it('renders disabled install button', () => {
- expect(
- vm.$el
.querySelector(
'.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
)
diff --git a/spec/frontend/clusters/components/knative_domain_editor_spec.js b/spec/frontend/clusters/components/knative_domain_editor_spec.js
index 242b5701f8b..6514d883c0d 100644
--- a/spec/frontend/clusters/components/knative_domain_editor_spec.js
+++ b/spec/frontend/clusters/components/knative_domain_editor_spec.js
@@ -25,6 +25,7 @@ describe('KnativeDomainEditor', () => {
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
describe('knative has an assigned IP address', () => {
@@ -78,7 +79,9 @@ describe('KnativeDomainEditor', () => {
it('triggers save event and pass current knative hostname', () => {
wrapper.find(LoadingButton).vm.$emit('click');
- expect(wrapper.emitted('save')[0]).toEqual([knative.hostname]);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('save')[0]).toEqual([knative.hostname]);
+ });
});
});
@@ -101,11 +104,15 @@ describe('KnativeDomainEditor', () => {
describe('when knative domain name input changes', () => {
it('emits "set" event with updated domain name', () => {
+ createComponent({ knative });
+
const newHostname = 'newhostname.com';
wrapper.setData({ knativeHostname: newHostname });
- expect(wrapper.emitted('set')[0]).toEqual([newHostname]);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('set')[0]).toEqual([newHostname]);
+ });
});
});
@@ -117,7 +124,9 @@ describe('KnativeDomainEditor', () => {
it('displays an error banner indicating the operation failure', () => {
wrapper.setProps({ knative: { updateFailed: true, ...knative } });
- expect(wrapper.find('.js-cluster-knative-domain-name-failure-message').exists()).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-cluster-knative-domain-name-failure-message').exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js b/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
index b5aead238ad..091d4e07987 100644
--- a/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
+++ b/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
@@ -13,7 +13,6 @@ describe('Remove cluster confirmation modal', () => {
clusterName: 'clusterName',
...props,
},
- sync: false,
});
};
diff --git a/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js b/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
index f95bce775c6..c07f6851826 100644
--- a/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
+++ b/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
@@ -35,9 +35,10 @@ describe('UninstallApplicationConfirmationModal', () => {
wrapper.find(GlModal).vm.$emit('ok');
});
- it('emits confirm event', () => {
- expect(wrapper.emitted('confirm')).toBeTruthy();
- });
+ it('emits confirm event', () =>
+ wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('confirm')).toBeTruthy();
+ }));
it('calls track uninstall button click mixin', () => {
expect(wrapper.vm.trackUninstallButtonClick).toHaveBeenCalledWith(INGRESS);
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
index d43dc9333b4..3e5f8de8e7b 100644
--- a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
+++ b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
@@ -70,7 +70,9 @@ describe('CrossplaneProviderStack component', () => {
};
createComponent({ crossplane });
findFirstDropdownElement().vm.$emit('click');
- expect(wrapper.emitted().set[0][0].code).toEqual('gcp');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().set[0][0].code).toEqual('gcp');
+ });
});
it('renders the correct dropdown text when no stack is selected', () => {
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index 016f5a259b5..f0bcf5d980f 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -150,14 +150,14 @@ const DEFAULT_APPLICATION_STATE = {
const APPLICATIONS_MOCK_STATE = {
helm: { title: 'Helm Tiller', status: 'installable' },
- ingress: { title: 'Ingress', status: 'installable' },
+ ingress: { title: 'Ingress', status: 'installable', modsecurity_enabled: false },
crossplane: { title: 'Crossplane', status: 'installable', stack: '' },
cert_manager: { title: 'Cert-Manager', status: 'installable' },
runner: { title: 'GitLab Runner' },
prometheus: { title: 'Prometheus' },
jupyter: { title: 'JupyterHub', status: 'installable', hostname: '' },
knative: { title: 'Knative ', status: 'installable', hostname: '' },
- elastic_stack: { title: 'Elastic Stack', status: 'installable', kibana_hostname: '' },
+ elastic_stack: { title: 'Elastic Stack', status: 'installable' },
};
export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index 71d4daceb75..f2dbdd0638b 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -86,6 +86,7 @@ describe('Clusters Store', () => {
uninstallSuccessful: false,
uninstallFailed: false,
validationError: null,
+ modsecurity_enabled: false,
},
runner: {
title: 'GitLab Runner',
@@ -166,7 +167,6 @@ describe('Clusters Store', () => {
installFailed: true,
statusReason: mockResponseData.applications[7].status_reason,
requestReason: null,
- kibana_hostname: '',
installed: false,
uninstallable: false,
uninstallSuccessful: false,
@@ -215,16 +215,5 @@ describe('Clusters Store', () => {
`jupyter.${store.state.applications.ingress.externalIp}.nip.io`,
);
});
-
- it('sets default hostname for elastic stack when ingress has a ip address', () => {
- const mockResponseData =
- CLUSTERS_MOCK_DATA.GET['/gitlab-org/gitlab-shell/clusters/2/status.json'].data;
-
- store.updateStateFromServer(mockResponseData);
-
- expect(store.state.applications.elastic_stack.kibana_hostname).toEqual(
- `kibana.${store.state.applications.ingress.externalIp}.nip.io`,
- );
- });
});
});
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index a2a6d405eab..9281d1d02a3 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -33,7 +33,6 @@ describe('Commit pipeline status component', () => {
...defaultProps,
...props,
},
- sync: false,
});
};
diff --git a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
index d69a9f90d65..f7b68d96129 100644
--- a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
+++ b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
@@ -18,11 +18,11 @@ exports[`Confidential merge request project form group component renders empty s
No forks are available to you.
<br />
- <glsprintf-stub
+ <gl-sprintf-stub
message="To protect this issue's confidentiality, %{forkLink} and set the fork's visibility to private."
/>
- <gllink-stub
+ <gl-link-stub
class="w-auto p-0 d-inline-block text-primary bg-transparent"
href="/help"
target="_blank"
@@ -37,7 +37,7 @@ exports[`Confidential merge request project form group component renders empty s
aria-hidden="true"
class="fa fa-question-circle"
/>
- </gllink-stub>
+ </gl-link-stub>
</p>
</div>
</div>
@@ -61,11 +61,11 @@ exports[`Confidential merge request project form group component renders fork dr
No forks are available to you.
<br />
- <glsprintf-stub
+ <gl-sprintf-stub
message="To protect this issue's confidentiality, %{forkLink} and set the fork's visibility to private."
/>
- <gllink-stub
+ <gl-link-stub
class="w-auto p-0 d-inline-block text-primary bg-transparent"
href="/help"
target="_blank"
@@ -80,7 +80,7 @@ exports[`Confidential merge request project form group component renders fork dr
aria-hidden="true"
class="fa fa-question-circle"
/>
- </gllink-stub>
+ </gl-link-stub>
</p>
</div>
</div>
diff --git a/spec/frontend/confidential_merge_request/components/project_form_group_spec.js b/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
index 3001363f7b9..975701ebd96 100644
--- a/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
+++ b/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
@@ -1,9 +1,8 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import ProjectFormGroup from '~/confidential_merge_request/components/project_form_group.vue';
-const localVue = createLocalVue();
const mockData = [
{
id: 1,
@@ -30,7 +29,6 @@ function factory(projects = mockData) {
mock.onGet(/api\/(.*)\/projects\/gitlab-org%2Fgitlab-ce\/forks/).reply(200, projects);
vm = shallowMount(ProjectFormGroup, {
- localVue,
propsData: {
namespacePath: 'gitlab-org',
projectPath: 'gitlab-org/gitlab-ce',
@@ -49,7 +47,7 @@ describe('Confidential merge request project form group component', () => {
it('renders fork dropdown', () => {
factory();
- return localVue.nextTick(() => {
+ return vm.vm.$nextTick(() => {
expect(vm.element).toMatchSnapshot();
});
});
@@ -57,7 +55,7 @@ describe('Confidential merge request project form group component', () => {
it('sets selected project as first fork', () => {
factory();
- return localVue.nextTick(() => {
+ return vm.vm.$nextTick(() => {
expect(vm.vm.selectedProject).toEqual({
id: 1,
name: 'root / gitlab-ce',
@@ -70,7 +68,7 @@ describe('Confidential merge request project form group component', () => {
it('renders empty state when response is empty', () => {
factory([]);
- return localVue.nextTick(() => {
+ return vm.vm.$nextTick(() => {
expect(vm.element).toMatchSnapshot();
});
});
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index b87afdd7eb4..184d0321dc1 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -17,7 +17,11 @@ exports[`Contributors charts should render charts when loading completed and the
<glareachart-stub
data="[object Object]"
height="264"
+ includelegendavgmax="true"
+ legendaveragetext="Avg"
+ legendmaxtext="Max"
option="[object Object]"
+ thresholds=""
/>
</div>
@@ -38,7 +42,11 @@ exports[`Contributors charts should render charts when loading completed and the
<glareachart-stub
data="[object Object]"
height="216"
+ includelegendavgmax="true"
+ legendaveragetext="Avg"
+ legendmaxtext="Max"
option="[object Object]"
+ thresholds=""
/>
</div>
</div>
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index 1d5605ef516..3e4924ed906 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -1,11 +1,10 @@
import Vue from 'vue';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { createStore } from '~/contributors/stores';
import axios from '~/lib/utils/axios_utils';
import ContributorsCharts from '~/contributors/components/contributors.vue';
-const localVue = createLocalVue();
let wrapper;
let mock;
let store;
@@ -52,7 +51,7 @@ describe('Contributors charts', () => {
it('should display loader whiled loading data', () => {
wrapper.vm.$store.state.loading = true;
- return localVue.nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(wrapper.find('.contributors-loader').exists()).toBe(true);
});
});
@@ -60,7 +59,7 @@ describe('Contributors charts', () => {
it('should render charts when loading completed and there is chart data', () => {
wrapper.vm.$store.state.loading = false;
wrapper.vm.$store.state.chartData = chartData;
- return localVue.nextTick(() => {
+ return wrapper.vm.$nextTick(() => {
expect(wrapper.find('.contributors-loader').exists()).toBe(false);
expect(wrapper.find('.contributors-charts').exists()).toBe(true);
expect(wrapper.element).toMatchSnapshot();
diff --git a/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
index c9cdd728509..292b8694fbc 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import $ from 'jquery';
import { GlIcon } from '@gitlab/ui';
-import ClusterFormDropdown from '~/create_cluster/eks_cluster/components/cluster_form_dropdown.vue';
+import ClusterFormDropdown from '~/create_cluster/components/cluster_form_dropdown.vue';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
@@ -20,7 +20,10 @@ describe('ClusterFormDropdown', () => {
describe('when initial value is provided', () => {
it('sets selectedItem to initial value', () => {
vm.setProps({ items, value: secondItem.value });
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+ });
});
});
@@ -30,16 +33,22 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ placeholder });
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
+ });
});
});
describe('when an item is selected', () => {
beforeEach(() => {
vm.setProps({ items });
- vm.findAll('.js-dropdown-item')
- .at(1)
- .trigger('click');
+
+ return vm.vm.$nextTick().then(() => {
+ vm.findAll('.js-dropdown-item')
+ .at(1)
+ .trigger('click');
+ return vm.vm.$nextTick();
+ });
});
it('emits input event with selected item', () => {
@@ -52,12 +61,20 @@ describe('ClusterFormDropdown', () => {
beforeEach(() => {
vm.setProps({ items, multiple: true, value });
- vm.findAll('.js-dropdown-item')
- .at(0)
- .trigger('click');
- vm.findAll('.js-dropdown-item')
- .at(1)
- .trigger('click');
+ return vm.vm
+ .$nextTick()
+ .then(() => {
+ vm.findAll('.js-dropdown-item')
+ .at(0)
+ .trigger('click');
+ return vm.vm.$nextTick();
+ })
+ .then(() => {
+ vm.findAll('.js-dropdown-item')
+ .at(1)
+ .trigger('click');
+ return vm.vm.$nextTick();
+ });
});
it('emits input event with an array of selected items', () => {
@@ -68,6 +85,7 @@ describe('ClusterFormDropdown', () => {
describe('when multiple items can be selected', () => {
beforeEach(() => {
vm.setProps({ items, multiple: true, value: firstItem.value });
+ return vm.vm.$nextTick();
});
it('displays a checked GlIcon next to the item', () => {
@@ -85,7 +103,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
+ });
});
});
@@ -93,7 +113,9 @@ describe('ClusterFormDropdown', () => {
it('dropdown button isLoading', () => {
vm.setProps({ loading: true });
- expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
+ });
});
});
@@ -103,7 +125,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ loading: true, loadingText });
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
+ });
});
});
@@ -111,7 +135,9 @@ describe('ClusterFormDropdown', () => {
it('dropdown button isDisabled', () => {
vm.setProps({ disabled: true });
- expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
+ });
});
});
@@ -121,7 +147,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ disabled: true, disabledText });
- expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
+ });
});
});
@@ -129,7 +157,9 @@ describe('ClusterFormDropdown', () => {
it('sets border-danger class selector to dropdown toggle', () => {
vm.setProps({ hasErrors: true });
- expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
+ });
});
});
@@ -139,7 +169,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ hasErrors: true, errorMessage });
- expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
+ });
});
});
@@ -149,7 +181,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ items: [], emptyText });
- expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
+ });
});
});
@@ -158,7 +192,9 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ searchFieldPlaceholder });
- expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
+ });
});
it('it filters results by search query', () => {
@@ -167,8 +203,10 @@ describe('ClusterFormDropdown', () => {
vm.setProps({ items });
vm.setData({ searchQuery });
- expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
- expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
+ expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
+ });
});
it('focuses dropdown search input when dropdown is displayed', () => {
@@ -178,6 +216,8 @@ describe('ClusterFormDropdown', () => {
$(dropdownEl).trigger('shown.bs.dropdown');
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
+ });
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index d3992c6751c..25034dcf5ad 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -5,7 +5,7 @@ import { GlFormCheckbox } from '@gitlab/ui';
import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue';
import eksClusterFormState from '~/create_cluster/eks_cluster/store/state';
-import clusterDropdownStoreState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
+import clusterDropdownStoreState from '~/create_cluster/store/cluster_dropdown/state';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -197,7 +197,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets RoleDropdown hasErrors to true when loading roles failed', () => {
rolesState.loadingItemsError = new Error();
- expect(findRoleDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findRoleDropdown().props('hasErrors')).toEqual(true);
+ });
});
it('sets isLoadingRegions to RegionDropdown loading property', () => {
@@ -215,7 +217,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets loadingRegionsError to RegionDropdown error property', () => {
regionsState.loadingItemsError = new Error();
- expect(findRegionDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findRegionDropdown().props('hasErrors')).toEqual(true);
+ });
});
it('disables KeyPairDropdown when no region is selected', () => {
@@ -245,7 +249,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets KeyPairDropdown hasErrors to true when loading key pairs fails', () => {
keyPairsState.loadingItemsError = new Error();
- expect(findKeyPairDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findKeyPairDropdown().props('hasErrors')).toEqual(true);
+ });
});
it('disables VpcDropdown when no region is selected', () => {
@@ -275,7 +281,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets VpcDropdown hasErrors to true when loading vpcs fails', () => {
vpcsState.loadingItemsError = new Error();
- expect(findVpcDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findVpcDropdown().props('hasErrors')).toEqual(true);
+ });
});
it('disables SubnetDropdown when no vpc is selected', () => {
@@ -305,7 +313,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets SubnetDropdown hasErrors to true when loading subnets fails', () => {
subnetsState.loadingItemsError = new Error();
- expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ });
});
it('disables SecurityGroupDropdown when no vpc is selected', () => {
@@ -335,7 +345,9 @@ describe('EksClusterConfigurationForm', () => {
it('sets SecurityGroupDropdown hasErrors to true when loading security groups fails', () => {
securityGroupsState.loadingItemsError = new Error();
- expect(findSecurityGroupDropdown().props('hasErrors')).toEqual(true);
+ return Vue.nextTick().then(() => {
+ expect(findSecurityGroupDropdown().props('hasErrors')).toEqual(true);
+ });
});
describe('when region is selected', () => {
diff --git a/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js
index 0be723b48f0..c58638f5c80 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/service_credentials_form_spec.js
@@ -47,7 +47,6 @@ describe('ServiceCredentialsForm', () => {
const findCopyExternalIdButton = () => vm.find('.js-copy-external-id-button');
const findInvalidCredentials = () => vm.find('.js-invalid-credentials');
const findSubmitButton = () => vm.find(LoadingButton);
- const findForm = () => vm.find('form[name="service-credentials-form"]');
it('displays provided account id', () => {
expect(findAccountIdInput().attributes('value')).toBe(accountId);
@@ -72,11 +71,15 @@ describe('ServiceCredentialsForm', () => {
it('enables submit button when role ARN is not provided', () => {
vm.setData({ roleArn: '123' });
- expect(findSubmitButton().attributes('disabled')).toBeFalsy();
+ return vm.vm.$nextTick().then(() => {
+ expect(findSubmitButton().attributes('disabled')).toBeFalsy();
+ });
});
- it('dispatches createRole action when form is submitted', () => {
- findForm().trigger('submit');
+ it('dispatches createRole action when submit button is clicked', () => {
+ vm.setData({ roleArn: '123' }); // set role ARN to enable button
+
+ findSubmitButton().vm.$emit('click', new Event('click'));
expect(createRoleAction).toHaveBeenCalled();
});
@@ -86,6 +89,8 @@ describe('ServiceCredentialsForm', () => {
vm.setData({ roleArn: '123' }); // set role ARN to enable button
state.isCreatingRole = true;
+
+ return vm.vm.$nextTick();
});
it('disables submit button', () => {
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
new file mode 100644
index 00000000000..57ef74f0119
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
@@ -0,0 +1,135 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { selectedMachineTypeMock, gapiMachineTypesResponseMock } from '../mock_data';
+import createState from '~/create_cluster/gke_cluster/store/state';
+import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
+import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
+import GkeMachineTypeDropdown from '~/create_cluster/gke_cluster/components/gke_machine_type_dropdown.vue';
+
+const componentConfig = {
+ fieldId: 'cluster_provider_gcp_attributes_gcp_machine_type',
+ fieldName: 'cluster[provider_gcp_attributes][gcp_machine_type]',
+};
+const setMachineType = jest.fn();
+
+const LABELS = {
+ LOADING: 'Fetching machine types',
+ DISABLED_NO_PROJECT: 'Select project and zone to choose machine type',
+ DISABLED_NO_ZONE: 'Select zone to choose machine type',
+ DEFAULT: 'Select machine type',
+};
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+const createComponent = (store, propsData = componentConfig) =>
+ shallowMount(GkeMachineTypeDropdown, {
+ propsData,
+ store,
+ localVue,
+ });
+
+const createStore = (initialState = {}, getters = {}) =>
+ new Vuex.Store({
+ state: {
+ ...createState(),
+ ...initialState,
+ },
+ getters: {
+ hasZone: () => false,
+ ...getters,
+ },
+ actions: {
+ setMachineType,
+ },
+ });
+
+describe('GkeMachineTypeDropdown', () => {
+ let wrapper;
+ let store;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const dropdownButtonLabel = () => wrapper.find(DropdownButton).props('toggleText');
+ const dropdownHiddenInputValue = () => wrapper.find(DropdownHiddenInput).props('value');
+
+ describe('shows various toggle text depending on state', () => {
+ it('returns disabled state toggle text when no project and zone are selected', () => {
+ store = createStore({
+ projectHasBillingEnabled: false,
+ });
+ wrapper = createComponent(store);
+
+ expect(dropdownButtonLabel()).toBe(LABELS.DISABLED_NO_PROJECT);
+ });
+
+ it('returns disabled state toggle text when no zone is selected', () => {
+ store = createStore({
+ projectHasBillingEnabled: true,
+ });
+ wrapper = createComponent(store);
+
+ expect(dropdownButtonLabel()).toBe(LABELS.DISABLED_NO_ZONE);
+ });
+
+ it('returns loading toggle text', () => {
+ store = createStore();
+ wrapper = createComponent(store);
+
+ wrapper.setData({ isLoading: true });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(dropdownButtonLabel()).toBe(LABELS.LOADING);
+ });
+ });
+
+ it('returns default toggle text', () => {
+ store = createStore(
+ {
+ projectHasBillingEnabled: true,
+ },
+ { hasZone: () => true },
+ );
+ wrapper = createComponent(store);
+
+ expect(dropdownButtonLabel()).toBe(LABELS.DEFAULT);
+ });
+
+ it('returns machine type name if machine type selected', () => {
+ store = createStore(
+ {
+ projectHasBillingEnabled: true,
+ selectedMachineType: selectedMachineTypeMock,
+ },
+ { hasZone: () => true },
+ );
+ wrapper = createComponent(store);
+
+ expect(dropdownButtonLabel()).toBe(selectedMachineTypeMock);
+ });
+ });
+
+ describe('form input', () => {
+ it('reflects new value when dropdown item is clicked', () => {
+ store = createStore({
+ machineTypes: gapiMachineTypesResponseMock.items,
+ });
+ wrapper = createComponent(store);
+
+ expect(dropdownHiddenInputValue()).toBe('');
+
+ wrapper.find('.dropdown-content button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(setMachineType).toHaveBeenCalledWith(
+ expect.anything(),
+ selectedMachineTypeMock,
+ undefined,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_network_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_network_dropdown_spec.js
new file mode 100644
index 00000000000..1df583af711
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_network_dropdown_spec.js
@@ -0,0 +1,143 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import GkeNetworkDropdown from '~/create_cluster/gke_cluster/components/gke_network_dropdown.vue';
+import ClusterFormDropdown from '~/create_cluster/components/cluster_form_dropdown.vue';
+import createClusterDropdownState from '~/create_cluster/store/cluster_dropdown/state';
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+describe('GkeNetworkDropdown', () => {
+ let wrapper;
+ let store;
+ const defaultProps = { fieldName: 'field-name' };
+ const selectedNetwork = { selfLink: '123456' };
+ const projectId = '6789';
+ const region = 'east-1';
+ const setNetwork = jest.fn();
+ const setSubnetwork = jest.fn();
+ const fetchSubnetworks = jest.fn();
+
+ const buildStore = ({ clusterDropdownState } = {}) =>
+ new Vuex.Store({
+ state: {
+ selectedNetwork,
+ },
+ actions: {
+ setNetwork,
+ setSubnetwork,
+ },
+ getters: {
+ hasZone: () => false,
+ region: () => region,
+ projectId: () => projectId,
+ },
+ modules: {
+ networks: {
+ namespaced: true,
+ state: {
+ ...createClusterDropdownState(),
+ ...(clusterDropdownState || {}),
+ },
+ },
+ subnetworks: {
+ namespaced: true,
+ actions: {
+ fetchItems: fetchSubnetworks,
+ },
+ },
+ },
+ });
+
+ const buildWrapper = (propsData = defaultProps) =>
+ shallowMount(GkeNetworkDropdown, {
+ propsData,
+ store,
+ localVue,
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets correct field-name', () => {
+ const fieldName = 'field-name';
+
+ store = buildStore();
+ wrapper = buildWrapper({ fieldName });
+
+ expect(wrapper.find(ClusterFormDropdown).props('fieldName')).toBe(fieldName);
+ });
+
+ it('sets selected network as the dropdown value', () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('value')).toBe(selectedNetwork);
+ });
+
+ it('maps networks store items to the dropdown items property', () => {
+ const items = [{ name: 'network' }];
+
+ store = buildStore({ clusterDropdownState: { items } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('items')).toBe(items);
+ });
+
+ describe('when network dropdown store is loading items', () => {
+ it('sets network dropdown as loading', () => {
+ store = buildStore({ clusterDropdownState: { isLoadingItems: true } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('loading')).toBe(true);
+ });
+ });
+
+ describe('when there is no selected zone', () => {
+ it('disables the network dropdown', () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when an error occurs while loading networks', () => {
+ it('sets the network dropdown as having errors', () => {
+ store = buildStore({ clusterDropdownState: { loadingItemsError: new Error() } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('hasErrors')).toBe(true);
+ });
+ });
+
+ describe('when dropdown emits input event', () => {
+ beforeEach(() => {
+ store = buildStore();
+ wrapper = buildWrapper();
+ wrapper.find(ClusterFormDropdown).vm.$emit('input', selectedNetwork);
+ });
+
+ it('cleans selected subnetwork', () => {
+ expect(setSubnetwork).toHaveBeenCalledWith(expect.anything(), '', undefined);
+ });
+
+ it('dispatches the setNetwork action', () => {
+ expect(setNetwork).toHaveBeenCalledWith(expect.anything(), selectedNetwork, undefined);
+ });
+
+ it('fetches subnetworks for the selected project, region, and network', () => {
+ expect(fetchSubnetworks).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ project: projectId,
+ region,
+ network: selectedNetwork.selfLink,
+ },
+ undefined,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
new file mode 100644
index 00000000000..0d429778a44
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
@@ -0,0 +1,138 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import createState from '~/create_cluster/gke_cluster/store/state';
+import { selectedProjectMock, gapiProjectsResponseMock } from '../mock_data';
+import GkeProjectIdDropdown from '~/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue';
+import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
+import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
+
+const componentConfig = {
+ docsUrl: 'https://console.cloud.google.com/home/dashboard',
+ fieldId: 'cluster_provider_gcp_attributes_gcp_project_id',
+ fieldName: 'cluster[provider_gcp_attributes][gcp_project_id]',
+};
+
+const LABELS = {
+ LOADING: 'Fetching projects',
+ VALIDATING_PROJECT_BILLING: 'Validating project billing status',
+ DEFAULT: 'Select project',
+ EMPTY: 'No projects found',
+};
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+describe('GkeProjectIdDropdown', () => {
+ let wrapper;
+ let vuexStore;
+ let setProject;
+
+ beforeEach(() => {
+ setProject = jest.fn();
+ });
+
+ const createStore = (initialState = {}, getters = {}) =>
+ new Vuex.Store({
+ state: {
+ ...createState(),
+ ...initialState,
+ },
+ actions: {
+ fetchProjects: jest.fn().mockResolvedValueOnce([]),
+ setProject,
+ },
+ getters: {
+ hasProject: () => false,
+ ...getters,
+ },
+ });
+
+ const createComponent = (store, propsData = componentConfig) =>
+ shallowMount(GkeProjectIdDropdown, {
+ propsData,
+ store,
+ localVue,
+ });
+
+ const bootstrap = (initialState, getters) => {
+ vuexStore = createStore(initialState, getters);
+ wrapper = createComponent(vuexStore);
+ };
+
+ const dropdownButtonLabel = () => wrapper.find(DropdownButton).props('toggleText');
+ const dropdownHiddenInputValue = () => wrapper.find(DropdownHiddenInput).props('value');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('toggleText', () => {
+ it('returns loading toggle text', () => {
+ bootstrap();
+
+ expect(dropdownButtonLabel()).toBe(LABELS.LOADING);
+ });
+
+ it('returns project billing validation text', () => {
+ bootstrap({ isValidatingProjectBilling: true });
+
+ expect(dropdownButtonLabel()).toBe(LABELS.VALIDATING_PROJECT_BILLING);
+ });
+
+ it('returns default toggle text', () => {
+ bootstrap();
+
+ wrapper.setData({ isLoading: false });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(dropdownButtonLabel()).toBe(LABELS.DEFAULT);
+ });
+ });
+
+ it('returns project name if project selected', () => {
+ bootstrap(
+ {
+ selectedProject: selectedProjectMock,
+ },
+ {
+ hasProject: () => true,
+ },
+ );
+ wrapper.setData({ isLoading: false });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(dropdownButtonLabel()).toBe(selectedProjectMock.name);
+ });
+ });
+
+ it('returns empty toggle text', () => {
+ bootstrap({
+ projects: null,
+ });
+ wrapper.setData({ isLoading: false });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(dropdownButtonLabel()).toBe(LABELS.EMPTY);
+ });
+ });
+ });
+
+ describe('selectItem', () => {
+ it('reflects new value when dropdown item is clicked', () => {
+ bootstrap({ projects: gapiProjectsResponseMock.projects });
+
+ expect(dropdownHiddenInputValue()).toBe('');
+
+ wrapper.find('.dropdown-content button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(setProject).toHaveBeenCalledWith(
+ expect.anything(),
+ gapiProjectsResponseMock.projects[0],
+ undefined,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/components/gke_subnetwork_dropdown_spec.js b/spec/frontend/create_cluster/gke_cluster/components/gke_subnetwork_dropdown_spec.js
new file mode 100644
index 00000000000..a1dc3960fe9
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/components/gke_subnetwork_dropdown_spec.js
@@ -0,0 +1,113 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import GkeSubnetworkDropdown from '~/create_cluster/gke_cluster/components/gke_subnetwork_dropdown.vue';
+import ClusterFormDropdown from '~/create_cluster/components/cluster_form_dropdown.vue';
+import createClusterDropdownState from '~/create_cluster/store/cluster_dropdown/state';
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+describe('GkeSubnetworkDropdown', () => {
+ let wrapper;
+ let store;
+ const defaultProps = { fieldName: 'field-name' };
+ const selectedSubnetwork = '123456';
+ const setSubnetwork = jest.fn();
+
+ const buildStore = ({ clusterDropdownState } = {}) =>
+ new Vuex.Store({
+ state: {
+ selectedSubnetwork,
+ },
+ actions: {
+ setSubnetwork,
+ },
+ getters: {
+ hasNetwork: () => false,
+ },
+ modules: {
+ subnetworks: {
+ namespaced: true,
+ state: {
+ ...createClusterDropdownState(),
+ ...(clusterDropdownState || {}),
+ },
+ },
+ },
+ });
+
+ const buildWrapper = (propsData = defaultProps) =>
+ shallowMount(GkeSubnetworkDropdown, {
+ propsData,
+ store,
+ localVue,
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets correct field-name', () => {
+ const fieldName = 'field-name';
+
+ store = buildStore();
+ wrapper = buildWrapper({ fieldName });
+
+ expect(wrapper.find(ClusterFormDropdown).props('fieldName')).toBe(fieldName);
+ });
+
+ it('sets selected subnetwork as the dropdown value', () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('value')).toBe(selectedSubnetwork);
+ });
+
+ it('maps subnetworks store items to the dropdown items property', () => {
+ const items = [{ name: 'subnetwork' }];
+
+ store = buildStore({ clusterDropdownState: { items } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('items')).toBe(items);
+ });
+
+ describe('when subnetwork dropdown store is loading items', () => {
+ it('sets subnetwork dropdown as loading', () => {
+ store = buildStore({ clusterDropdownState: { isLoadingItems: true } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('loading')).toBe(true);
+ });
+ });
+
+ describe('when there is no selected network', () => {
+ it('disables the subnetwork dropdown', () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('disabled')).toBe(true);
+ });
+ });
+
+ describe('when an error occurs while loading subnetworks', () => {
+ it('sets the subnetwork dropdown as having errors', () => {
+ store = buildStore({ clusterDropdownState: { loadingItemsError: new Error() } });
+ wrapper = buildWrapper();
+
+ expect(wrapper.find(ClusterFormDropdown).props('hasErrors')).toBe(true);
+ });
+ });
+
+ describe('when dropdown emits input event', () => {
+ it('dispatches the setSubnetwork action', () => {
+ store = buildStore();
+ wrapper = buildWrapper();
+
+ wrapper.find(ClusterFormDropdown).vm.$emit('input', selectedSubnetwork);
+
+ expect(setSubnetwork).toHaveBeenCalledWith(expect.anything(), selectedSubnetwork, undefined);
+ });
+ });
+});
diff --git a/spec/frontend/create_cluster/gke_cluster/mock_data.js b/spec/frontend/create_cluster/gke_cluster/mock_data.js
new file mode 100644
index 00000000000..d9f5dbc636f
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/mock_data.js
@@ -0,0 +1,75 @@
+export const emptyProjectMock = {
+ projectId: '',
+ name: '',
+};
+
+export const selectedProjectMock = {
+ projectId: 'gcp-project-123',
+ name: 'gcp-project',
+};
+
+export const selectedZoneMock = 'us-central1-a';
+
+export const selectedMachineTypeMock = 'n1-standard-2';
+
+export const gapiProjectsResponseMock = {
+ projects: [
+ {
+ projectNumber: '1234',
+ projectId: 'gcp-project-123',
+ lifecycleState: 'ACTIVE',
+ name: 'gcp-project',
+ createTime: '2017-12-16T01:48:29.129Z',
+ parent: {
+ type: 'organization',
+ id: '12345',
+ },
+ },
+ ],
+};
+
+export const gapiZonesResponseMock = {
+ kind: 'compute#zoneList',
+ id: 'projects/gitlab-internal-153318/zones',
+ items: [
+ {
+ kind: 'compute#zone',
+ id: '2000',
+ creationTimestamp: '1969-12-31T16:00:00.000-08:00',
+ name: 'us-central1-a',
+ description: 'us-central1-a',
+ status: 'UP',
+ region:
+ 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/regions/us-central1',
+ selfLink:
+ 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a',
+ availableCpuPlatforms: ['Intel Skylake', 'Intel Broadwell', 'Intel Sandy Bridge'],
+ },
+ ],
+ selfLink: 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones',
+};
+
+export const gapiMachineTypesResponseMock = {
+ kind: 'compute#machineTypeList',
+ id: 'projects/gitlab-internal-153318/zones/us-central1-a/machineTypes',
+ items: [
+ {
+ kind: 'compute#machineType',
+ id: '3002',
+ creationTimestamp: '1969-12-31T16:00:00.000-08:00',
+ name: 'n1-standard-2',
+ description: '2 vCPUs, 7.5 GB RAM',
+ guestCpus: 2,
+ memoryMb: 7680,
+ imageSpaceGb: 10,
+ maximumPersistentDisks: 64,
+ maximumPersistentDisksSizeGb: '65536',
+ zone: 'us-central1-a',
+ selfLink:
+ 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a/machineTypes/n1-standard-2',
+ isSharedCpu: false,
+ },
+ ],
+ selfLink:
+ 'https://www.googleapis.com/compute/v1/projects/gitlab-internal-153318/zones/us-central1-a/machineTypes',
+};
diff --git a/spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/actions_spec.js b/spec/frontend/create_cluster/store/cluster_dropdown/actions_spec.js
index 58f8855a64c..014b527161f 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/actions_spec.js
+++ b/spec/frontend/create_cluster/store/cluster_dropdown/actions_spec.js
@@ -1,8 +1,8 @@
import testAction from 'helpers/vuex_action_helper';
-import createState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
-import * as types from '~/create_cluster/eks_cluster/store/cluster_dropdown/mutation_types';
-import actionsFactory from '~/create_cluster/eks_cluster/store/cluster_dropdown/actions';
+import createState from '~/create_cluster/store/cluster_dropdown/state';
+import * as types from '~/create_cluster/store/cluster_dropdown/mutation_types';
+import actionsFactory from '~/create_cluster/store/cluster_dropdown/actions';
describe('Cluster dropdown Store Actions', () => {
const items = [{ name: 'item 1' }];
diff --git a/spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/mutations_spec.js b/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js
index 0665047edea..5edd237133d 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/cluster_dropdown/mutations_spec.js
+++ b/spec/frontend/create_cluster/store/cluster_dropdown/mutations_spec.js
@@ -2,9 +2,9 @@ import {
REQUEST_ITEMS,
RECEIVE_ITEMS_SUCCESS,
RECEIVE_ITEMS_ERROR,
-} from '~/create_cluster/eks_cluster/store/cluster_dropdown/mutation_types';
-import createState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
-import mutations from '~/create_cluster/eks_cluster/store/cluster_dropdown/mutations';
+} from '~/create_cluster/store/cluster_dropdown/mutation_types';
+import createState from '~/create_cluster/store/cluster_dropdown/state';
+import mutations from '~/create_cluster/store/cluster_dropdown/mutations';
describe('Cluster dropdown store mutations', () => {
let state;
diff --git a/spec/frontend/cycle_analytics/limit_warning_component_spec.js b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
index 5041ebe1a8b..e712dea67cb 100644
--- a/spec/frontend/cycle_analytics/limit_warning_component_spec.js
+++ b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
@@ -10,8 +10,6 @@ const createComponent = props =>
propsData: {
...props,
},
- sync: false,
- attachToDocument: true,
});
describe('Limit warning component', () => {
diff --git a/spec/frontend/cycle_analytics/stage_nav_item_spec.js b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
index a7a1d563e1e..480bb756731 100644
--- a/spec/frontend/cycle_analytics/stage_nav_item_spec.js
+++ b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
@@ -92,7 +92,9 @@ describe('StageNavItem', () => {
it('emits the `select` event when clicked', () => {
expect(wrapper.emitted().select).toBeUndefined();
wrapper.trigger('click');
- expect(wrapper.emitted().select.length).toBe(1);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted().select.length).toBe(1);
+ });
});
});
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 9900fcdb6e1..ff92a12eaf6 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -22,13 +22,12 @@ describe('CompareVersions', () => {
store.state.diffs.diffFiles.push('test');
wrapper = mount(CompareVersionsComponent, {
- sync: false,
- attachToDocument: true,
localVue,
store,
propsData: {
mergeRequestDiffs: diffsMockData,
mergeRequestDiff: diffsMockData[0],
+ diffFilesLength: 0,
targetBranch,
...props,
},
@@ -49,9 +48,8 @@ describe('CompareVersions', () => {
const treeListBtn = wrapper.find('.js-toggle-tree-list');
expect(treeListBtn.exists()).toBe(true);
- expect(treeListBtn.attributes('data-original-title')).toBe('Hide file browser');
- expect(treeListBtn.findAll(Icon).length).not.toBe(0);
- expect(treeListBtn.find(Icon).props('name')).toBe('collapse-left');
+ expect(treeListBtn.attributes('title')).toBe('Hide file browser');
+ expect(treeListBtn.find(Icon).props('name')).toBe('file-tree');
});
it('should render comparison dropdowns with correct values', () => {
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index b0dd25f746b..979c67787f7 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -84,7 +84,6 @@ describe('DiffContent', () => {
},
localVue,
store: fakeStore,
- sync: false,
});
};
diff --git a/spec/frontend/diffs/components/diff_discussion_reply_spec.js b/spec/frontend/diffs/components/diff_discussion_reply_spec.js
index 28689ab07de..9443a441ec2 100644
--- a/spec/frontend/diffs/components/diff_discussion_reply_spec.js
+++ b/spec/frontend/diffs/components/diff_discussion_reply_spec.js
@@ -16,7 +16,6 @@ describe('DiffDiscussionReply', () => {
wrapper = shallowMount(DiffDiscussionReply, {
store,
localVue,
- sync: false,
propsData: {
...props,
},
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index 48fd6dd6f58..e0b7e0bc0f3 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -91,8 +91,6 @@ describe('DiffFileHeader component', () => {
},
localVue,
store,
- sync: false,
- attachToDocument: true,
});
};
@@ -117,19 +115,27 @@ describe('DiffFileHeader component', () => {
it('when header is clicked emits toggleFile', () => {
createComponent();
findHeader().trigger('click');
- expect(wrapper.emitted().toggleFile).toBeDefined();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleFile).toBeDefined();
+ });
});
it('when collapseIcon is clicked emits toggleFile', () => {
createComponent({ collapsible: true });
findCollapseIcon().vm.$emit('click', new Event('click'));
- expect(wrapper.emitted().toggleFile).toBeDefined();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleFile).toBeDefined();
+ });
});
it('when other element in header is clicked does not emits toggleFile', () => {
createComponent({ collapsible: true });
findTitleLink().trigger('click');
- expect(wrapper.emitted().toggleFile).not.toBeDefined();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleFile).not.toBeDefined();
+ });
});
it('displays a copy to clipboard button', () => {
@@ -194,7 +200,9 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
});
wrapper.find(EditButton).vm.$emit('showForkMessage');
- expect(wrapper.emitted().showForkMessage).toBeDefined();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().showForkMessage).toBeDefined();
+ });
});
it('for mode_changed file mode displays mode changes', () => {
@@ -329,7 +337,7 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
});
expect(findViewFileButton().attributes('href')).toBe(viewPath);
- expect(findViewFileButton().attributes('data-original-title')).toEqual(
+ expect(findViewFileButton().attributes('title')).toEqual(
`View file @ ${diffFile.content_sha.substr(0, 8)}`,
);
});
diff --git a/spec/frontend/diffs/components/diff_gutter_avatars_spec.js b/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
index b2debe36b89..4d8345d494d 100644
--- a/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
+++ b/spec/frontend/diffs/components/diff_gutter_avatars_spec.js
@@ -1,8 +1,7 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
import discussionsMockData from '../mock_data/diff_discussions';
-const localVue = createLocalVue();
const getDiscussionsMockData = () => [Object.assign({}, discussionsMockData)];
describe('DiffGutterAvatars', () => {
@@ -14,12 +13,9 @@ describe('DiffGutterAvatars', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(DiffGutterAvatars, {
- localVue,
propsData: {
...props,
},
- sync: false,
- attachToDocument: true,
});
};
@@ -42,7 +38,9 @@ describe('DiffGutterAvatars', () => {
it('should emit toggleDiscussions event on button click', () => {
findCollapseButton().trigger('click');
- expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ });
});
});
@@ -72,13 +70,17 @@ describe('DiffGutterAvatars', () => {
.at(0)
.trigger('click');
- expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ });
});
it('should emit toggleDiscussions event on more count text click', () => {
findMoreCount().trigger('click');
- expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleLineDiscussions).toBeTruthy();
+ });
});
});
diff --git a/spec/frontend/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 984b3026209..aa5c7f6278a 100644
--- a/spec/frontend/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
@@ -22,12 +22,12 @@ describe('diff_stats', () => {
diffFilesLength: 300,
},
});
- const additions = wrapper.find('icon-stub[name="file-addition"]').element.parentNode;
- const deletions = wrapper.find('icon-stub[name="file-deletion"]').element.parentNode;
- const filesChanged = wrapper.find('icon-stub[name="doc-code"]').element.parentNode;
- expect(additions.textContent).toContain('100');
- expect(deletions.textContent).toContain('200');
- expect(filesChanged.textContent).toContain('300');
+ const findFileLine = name => wrapper.find(name);
+ const additions = findFileLine('.js-file-addition-line');
+ const deletions = findFileLine('.js-file-deletion-line');
+
+ expect(additions.text()).toBe('100');
+ expect(deletions.text()).toBe('200');
});
});
diff --git a/spec/frontend/diffs/components/edit_button_spec.js b/spec/frontend/diffs/components/edit_button_spec.js
index 4e2cfc75212..f9a1d4a84a8 100644
--- a/spec/frontend/diffs/components/edit_button_spec.js
+++ b/spec/frontend/diffs/components/edit_button_spec.js
@@ -1,7 +1,6 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import EditButton from '~/diffs/components/edit_button.vue';
-const localVue = createLocalVue();
const editPath = 'test-path';
describe('EditButton', () => {
@@ -9,10 +8,7 @@ describe('EditButton', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(EditButton, {
- localVue,
propsData: { ...props },
- sync: false,
- attachToDocument: true,
});
};
@@ -36,7 +32,9 @@ describe('EditButton', () => {
});
wrapper.trigger('click');
- expect(wrapper.emitted('showForkMessage')).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('showForkMessage')).toBeTruthy();
+ });
});
it('doesnt emit a show fork message event if current user cannot fork', () => {
@@ -46,7 +44,9 @@ describe('EditButton', () => {
});
wrapper.trigger('click');
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ });
});
it('doesnt emit a show fork message event if current user can modify blob', () => {
@@ -57,6 +57,8 @@ describe('EditButton', () => {
});
wrapper.trigger('click');
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ });
});
});
diff --git a/spec/frontend/diffs/components/hidden_files_warning_spec.js b/spec/frontend/diffs/components/hidden_files_warning_spec.js
index 5bf5ddd27bd..6fb4e4645f8 100644
--- a/spec/frontend/diffs/components/hidden_files_warning_spec.js
+++ b/spec/frontend/diffs/components/hidden_files_warning_spec.js
@@ -1,7 +1,6 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
-const localVue = createLocalVue();
const propsData = {
total: '10',
visible: 5,
@@ -14,8 +13,6 @@ describe('HiddenFilesWarning', () => {
const createComponent = () => {
wrapper = shallowMount(HiddenFilesWarning, {
- localVue,
- sync: false,
propsData,
});
};
diff --git a/spec/frontend/diffs/components/no_changes_spec.js b/spec/frontend/diffs/components/no_changes_spec.js
index e45d34bf9d5..245651af61c 100644
--- a/spec/frontend/diffs/components/no_changes_spec.js
+++ b/spec/frontend/diffs/components/no_changes_spec.js
@@ -13,7 +13,7 @@ describe('Diff no changes empty state', () => {
const store = createStore();
extendStore(store);
- vm = shallowMount(localVue.extend(NoChanges), {
+ vm = shallowMount(NoChanges, {
localVue,
store,
propsData: {
diff --git a/spec/javascripts/diffs/components/settings_dropdown_spec.js b/spec/frontend/diffs/components/settings_dropdown_spec.js
index 6c08474ffd2..2e95d79ea49 100644
--- a/spec/javascripts/diffs/components/settings_dropdown_spec.js
+++ b/spec/frontend/diffs/components/settings_dropdown_spec.js
@@ -25,19 +25,18 @@ describe('Diff settiings dropdown component', () => {
extendStore(store);
- vm = mount(localVue.extend(SettingsDropdown), {
+ vm = mount(SettingsDropdown, {
localVue,
store,
- sync: false,
});
}
beforeEach(() => {
actions = {
- setInlineDiffViewType: jasmine.createSpy('setInlineDiffViewType'),
- setParallelDiffViewType: jasmine.createSpy('setParallelDiffViewType'),
- setRenderTreeList: jasmine.createSpy('setRenderTreeList'),
- setShowWhitespace: jasmine.createSpy('setShowWhitespace'),
+ setInlineDiffViewType: jest.fn(),
+ setParallelDiffViewType: jest.fn(),
+ setRenderTreeList: jest.fn(),
+ setShowWhitespace: jest.fn(),
};
});
@@ -51,7 +50,7 @@ describe('Diff settiings dropdown component', () => {
vm.find('.js-list-view').trigger('click');
- expect(actions.setRenderTreeList).toHaveBeenCalledWith(jasmine.anything(), false, undefined);
+ expect(actions.setRenderTreeList).toHaveBeenCalledWith(expect.anything(), false, undefined);
});
it('tree view button dispatches setRenderTreeList with true', () => {
@@ -59,7 +58,7 @@ describe('Diff settiings dropdown component', () => {
vm.find('.js-tree-view').trigger('click');
- expect(actions.setRenderTreeList).toHaveBeenCalledWith(jasmine.anything(), true, undefined);
+ expect(actions.setRenderTreeList).toHaveBeenCalledWith(expect.anything(), true, undefined);
});
it('sets list button as active when renderTreeList is false', () => {
@@ -155,7 +154,7 @@ describe('Diff settiings dropdown component', () => {
checkbox.trigger('change');
expect(actions.setShowWhitespace).toHaveBeenCalledWith(
- jasmine.anything(),
+ expect.anything(),
{
showWhitespace: true,
pushState: true,
diff --git a/spec/frontend/droplab/constants_spec.js b/spec/frontend/droplab/constants_spec.js
new file mode 100644
index 00000000000..fd48228d6a2
--- /dev/null
+++ b/spec/frontend/droplab/constants_spec.js
@@ -0,0 +1,39 @@
+import * as constants from '~/droplab/constants';
+
+describe('constants', () => {
+ describe('DATA_TRIGGER', () => {
+ it('should be `data-dropdown-trigger`', () => {
+ expect(constants.DATA_TRIGGER).toBe('data-dropdown-trigger');
+ });
+ });
+
+ describe('DATA_DROPDOWN', () => {
+ it('should be `data-dropdown`', () => {
+ expect(constants.DATA_DROPDOWN).toBe('data-dropdown');
+ });
+ });
+
+ describe('SELECTED_CLASS', () => {
+ it('should be `droplab-item-selected`', () => {
+ expect(constants.SELECTED_CLASS).toBe('droplab-item-selected');
+ });
+ });
+
+ describe('ACTIVE_CLASS', () => {
+ it('should be `droplab-item-active`', () => {
+ expect(constants.ACTIVE_CLASS).toBe('droplab-item-active');
+ });
+ });
+
+ describe('TEMPLATE_REGEX', () => {
+ it('should be a handlebars templating syntax regex', () => {
+ expect(constants.TEMPLATE_REGEX).toEqual(/\{\{(.+?)\}\}/g);
+ });
+ });
+
+ describe('IGNORE_CLASS', () => {
+ it('should be `droplab-item-ignore`', () => {
+ expect(constants.IGNORE_CLASS).toBe('droplab-item-ignore');
+ });
+ });
+});
diff --git a/spec/javascripts/droplab/plugins/ajax_filter_spec.js b/spec/frontend/droplab/plugins/ajax_filter_spec.js
index 5dbe50af07f..5ec0400cbc5 100644
--- a/spec/javascripts/droplab/plugins/ajax_filter_spec.js
+++ b/spec/frontend/droplab/plugins/ajax_filter_spec.js
@@ -28,10 +28,10 @@ describe('AjaxFilter', () => {
let ajaxSpy;
beforeEach(() => {
- spyOn(AjaxCache, 'retrieve').and.callFake(url => ajaxSpy(url));
- spyOn(AjaxFilter, '_loadData');
+ jest.spyOn(AjaxCache, 'retrieve').mockImplementation(url => ajaxSpy(url));
+ jest.spyOn(AjaxFilter, '_loadData').mockImplementation(() => {});
- dummyConfig.onLoadingFinished = jasmine.createSpy('spy');
+ dummyConfig.onLoadingFinished = jest.fn();
const dynamicList = document.createElement('div');
dynamicList.dataset.dynamic = true;
@@ -46,7 +46,7 @@ describe('AjaxFilter', () => {
AjaxFilter.trigger()
.then(() => {
- expect(dummyConfig.onLoadingFinished.calls.count()).toBe(1);
+ expect(dummyConfig.onLoadingFinished.mock.calls.length).toBe(1);
})
.then(done)
.catch(done.fail);
@@ -63,7 +63,7 @@ describe('AjaxFilter', () => {
.then(done.fail)
.catch(error => {
expect(error).toBe(dummyError);
- expect(dummyConfig.onLoadingFinished.calls.count()).toBe(0);
+ expect(dummyConfig.onLoadingFinished.mock.calls.length).toBe(0);
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/droplab/plugins/ajax_spec.js b/spec/frontend/droplab/plugins/ajax_spec.js
index 2f492d00c0a..1d7576ce420 100644
--- a/spec/javascripts/droplab/plugins/ajax_spec.js
+++ b/spec/frontend/droplab/plugins/ajax_spec.js
@@ -18,23 +18,23 @@ describe('Ajax', () => {
beforeEach(() => {
config.preprocessing = () => processedArray;
- spyOn(config, 'preprocessing').and.callFake(() => processedArray);
+ jest.spyOn(config, 'preprocessing').mockImplementation(() => processedArray);
});
it('calls preprocessing', () => {
Ajax.preprocessing(config, []);
- expect(config.preprocessing.calls.count()).toBe(1);
+ expect(config.preprocessing.mock.calls.length).toBe(1);
});
it('overrides AjaxCache', () => {
- spyOn(AjaxCache, 'override').and.callFake((endpoint, results) => {
+ jest.spyOn(AjaxCache, 'override').mockImplementation((endpoint, results) => {
expect(results).toEqual(processedArray);
});
Ajax.preprocessing(config, []);
- expect(AjaxCache.override.calls.count()).toBe(1);
+ expect(AjaxCache.override.mock.calls.length).toBe(1);
});
});
});
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index 52625c64a1c..004687fcf44 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -1,6 +1,8 @@
import { mount } from '@vue/test-utils';
import { format } from 'timeago.js';
import EnvironmentItem from '~/environments/components/environment_item.vue';
+import PinComponent from '~/environments/components/environment_pin.vue';
+
import { environment, folder, tableData } from './mock_data';
describe('Environment item', () => {
@@ -26,6 +28,8 @@ describe('Environment item', () => {
});
});
+ const findAutoStop = () => wrapper.find('.js-auto-stop');
+
afterEach(() => {
wrapper.destroy();
});
@@ -77,6 +81,79 @@ describe('Environment item', () => {
expect(wrapper.find('.js-commit-component')).toBeDefined();
});
});
+
+ describe('Without auto-stop date', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ model: environment,
+ canReadEnvironment: true,
+ tableData,
+ shouldShowAutoStopDate: true,
+ },
+ });
+ });
+
+ it('should not render a date', () => {
+ expect(findAutoStop().exists()).toBe(false);
+ });
+
+ it('should not render the suto-stop button', () => {
+ expect(wrapper.find(PinComponent).exists()).toBe(false);
+ });
+ });
+
+ describe('With auto-stop date', () => {
+ describe('in the future', () => {
+ const futureDate = new Date(Date.now() + 100000);
+ beforeEach(() => {
+ factory({
+ propsData: {
+ model: {
+ ...environment,
+ auto_stop_at: futureDate,
+ },
+ canReadEnvironment: true,
+ tableData,
+ shouldShowAutoStopDate: true,
+ },
+ });
+ });
+
+ it('renders the date', () => {
+ expect(findAutoStop().text()).toContain(format(futureDate));
+ });
+
+ it('should render the auto-stop button', () => {
+ expect(wrapper.find(PinComponent).exists()).toBe(true);
+ });
+ });
+
+ describe('in the past', () => {
+ const pastDate = new Date(Date.now() - 100000);
+ beforeEach(() => {
+ factory({
+ propsData: {
+ model: {
+ ...environment,
+ auto_stop_at: pastDate,
+ },
+ canReadEnvironment: true,
+ tableData,
+ shouldShowAutoStopDate: true,
+ },
+ });
+ });
+
+ it('should not render a date', () => {
+ expect(findAutoStop().exists()).toBe(false);
+ });
+
+ it('should not render the suto-stop button', () => {
+ expect(wrapper.find(PinComponent).exists()).toBe(false);
+ });
+ });
+ });
});
describe('With manual actions', () => {
diff --git a/spec/frontend/environments/environment_monitoring_spec.js b/spec/frontend/environments/environment_monitoring_spec.js
index 8e67f799dc0..d2129bd7b30 100644
--- a/spec/frontend/environments/environment_monitoring_spec.js
+++ b/spec/frontend/environments/environment_monitoring_spec.js
@@ -9,8 +9,6 @@ describe('Monitoring Component', () => {
const createWrapper = () => {
wrapper = shallowMount(MonitoringComponent, {
- sync: false,
- attachToDocument: true,
propsData: {
monitoringUrl,
},
@@ -33,7 +31,7 @@ describe('Monitoring Component', () => {
it('should render a link to environment monitoring page', () => {
expect(wrapper.attributes('href')).toEqual(monitoringUrl);
expect(findIconsByName('chart').length).toBe(1);
- expect(wrapper.attributes('data-original-title')).toBe('Monitoring');
+ expect(wrapper.attributes('title')).toBe('Monitoring');
expect(wrapper.attributes('aria-label')).toBe('Monitoring');
});
});
diff --git a/spec/frontend/environments/environment_pin_spec.js b/spec/frontend/environments/environment_pin_spec.js
new file mode 100644
index 00000000000..d1d6735fa38
--- /dev/null
+++ b/spec/frontend/environments/environment_pin_spec.js
@@ -0,0 +1,46 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import Icon from '~/vue_shared/components/icon.vue';
+import eventHub from '~/environments/event_hub';
+import PinComponent from '~/environments/components/environment_pin.vue';
+
+describe('Pin Component', () => {
+ let wrapper;
+
+ const factory = (options = {}) => {
+ // This destroys any wrappers created before a nested call to factory reassigns it
+ if (wrapper && wrapper.destroy) {
+ wrapper.destroy();
+ }
+ wrapper = shallowMount(PinComponent, {
+ ...options,
+ });
+ };
+
+ const autoStopUrl = '/root/auto-stop-env-test/-/environments/38/cancel_auto_stop';
+
+ beforeEach(() => {
+ factory({
+ propsData: {
+ autoStopUrl,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render the component with thumbtack icon', () => {
+ expect(wrapper.find(Icon).props('name')).toBe('thumbtack');
+ });
+
+ it('should emit onPinClick when clicked', () => {
+ const eventHubSpy = jest.spyOn(eventHub, '$emit');
+ const button = wrapper.find(GlButton);
+
+ button.vm.$emit('click');
+
+ expect(eventHubSpy).toHaveBeenCalledWith('cancelAutoStop', autoStopUrl);
+ });
+});
diff --git a/spec/frontend/environments/environment_rollback_spec.js b/spec/frontend/environments/environment_rollback_spec.js
index 33e04f5eb29..fb62a096c3d 100644
--- a/spec/frontend/environments/environment_rollback_spec.js
+++ b/spec/frontend/environments/environment_rollback_spec.js
@@ -13,8 +13,6 @@ describe('Rollback Component', () => {
isLastDeployment: true,
environment: {},
},
- attachToDocument: true,
- sync: false,
});
expect(wrapper.element).toHaveSpriteIcon('repeat');
@@ -27,8 +25,6 @@ describe('Rollback Component', () => {
isLastDeployment: false,
environment: {},
},
- attachToDocument: true,
- sync: false,
});
expect(wrapper.element).toHaveSpriteIcon('redo');
diff --git a/spec/frontend/environments/environment_stop_spec.js b/spec/frontend/environments/environment_stop_spec.js
index ab714728311..f971cf56b65 100644
--- a/spec/frontend/environments/environment_stop_spec.js
+++ b/spec/frontend/environments/environment_stop_spec.js
@@ -11,8 +11,6 @@ describe('Stop Component', () => {
const createWrapper = () => {
wrapper = shallowMount(StopComponent, {
- sync: false,
- attachToDocument: true,
propsData: {
environment: {},
},
@@ -29,7 +27,7 @@ describe('Stop Component', () => {
it('should render a button to stop the environment', () => {
expect(findButton().exists()).toBe(true);
- expect(wrapper.attributes('data-original-title')).toEqual('Stop environment');
+ expect(wrapper.attributes('title')).toEqual('Stop environment');
});
it('emits requestStopEnvironment in the event hub when button is clicked', () => {
diff --git a/spec/frontend/environments/environment_terminal_button_spec.js b/spec/frontend/environments/environment_terminal_button_spec.js
index 9aa2b82736c..007fda2f2cc 100644
--- a/spec/frontend/environments/environment_terminal_button_spec.js
+++ b/spec/frontend/environments/environment_terminal_button_spec.js
@@ -7,8 +7,6 @@ describe('Stop Component', () => {
const mountWithProps = props => {
wrapper = shallowMount(TerminalComponent, {
- sync: false,
- attachToDocument: true,
propsData: props,
});
};
@@ -25,7 +23,7 @@ describe('Stop Component', () => {
it('should render a link to open a web terminal with the provided path', () => {
expect(wrapper.is('a')).toBe(true);
- expect(wrapper.attributes('data-original-title')).toBe('Terminal');
+ expect(wrapper.attributes('title')).toBe('Terminal');
expect(wrapper.attributes('aria-label')).toBe('Terminal');
expect(wrapper.attributes('href')).toBe(terminalPath);
});
diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js
index a014108b898..a2b581578d2 100644
--- a/spec/frontend/environments/mock_data.js
+++ b/spec/frontend/environments/mock_data.js
@@ -63,6 +63,7 @@ const environment = {
log_path: 'root/ci-folders/environments/31/logs',
created_at: '2016-11-07T11:11:16.525Z',
updated_at: '2016-11-10T15:55:58.778Z',
+ auto_stop_at: null,
};
const folder = {
@@ -98,6 +99,10 @@ const tableData = {
title: 'Updated',
spacing: 'section-10',
},
+ autoStop: {
+ title: 'Auto stop in',
+ spacing: 'section-5',
+ },
actions: {
spacing: 'section-25',
},
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 6dc4980aaec..35014b00dd8 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -1,6 +1,6 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlLoadingIcon, GlLink } from '@gitlab/ui';
+import { GlLoadingIcon, GlLink, GlBadge, GlFormInput } from '@gitlab/ui';
import LoadingButton from '~/vue_shared/components/loading_button.vue';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
@@ -13,19 +13,42 @@ describe('ErrorDetails', () => {
let wrapper;
let actions;
let getters;
+ let mocks;
+
+ const findInput = name => {
+ const inputs = wrapper.findAll(GlFormInput).filter(c => c.attributes('name') === name);
+ return inputs.length ? inputs.at(0) : inputs;
+ };
function mountComponent() {
wrapper = shallowMount(ErrorDetails, {
stubs: { LoadingButton },
localVue,
store,
+ mocks,
propsData: {
+ issueId: '123',
+ projectPath: '/root/gitlab-test',
+ listPath: '/error_tracking',
+ issueUpdatePath: '/123',
issueDetailsPath: '/123/details',
issueStackTracePath: '/stacktrace',
projectIssuesPath: '/test-project/issues/',
csrfToken: 'fakeToken',
},
});
+ wrapper.setData({
+ GQLerror: {
+ id: 'gid://gitlab/Gitlab::ErrorTracking::DetailedError/129381',
+ sentryId: 129381,
+ title: 'Issue title',
+ externalUrl: 'http://sentry.gitlab.net/gitlab',
+ firstSeen: '2017-05-26T13:32:48Z',
+ lastSeen: '2018-05-26T13:32:48Z',
+ count: 12,
+ userCount: 2,
+ },
+ });
}
beforeEach(() => {
@@ -56,6 +79,19 @@ describe('ErrorDetails', () => {
},
},
});
+
+ const query = jest.fn();
+ mocks = {
+ $apollo: {
+ query,
+ queries: {
+ GQLerror: {
+ loading: true,
+ stopPolling: jest.fn(),
+ },
+ },
+ },
+ };
});
afterEach(() => {
@@ -77,27 +113,50 @@ describe('ErrorDetails', () => {
});
describe('Error details', () => {
- it('should show Sentry error details without stacktrace', () => {
+ beforeEach(() => {
store.state.details.loading = false;
store.state.details.error.id = 1;
+ mocks.$apollo.queries.GQLerror.loading = false;
mountComponent();
+ });
+
+ it('should show Sentry error details without stacktrace', () => {
expect(wrapper.find(GlLink).exists()).toBe(true);
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ expect(wrapper.find(GlBadge).exists()).toBe(false);
+ expect(wrapper.findAll('button').length).toBe(3);
+ });
+
+ describe('Badges', () => {
+ it('should show language and error level badges', () => {
+ store.state.details.error.tags = { level: 'error', logger: 'ruby' };
+ mountComponent();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlBadge).length).toBe(2);
+ });
+ });
+
+ it('should NOT show the badge if the tag is not present', () => {
+ store.state.details.error.tags = { level: 'error' };
+ mountComponent();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlBadge).length).toBe(1);
+ });
+ });
});
describe('Stacktrace', () => {
it('should show stacktrace', () => {
- store.state.details.loading = false;
- store.state.details.error.id = 1;
store.state.details.loadingStacktrace = false;
mountComponent();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(true);
+ });
});
it('should NOT show stacktrace if no entries', () => {
- store.state.details.loading = false;
store.state.details.loadingStacktrace = false;
store.getters = { 'details/sentryUrl': () => 'sentry.io', 'details/stacktrace': () => [] };
mountComponent();
@@ -108,29 +167,19 @@ describe('ErrorDetails', () => {
describe('When a user clicks the create issue button', () => {
beforeEach(() => {
- store.state.details.loading = false;
- store.state.details.error = {
- id: 129381,
- title: 'Issue title',
- external_url: 'http://sentry.gitlab.net/gitlab',
- first_seen: '2017-05-26T13:32:48Z',
- last_seen: '2018-05-26T13:32:48Z',
- count: 12,
- user_count: 2,
- };
mountComponent();
});
it('should send sentry_issue_identifier', () => {
- const sentryErrorIdInput = wrapper.find(
- 'glforminput-stub[name="issue[sentry_issue_attributes][sentry_issue_identifier]"',
+ const sentryErrorIdInput = findInput(
+ 'issue[sentry_issue_attributes][sentry_issue_identifier]',
);
expect(sentryErrorIdInput.attributes('value')).toBe('129381');
});
it('should set the form values with title and description', () => {
- const csrfTokenInput = wrapper.find('glforminput-stub[name="authenticity_token"]');
- const issueTitleInput = wrapper.find('glforminput-stub[name="issue[title]"]');
+ const csrfTokenInput = findInput('authenticity_token');
+ const issueTitleInput = findInput('issue[title]');
const issueDescriptionInput = wrapper.find('input[name="issue[description]"]');
expect(csrfTokenInput.attributes('value')).toBe('fakeToken');
expect(issueTitleInput.attributes('value')).toContain(wrapper.vm.issueTitle);
@@ -140,7 +189,7 @@ describe('ErrorDetails', () => {
it('should submit the form', () => {
window.HTMLFormElement.prototype.submit = () => {};
const submitSpy = jest.spyOn(wrapper.vm.$refs.sentryIssueForm, 'submit');
- wrapper.find('button').trigger('click');
+ wrapper.find('[data-qa-selector="create_issue_button"]').trigger('click');
expect(submitSpy).toHaveBeenCalled();
submitSpy.mockRestore();
});
@@ -150,6 +199,7 @@ describe('ErrorDetails', () => {
const gitlabIssue = 'https://gitlab.example.com/issues/1';
const findGitLabLink = () => wrapper.find(`[href="${gitlabIssue}"]`);
const findCreateIssueButton = () => wrapper.find('[data-qa-selector="create_issue_button"]');
+ const findViewIssueButton = () => wrapper.find('[data-qa-selector="view_issue_button"]');
describe('is present', () => {
beforeEach(() => {
@@ -161,6 +211,10 @@ describe('ErrorDetails', () => {
mountComponent();
});
+ it('should display the View issue button', () => {
+ expect(findViewIssueButton().exists()).toBe(true);
+ });
+
it('should display the issue link', () => {
expect(findGitLabLink().exists()).toBe(true);
});
@@ -180,13 +234,50 @@ describe('ErrorDetails', () => {
mountComponent();
});
+ it('should not display the View issue button', () => {
+ expect(findViewIssueButton().exists()).toBe(false);
+ });
+
it('should not display an issue link', () => {
expect(findGitLabLink().exists()).toBe(false);
});
+
it('should display the create issue button', () => {
expect(findCreateIssueButton().exists()).toBe(true);
});
});
});
+
+ describe('GitLab commit link', () => {
+ const gitlabCommit = '7975be0116940bf2ad4321f79d02a55c5f7779aa';
+ const gitlabCommitPath =
+ '/gitlab-org/gitlab-test/commit/7975be0116940bf2ad4321f79d02a55c5f7779aa';
+ const findGitLabCommitLink = () => wrapper.find(`[href$="${gitlabCommitPath}"]`);
+
+ it('should display a link', () => {
+ mocks.$apollo.queries.GQLerror.loading = false;
+ wrapper.setData({
+ GQLerror: {
+ gitlabCommit,
+ gitlabCommitPath,
+ },
+ });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findGitLabCommitLink().exists()).toBe(true);
+ });
+ });
+
+ it('should not display a link', () => {
+ mocks.$apollo.queries.GQLerror.loading = false;
+ wrapper.setData({
+ GQLerror: {
+ gitlabCommit: null,
+ },
+ });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findGitLabCommitLink().exists()).toBe(false);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 581581405b6..310cd676ca1 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -1,15 +1,7 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
-import {
- GlEmptyState,
- GlLoadingIcon,
- GlTable,
- GlLink,
- GlFormInput,
- GlDropdown,
- GlDropdownItem,
- GlPagination,
-} from '@gitlab/ui';
+import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination } from '@gitlab/ui';
+import stubChildren from 'helpers/stub_children';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import errorsList from './list_mock.json';
@@ -32,27 +24,24 @@ describe('ErrorTrackingList', () => {
function mountComponent({
errorTrackingEnabled = true,
userCanEnableErrorTracking = true,
- sync = true,
- stubs = {
- 'gl-link': GlLink,
- 'gl-table': GlTable,
- 'gl-pagination': GlPagination,
- 'gl-dropdown': GlDropdown,
- 'gl-dropdown-item': GlDropdownItem,
- },
+ stubs = {},
} = {}) {
- wrapper = shallowMount(ErrorTrackingList, {
+ wrapper = mount(ErrorTrackingList, {
localVue,
store,
- sync,
propsData: {
indexPath: '/path',
+ listPath: '/error_tracking',
+ projectPath: 'project/test',
enableErrorTrackingLink: '/link',
userCanEnableErrorTracking,
errorTrackingEnabled,
illustrationPath: 'illustration/path',
},
- stubs,
+ stubs: {
+ ...stubChildren(ErrorTrackingList),
+ ...stubs,
+ },
data() {
return { errorSearchQuery: 'search' };
},
@@ -71,6 +60,8 @@ describe('ErrorTrackingList', () => {
setEndpoint: jest.fn(),
searchByQuery: jest.fn(),
sortByField: jest.fn(),
+ fetchPaginatedResults: jest.fn(),
+ updateStatus: jest.fn(),
};
const state = {
@@ -121,7 +112,14 @@ describe('ErrorTrackingList', () => {
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
- mountComponent();
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlDropdown: false,
+ GlDropdownItem: false,
+ GlLink: false,
+ },
+ });
});
it('shows table', () => {
@@ -144,6 +142,18 @@ describe('ErrorTrackingList', () => {
});
});
+ it('each error in the list should have an ignore button', () => {
+ findErrorListRows().wrappers.forEach(row => {
+ expect(row.contains('glicon-stub[name="eye-slash"]')).toBe(true);
+ });
+ });
+
+ it('each error in the list should have a resolve button', () => {
+ findErrorListRows().wrappers.forEach(row => {
+ expect(row.contains('glicon-stub[name="check-circle"]')).toBe(true);
+ });
+ });
+
describe('filtering', () => {
const findSearchBox = () => wrapper.find(GlFormInput);
@@ -172,7 +182,13 @@ describe('ErrorTrackingList', () => {
store.state.list.loading = false;
store.state.list.errors = [];
- mountComponent();
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlDropdown: false,
+ GlDropdownItem: false,
+ },
+ });
});
it('shows empty table', () => {
@@ -186,7 +202,7 @@ describe('ErrorTrackingList', () => {
});
it('restarts polling', () => {
- findRefreshLink().trigger('click');
+ findRefreshLink().vm.$emit('click');
expect(actions.restartPolling).toHaveBeenCalled();
});
});
@@ -204,14 +220,70 @@ describe('ErrorTrackingList', () => {
});
});
+ describe('When the ignore button on an error is clicked', () => {
+ beforeEach(() => {
+ store.state.list.loading = false;
+ store.state.list.errors = errorsList;
+
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlLink: false,
+ GlButton: false,
+ },
+ });
+ });
+
+ it('sends the "ignored" status and error ID', () => {
+ wrapper.find({ ref: 'ignoreError' }).trigger('click');
+ expect(actions.updateStatus).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ endpoint: '/project/test/-/error_tracking/3.json',
+ redirectUrl: '/error_tracking',
+ status: 'ignored',
+ },
+ undefined,
+ );
+ });
+ });
+
+ describe('When the resolve button on an error is clicked', () => {
+ beforeEach(() => {
+ store.state.list.loading = false;
+ store.state.list.errors = errorsList;
+
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlLink: false,
+ GlButton: false,
+ },
+ });
+ });
+
+ it('sends "resolved" status and error ID', () => {
+ wrapper.find({ ref: 'resolveError' }).trigger('click');
+ expect(actions.updateStatus).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ endpoint: '/project/test/-/error_tracking/3.json',
+ redirectUrl: '/error_tracking',
+ status: 'resolved',
+ },
+ undefined,
+ );
+ });
+ });
+
describe('When error tracking is disabled and user is not allowed to enable it', () => {
beforeEach(() => {
mountComponent({
errorTrackingEnabled: false,
userCanEnableErrorTracking: false,
stubs: {
- 'gl-link': GlLink,
- 'gl-empty-state': GlEmptyState,
+ GlLink: false,
+ GlEmptyState: false,
},
});
});
@@ -225,7 +297,12 @@ describe('ErrorTrackingList', () => {
describe('recent searches', () => {
beforeEach(() => {
- mountComponent();
+ mountComponent({
+ stubs: {
+ GlDropdown: false,
+ GlDropdownItem: false,
+ },
+ });
});
it('shows empty message', () => {
@@ -237,11 +314,12 @@ describe('ErrorTrackingList', () => {
it('shows items', () => {
store.state.list.recentSearches = ['great', 'search'];
- const dropdownItems = wrapper.findAll('.filtered-search-box li');
-
- expect(dropdownItems.length).toBe(3);
- expect(dropdownItems.at(0).text()).toBe('great');
- expect(dropdownItems.at(1).text()).toBe('search');
+ return wrapper.vm.$nextTick().then(() => {
+ const dropdownItems = wrapper.findAll('.filtered-search-box li');
+ expect(dropdownItems.length).toBe(3);
+ expect(dropdownItems.at(0).text()).toBe('great');
+ expect(dropdownItems.at(1).text()).toBe('search');
+ });
});
describe('clear', () => {
@@ -256,22 +334,27 @@ describe('ErrorTrackingList', () => {
it('is visible when list has items', () => {
store.state.list.recentSearches = ['some', 'searches'];
- expect(clearRecentButton().exists()).toBe(true);
- expect(clearRecentButton().text()).toBe('Clear recent searches');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(clearRecentButton().exists()).toBe(true);
+ expect(clearRecentButton().text()).toBe('Clear recent searches');
+ });
});
it('clears items on click', () => {
store.state.list.recentSearches = ['some', 'searches'];
- clearRecentButton().vm.$emit('click');
+ return wrapper.vm.$nextTick().then(() => {
+ clearRecentButton().vm.$emit('click');
- expect(actions.clearRecentSearches).toHaveBeenCalledTimes(1);
+ expect(actions.clearRecentSearches).toHaveBeenCalledTimes(1);
+ });
});
});
});
describe('When pagination is not required', () => {
beforeEach(() => {
+ store.state.list.loading = false;
store.state.list.pagination = {};
mountComponent();
});
@@ -284,7 +367,12 @@ describe('ErrorTrackingList', () => {
describe('When pagination is required', () => {
describe('and the user is on the first page', () => {
beforeEach(() => {
- mountComponent({ sync: false });
+ store.state.list.loading = false;
+ mountComponent({
+ stubs: {
+ GlPagination: false,
+ },
+ });
});
it('shows a disabled Prev button', () => {
@@ -295,17 +383,24 @@ describe('ErrorTrackingList', () => {
describe('and the user is not on the first page', () => {
describe('and the previous button is clicked', () => {
beforeEach(() => {
- mountComponent({ sync: false });
+ store.state.list.loading = false;
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlPagination: false,
+ },
+ });
wrapper.setData({ pageValue: 2 });
+ return wrapper.vm.$nextTick();
});
it('fetches the previous page of results', () => {
expect(wrapper.find('.prev-page-item').attributes('aria-disabled')).toBe(undefined);
wrapper.vm.goToPrevPage();
- expect(actions.startPolling).toHaveBeenCalledTimes(2);
- expect(actions.startPolling).toHaveBeenLastCalledWith(
+ expect(actions.fetchPaginatedResults).toHaveBeenCalled();
+ expect(actions.fetchPaginatedResults).toHaveBeenLastCalledWith(
expect.anything(),
- '/path?cursor=previousCursor',
+ 'previousCursor',
undefined,
);
});
@@ -313,17 +408,18 @@ describe('ErrorTrackingList', () => {
describe('and the next page button is clicked', () => {
beforeEach(() => {
- mountComponent({ sync: false });
+ store.state.list.loading = false;
+ mountComponent();
});
it('fetches the next page of results', () => {
window.scrollTo = jest.fn();
findPagination().vm.$emit('input', 2);
expect(window.scrollTo).toHaveBeenCalledWith(0, 0);
- expect(actions.startPolling).toHaveBeenCalledTimes(2);
- expect(actions.startPolling).toHaveBeenLastCalledWith(
+ expect(actions.fetchPaginatedResults).toHaveBeenCalled();
+ expect(actions.fetchPaginatedResults).toHaveBeenLastCalledWith(
expect.anything(),
- '/path?cursor=nextCursor',
+ 'nextCursor',
undefined,
);
});
diff --git a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
index 942585d5370..2a4e826b4ab 100644
--- a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
@@ -46,8 +46,8 @@ describe('Stacktrace Entry', () => {
expect(wrapper.findAll('.line_content.old').length).toBe(1);
});
- describe('no code block', () => {
- const findFileHeaderContent = () => wrapper.find('.file-header-content').html();
+ describe('entry caption', () => {
+ const findFileHeaderContent = () => wrapper.find('.file-header-content').text();
it('should hide collapse icon and render error fn name and error line when there is no code block', () => {
const extraInfo = { errorLine: 34, errorFn: 'errorFn', errorColumn: 77 };
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
new file mode 100644
index 00000000000..8bc53d94345
--- /dev/null
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -0,0 +1,78 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+import createFlash from '~/flash';
+import * as actions from '~/error_tracking/store/actions';
+import * as types from '~/error_tracking/store/mutation_types';
+import { visitUrl } from '~/lib/utils/url_utility';
+
+jest.mock('~/flash.js');
+jest.mock('~/lib/utils/url_utility');
+
+let mock;
+
+describe('Sentry common store actions', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ createFlash.mockClear();
+ });
+
+ describe('updateStatus', () => {
+ const endpoint = '123/stacktrace';
+ const redirectUrl = '/list';
+ const status = 'resolved';
+
+ it('should handle successful status update', done => {
+ mock.onPut().reply(200, {});
+ testAction(
+ actions.updateStatus,
+ { endpoint, redirectUrl, status },
+ {},
+ [
+ {
+ payload: true,
+ type: types.SET_UPDATING_RESOLVE_STATUS,
+ },
+ {
+ payload: false,
+ type: 'SET_UPDATING_RESOLVE_STATUS',
+ },
+ ],
+ [],
+ () => {
+ done();
+ expect(visitUrl).toHaveBeenCalledWith(redirectUrl);
+ },
+ );
+ });
+
+ it('should handle unsuccessful status update', done => {
+ mock.onPut().reply(400, {});
+ testAction(
+ actions.updateStatus,
+ { endpoint, redirectUrl, status },
+ {},
+ [
+ {
+ payload: true,
+ type: types.SET_UPDATING_RESOLVE_STATUS,
+ },
+ {
+ payload: false,
+ type: types.SET_UPDATING_RESOLVE_STATUS,
+ },
+ ],
+ [],
+ () => {
+ expect(visitUrl).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ done();
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index 0866f76aeef..129760bb705 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -6,6 +6,8 @@ import * as actions from '~/error_tracking/store/details/actions';
import * as types from '~/error_tracking/store/details/mutation_types';
jest.mock('~/flash.js');
+jest.mock('~/lib/utils/url_utility');
+
let mock;
describe('Sentry error details store actions', () => {
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 7906738f5b0..54fdde88818 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -79,6 +79,7 @@ describe('error tracking actions', () => {
query,
{},
[
+ { type: types.SET_CURSOR, payload: null },
{ type: types.SET_SEARCH_QUERY, payload: query },
{ type: types.ADD_RECENT_SEARCH, payload: query },
],
@@ -93,15 +94,15 @@ describe('error tracking actions', () => {
testAction(
actions.sortByField,
- { field },
+ field,
{},
- [{ type: types.SET_SORT_FIELD, payload: { field } }],
+ [{ type: types.SET_CURSOR, payload: null }, { type: types.SET_SORT_FIELD, payload: field }],
[{ type: 'stopPolling' }, { type: 'startPolling' }],
);
});
});
- describe('setEnpoint', () => {
+ describe('setEndpoint', () => {
it('should set search endpoint', () => {
const endpoint = 'https://sentry.io';
@@ -114,4 +115,17 @@ describe('error tracking actions', () => {
);
});
});
+
+ describe('fetchPaginatedResults', () => {
+ it('should start polling the selected page cursor', () => {
+ const cursor = '1576637570000:1:1';
+ testAction(
+ actions.fetchPaginatedResults,
+ cursor,
+ {},
+ [{ type: types.SET_CURSOR, payload: cursor }],
+ [{ type: 'stopPolling' }, { type: 'startPolling' }],
+ );
+ });
+ });
});
diff --git a/spec/frontend/error_tracking_settings/components/app_spec.js b/spec/frontend/error_tracking_settings/components/app_spec.js
index 0b86aad5b3e..5c3efa24551 100644
--- a/spec/frontend/error_tracking_settings/components/app_spec.js
+++ b/spec/frontend/error_tracking_settings/components/app_spec.js
@@ -57,7 +57,9 @@ describe('error tracking settings app', () => {
it('disables the button when saving', () => {
store.state.settingsLoading = true;
- expect(wrapper.find('.js-error-tracking-button').attributes('disabled')).toBeTruthy();
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.js-error-tracking-button').attributes('disabled')).toBeTruthy();
+ });
});
});
});
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index 8e5dbe28452..3ce105f27e4 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -66,6 +66,8 @@ describe('error tracking settings project dropdown', () => {
describe('populated project list', () => {
beforeEach(() => {
wrapper.setProps({ projects: _.clone(projectList), hasProjects: true });
+
+ return wrapper.vm.$nextTick();
});
it('renders the dropdown', () => {
@@ -84,6 +86,7 @@ describe('error tracking settings project dropdown', () => {
beforeEach(() => {
wrapper.setProps({ projects: _.clone(projectList), selectedProject, hasProjects: true });
+ return wrapper.vm.$nextTick();
});
it('does not show helper text', () => {
@@ -99,6 +102,7 @@ describe('error tracking settings project dropdown', () => {
selectedProject: staleProject,
isProjectInvalid: true,
});
+ return wrapper.vm.$nextTick();
});
it('displays a error', () => {
diff --git a/spec/frontend/error_tracking_settings/store/actions_spec.js b/spec/frontend/error_tracking_settings/store/actions_spec.js
index e12c4e20f58..b076e6ecd31 100644
--- a/spec/frontend/error_tracking_settings/store/actions_spec.js
+++ b/spec/frontend/error_tracking_settings/store/actions_spec.js
@@ -28,7 +28,7 @@ describe('error tracking settings actions', () => {
});
it('should request and transform the project list', done => {
- mock.onPost(TEST_HOST).reply(() => [200, { projects: projectList }]);
+ mock.onGet(TEST_HOST).reply(() => [200, { projects: projectList }]);
testAction(
actions.fetchProjects,
null,
@@ -42,14 +42,14 @@ describe('error tracking settings actions', () => {
},
],
() => {
- expect(mock.history.post.length).toBe(1);
+ expect(mock.history.get.length).toBe(1);
done();
},
);
});
it('should handle a server error', done => {
- mock.onPost(`${TEST_HOST}.json`).reply(() => [400]);
+ mock.onGet(`${TEST_HOST}.json`).reply(() => [400]);
testAction(
actions.fetchProjects,
null,
@@ -62,7 +62,7 @@ describe('error tracking settings actions', () => {
},
],
() => {
- expect(mock.history.post.length).toBe(1);
+ expect(mock.history.get.length).toBe(1);
done();
},
);
diff --git a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js b/spec/frontend/feature_highlight/feature_highlight_options_spec.js
index 7f9425d8abe..8b75c46fd4c 100644
--- a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_options_spec.js
@@ -1,28 +1,34 @@
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import domContentLoaded from '~/feature_highlight/feature_highlight_options';
-import bp from '~/breakpoints';
describe('feature highlight options', () => {
describe('domContentLoaded', () => {
it('should not call highlightFeatures when breakpoint is xs', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('xs');
+ jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('xs');
expect(domContentLoaded()).toBe(false);
});
it('should not call highlightFeatures when breakpoint is sm', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
+ jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('sm');
expect(domContentLoaded()).toBe(false);
});
it('should not call highlightFeatures when breakpoint is md', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('md');
+ jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('md');
expect(domContentLoaded()).toBe(false);
});
- it('should call highlightFeatures when breakpoint is lg', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
+ it('should not call highlightFeatures when breakpoint is not xl', () => {
+ jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('lg');
+
+ expect(domContentLoaded()).toBe(false);
+ });
+
+ it('should call highlightFeatures when breakpoint is xl', () => {
+ jest.spyOn(bp, 'getBreakpointSize').mockReturnValue('xl');
expect(domContentLoaded()).toBe(true);
});
diff --git a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
index d1742dcedfa..2543fb8768b 100644
--- a/spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
+++ b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -158,7 +158,7 @@ describe('RecentSearchesDropdownContent', () => {
let onRecentSearchesItemSelectedSpy;
beforeEach(() => {
- onRecentSearchesItemSelectedSpy = jasmine.createSpy('spy');
+ onRecentSearchesItemSelectedSpy = jest.fn();
eventHub.$on('recentSearchesItemSelected', onRecentSearchesItemSelectedSpy);
vm = createComponent(propsDataWithItems);
@@ -180,7 +180,7 @@ describe('RecentSearchesDropdownContent', () => {
let onRequestClearRecentSearchesSpy;
beforeEach(() => {
- onRequestClearRecentSearchesSpy = jasmine.createSpy('spy');
+ onRequestClearRecentSearchesSpy = jest.fn();
eventHub.$on('requestClearRecentSearches', onRequestClearRecentSearchesSpy);
vm = createComponent(propsDataWithItems);
diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/frontend/filtered_search/dropdown_user_spec.js
index f764800fff0..8eef10290bf 100644
--- a/spec/javascripts/filtered_search/dropdown_user_spec.js
+++ b/spec/frontend/filtered_search/dropdown_user_spec.js
@@ -8,10 +8,10 @@ describe('Dropdown User', () => {
let dropdownUser;
beforeEach(() => {
- spyOn(DropdownUser.prototype, 'bindEvents').and.callFake(() => {});
- spyOn(DropdownUser.prototype, 'getProjectId').and.callFake(() => {});
- spyOn(DropdownUser.prototype, 'getGroupId').and.callFake(() => {});
- spyOn(DropdownUtils, 'getSearchInput').and.callFake(() => {});
+ jest.spyOn(DropdownUser.prototype, 'bindEvents').mockImplementation(() => {});
+ jest.spyOn(DropdownUser.prototype, 'getProjectId').mockImplementation(() => {});
+ jest.spyOn(DropdownUser.prototype, 'getGroupId').mockImplementation(() => {});
+ jest.spyOn(DropdownUtils, 'getSearchInput').mockImplementation(() => {});
dropdownUser = new DropdownUser({
tokenKeys: IssuableFilteredTokenKeys,
@@ -19,7 +19,7 @@ describe('Dropdown User', () => {
});
it('should not return the double quote found in value', () => {
- spyOn(FilteredSearchTokenizer, 'processTokens').and.returnValue({
+ jest.spyOn(FilteredSearchTokenizer, 'processTokens').mockReturnValue({
lastToken: '"johnny appleseed',
});
@@ -27,7 +27,7 @@ describe('Dropdown User', () => {
});
it('should not return the single quote found in value', () => {
- spyOn(FilteredSearchTokenizer, 'processTokens').and.returnValue({
+ jest.spyOn(FilteredSearchTokenizer, 'processTokens').mockReturnValue({
lastToken: "'larry boy",
});
@@ -37,9 +37,9 @@ describe('Dropdown User', () => {
describe("config AjaxFilter's endpoint", () => {
beforeEach(() => {
- spyOn(DropdownUser.prototype, 'bindEvents').and.callFake(() => {});
- spyOn(DropdownUser.prototype, 'getProjectId').and.callFake(() => {});
- spyOn(DropdownUser.prototype, 'getGroupId').and.callFake(() => {});
+ jest.spyOn(DropdownUser.prototype, 'bindEvents').mockImplementation(() => {});
+ jest.spyOn(DropdownUser.prototype, 'getProjectId').mockImplementation(() => {});
+ jest.spyOn(DropdownUser.prototype, 'getGroupId').mockImplementation(() => {});
});
it('should return endpoint', () => {
diff --git a/spec/frontend/filtered_search/filtered_search_token_keys_spec.js b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
index d1fea18dea8..f24d2b118c2 100644
--- a/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
@@ -124,6 +124,7 @@ describe('Filtered Search Token Keys', () => {
const condition = new FilteredSearchTokenKeys([], [], conditions).searchByConditionKeyValue(
null,
null,
+ null,
);
expect(condition).toBeNull();
@@ -132,6 +133,7 @@ describe('Filtered Search Token Keys', () => {
it('should return condition when found by tokenKey and value', () => {
const result = new FilteredSearchTokenKeys([], [], conditions).searchByConditionKeyValue(
conditions[0].tokenKey,
+ conditions[0].operator,
conditions[0].value,
);
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index 7e524990863..9a194e5ca84 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -23,6 +23,15 @@ describe Projects::IssuesController, '(JavaScript fixtures)', type: :controller
remove_repository(project)
end
+ it 'issues/new-issue.html' do
+ get :new, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project
+ }
+
+ expect(response).to be_successful
+ end
+
it 'issues/open-issue.html' do
render_issue(create(:issue, project: project))
end
diff --git a/spec/frontend/fixtures/static/mock-video.mp4 b/spec/frontend/fixtures/static/mock-video.mp4
new file mode 100644
index 00000000000..1fc478842f5
--- /dev/null
+++ b/spec/frontend/fixtures/static/mock-video.mp4
Binary files differ
diff --git a/spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
index be11af8428f..204bbfb9c2f 100644
--- a/spec/javascripts/frequent_items/components/frequent_items_search_input_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
@@ -1,14 +1,10 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue';
import eventHub from '~/frequent_items/event_hub';
-const localVue = createLocalVue();
-
const createComponent = (namespace = 'projects') =>
- shallowMount(localVue.extend(searchComponent), {
+ shallowMount(searchComponent, {
propsData: { namespace },
- localVue,
- sync: false,
});
describe('FrequentItemsSearchInputComponent', () => {
@@ -28,7 +24,7 @@ describe('FrequentItemsSearchInputComponent', () => {
describe('methods', () => {
describe('setFocus', () => {
it('should set focus to search input', () => {
- spyOn(vm.$refs.search, 'focus');
+ jest.spyOn(vm.$refs.search, 'focus').mockImplementation(() => {});
vm.setFocus();
@@ -39,13 +35,13 @@ describe('FrequentItemsSearchInputComponent', () => {
describe('mounted', () => {
it('should listen `dropdownOpen` event', done => {
- spyOn(eventHub, '$on');
+ jest.spyOn(eventHub, '$on').mockImplementation(() => {});
const vmX = createComponent().vm;
- localVue.nextTick(() => {
+ vmX.$nextTick(() => {
expect(eventHub.$on).toHaveBeenCalledWith(
`${vmX.namespace}-dropdownOpen`,
- jasmine.any(Function),
+ expect.any(Function),
);
done();
});
@@ -55,15 +51,15 @@ describe('FrequentItemsSearchInputComponent', () => {
describe('beforeDestroy', () => {
it('should unbind event listeners on eventHub', done => {
const vmX = createComponent().vm;
- spyOn(eventHub, '$off');
+ jest.spyOn(eventHub, '$off').mockImplementation(() => {});
vmX.$mount();
vmX.$destroy();
- localVue.nextTick(() => {
+ vmX.$nextTick(() => {
expect(eventHub.$off).toHaveBeenCalledWith(
`${vmX.namespace}-dropdownOpen`,
- jasmine.any(Function),
+ expect.any(Function),
);
done();
});
diff --git a/spec/javascripts/gl_field_errors_spec.js b/spec/frontend/gl_field_errors_spec.js
index 294f219d6fe..4653f519f65 100644
--- a/spec/javascripts/gl_field_errors_spec.js
+++ b/spec/frontend/gl_field_errors_spec.js
@@ -3,83 +3,89 @@
import $ from 'jquery';
import GlFieldErrors from '~/gl_field_errors';
-describe('GL Style Field Errors', function() {
+describe('GL Style Field Errors', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
preloadFixtures('static/gl_field_errors.html');
- beforeEach(function() {
+ beforeEach(() => {
loadFixtures('static/gl_field_errors.html');
const $form = $('form.gl-show-field-errors');
- this.$form = $form;
- this.fieldErrors = new GlFieldErrors($form);
+ testContext.$form = $form;
+ testContext.fieldErrors = new GlFieldErrors($form);
});
- it('should select the correct input elements', function() {
- expect(this.$form).toBeDefined();
- expect(this.$form.length).toBe(1);
- expect(this.fieldErrors).toBeDefined();
- const { inputs } = this.fieldErrors.state;
+ it('should select the correct input elements', () => {
+ expect(testContext.$form).toBeDefined();
+ expect(testContext.$form.length).toBe(1);
+ expect(testContext.fieldErrors).toBeDefined();
+ const { inputs } = testContext.fieldErrors.state;
expect(inputs.length).toBe(4);
});
- it('should ignore elements with custom error handling', function() {
+ it('should ignore elements with custom error handling', () => {
const customErrorFlag = 'gl-field-error-ignore';
const customErrorElem = $(`.${customErrorFlag}`);
expect(customErrorElem.length).toBe(1);
- const customErrors = this.fieldErrors.state.inputs.filter(input => {
+ const customErrors = testContext.fieldErrors.state.inputs.filter(input => {
return input.inputElement.hasClass(customErrorFlag);
});
expect(customErrors.length).toBe(0);
});
- it('should not show any errors before submit attempt', function() {
- this.$form
+ it('should not show any errors before submit attempt', () => {
+ testContext.$form
.find('.email')
.val('not-a-valid-email')
.keyup();
- this.$form
+ testContext.$form
.find('.text-required')
.val('')
.keyup();
- this.$form
+ testContext.$form
.find('.alphanumberic')
.val('?---*')
.keyup();
- const errorsShown = this.$form.find('.gl-field-error-outline');
+ const errorsShown = testContext.$form.find('.gl-field-error-outline');
expect(errorsShown.length).toBe(0);
});
- it('should show errors when input valid is submitted', function() {
- this.$form
+ it('should show errors when input valid is submitted', () => {
+ testContext.$form
.find('.email')
.val('not-a-valid-email')
.keyup();
- this.$form
+ testContext.$form
.find('.text-required')
.val('')
.keyup();
- this.$form
+ testContext.$form
.find('.alphanumberic')
.val('?---*')
.keyup();
- this.$form.submit();
+ testContext.$form.submit();
- const errorsShown = this.$form.find('.gl-field-error-outline');
+ const errorsShown = testContext.$form.find('.gl-field-error-outline');
expect(errorsShown.length).toBe(4);
});
- it('should properly track validity state on input after invalid submission attempt', function() {
- this.$form.submit();
+ it('should properly track validity state on input after invalid submission attempt', () => {
+ testContext.$form.submit();
- const emailInputModel = this.fieldErrors.state.inputs[1];
+ const emailInputModel = testContext.fieldErrors.state.inputs[1];
const fieldState = emailInputModel.state;
const emailInputElement = emailInputModel.inputElement;
@@ -124,9 +130,9 @@ describe('GL Style Field Errors', function() {
expect(fieldState.valid).toBe(true);
});
- it('should properly infer error messages', function() {
- this.$form.submit();
- const trackedInputs = this.fieldErrors.state.inputs;
+ it('should properly infer error messages', () => {
+ testContext.$form.submit();
+ const trackedInputs = testContext.fieldErrors.state.inputs;
const inputHasTitle = trackedInputs[1];
const hasTitleErrorElem = inputHasTitle.inputElement.siblings('.gl-field-error');
const inputNoTitle = trackedInputs[2];
diff --git a/spec/javascripts/gpg_badges_spec.js b/spec/frontend/gpg_badges_spec.js
index 4731484e02d..809cc5c88e2 100644
--- a/spec/javascripts/gpg_badges_spec.js
+++ b/spec/frontend/gpg_badges_spec.js
@@ -38,7 +38,7 @@ describe('GpgBadges', () => {
it('does not make a request if there is no container element', done => {
setFixtures('');
- spyOn(axios, 'get');
+ jest.spyOn(axios, 'get').mockImplementation(() => {});
GpgBadges.fetch()
.then(() => {
@@ -50,7 +50,7 @@ describe('GpgBadges', () => {
it('throws an error if the endpoint is missing', done => {
setFixtures('<div class="js-signature-container"></div>');
- spyOn(axios, 'get');
+ jest.spyOn(axios, 'get').mockImplementation(() => {});
GpgBadges.fetch()
.then(() => done.fail('Expected error to be thrown'))
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 69ad71a1efb..5c784c8000f 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -16,11 +16,11 @@ exports[`grafana integration component default state to match the default snapsh
</h4>
- <glbutton-stub
+ <gl-button-stub
class="js-settings-toggle"
>
Expand
- </glbutton-stub>
+ </gl-button-stub>
<p
class="js-section-sub-header"
@@ -35,32 +35,32 @@ exports[`grafana integration component default state to match the default snapsh
class="settings-content"
>
<form>
- <glformcheckbox-stub
+ <gl-form-checkbox-stub
class="mb-4"
id="grafana-integration-enabled"
>
Active
- </glformcheckbox-stub>
+ </gl-form-checkbox-stub>
- <glformgroup-stub
+ <gl-form-group-stub
description="Enter the base URL of the Grafana instance."
label="Grafana URL"
label-for="grafana-url"
>
- <glforminput-stub
+ <gl-form-input-stub
id="grafana-url"
placeholder="https://my-url.grafana.net/"
value="http://test.host"
/>
- </glformgroup-stub>
+ </gl-form-group-stub>
- <glformgroup-stub
+ <gl-form-group-stub
label="API Token"
label-for="grafana-token"
>
- <glforminput-stub
+ <gl-form-input-stub
id="grafana-token"
value="someToken"
/>
@@ -86,15 +86,15 @@ exports[`grafana integration component default state to match the default snapsh
/>
</a>
</p>
- </glformgroup-stub>
+ </gl-form-group-stub>
- <glbutton-stub
+ <gl-button-stub
variant="success"
>
Save Changes
- </glbutton-stub>
+ </gl-button-stub>
</form>
</div>
</section>
diff --git a/spec/javascripts/header_spec.js b/spec/frontend/header_spec.js
index c36d3be1b22..00b5b306d66 100644
--- a/spec/javascripts/header_spec.js
+++ b/spec/frontend/header_spec.js
@@ -1,7 +1,7 @@
import $ from 'jquery';
import initTodoToggle from '~/header';
-describe('Header', function() {
+describe('Header', () => {
const todosPendingCount = '.todos-count';
const fixtureTemplate = 'issues/open-issue.html';
diff --git a/spec/javascripts/helpers/class_spec_helper_spec.js b/spec/frontend/helpers/class_spec_helper_spec.js
index f6268b0fb6d..533d5687bde 100644
--- a/spec/javascripts/helpers/class_spec_helper_spec.js
+++ b/spec/frontend/helpers/class_spec_helper_spec.js
@@ -2,7 +2,13 @@
import './class_spec_helper';
-describe('ClassSpecHelper', function() {
+describe('ClassSpecHelper', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
describe('itShouldBeAStaticMethod', () => {
beforeEach(() => {
class TestClass {
@@ -12,7 +18,7 @@ describe('ClassSpecHelper', function() {
static staticMethod() {}
}
- this.TestClass = TestClass;
+ testContext.TestClass = TestClass;
});
ClassSpecHelper.itShouldBeAStaticMethod(ClassSpecHelper, 'itShouldBeAStaticMethod');
diff --git a/spec/frontend/helpers/diffs_helper_spec.js b/spec/frontend/helpers/diffs_helper_spec.js
new file mode 100644
index 00000000000..b223d48bf5c
--- /dev/null
+++ b/spec/frontend/helpers/diffs_helper_spec.js
@@ -0,0 +1,113 @@
+import * as diffsHelper from '~/helpers/diffs_helper';
+
+describe('diffs helper', () => {
+ function getDiffFile(withOverrides = {}) {
+ return {
+ parallel_diff_lines: ['line'],
+ highlighted_diff_lines: ['line'],
+ blob: {
+ readable_text: 'text',
+ },
+ ...withOverrides,
+ };
+ }
+
+ describe('hasInlineLines', () => {
+ it('is false when the file does not exist', () => {
+ expect(diffsHelper.hasInlineLines()).toBeFalsy();
+ });
+
+ it('is false when the file does not have the highlighted_diff_lines property', () => {
+ const missingInline = getDiffFile({ highlighted_diff_lines: undefined });
+
+ expect(diffsHelper.hasInlineLines(missingInline)).toBeFalsy();
+ });
+
+ it('is false when the file has zero highlighted_diff_lines', () => {
+ const emptyInline = getDiffFile({ highlighted_diff_lines: [] });
+
+ expect(diffsHelper.hasInlineLines(emptyInline)).toBeFalsy();
+ });
+
+ it('is true when the file has at least 1 highlighted_diff_lines', () => {
+ expect(diffsHelper.hasInlineLines(getDiffFile())).toBeTruthy();
+ });
+ });
+
+ describe('hasParallelLines', () => {
+ it('is false when the file does not exist', () => {
+ expect(diffsHelper.hasParallelLines()).toBeFalsy();
+ });
+
+ it('is false when the file does not have the parallel_diff_lines property', () => {
+ const missingInline = getDiffFile({ parallel_diff_lines: undefined });
+
+ expect(diffsHelper.hasParallelLines(missingInline)).toBeFalsy();
+ });
+
+ it('is false when the file has zero parallel_diff_lines', () => {
+ const emptyInline = getDiffFile({ parallel_diff_lines: [] });
+
+ expect(diffsHelper.hasParallelLines(emptyInline)).toBeFalsy();
+ });
+
+ it('is true when the file has at least 1 parallel_diff_lines', () => {
+ expect(diffsHelper.hasParallelLines(getDiffFile())).toBeTruthy();
+ });
+ });
+
+ describe('isSingleViewStyle', () => {
+ it('is true when the file has at least 1 inline line but no parallel lines for any reason', () => {
+ const noParallelLines = getDiffFile({ parallel_diff_lines: undefined });
+ const emptyParallelLines = getDiffFile({ parallel_diff_lines: [] });
+
+ expect(diffsHelper.isSingleViewStyle(noParallelLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(emptyParallelLines)).toBeTruthy();
+ });
+
+ it('is true when the file has at least 1 parallel line but no inline lines for any reason', () => {
+ const noInlineLines = getDiffFile({ highlighted_diff_lines: undefined });
+ const emptyInlineLines = getDiffFile({ highlighted_diff_lines: [] });
+
+ expect(diffsHelper.isSingleViewStyle(noInlineLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(emptyInlineLines)).toBeTruthy();
+ });
+
+ it('is true when the file does not have any inline lines or parallel lines for any reason', () => {
+ const noLines = getDiffFile({
+ highlighted_diff_lines: undefined,
+ parallel_diff_lines: undefined,
+ });
+ const emptyLines = getDiffFile({
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ });
+
+ expect(diffsHelper.isSingleViewStyle(noLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(emptyLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle()).toBeTruthy();
+ });
+
+ it('is false when the file has both inline and parallel lines', () => {
+ expect(diffsHelper.isSingleViewStyle(getDiffFile())).toBeFalsy();
+ });
+ });
+
+ describe.each`
+ context | inline | parallel | blob | expected
+ ${'only has inline lines'} | ${['line']} | ${undefined} | ${undefined} | ${true}
+ ${'only has parallel lines'} | ${undefined} | ${['line']} | ${undefined} | ${true}
+ ${"doesn't have inline, parallel, or blob"} | ${undefined} | ${undefined} | ${undefined} | ${true}
+ ${'has blob readable text'} | ${undefined} | ${undefined} | ${{ readable_text: 'text' }} | ${false}
+ `('when hasDiff', ({ context, inline, parallel, blob, expected }) => {
+ it(`${context}`, () => {
+ const diffFile = getDiffFile({
+ highlighted_diff_lines: inline,
+ parallel_diff_lines: parallel,
+ blob,
+ });
+
+ expect(diffsHelper.hasDiff(diffFile)).toEqual(expected);
+ });
+ });
+});
diff --git a/spec/frontend/helpers/stub_children.js b/spec/frontend/helpers/stub_children.js
new file mode 100644
index 00000000000..91171eb3d8c
--- /dev/null
+++ b/spec/frontend/helpers/stub_children.js
@@ -0,0 +1,3 @@
+export default function stubChildren(Component) {
+ return Object.fromEntries(Object.keys(Component.components).map(c => [c, true]));
+}
diff --git a/spec/frontend/ide/components/branches/search_list_spec.js b/spec/frontend/ide/components/branches/search_list_spec.js
index d26dfc48ff8..fe142d70698 100644
--- a/spec/frontend/ide/components/branches/search_list_spec.js
+++ b/spec/frontend/ide/components/branches/search_list_spec.js
@@ -33,7 +33,6 @@ describe('IDE branches search list', () => {
wrapper = shallowMount(List, {
localVue,
store: fakeStore,
- sync: false,
});
};
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
new file mode 100644
index 00000000000..054e7492429
--- /dev/null
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -0,0 +1,82 @@
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
+import { createStore } from '~/ide/stores';
+import EditorHeader from '~/ide/components/commit_sidebar/editor_header.vue';
+import { file } from '../../helpers';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('IDE commit editor header', () => {
+ let wrapper;
+ let f;
+ let store;
+
+ const findDiscardModal = () => wrapper.find({ ref: 'discardModal' });
+ const findDiscardButton = () => wrapper.find({ ref: 'discardButton' });
+ const findActionButton = () => wrapper.find({ ref: 'actionButton' });
+
+ beforeEach(() => {
+ f = file('file');
+ store = createStore();
+
+ wrapper = mount(EditorHeader, {
+ store,
+ localVue,
+ propsData: {
+ activeFile: f,
+ },
+ });
+
+ jest.spyOn(wrapper.vm, 'stageChange').mockImplementation();
+ jest.spyOn(wrapper.vm, 'unstageChange').mockImplementation();
+ jest.spyOn(wrapper.vm, 'discardFileChanges').mockImplementation();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders button to discard & stage', () => {
+ expect(wrapper.vm.$el.querySelectorAll('.btn').length).toBe(2);
+ });
+
+ describe('discard button', () => {
+ let modal;
+
+ beforeEach(() => {
+ modal = findDiscardModal();
+
+ jest.spyOn(modal.vm, 'show');
+
+ findDiscardButton().trigger('click');
+ });
+
+ it('opens a dialog confirming discard', () => {
+ expect(modal.vm.show).toHaveBeenCalled();
+ });
+
+ it('calls discardFileChanges if dialog result is confirmed', () => {
+ modal.vm.$emit('ok');
+
+ expect(wrapper.vm.discardFileChanges).toHaveBeenCalledWith(f.path);
+ });
+ });
+
+ describe('stage/unstage button', () => {
+ it('unstages the file if it was already staged', () => {
+ f.staged = true;
+
+ findActionButton().trigger('click');
+
+ expect(wrapper.vm.unstageChange).toHaveBeenCalledWith(f.path);
+ });
+
+ it('stages the file if it was not staged', () => {
+ findActionButton().trigger('click');
+
+ expect(wrapper.vm.stageChange).toHaveBeenCalledWith(f.path);
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/error_message_spec.js b/spec/frontend/ide/components/error_message_spec.js
index e995c64645e..1de496ba3f8 100644
--- a/spec/frontend/ide/components/error_message_spec.js
+++ b/spec/frontend/ide/components/error_message_spec.js
@@ -26,7 +26,6 @@ describe('IDE error message component', () => {
},
store: fakeStore,
localVue,
- sync: false,
});
};
@@ -90,8 +89,13 @@ describe('IDE error message component', () => {
it('does not dispatch action when already loading', () => {
wrapper.find('button').trigger('click');
actionMock.mockReset();
- wrapper.find('button').trigger('click');
- expect(actionMock).not.toHaveBeenCalled();
+ return wrapper.vm.$nextTick(() => {
+ wrapper.find('button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(actionMock).not.toHaveBeenCalled();
+ });
+ });
});
it('shows loading icon when loading', () => {
diff --git a/spec/frontend/ide/components/file_templates/dropdown_spec.js b/spec/frontend/ide/components/file_templates/dropdown_spec.js
index 83d797469ad..3cffbc3362f 100644
--- a/spec/frontend/ide/components/file_templates/dropdown_spec.js
+++ b/spec/frontend/ide/components/file_templates/dropdown_spec.js
@@ -45,7 +45,6 @@ describe('IDE file templates dropdown component', () => {
},
store: fakeStore,
localVue,
- sync: false,
});
({ element } = wrapper);
@@ -62,7 +61,9 @@ describe('IDE file templates dropdown component', () => {
const item = findItemButtons().at(0);
item.trigger('click');
- expect(wrapper.emitted().click[0][0]).toBe(itemData);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().click[0][0]).toBe(itemData);
+ });
});
it('renders dropdown title', () => {
diff --git a/spec/frontend/ide/components/ide_status_list_spec.js b/spec/frontend/ide/components/ide_status_list_spec.js
index 4e0e8a9f0e3..2762adfb57d 100644
--- a/spec/frontend/ide/components/ide_status_list_spec.js
+++ b/spec/frontend/ide/components/ide_status_list_spec.js
@@ -25,9 +25,8 @@ describe('ide/components/ide_status_list', () => {
},
});
- wrapper = shallowMount(localVue.extend(IdeStatusList), {
+ wrapper = shallowMount(IdeStatusList, {
localVue,
- sync: false,
store,
...options,
});
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
index 5d6c31f01d9..43e606eac6e 100644
--- a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
+++ b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
@@ -7,7 +7,7 @@ exports[`IDE pipeline stage renders stage details & icon 1`] = `
<div
class="card-header"
>
- <ciicon-stub
+ <ci-icon-stub
cssclasses=""
size="24"
status="[object Object]"
diff --git a/spec/javascripts/ide/components/jobs/detail/scroll_button_spec.js b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
index fff382a107f..096851a5401 100644
--- a/spec/javascripts/ide/components/jobs/detail/scroll_button_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
@@ -40,7 +40,7 @@ describe('IDE job log scroll button', () => {
});
it('emits click event on click', () => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.btn-scroll').click();
diff --git a/spec/frontend/ide/components/jobs/list_spec.js b/spec/frontend/ide/components/jobs/list_spec.js
index ec2e5b05048..d8880fa7cb7 100644
--- a/spec/frontend/ide/components/jobs/list_spec.js
+++ b/spec/frontend/ide/components/jobs/list_spec.js
@@ -44,7 +44,6 @@ describe('IDE stages list', () => {
},
localVue,
store,
- sync: false,
});
};
@@ -93,7 +92,6 @@ describe('IDE stages list', () => {
wrapper = mount(StageList, {
propsData: { ...defaultProps, stages },
store,
- sync: false,
localVue,
});
});
diff --git a/spec/frontend/ide/components/jobs/stage_spec.js b/spec/frontend/ide/components/jobs/stage_spec.js
index 2e42ab26d27..3a47571ee13 100644
--- a/spec/frontend/ide/components/jobs/stage_spec.js
+++ b/spec/frontend/ide/components/jobs/stage_spec.js
@@ -26,7 +26,6 @@ describe('IDE pipeline stage', () => {
...defaultProps,
...props,
},
- sync: false,
});
};
@@ -52,7 +51,10 @@ describe('IDE pipeline stage', () => {
const id = 5;
createComponent({ stage: { ...defaultProps.stage, id } });
findHeader().trigger('click');
- expect(wrapper.emitted().toggleCollapsed[0][0]).toBe(id);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleCollapsed[0][0]).toBe(id);
+ });
});
it('emits clickViewLog entity with job', () => {
@@ -62,7 +64,9 @@ describe('IDE pipeline stage', () => {
.findAll(Item)
.at(0)
.vm.$emit('clickViewLog', job);
- expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
+ });
});
it('renders stage details & icon', () => {
diff --git a/spec/frontend/ide/components/merge_requests/list_spec.js b/spec/frontend/ide/components/merge_requests/list_spec.js
index 76806dcba69..ae94ee4efa7 100644
--- a/spec/frontend/ide/components/merge_requests/list_spec.js
+++ b/spec/frontend/ide/components/merge_requests/list_spec.js
@@ -42,7 +42,6 @@ describe('IDE merge requests list', () => {
wrapper = shallowMount(List, {
store: fakeStore,
localVue,
- sync: false,
});
};
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
new file mode 100644
index 00000000000..3bc89996978
--- /dev/null
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -0,0 +1,167 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { createStore } from '~/ide/stores';
+import paneModule from '~/ide/stores/modules/pane';
+import CollapsibleSidebar from '~/ide/components/panes/collapsible_sidebar.vue';
+import Vuex from 'vuex';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ide/components/panes/collapsible_sidebar.vue', () => {
+ let wrapper;
+ let store;
+
+ const width = 350;
+ const fakeComponentName = 'fake-component';
+
+ const createComponent = props => {
+ wrapper = shallowMount(CollapsibleSidebar, {
+ localVue,
+ store,
+ propsData: {
+ extensionTabs: [],
+ side: 'right',
+ width,
+ ...props,
+ },
+ slots: {
+ 'header-icon': '<div class=".header-icon-slot">SLOT ICON</div>',
+ header: '<div class=".header-slot"/>',
+ footer: '<div class=".footer-slot"/>',
+ },
+ });
+ };
+
+ const findTabButton = () => wrapper.find(`[data-qa-selector="${fakeComponentName}_tab_button"]`);
+
+ beforeEach(() => {
+ store = createStore();
+ store.registerModule('leftPane', paneModule());
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with a tab', () => {
+ let fakeView;
+ let extensionTabs;
+
+ beforeEach(() => {
+ const FakeComponent = localVue.component(fakeComponentName, {
+ render: () => {},
+ });
+
+ fakeView = {
+ name: fakeComponentName,
+ keepAlive: true,
+ component: FakeComponent,
+ };
+
+ extensionTabs = [
+ {
+ show: true,
+ title: fakeComponentName,
+ views: [fakeView],
+ icon: 'text-description',
+ buttonClasses: ['button-class-1', 'button-class-2'],
+ },
+ ];
+ });
+
+ describe.each`
+ side
+ ${'left'}
+ ${'right'}
+ `('when side=$side', ({ side }) => {
+ it('correctly renders side specific attributes', () => {
+ createComponent({ extensionTabs, side });
+ const button = findTabButton();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.classes()).toContain('multi-file-commit-panel');
+ expect(wrapper.classes()).toContain(`ide-${side}-sidebar`);
+ expect(wrapper.find('.multi-file-commit-panel-inner')).not.toBe(null);
+ expect(wrapper.find(`.ide-${side}-sidebar-${fakeComponentName}`)).not.toBe(null);
+ expect(button.attributes('data-placement')).toEqual(side === 'left' ? 'right' : 'left');
+ if (side === 'right') {
+ // this class is only needed on the right side; there is no 'is-left'
+ expect(button.classes()).toContain('is-right');
+ } else {
+ expect(button.classes()).not.toContain('is-right');
+ }
+ });
+ });
+ });
+
+ describe('when default side', () => {
+ let button;
+
+ beforeEach(() => {
+ createComponent({ extensionTabs });
+
+ button = findTabButton();
+ });
+
+ it('correctly renders tab-specific classes', () => {
+ store.state.rightPane.currentView = fakeComponentName;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(button.classes()).toContain('button-class-1');
+ expect(button.classes()).toContain('button-class-2');
+ });
+ });
+
+ it('can show an open pane tab with an active view', () => {
+ store.state.rightPane.isOpen = true;
+ store.state.rightPane.currentView = fakeComponentName;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(button.classes()).toEqual(expect.arrayContaining(['ide-sidebar-link', 'active']));
+ expect(button.attributes('data-original-title')).toEqual(fakeComponentName);
+ expect(wrapper.find('.js-tab-view').exists()).toBe(true);
+ });
+ });
+
+ it('does not show a pane which is not open', () => {
+ store.state.rightPane.isOpen = false;
+ store.state.rightPane.currentView = fakeComponentName;
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(button.classes()).not.toEqual(
+ expect.arrayContaining(['ide-sidebar-link', 'active']),
+ );
+ expect(wrapper.find('.js-tab-view').exists()).toBe(false);
+ });
+ });
+
+ describe('when button is clicked', () => {
+ it('opens view', () => {
+ button.trigger('click');
+ expect(store.state.rightPane.isOpen).toBeTruthy();
+ });
+
+ it('toggles open view if tab is currently active', () => {
+ button.trigger('click');
+ expect(store.state.rightPane.isOpen).toBeTruthy();
+
+ button.trigger('click');
+ expect(store.state.rightPane.isOpen).toBeFalsy();
+ });
+ });
+
+ it('shows header-icon', () => {
+ expect(wrapper.find('.header-icon-slot')).not.toBeNull();
+ });
+
+ it('shows header', () => {
+ expect(wrapper.find('.header-slot')).not.toBeNull();
+ });
+
+ it('shows footer', () => {
+ expect(wrapper.find('.footer-slot')).not.toBeNull();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/panes/right_spec.js b/spec/frontend/ide/components/panes/right_spec.js
index 6908790aaa8..7e408be96fc 100644
--- a/spec/frontend/ide/components/panes/right_spec.js
+++ b/spec/frontend/ide/components/panes/right_spec.js
@@ -1,89 +1,124 @@
import Vue from 'vue';
-import '~/behaviors/markdown/render_gfm';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue';
+import CollapsibleSidebar from '~/ide/components/panes/collapsible_sidebar.vue';
import { rightSidebarViews } from '~/ide/constants';
-describe('IDE right pane', () => {
- let Component;
- let vm;
+const localVue = createLocalVue();
+localVue.use(Vuex);
- beforeAll(() => {
- Component = Vue.extend(RightPane);
- });
+describe('ide/components/panes/right.vue', () => {
+ let wrapper;
+ let store;
- beforeEach(() => {
- const store = createStore();
+ const createComponent = props => {
+ wrapper = shallowMount(RightPane, {
+ localVue,
+ store,
+ propsData: {
+ ...props,
+ },
+ });
+ };
- vm = createComponentWithStore(Component, store).$mount();
+ beforeEach(() => {
+ store = createStore();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
- describe('active', () => {
- it('renders merge request button as active', done => {
- vm.$store.state.rightPane.isOpen = true;
- vm.$store.state.rightPane.currentView = rightSidebarViews.mergeRequestInfo.name;
- vm.$store.state.currentMergeRequestId = '123';
- vm.$store.state.currentProjectId = 'gitlab-ce';
- vm.$store.state.currentMergeRequestId = 1;
- vm.$store.state.projects['gitlab-ce'] = {
- mergeRequests: {
- 1: {
- iid: 1,
- title: 'Testing',
- title_html: '<span class="title-html">Testing</span>',
- description: 'Description',
- description_html: '<p class="description-html">Description HTML</p>',
- },
+ it('allows tabs to be added via extensionTabs prop', () => {
+ createComponent({
+ extensionTabs: [
+ {
+ show: true,
+ title: 'FakeTab',
},
- };
-
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.ide-sidebar-link.active')).not.toBe(null);
- expect(
- vm.$el.querySelector('.ide-sidebar-link.active').getAttribute('data-original-title'),
- ).toBe('Merge Request');
- })
- .then(done)
- .catch(done.fail);
+ ],
});
+
+ expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ show: true,
+ title: 'FakeTab',
+ }),
+ ]),
+ );
});
- describe('click', () => {
- beforeEach(() => {
- jest.spyOn(vm, 'open').mockReturnValue();
- });
+ describe('pipelines tab', () => {
+ it('is always shown', () => {
+ createComponent();
- it('sets view to merge request', done => {
- vm.$store.state.currentMergeRequestId = '123';
+ expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ show: true,
+ title: 'Pipelines',
+ views: expect.arrayContaining([
+ expect.objectContaining({
+ name: rightSidebarViews.pipelines.name,
+ }),
+ expect.objectContaining({
+ name: rightSidebarViews.jobsDetail.name,
+ }),
+ ]),
+ }),
+ ]),
+ );
+ });
+ });
- vm.$nextTick(() => {
- vm.$el.querySelector('.ide-sidebar-link').click();
+ describe('merge request tab', () => {
+ it('is shown if there is a currentMergeRequestId', () => {
+ store.state.currentMergeRequestId = 1;
- expect(vm.open).toHaveBeenCalledWith(rightSidebarViews.mergeRequestInfo);
+ createComponent();
- done();
- });
+ expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ show: true,
+ title: 'Merge Request',
+ views: expect.arrayContaining([
+ expect.objectContaining({
+ name: rightSidebarViews.mergeRequestInfo.name,
+ }),
+ ]),
+ }),
+ ]),
+ );
});
});
- describe('live preview', () => {
- it('renders live preview button', done => {
- Vue.set(vm.$store.state.entries, 'package.json', {
+ describe('clientside live preview tab', () => {
+ it('is shown if there is a packageJson and clientsidePreviewEnabled', () => {
+ Vue.set(store.state.entries, 'package.json', {
name: 'package.json',
});
- vm.$store.state.clientsidePreviewEnabled = true;
+ store.state.clientsidePreviewEnabled = true;
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('button[aria-label="Live preview"]')).not.toBeNull();
+ createComponent();
- done();
- });
+ expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ show: true,
+ title: 'Live preview',
+ views: expect.arrayContaining([
+ expect.objectContaining({
+ name: rightSidebarViews.clientSidePreview.name,
+ }),
+ ]),
+ }),
+ ]),
+ );
});
});
});
diff --git a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
index 5fbe6af750d..177cd4559ca 100644
--- a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap
@@ -6,7 +6,7 @@ exports[`IDE pipelines list when loaded renders empty state when no latestPipeli
>
<!---->
- <emptystate-stub
+ <empty-state-stub
cansetci="true"
emptystatesvgpath="http://test.host"
helppagepath="http://test.host"
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index 91152dffafa..11e672b6685 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -63,7 +63,6 @@ describe('IDE pipelines list', () => {
wrapper = shallowMount(List, {
localVue,
store: fakeStore,
- sync: false,
});
};
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index 5cb9e598fc4..c7d5ea9c513 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -54,7 +54,6 @@ describe('IDE clientside preview', () => {
});
wrapper = shallowMount(Clientside, {
- sync: false,
store,
localVue,
});
diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index 03d1125c23a..a8e48f0b85e 100644
--- a/spec/javascripts/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -1,20 +1,21 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import * as actions from '~/ide/stores/actions/file';
import * as types from '~/ide/stores/mutation_types';
import service from '~/ide/services';
import router from '~/ide/ide_router';
import eventHub from '~/ide/eventhub';
-import { file, resetStore } from '../../helpers';
-import testAction from '../../../helpers/vuex_action_helper';
+import { file } from '../../helpers';
+const ORIGINAL_CONTENT = 'original content';
const RELATIVE_URL_ROOT = '/gitlab';
describe('IDE store file actions', () => {
let mock;
let originalGon;
+ let store;
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -24,12 +25,15 @@ describe('IDE store file actions', () => {
relative_url_root: RELATIVE_URL_ROOT,
};
- spyOn(router, 'push');
+ store = createStore();
+
+ jest.spyOn(store, 'commit');
+ jest.spyOn(store, 'dispatch');
+ jest.spyOn(router, 'push').mockImplementation(() => {});
});
afterEach(() => {
mock.restore();
- resetStore(store);
window.gon = originalGon;
});
@@ -117,7 +121,7 @@ describe('IDE store file actions', () => {
let oldScrollToTab;
beforeEach(() => {
- scrollToTabSpy = jasmine.createSpy('scrollToTab');
+ scrollToTabSpy = jest.fn();
oldScrollToTab = store._actions.scrollToTab; // eslint-disable-line
store._actions.scrollToTab = [scrollToTabSpy]; // eslint-disable-line
@@ -131,7 +135,7 @@ describe('IDE store file actions', () => {
});
it('calls scrollToTab', () => {
- const dispatch = jasmine.createSpy();
+ const dispatch = jest.fn();
actions.setFileActive(
{ commit() {}, state: store.state, getters: store.getters, dispatch },
@@ -142,7 +146,7 @@ describe('IDE store file actions', () => {
});
it('commits SET_FILE_ACTIVE', () => {
- const commit = jasmine.createSpy();
+ const commit = jest.fn();
actions.setFileActive(
{ commit, state: store.state, getters: store.getters, dispatch() {} },
@@ -161,7 +165,7 @@ describe('IDE store file actions', () => {
localFile.active = true;
store.state.openFiles.push(localFile);
- const commit = jasmine.createSpy();
+ const commit = jest.fn();
actions.setFileActive(
{ commit, state: store.state, getters: store.getters, dispatch() {} },
@@ -179,7 +183,7 @@ describe('IDE store file actions', () => {
let localFile;
beforeEach(() => {
- spyOn(service, 'getFileData').and.callThrough();
+ jest.spyOn(service, 'getFileData');
localFile = file(`newCreate-${Math.random()}`);
store.state.entries[localFile.path] = localFile;
@@ -198,6 +202,53 @@ describe('IDE store file actions', () => {
};
});
+ describe('call to service', () => {
+ const callExpectation = serviceCalled => {
+ store.dispatch('getFileData', { path: localFile.path });
+
+ if (serviceCalled) {
+ expect(service.getFileData).toHaveBeenCalled();
+ } else {
+ expect(service.getFileData).not.toHaveBeenCalled();
+ }
+ };
+
+ beforeEach(() => {
+ service.getFileData.mockImplementation(() => new Promise(() => {}));
+ });
+
+ it("isn't called if file.raw exists", () => {
+ localFile.raw = 'raw data';
+
+ callExpectation(false);
+ });
+
+ it("isn't called if file is a tempFile", () => {
+ localFile.raw = '';
+ localFile.tempFile = true;
+
+ callExpectation(false);
+ });
+
+ it('is called if file is a tempFile but also renamed', () => {
+ localFile.raw = '';
+ localFile.tempFile = true;
+ localFile.prevPath = 'old_path';
+
+ callExpectation(true);
+ });
+
+ it('is called if tempFile but file was deleted and readded', () => {
+ localFile.raw = '';
+ localFile.tempFile = true;
+ localFile.prevPath = 'old_path';
+
+ store.state.stagedFiles = [{ ...localFile, deleted: true }];
+
+ callExpectation(true);
+ });
+ });
+
describe('success', () => {
beforeEach(() => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
@@ -328,10 +379,10 @@ describe('IDE store file actions', () => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
});
- it('dispatches error action', done => {
- const dispatch = jasmine.createSpy('dispatch');
+ it('dispatches error action', () => {
+ const dispatch = jest.fn();
- actions
+ return actions
.getFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: localFile.path },
@@ -339,17 +390,14 @@ describe('IDE store file actions', () => {
.then(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file.',
- action: jasmine.any(Function),
+ action: expect.any(Function),
actionText: 'Please try again',
actionPayload: {
path: localFile.path,
makeFileActive: true,
},
});
-
- done();
- })
- .catch(done.fail);
+ });
});
});
});
@@ -358,7 +406,7 @@ describe('IDE store file actions', () => {
let tmpFile;
beforeEach(() => {
- spyOn(service, 'getRawFileData').and.callThrough();
+ jest.spyOn(service, 'getRawFileData');
tmpFile = file('tmpFile');
store.state.entries[tmpFile.path] = tmpFile;
@@ -392,7 +440,7 @@ describe('IDE store file actions', () => {
});
it('calls also getBaseRawFileData service method', done => {
- spyOn(service, 'getBaseRawFileData').and.returnValue(Promise.resolve('baseraw'));
+ jest.spyOn(service, 'getBaseRawFileData').mockReturnValue(Promise.resolve('baseraw'));
store.state.currentProjectId = 'gitlab-org/gitlab-ce';
store.state.currentMergeRequestId = '1';
@@ -442,23 +490,23 @@ describe('IDE store file actions', () => {
mock.onGet(/(.*)/).networkError();
});
- it('dispatches error action', done => {
- const dispatch = jasmine.createSpy('dispatch');
+ it('dispatches error action', () => {
+ const dispatch = jest.fn();
- actions
- .getRawFileData({ state: store.state, commit() {}, dispatch }, { path: tmpFile.path })
- .then(done.fail)
+ return actions
+ .getRawFileData(
+ { state: store.state, commit() {}, dispatch, getters: store.getters },
+ { path: tmpFile.path },
+ )
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file content.',
- action: jasmine.any(Function),
+ action: expect.any(Function),
actionText: 'Please try again',
actionPayload: {
path: tmpFile.path,
},
});
-
- done();
});
});
});
@@ -466,6 +514,8 @@ describe('IDE store file actions', () => {
describe('changeFileContent', () => {
let tmpFile;
+ const callAction = (content = 'content\n') =>
+ store.dispatch('changeFileContent', { path: tmpFile.path, content });
beforeEach(() => {
tmpFile = file('tmpFile');
@@ -475,11 +525,7 @@ describe('IDE store file actions', () => {
});
it('updates file content', done => {
- store
- .dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content\n',
- })
+ callAction()
.then(() => {
expect(tmpFile.content).toBe('content\n');
@@ -489,11 +535,7 @@ describe('IDE store file actions', () => {
});
it('adds a newline to the end of the file if it doesnt already exist', done => {
- store
- .dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content',
- })
+ callAction('content')
.then(() => {
expect(tmpFile.content).toBe('content\n');
@@ -503,11 +545,7 @@ describe('IDE store file actions', () => {
});
it('adds file into changedFiles array', done => {
- store
- .dispatch('changeFileContent', {
- path: tmpFile.path,
- content: 'content',
- })
+ callAction()
.then(() => {
expect(store.state.changedFiles.length).toBe(1);
@@ -516,7 +554,7 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
- it('adds file once into changedFiles array', done => {
+ it('adds file not more than once into changedFiles array', done => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
@@ -556,6 +594,52 @@ describe('IDE store file actions', () => {
.catch(done.fail);
});
+ describe('when `gon.feature.stageAllByDefault` is true', () => {
+ const originalGonFeatures = Object.assign({}, gon.features);
+
+ beforeAll(() => {
+ gon.features = { stageAllByDefault: true };
+ });
+
+ afterAll(() => {
+ gon.features = originalGonFeatures;
+ });
+
+ it('adds file into stagedFiles array', done => {
+ store
+ .dispatch('changeFileContent', {
+ path: tmpFile.path,
+ content: 'content',
+ })
+ .then(() => {
+ expect(store.state.stagedFiles.length).toBe(1);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('adds file not more than once into stagedFiles array', done => {
+ store
+ .dispatch('changeFileContent', {
+ path: tmpFile.path,
+ content: 'content',
+ })
+ .then(() =>
+ store.dispatch('changeFileContent', {
+ path: tmpFile.path,
+ content: 'content 123',
+ }),
+ )
+ .then(() => {
+ expect(store.state.stagedFiles.length).toBe(1);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
it('bursts unused seal', done => {
store
.dispatch('changeFileContent', {
@@ -571,122 +655,144 @@ describe('IDE store file actions', () => {
});
});
- describe('discardFileChanges', () => {
+ describe('with changed file', () => {
let tmpFile;
beforeEach(() => {
- spyOn(eventHub, '$on');
- spyOn(eventHub, '$emit');
-
- tmpFile = file();
+ tmpFile = file('tempFile');
tmpFile.content = 'testing';
+ tmpFile.raw = ORIGINAL_CONTENT;
store.state.changedFiles.push(tmpFile);
store.state.entries[tmpFile.path] = tmpFile;
});
- it('resets file content', done => {
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(tmpFile.content).not.toBe('testing');
+ describe('restoreOriginalFile', () => {
+ it('resets file content', () =>
+ store.dispatch('restoreOriginalFile', tmpFile.path).then(() => {
+ expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
+ }));
- done();
- })
- .catch(done.fail);
- });
+ it('closes temp file and deletes it', () => {
+ tmpFile.tempFile = true;
+ tmpFile.opened = true;
+ tmpFile.parentPath = 'parentFile';
+ store.state.entries.parentFile = file('parentFile');
- it('removes file from changedFiles array', done => {
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(store.state.changedFiles.length).toBe(0);
+ actions.restoreOriginalFile(store, tmpFile.path);
- done();
- })
- .catch(done.fail);
- });
+ expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
+ expect(store.dispatch).toHaveBeenCalledWith('deleteEntry', tmpFile.path);
+ });
- it('closes temp file', done => {
- tmpFile.tempFile = true;
- tmpFile.opened = true;
+ describe('with renamed file', () => {
+ beforeEach(() => {
+ Object.assign(tmpFile, {
+ prevPath: 'parentPath/old_name',
+ prevName: 'old_name',
+ prevParentPath: 'parentPath',
+ });
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(tmpFile.opened).toBeFalsy();
+ store.state.entries.parentPath = file('parentPath');
- done();
- })
- .catch(done.fail);
- });
+ actions.restoreOriginalFile(store, tmpFile.path);
+ });
- it('does not re-open a closed temp file', done => {
- tmpFile.tempFile = true;
+ it('renames the file to its original name and closes it if it was open', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('closeFile', tmpFile);
+ expect(store.dispatch).toHaveBeenCalledWith('renameEntry', {
+ path: 'tempFile',
+ name: 'old_name',
+ parentPath: 'parentPath',
+ });
+ });
- expect(tmpFile.opened).toBeFalsy();
+ it('resets file content', () => {
+ expect(tmpFile.content).toBe(ORIGINAL_CONTENT);
+ });
+ });
+ });
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(tmpFile.opened).toBeFalsy();
+ describe('discardFileChanges', () => {
+ beforeEach(() => {
+ jest.spyOn(eventHub, '$on').mockImplementation(() => {});
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ });
- done();
- })
- .catch(done.fail);
- });
+ describe('with regular file', () => {
+ beforeEach(() => {
+ actions.discardFileChanges(store, tmpFile.path);
+ });
- it('pushes route for active file', done => {
- tmpFile.active = true;
- store.state.openFiles.push(tmpFile);
+ it('restores original file', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('restoreOriginalFile', tmpFile.path);
+ });
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
+ it('removes file from changedFiles array', () => {
+ expect(store.state.changedFiles.length).toBe(0);
+ });
+
+ it('does not push a new route', () => {
+ expect(router.push).not.toHaveBeenCalled();
+ });
+
+ it('emits eventHub event to dispose cached model', () => {
+ actions.discardFileChanges(store, tmpFile.path);
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(
+ `editor.update.model.new.content.${tmpFile.key}`,
+ ORIGINAL_CONTENT,
+ );
+ expect(eventHub.$emit).toHaveBeenCalledWith(
+ `editor.update.model.dispose.unstaged-${tmpFile.key}`,
+ ORIGINAL_CONTENT,
+ );
+ });
+ });
- done();
- })
- .catch(done.fail);
- });
+ describe('with active file', () => {
+ beforeEach(() => {
+ tmpFile.active = true;
+ store.state.openFiles.push(tmpFile);
- it('emits eventHub event to dispose cached model', done => {
- store
- .dispatch('discardFileChanges', tmpFile.path)
- .then(() => {
- expect(eventHub.$emit).toHaveBeenCalled();
+ actions.discardFileChanges(store, tmpFile.path);
+ });
- done();
- })
- .catch(done.fail);
+ it('pushes route for active file', () => {
+ expect(router.push).toHaveBeenCalledWith(`/project${tmpFile.url}`);
+ });
+ });
});
});
describe('stageChange', () => {
- it('calls STAGE_CHANGE with file path', done => {
- testAction(
- actions.stageChange,
- 'path',
- store.state,
- [
- { type: types.STAGE_CHANGE, payload: 'path' },
- { type: types.SET_LAST_COMMIT_MSG, payload: '' },
- ],
- [],
- done,
+ it('calls STAGE_CHANGE with file path', () => {
+ const f = { ...file('path'), content: 'old' };
+
+ store.state.entries[f.path] = f;
+
+ actions.stageChange(store, 'path');
+
+ expect(store.commit).toHaveBeenCalledWith(
+ types.STAGE_CHANGE,
+ expect.objectContaining({ path: 'path' }),
);
+ expect(store.commit).toHaveBeenCalledWith(types.SET_LAST_COMMIT_MSG, '');
});
});
describe('unstageChange', () => {
- it('calls UNSTAGE_CHANGE with file path', done => {
- testAction(
- actions.unstageChange,
- 'path',
- store.state,
- [{ type: types.UNSTAGE_CHANGE, payload: 'path' }],
- [],
- done,
+ it('calls UNSTAGE_CHANGE with file path', () => {
+ const f = { ...file('path'), content: 'old' };
+
+ store.state.entries[f.path] = f;
+ store.state.stagedFiles.push({ f, content: 'new' });
+
+ actions.unstageChange(store, 'path');
+
+ expect(store.commit).toHaveBeenCalledWith(
+ types.UNSTAGE_CHANGE,
+ expect.objectContaining({ path: 'path' }),
);
});
});
@@ -756,7 +862,7 @@ describe('IDE store file actions', () => {
let f;
beforeEach(() => {
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
f = {
...file('pendingFile'),
@@ -789,7 +895,7 @@ describe('IDE store file actions', () => {
describe('triggerFilesChange', () => {
beforeEach(() => {
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
it('emits event that files have changed', done => {
diff --git a/spec/frontend/ide/stores/modules/pane/actions_spec.js b/spec/frontend/ide/stores/modules/pane/actions_spec.js
index 8c0aeaff5b3..8c56714e0ed 100644
--- a/spec/frontend/ide/stores/modules/pane/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/pane/actions_spec.js
@@ -8,14 +8,7 @@ describe('IDE pane module actions', () => {
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
- testAction(
- actions.toggleOpen,
- TEST_VIEW,
- { isOpen: false },
- [],
- [{ type: 'open', payload: TEST_VIEW }],
- done,
- );
+ testAction(actions.toggleOpen, TEST_VIEW, { isOpen: false }, [], [{ type: 'open' }], done);
});
it('dispatches close if opened', done => {
@@ -24,37 +17,48 @@ describe('IDE pane module actions', () => {
});
describe('open', () => {
- it('commits SET_OPEN', done => {
- testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
- });
+ describe('with a view specified', () => {
+ it('commits SET_OPEN and SET_CURRENT_VIEW', done => {
+ testAction(
+ actions.open,
+ TEST_VIEW,
+ {},
+ [
+ { type: types.SET_OPEN, payload: true },
+ { type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
+ ],
+ [],
+ done,
+ );
+ });
- it('commits SET_CURRENT_VIEW if view is given', done => {
- testAction(
- actions.open,
- TEST_VIEW,
- {},
- [
- { type: types.SET_OPEN, payload: true },
- { type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
- ],
- [],
- done,
- );
+ it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
+ testAction(
+ actions.open,
+ TEST_VIEW_KEEP_ALIVE,
+ {},
+ [
+ { type: types.SET_OPEN, payload: true },
+ { type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
+ { type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
+ ],
+ [],
+ done,
+ );
+ });
});
- it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
- testAction(
- actions.open,
- TEST_VIEW_KEEP_ALIVE,
- {},
- [
- { type: types.SET_OPEN, payload: true },
- { type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
- { type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
- ],
- [],
- done,
- );
+ describe('without a view specified', () => {
+ it('commits SET_OPEN', done => {
+ testAction(
+ actions.open,
+ undefined,
+ {},
+ [{ type: types.SET_OPEN, payload: true }],
+ [],
+ done,
+ );
+ });
});
});
diff --git a/spec/frontend/ide/stores/mutations/file_spec.js b/spec/frontend/ide/stores/mutations/file_spec.js
index 91506c1b46c..cd308ee9991 100644
--- a/spec/frontend/ide/stores/mutations/file_spec.js
+++ b/spec/frontend/ide/stores/mutations/file_spec.js
@@ -1,15 +1,17 @@
import mutations from '~/ide/stores/mutations/file';
-import state from '~/ide/stores/state';
+import { createStore } from '~/ide/stores';
import { FILE_VIEW_MODE_PREVIEW } from '~/ide/constants';
import { file } from '../../helpers';
describe('IDE store file mutations', () => {
let localState;
+ let localStore;
let localFile;
beforeEach(() => {
- localState = state();
- localFile = { ...file(), type: 'blob' };
+ localStore = createStore();
+ localState = localStore.state;
+ localFile = { ...file('file'), type: 'blob', content: 'original' };
localState.entries[localFile.path] = localFile;
});
@@ -137,35 +139,68 @@ describe('IDE store file mutations', () => {
});
describe('SET_FILE_RAW_DATA', () => {
- it('sets raw data', () => {
+ const callMutationForFile = f => {
mutations.SET_FILE_RAW_DATA(localState, {
- file: localFile,
+ file: f,
raw: 'testing',
+ fileDeletedAndReadded: localStore.getters.isFileDeletedAndReadded(localFile.path),
});
+ };
+
+ it('sets raw data', () => {
+ callMutationForFile(localFile);
expect(localFile.raw).toBe('testing');
});
+ it('sets raw data to stagedFile if file was deleted and readded', () => {
+ localState.stagedFiles = [{ ...localFile, deleted: true }];
+ localFile.tempFile = true;
+
+ callMutationForFile(localFile);
+
+ expect(localFile.raw).toBeFalsy();
+ expect(localState.stagedFiles[0].raw).toBe('testing');
+ });
+
+ it("sets raw data to a file's content if tempFile is empty", () => {
+ localFile.tempFile = true;
+ localFile.content = '';
+
+ callMutationForFile(localFile);
+
+ expect(localFile.raw).toBeFalsy();
+ expect(localFile.content).toBe('testing');
+ });
+
it('adds raw data to open pending file', () => {
localState.openFiles.push({ ...localFile, pending: true });
- mutations.SET_FILE_RAW_DATA(localState, {
- file: localFile,
- raw: 'testing',
- });
+ callMutationForFile(localFile);
expect(localState.openFiles[0].raw).toBe('testing');
});
- it('does not add raw data to open pending tempFile file', () => {
- localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
+ it('sets raw to content of a renamed tempFile', () => {
+ localFile.tempFile = true;
+ localFile.prevPath = 'old_path';
+ localState.openFiles.push({ ...localFile, pending: true });
- mutations.SET_FILE_RAW_DATA(localState, {
- file: localFile,
- raw: 'testing',
- });
+ callMutationForFile(localFile);
expect(localState.openFiles[0].raw).not.toBe('testing');
+ expect(localState.openFiles[0].content).toBe('testing');
+ });
+
+ it('adds raw data to a staged deleted file if unstaged change has a tempFile of the same name', () => {
+ localFile.tempFile = true;
+ localState.openFiles.push({ ...localFile, pending: true });
+ localState.stagedFiles = [{ ...localFile, deleted: true }];
+
+ callMutationForFile(localFile);
+
+ expect(localFile.raw).toBeFalsy();
+ expect(localState.stagedFiles[0].raw).toBe('testing');
});
});
@@ -333,44 +368,154 @@ describe('IDE store file mutations', () => {
});
});
- describe('STAGE_CHANGE', () => {
- beforeEach(() => {
- mutations.STAGE_CHANGE(localState, localFile.path);
- });
+ describe.each`
+ mutationName | mutation | addedTo | removedFrom | staged | changedFilesCount | stagedFilesCount
+ ${'STAGE_CHANGE'} | ${mutations.STAGE_CHANGE} | ${'stagedFiles'} | ${'changedFiles'} | ${true} | ${0} | ${1}
+ ${'UNSTAGE_CHANGE'} | ${mutations.UNSTAGE_CHANGE} | ${'changedFiles'} | ${'stagedFiles'} | ${false} | ${1} | ${0}
+ `(
+ '$mutationName',
+ ({ mutation, changedFilesCount, removedFrom, addedTo, staged, stagedFilesCount }) => {
+ let unstagedFile;
+ let stagedFile;
+
+ beforeEach(() => {
+ unstagedFile = {
+ ...file('file'),
+ type: 'blob',
+ raw: 'original content',
+ content: 'changed content',
+ };
+
+ stagedFile = {
+ ...unstagedFile,
+ content: 'staged content',
+ staged: true,
+ };
+
+ localState.changedFiles.push(unstagedFile);
+ localState.stagedFiles.push(stagedFile);
+ localState.entries[unstagedFile.path] = unstagedFile;
+ });
- it('adds file into stagedFiles array', () => {
- expect(localState.stagedFiles.length).toBe(1);
- expect(localState.stagedFiles[0]).toEqual(localFile);
- });
+ it('removes all changes of a file if staged and unstaged change contents are equal', () => {
+ unstagedFile.content = 'original content';
+
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
+
+ expect(localState.entries.file).toEqual(
+ expect.objectContaining({
+ content: 'original content',
+ staged: false,
+ changed: false,
+ }),
+ );
- it('updates stagedFile if it is already staged', () => {
- localFile.raw = 'testing 123';
+ expect(localState.stagedFiles.length).toBe(0);
+ expect(localState.changedFiles.length).toBe(0);
+ });
- mutations.STAGE_CHANGE(localState, localFile.path);
+ it('removes all changes of a file if a file is deleted and a new file with same content is added', () => {
+ stagedFile.deleted = true;
+ unstagedFile.tempFile = true;
+ unstagedFile.content = 'original content';
- expect(localState.stagedFiles.length).toBe(1);
- expect(localState.stagedFiles[0].raw).toEqual('testing 123');
- });
- });
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
- describe('UNSTAGE_CHANGE', () => {
- let f;
+ expect(localState.stagedFiles.length).toBe(0);
+ expect(localState.changedFiles.length).toBe(0);
- beforeEach(() => {
- f = { ...file(), type: 'blob', staged: true };
+ expect(localState.entries.file).toEqual(
+ expect.objectContaining({
+ content: 'original content',
+ deleted: false,
+ tempFile: false,
+ }),
+ );
+ });
- localState.stagedFiles.push(f);
- localState.changedFiles.push(f);
- localState.entries[f.path] = f;
- });
+ it('merges deleted and added file into a changed file if the contents differ', () => {
+ stagedFile.deleted = true;
+ unstagedFile.tempFile = true;
+ unstagedFile.content = 'hello';
- it('removes from stagedFiles array', () => {
- mutations.UNSTAGE_CHANGE(localState, f.path);
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
- expect(localState.stagedFiles.length).toBe(0);
- expect(localState.changedFiles.length).toBe(1);
- });
- });
+ expect(localState.stagedFiles.length).toBe(stagedFilesCount);
+ expect(localState.changedFiles.length).toBe(changedFilesCount);
+
+ expect(unstagedFile).toEqual(
+ expect.objectContaining({
+ content: 'hello',
+ staged,
+ deleted: false,
+ tempFile: false,
+ changed: true,
+ }),
+ );
+ });
+
+ it('does not remove file from stagedFiles and changedFiles if the file was renamed, even if the contents are equal', () => {
+ unstagedFile.content = 'original content';
+ unstagedFile.prevPath = 'old_file';
+
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
+
+ expect(localState.entries.file).toEqual(
+ expect.objectContaining({
+ content: 'original content',
+ staged,
+ changed: false,
+ prevPath: 'old_file',
+ }),
+ );
+
+ expect(localState.stagedFiles.length).toBe(stagedFilesCount);
+ expect(localState.changedFiles.length).toBe(changedFilesCount);
+ });
+
+ it(`removes file from ${removedFrom} array and adds it into ${addedTo} array`, () => {
+ localState.stagedFiles.length = 0;
+
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
+
+ expect(localState.stagedFiles.length).toBe(stagedFilesCount);
+ expect(localState.changedFiles.length).toBe(changedFilesCount);
+
+ const f = localState.stagedFiles[0] || localState.changedFiles[0];
+ expect(f).toEqual(unstagedFile);
+ });
+
+ it(`updates file in ${addedTo} array if it is was already present in it`, () => {
+ unstagedFile.raw = 'testing 123';
+
+ mutation(localState, {
+ path: unstagedFile.path,
+ diffInfo: localStore.getters.getDiffInfo(unstagedFile.path),
+ });
+
+ expect(localState.stagedFiles.length).toBe(stagedFilesCount);
+ expect(localState.changedFiles.length).toBe(changedFilesCount);
+
+ const f = localState.stagedFiles[0] || localState.changedFiles[0];
+ expect(f.raw).toEqual('testing 123');
+ });
+ },
+ );
describe('TOGGLE_FILE_CHANGED', () => {
it('updates file changed status', () => {
diff --git a/spec/javascripts/image_diff/helpers/init_image_diff_spec.js b/spec/frontend/image_diff/helpers/init_image_diff_spec.js
index ba501d58965..dc872ace265 100644
--- a/spec/javascripts/image_diff/helpers/init_image_diff_spec.js
+++ b/spec/frontend/image_diff/helpers/init_image_diff_spec.js
@@ -14,8 +14,8 @@ describe('initImageDiff', () => {
<div class="diff-file"></div>
`;
- spyOn(ReplacedImageDiff.prototype, 'init').and.callFake(() => {});
- spyOn(ImageDiff.prototype, 'init').and.callFake(() => {});
+ jest.spyOn(ReplacedImageDiff.prototype, 'init').mockImplementation(() => {});
+ jest.spyOn(ImageDiff.prototype, 'init').mockImplementation(() => {});
});
afterEach(() => {
diff --git a/spec/javascripts/image_diff/init_discussion_tab_spec.js b/spec/frontend/image_diff/init_discussion_tab_spec.js
index 5eb87e1df25..f459fdf5a08 100644
--- a/spec/javascripts/image_diff/init_discussion_tab_spec.js
+++ b/spec/frontend/image_diff/init_discussion_tab_spec.js
@@ -12,29 +12,31 @@ describe('initDiscussionTab', () => {
});
it('should pass canCreateNote as false to initImageDiff', done => {
- spyOn(initImageDiffHelper, 'initImageDiff').and.callFake((diffFileEl, canCreateNote) => {
- expect(canCreateNote).toEqual(false);
- done();
- });
+ jest
+ .spyOn(initImageDiffHelper, 'initImageDiff')
+ .mockImplementation((diffFileEl, canCreateNote) => {
+ expect(canCreateNote).toEqual(false);
+ done();
+ });
initDiscussionTab();
});
it('should pass renderCommentBadge as true to initImageDiff', done => {
- spyOn(initImageDiffHelper, 'initImageDiff').and.callFake(
- (diffFileEl, canCreateNote, renderCommentBadge) => {
+ jest
+ .spyOn(initImageDiffHelper, 'initImageDiff')
+ .mockImplementation((diffFileEl, canCreateNote, renderCommentBadge) => {
expect(renderCommentBadge).toEqual(true);
done();
- },
- );
+ });
initDiscussionTab();
});
it('should call initImageDiff for each diffFileEls', () => {
- spyOn(initImageDiffHelper, 'initImageDiff').and.callFake(() => {});
+ jest.spyOn(initImageDiffHelper, 'initImageDiff').mockImplementation(() => {});
initDiscussionTab();
- expect(initImageDiffHelper.initImageDiff.calls.count()).toEqual(2);
+ expect(initImageDiffHelper.initImageDiff.mock.calls.length).toEqual(2);
});
});
diff --git a/spec/frontend/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_projects/components/import_projects_table_spec.js
index 708f2758083..deffe22ea77 100644
--- a/spec/frontend/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_projects/components/import_projects_table_spec.js
@@ -45,7 +45,6 @@ describe('ImportProjectsTable', () => {
propsData: {
providerTitle,
},
- sync: false,
});
return component.vm;
diff --git a/spec/frontend/import_projects/components/imported_project_table_row_spec.js b/spec/frontend/import_projects/components/imported_project_table_row_spec.js
index 34961eae0f3..700dd1e025a 100644
--- a/spec/frontend/import_projects/components/imported_project_table_row_spec.js
+++ b/spec/frontend/import_projects/components/imported_project_table_row_spec.js
@@ -26,7 +26,6 @@ describe('ImportedProjectTableRow', () => {
...project,
},
},
- sync: false,
});
return component.vm;
diff --git a/spec/frontend/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
index 02c786d8d0b..8efd526e360 100644
--- a/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
@@ -45,7 +45,6 @@ describe('ProviderRepoTableRow', () => {
propsData: {
repo,
},
- sync: false,
});
return component.vm;
diff --git a/spec/frontend/issuable_suggestions/components/app_spec.js b/spec/frontend/issuable_suggestions/components/app_spec.js
index 41860202750..20930be8667 100644
--- a/spec/frontend/issuable_suggestions/components/app_spec.js
+++ b/spec/frontend/issuable_suggestions/components/app_spec.js
@@ -11,8 +11,6 @@ describe('Issuable suggestions app component', () => {
search,
projectPath: 'project',
},
- sync: false,
- attachToDocument: true,
});
}
@@ -27,7 +25,9 @@ describe('Issuable suggestions app component', () => {
it('does not render with empty search', () => {
wrapper.setProps({ search: '' });
- expect(wrapper.isVisible()).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.isVisible()).toBe(false);
+ });
});
describe('with data', () => {
@@ -40,14 +40,18 @@ describe('Issuable suggestions app component', () => {
it('renders component', () => {
wrapper.setData(data);
- expect(wrapper.isEmpty()).toBe(false);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.isEmpty()).toBe(false);
+ });
});
it('does not render with empty search', () => {
wrapper.setProps({ search: '' });
wrapper.setData(data);
- expect(wrapper.isVisible()).toBe(false);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.isVisible()).toBe(false);
+ });
});
it('does not render when loading', () => {
@@ -56,13 +60,17 @@ describe('Issuable suggestions app component', () => {
loading: 1,
});
- expect(wrapper.isVisible()).toBe(false);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.isVisible()).toBe(false);
+ });
});
it('does not render with empty issues data', () => {
wrapper.setData({ issues: [] });
- expect(wrapper.isVisible()).toBe(false);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.isVisible()).toBe(false);
+ });
});
it('renders list of issues', () => {
diff --git a/spec/frontend/issuable_suggestions/components/item_spec.js b/spec/frontend/issuable_suggestions/components/item_spec.js
index 10fba238506..6c3c30fcbb0 100644
--- a/spec/frontend/issuable_suggestions/components/item_spec.js
+++ b/spec/frontend/issuable_suggestions/components/item_spec.js
@@ -16,8 +16,6 @@ describe('Issuable suggestions suggestion component', () => {
...suggestion,
},
},
- sync: false,
- attachToDocument: true,
});
}
@@ -135,7 +133,7 @@ describe('Issuable suggestions suggestion component', () => {
const icon = vm.find(Icon);
expect(icon.props('name')).toBe('eye-slash');
- expect(icon.attributes('data-original-title')).toBe('Confidential');
+ expect(icon.attributes('title')).toBe('Confidential');
});
});
});
diff --git a/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
index f57391a6b0d..3e445319746 100644
--- a/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
+++ b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Issuables list component with empty issues response with all state should display a catch-all if there are no issues to show 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
description="The Issue Tracker is the place to add things that need to be improved or solved in a project. You can register or sign in to create issues for this project."
svgpath="/emptySvg"
title="There are no issues to show"
diff --git a/spec/frontend/issuables_list/components/issuable_spec.js b/spec/frontend/issuables_list/components/issuable_spec.js
index b6851a0e24c..81f6b60ae25 100644
--- a/spec/frontend/issuables_list/components/issuable_spec.js
+++ b/spec/frontend/issuables_list/components/issuable_spec.js
@@ -44,8 +44,6 @@ describe('Issuable component', () => {
baseUrl: TEST_BASE_URL,
...props,
},
- sync: false,
- attachToDocument: true,
});
};
@@ -70,7 +68,7 @@ describe('Issuable component', () => {
const findTaskStatus = () => wrapper.find('.task-status');
const findOpenedAgoContainer = () => wrapper.find({ ref: 'openedAgoByContainer' });
const findMilestone = () => wrapper.find('.js-milestone');
- const findMilestoneTooltip = () => findMilestone().attributes('data-original-title');
+ const findMilestoneTooltip = () => findMilestone().attributes('title');
const findDueDate = () => wrapper.find('.js-due-date');
const findLabelContainer = () => wrapper.find('.js-labels');
const findLabelLinks = () => findLabelContainer().findAll(GlLink);
@@ -240,7 +238,7 @@ describe('Issuable component', () => {
const labels = findLabelLinks().wrappers.map(label => ({
href: label.attributes('href'),
text: label.text(),
- tooltip: label.find('span').attributes('data-original-title'),
+ tooltip: label.find('span').attributes('title'),
}));
const expected = testLabels.map(label => ({
@@ -339,7 +337,9 @@ describe('Issuable component', () => {
findBulkCheckbox().trigger('click');
- expect(wrapper.emitted().select).toEqual([[{ issuable, selected: !selected }]]);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().select).toEqual([[{ issuable, selected: !selected }]]);
+ });
});
});
});
diff --git a/spec/frontend/issuables_list/components/issuables_list_app_spec.js b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
index 621e8b8aa54..eafc4d83d87 100644
--- a/spec/frontend/issuables_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
@@ -1,6 +1,6 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlEmptyState, GlPagination, GlSkeletonLoading } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'helpers/test_constants';
@@ -18,8 +18,6 @@ const TEST_ENDPOINT = '/issues';
const TEST_CREATE_ISSUES_PATH = '/createIssue';
const TEST_EMPTY_SVG_PATH = '/emptySvg';
-const localVue = createLocalVue();
-
const MOCK_ISSUES = Array(PAGE_SIZE_MANUAL)
.fill(0)
.map((_, i) => ({
@@ -40,16 +38,13 @@ describe('Issuables list component', () => {
};
const factory = (props = { sortKey: 'priority' }) => {
- wrapper = shallowMount(localVue.extend(IssuablesListApp), {
+ wrapper = shallowMount(IssuablesListApp, {
propsData: {
endpoint: TEST_ENDPOINT,
createIssuePath: TEST_CREATE_ISSUES_PATH,
emptySvgPath: TEST_EMPTY_SVG_PATH,
...props,
},
- localVue,
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/javascripts/issue_show/components/edit_actions_spec.js b/spec/frontend/issue_show/components/edit_actions_spec.js
index 2ab74ae4e10..b0c1894058e 100644
--- a/spec/javascripts/issue_show/components/edit_actions_spec.js
+++ b/spec/frontend/issue_show/components/edit_actions_spec.js
@@ -15,7 +15,7 @@ describe('Edit Actions components', () => {
});
store.formState.title = 'test';
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm = new Component({
propsData: {
@@ -101,14 +101,14 @@ describe('Edit Actions components', () => {
describe('deleteIssuable', () => {
it('sends delete.issuable event when clicking save button', () => {
- spyOn(window, 'confirm').and.returnValue(true);
+ jest.spyOn(window, 'confirm').mockReturnValue(true);
vm.$el.querySelector('.btn-danger').click();
expect(eventHub.$emit).toHaveBeenCalledWith('delete.issuable', { destroy_confirm: true });
});
it('shows loading icon after clicking delete button', done => {
- spyOn(window, 'confirm').and.returnValue(true);
+ jest.spyOn(window, 'confirm').mockReturnValue(true);
vm.$el.querySelector('.btn-danger').click();
Vue.nextTick(() => {
@@ -119,7 +119,7 @@ describe('Edit Actions components', () => {
});
it('does no actions when confirm is false', done => {
- spyOn(window, 'confirm').and.returnValue(false);
+ jest.spyOn(window, 'confirm').mockReturnValue(false);
vm.$el.querySelector('.btn-danger').click();
Vue.nextTick(() => {
diff --git a/spec/javascripts/issue_show/components/fields/description_spec.js b/spec/frontend/issue_show/components/fields/description_spec.js
index f5f87a6bfbf..8ea326ad1ee 100644
--- a/spec/javascripts/issue_show/components/fields/description_spec.js
+++ b/spec/frontend/issue_show/components/fields/description_spec.js
@@ -20,7 +20,7 @@ describe('Description field component', () => {
document.body.appendChild(el);
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm = new Component({
el,
diff --git a/spec/javascripts/issue_show/components/fields/title_spec.js b/spec/frontend/issue_show/components/fields/title_spec.js
index 62dff983250..99e8658b89f 100644
--- a/spec/javascripts/issue_show/components/fields/title_spec.js
+++ b/spec/frontend/issue_show/components/fields/title_spec.js
@@ -17,7 +17,7 @@ describe('Title field component', () => {
});
store.formState.title = 'test';
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm = new Component({
propsData: {
diff --git a/spec/frontend/issue_show/components/pinned_links_spec.js b/spec/frontend/issue_show/components/pinned_links_spec.js
index 77da3390918..59c919c85d5 100644
--- a/spec/frontend/issue_show/components/pinned_links_spec.js
+++ b/spec/frontend/issue_show/components/pinned_links_spec.js
@@ -1,9 +1,7 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import PinnedLinks from '~/issue_show/components/pinned_links.vue';
-const localVue = createLocalVue();
-
const plainZoomUrl = 'https://zoom.us/j/123456789';
describe('PinnedLinks', () => {
@@ -19,9 +17,7 @@ describe('PinnedLinks', () => {
};
const createComponent = props => {
- wrapper = shallowMount(localVue.extend(PinnedLinks), {
- localVue,
- sync: false,
+ wrapper = shallowMount(PinnedLinks, {
propsData: {
zoomMeetingUrl: null,
...props,
diff --git a/spec/javascripts/issue_show/index_spec.js b/spec/frontend/issue_show/index_spec.js
index fa0b426c06c..e80d1b83c11 100644
--- a/spec/javascripts/issue_show/index_spec.js
+++ b/spec/frontend/issue_show/index_spec.js
@@ -10,7 +10,7 @@ describe('Issue show index', () => {
});
document.body.appendChild(d);
- const alertSpy = spyOn(window, 'alert');
+ const alertSpy = jest.spyOn(window, 'alert');
initIssueableApp();
expect(alertSpy).not.toHaveBeenCalled();
diff --git a/spec/javascripts/issue_spec.js b/spec/frontend/issue_spec.js
index 966aee72abb..586bd7f8529 100644
--- a/spec/javascripts/issue_spec.js
+++ b/spec/frontend/issue_spec.js
@@ -6,7 +6,13 @@ import axios from '~/lib/utils/axios_utils';
import Issue from '~/issue';
import '~/lib/utils/text_utility';
-describe('Issue', function() {
+describe('Issue', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
let $boxClosed, $boxOpen, $btn;
preloadFixtures('issues/closed-issue.html');
@@ -80,10 +86,18 @@ describe('Issue', function() {
}
[true, false].forEach(isIssueInitiallyOpen => {
- describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, function() {
+ describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, () => {
const action = isIssueInitiallyOpen ? 'close' : 'reopen';
let mock;
+ function setup() {
+ testContext.issue = new Issue();
+ expectIssueState(isIssueInitiallyOpen);
+
+ testContext.$projectIssuesCounter = $('.issue_counter').first();
+ testContext.$projectIssuesCounter.text('1,001');
+ }
+
function mockCloseButtonResponseSuccess(url, response) {
mock.onPut(url).reply(() => {
expectNewBranchButtonState(true, false);
@@ -103,7 +117,7 @@ describe('Issue', function() {
});
}
- beforeEach(function() {
+ beforeEach(() => {
if (isIssueInitiallyOpen) {
loadFixtures('issues/open-issue.html');
} else {
@@ -111,19 +125,11 @@ describe('Issue', function() {
}
mock = new MockAdapter(axios);
-
mock.onGet(/(.*)\/related_branches$/).reply(200, {});
+ jest.spyOn(axios, 'get');
findElements(isIssueInitiallyOpen);
- this.issue = new Issue();
- expectIssueState(isIssueInitiallyOpen);
-
- this.$triggeredButton = $btn;
-
- this.$projectIssuesCounter = $('.issue_counter').first();
- this.$projectIssuesCounter.text('1,001');
-
- spyOn(axios, 'get').and.callThrough();
+ testContext.$triggeredButton = $btn;
});
afterEach(() => {
@@ -131,82 +137,90 @@ describe('Issue', function() {
$('div.flash-alert').remove();
});
- it(`${action}s the issue`, function(done) {
- mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), {
+ it(`${action}s the issue`, done => {
+ mockCloseButtonResponseSuccess(testContext.$triggeredButton.attr('href'), {
id: 34,
});
mockCanCreateBranch(!isIssueInitiallyOpen);
- this.$triggeredButton.trigger('click');
+ setup();
+ testContext.$triggeredButton.trigger('click');
- setTimeout(() => {
+ setImmediate(() => {
expectIssueState(!isIssueInitiallyOpen);
- expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
- expect(this.$projectIssuesCounter.text()).toBe(isIssueInitiallyOpen ? '1,000' : '1,002');
+ expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
+ expect(testContext.$projectIssuesCounter.text()).toBe(
+ isIssueInitiallyOpen ? '1,000' : '1,002',
+ );
expectNewBranchButtonState(false, !isIssueInitiallyOpen);
done();
});
});
- it(`fails to ${action} the issue if saved:false`, function(done) {
- mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), {
+ it(`fails to ${action} the issue if saved:false`, done => {
+ mockCloseButtonResponseSuccess(testContext.$triggeredButton.attr('href'), {
saved: false,
});
mockCanCreateBranch(isIssueInitiallyOpen);
- this.$triggeredButton.trigger('click');
+ setup();
+ testContext.$triggeredButton.trigger('click');
- setTimeout(() => {
+ setImmediate(() => {
expectIssueState(isIssueInitiallyOpen);
- expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
+ expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expectErrorMessage();
- expect(this.$projectIssuesCounter.text()).toBe('1,001');
+ expect(testContext.$projectIssuesCounter.text()).toBe('1,001');
expectNewBranchButtonState(false, isIssueInitiallyOpen);
done();
});
});
- it(`fails to ${action} the issue if HTTP error occurs`, function(done) {
- mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
+ it(`fails to ${action} the issue if HTTP error occurs`, done => {
+ mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
mockCanCreateBranch(isIssueInitiallyOpen);
- this.$triggeredButton.trigger('click');
+ setup();
+ testContext.$triggeredButton.trigger('click');
- setTimeout(() => {
+ setImmediate(() => {
expectIssueState(isIssueInitiallyOpen);
- expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
+ expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expectErrorMessage();
- expect(this.$projectIssuesCounter.text()).toBe('1,001');
+ expect(testContext.$projectIssuesCounter.text()).toBe('1,001');
expectNewBranchButtonState(false, isIssueInitiallyOpen);
done();
});
});
- it('disables the new branch button if Ajax call fails', function() {
- mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
+ it('disables the new branch button if Ajax call fails', () => {
+ mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
mock.onGet(/(.*)\/can_create_branch$/).networkError();
- this.$triggeredButton.trigger('click');
+ setup();
+ testContext.$triggeredButton.trigger('click');
expectNewBranchButtonState(false, false);
});
- it('does not trigger Ajax call if new branch button is missing', function(done) {
- mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
- Issue.$btnNewBranch = $();
- this.canCreateBranchDeferred = null;
+ it('does not trigger Ajax call if new branch button is missing', done => {
+ mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
+
+ document.querySelector('#related-branches').remove();
+ document.querySelector('.create-mr-dropdown-wrap').remove();
- this.$triggeredButton.trigger('click');
+ setup();
+ testContext.$triggeredButton.trigger('click');
- setTimeout(() => {
+ setImmediate(() => {
expect(axios.get).not.toHaveBeenCalled();
done();
diff --git a/spec/frontend/jobs/components/erased_block_spec.js b/spec/frontend/jobs/components/erased_block_spec.js
index c7a53197fad..d66ee71df6a 100644
--- a/spec/frontend/jobs/components/erased_block_spec.js
+++ b/spec/frontend/jobs/components/erased_block_spec.js
@@ -13,8 +13,6 @@ describe('Erased block', () => {
const createComponent = props => {
wrapper = mount(ErasedBlock, {
propsData: props,
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/javascripts/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index d527c6708fc..04f20811601 100644
--- a/spec/javascripts/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -100,7 +100,7 @@ describe('Job log controllers', () => {
});
it('emits scrollJobLogTop event on click', () => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.js-scroll-top').click();
expect(vm.$emit).toHaveBeenCalledWith('scrollJobLogTop');
@@ -127,7 +127,7 @@ describe('Job log controllers', () => {
});
it('does not emit scrollJobLogTop event on click', () => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.js-scroll-top').click();
expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogTop');
@@ -146,7 +146,7 @@ describe('Job log controllers', () => {
});
it('emits scrollJobLogBottom event on click', () => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.js-scroll-bottom').click();
expect(vm.$emit).toHaveBeenCalledWith('scrollJobLogBottom');
@@ -173,7 +173,7 @@ describe('Job log controllers', () => {
});
it('does not emit scrollJobLogBottom event on click', () => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.js-scroll-bottom').click();
expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogBottom');
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/jobs/components/log/collapsible_section_spec.js
index 01184a51193..3a16521a986 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/jobs/components/log/collapsible_section_spec.js
@@ -12,7 +12,6 @@ describe('Job Log Collapsible Section', () => {
const createComponent = (props = {}) => {
wrapper = mount(CollpasibleSection, {
- sync: true,
propsData: {
...props,
},
@@ -68,6 +67,9 @@ describe('Job Log Collapsible Section', () => {
});
findCollapsibleLine().trigger('click');
- expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
+ });
});
});
diff --git a/spec/frontend/jobs/components/log/duration_badge_spec.js b/spec/frontend/jobs/components/log/duration_badge_spec.js
index 2ac34e78909..84dae386bdb 100644
--- a/spec/frontend/jobs/components/log/duration_badge_spec.js
+++ b/spec/frontend/jobs/components/log/duration_badge_spec.js
@@ -10,7 +10,6 @@ describe('Job Log Duration Badge', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(DurationBadge, {
- sync: false,
propsData: {
...props,
},
diff --git a/spec/frontend/jobs/components/log/line_header_spec.js b/spec/frontend/jobs/components/log/line_header_spec.js
index 2d2f92fad9d..f2e202674ee 100644
--- a/spec/frontend/jobs/components/log/line_header_spec.js
+++ b/spec/frontend/jobs/components/log/line_header_spec.js
@@ -22,7 +22,6 @@ describe('Job Log Header Line', () => {
const createComponent = (props = {}) => {
wrapper = mount(LineHeader, {
- sync: false,
propsData: {
...props,
},
@@ -79,7 +78,9 @@ describe('Job Log Header Line', () => {
it('emits toggleLine event', () => {
wrapper.trigger('click');
- expect(wrapper.emitted().toggleLine.length).toBe(1);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleLine.length).toBe(1);
+ });
});
});
diff --git a/spec/frontend/jobs/components/log/line_number_spec.js b/spec/frontend/jobs/components/log/line_number_spec.js
index fcf2edf9159..96aa31baab9 100644
--- a/spec/frontend/jobs/components/log/line_number_spec.js
+++ b/spec/frontend/jobs/components/log/line_number_spec.js
@@ -11,7 +11,6 @@ describe('Job Log Line Number', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(LineNumber, {
- sync: false,
propsData: {
...props,
},
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index ea593e3c39a..ec3a3968f14 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -20,7 +20,6 @@ describe('Job Log Line', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(Line, {
- sync: false,
propsData: {
...props,
},
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 7c834542a9a..02cdb31d27e 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -15,7 +15,6 @@ describe('Job Log', () => {
const createComponent = () => {
wrapper = mount(Log, {
- sync: false,
localVue,
store,
});
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index 01f69e6328c..587818045eb 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -34,7 +34,7 @@ export const utilsMockData = [
content: [
{
text:
- 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.3-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.16-postgresql-9.6-graphicsmagick-1.3.33',
+ 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.16-postgresql-9.6-graphicsmagick-1.3.33',
},
],
section: 'prepare-executor',
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 872779299d2..e584150ba70 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -341,6 +341,16 @@ describe('prettyTime methods', () => {
assertTimeUnits(twoDays, 3, 48, 0, 0);
});
+
+ it('should correctly parse values when limitedToDays is true', () => {
+ const sevenDays = datetimeUtility.parseSeconds(648750, {
+ hoursPerDay: 24,
+ daysPerWeek: 7,
+ limitToDays: true,
+ });
+
+ assertTimeUnits(sevenDays, 12, 12, 7, 0);
+ });
});
describe('stringifyTime', () => {
@@ -445,6 +455,23 @@ describe('getDateInPast', () => {
});
});
+describe('getDateInFuture', () => {
+ const date = new Date('2019-07-16T00:00:00.000Z');
+ const daysInFuture = 90;
+
+ it('returns the correct date in the future', () => {
+ const dateInFuture = datetimeUtility.getDateInFuture(date, daysInFuture);
+ const expectedDateInFuture = new Date('2019-10-14T00:00:00.000Z');
+
+ expect(dateInFuture).toStrictEqual(expectedDateInFuture);
+ });
+
+ it('does not modifiy the original date', () => {
+ datetimeUtility.getDateInFuture(date, daysInFuture);
+ expect(date).toStrictEqual(new Date('2019-07-16T00:00:00.000Z'));
+ });
+});
+
describe('getDatesInRange', () => {
it('returns an empty array if 1st or 2nd argument is not a Date object', () => {
const d1 = new Date('2019-01-01');
@@ -507,3 +534,32 @@ describe('secondsToDays', () => {
expect(datetimeUtility.secondsToDays(270000)).toBe(3);
});
});
+
+describe('approximateDuration', () => {
+ it.each`
+ seconds
+ ${null}
+ ${{}}
+ ${[]}
+ ${-1}
+ `('returns a blank string for seconds=$seconds', ({ seconds }) => {
+ expect(datetimeUtility.approximateDuration(seconds)).toBe('');
+ });
+
+ it.each`
+ seconds | approximation
+ ${0} | ${'less than a minute'}
+ ${25} | ${'less than a minute'}
+ ${45} | ${'1 minute'}
+ ${90} | ${'1 minute'}
+ ${100} | ${'1 minute'}
+ ${150} | ${'2 minutes'}
+ ${220} | ${'3 minutes'}
+ ${3000} | ${'about 1 hour'}
+ ${30000} | ${'about 8 hours'}
+ ${100000} | ${'1 day'}
+ ${180000} | ${'2 days'}
+ `('converts $seconds seconds to $approximation', ({ seconds, approximation }) => {
+ expect(datetimeUtility.approximateDuration(seconds)).toBe(approximation);
+ });
+});
diff --git a/spec/frontend/lib/utils/poll_until_complete_spec.js b/spec/frontend/lib/utils/poll_until_complete_spec.js
new file mode 100644
index 00000000000..15602b87b9c
--- /dev/null
+++ b/spec/frontend/lib/utils/poll_until_complete_spec.js
@@ -0,0 +1,89 @@
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import pollUntilComplete from '~/lib/utils/poll_until_complete';
+import httpStatusCodes from '~/lib/utils/http_status';
+import { TEST_HOST } from 'helpers/test_constants';
+
+const endpoint = `${TEST_HOST}/foo`;
+const mockData = 'mockData';
+const pollInterval = 1234;
+const pollIntervalHeader = {
+ 'Poll-Interval': pollInterval,
+};
+
+describe('pollUntilComplete', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('given an immediate success response', () => {
+ beforeEach(() => {
+ mock.onGet(endpoint).replyOnce(httpStatusCodes.OK, mockData);
+ });
+
+ it('resolves with the response', () =>
+ pollUntilComplete(endpoint).then(({ data }) => {
+ expect(data).toBe(mockData);
+ }));
+ });
+
+ describe(`given the endpoint returns NO_CONTENT with a Poll-Interval before succeeding`, () => {
+ beforeEach(() => {
+ mock
+ .onGet(endpoint)
+ .replyOnce(httpStatusCodes.NO_CONTENT, undefined, pollIntervalHeader)
+ .onGet(endpoint)
+ .replyOnce(httpStatusCodes.OK, mockData);
+ });
+
+ it('calls the endpoint until it succeeds, and resolves with the response', () =>
+ Promise.all([
+ pollUntilComplete(endpoint).then(({ data }) => {
+ expect(data).toBe(mockData);
+ expect(mock.history.get).toHaveLength(2);
+ }),
+
+ // To ensure the above pollUntilComplete() promise is actually
+ // fulfilled, we must explictly run the timers forward by the time
+ // indicated in the headers *after* each previous request has been
+ // fulfilled.
+ axios
+ // wait for initial NO_CONTENT response to be fulfilled
+ .waitForAll()
+ .then(() => {
+ jest.advanceTimersByTime(pollInterval);
+ }),
+ ]));
+ });
+
+ describe('given the endpoint returns an error status', () => {
+ const errorMessage = 'error message';
+
+ beforeEach(() => {
+ mock.onGet(endpoint).replyOnce(httpStatusCodes.NOT_FOUND, errorMessage);
+ });
+
+ it('rejects with the error response', () =>
+ pollUntilComplete(endpoint).catch(error => {
+ expect(error.response.data).toBe(errorMessage);
+ }));
+ });
+
+ describe('given params', () => {
+ const params = { foo: 'bar' };
+ beforeEach(() => {
+ mock.onGet(endpoint, { params }).replyOnce(httpStatusCodes.OK, mockData);
+ });
+
+ it('requests the expected URL', () =>
+ pollUntilComplete(endpoint, { params }).then(({ data }) => {
+ expect(data).toBe(mockData);
+ }));
+ });
+});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index deb6dab772e..803b3629524 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -27,6 +27,9 @@ describe('text_utility', () => {
it('should remove underscores and uppercase the first letter', () => {
expect(textUtils.humanize('foo_bar')).toEqual('Foo bar');
});
+ it('should remove underscores and dashes and uppercase the first letter', () => {
+ expect(textUtils.humanize('foo_bar-foo', '[_-]')).toEqual('Foo bar foo');
+ });
});
describe('dasherize', () => {
@@ -52,14 +55,20 @@ describe('text_utility', () => {
expect(textUtils.slugify(' a new project ')).toEqual('a-new-project');
});
it('should only remove non-allowed special characters', () => {
- expect(textUtils.slugify('test!_pro-ject~')).toEqual('test-_pro-ject-');
+ expect(textUtils.slugify('test!_pro-ject~')).toEqual('test-_pro-ject');
});
it('should squash multiple hypens', () => {
- expect(textUtils.slugify('test!!!!_pro-ject~')).toEqual('test-_pro-ject-');
+ expect(textUtils.slugify('test!!!!_pro-ject~')).toEqual('test-_pro-ject');
});
it('should return empty string if only non-allowed characters', () => {
expect(textUtils.slugify('здрасти')).toEqual('');
});
+ it('should squash multiple separators', () => {
+ expect(textUtils.slugify('Test:-)')).toEqual('test');
+ });
+ it('should trim any separators from the beginning and end of the slug', () => {
+ expect(textUtils.slugify('-Test:-)-')).toEqual('test');
+ });
});
describe('stripHtml', () => {
@@ -109,6 +118,12 @@ describe('text_utility', () => {
});
});
+ describe('convertToTitleCase', () => {
+ it('converts sentence case to Sentence Case', () => {
+ expect(textUtils.convertToTitleCase('hello world')).toBe('Hello World');
+ });
+ });
+
describe('truncateSha', () => {
it('shortens SHAs to 8 characters', () => {
expect(textUtils.truncateSha('verylongsha')).toBe('verylong');
diff --git a/spec/frontend/monitoring/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/empty_state_spec.js.snap
index 5f24bab600c..31b3ad1bd76 100644
--- a/spec/frontend/monitoring/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/empty_state_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`EmptyState shows gettingStarted state 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
description="Stay updated about the performance and health of your environment by configuring Prometheus to monitor your deployments."
primarybuttonlink="/clustersPath"
primarybuttontext="Install on clusters"
@@ -13,7 +13,7 @@ exports[`EmptyState shows gettingStarted state 1`] = `
`;
exports[`EmptyState shows loading state 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
description="Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available."
primarybuttonlink="/documentationPath"
primarybuttontext="View documentation"
@@ -25,7 +25,7 @@ exports[`EmptyState shows loading state 1`] = `
`;
exports[`EmptyState shows unableToConnect state 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
description="Ensure connectivity is available from the GitLab server to the Prometheus server"
primarybuttonlink="/documentationPath"
primarybuttontext="View documentation"
diff --git a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
index 7f37a83d291..c30fb572826 100644
--- a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`GroupEmptyState Renders an empty state for BAD_QUERY 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
primarybuttonlink="/path/to/settings"
primarybuttontext="Verify configuration"
@@ -13,7 +13,7 @@ exports[`GroupEmptyState Renders an empty state for BAD_QUERY 1`] = `
exports[`GroupEmptyState Renders an empty state for BAD_QUERY 2`] = `"The Prometheus server responded with \\"bad request\\". Please check your queries are correct and are supported in your Prometheus version. <a href=\\"/path/to/docs\\">More information</a>"`;
exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
description="We couldn't reach the Prometheus server. Either the server no longer exists or the configuration details need updating."
primarybuttonlink="/path/to/settings"
@@ -26,7 +26,7 @@ exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 1`] = `
exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 2`] = `undefined`;
exports[`GroupEmptyState Renders an empty state for FOO STATE 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
description="An error occurred while loading the data. Please try again."
svgpath="/path/to/empty-group-illustration.svg"
@@ -37,7 +37,7 @@ exports[`GroupEmptyState Renders an empty state for FOO STATE 1`] = `
exports[`GroupEmptyState Renders an empty state for FOO STATE 2`] = `undefined`;
exports[`GroupEmptyState Renders an empty state for LOADING 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
description="Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available."
svgpath="/path/to/empty-group-illustration.svg"
@@ -48,7 +48,7 @@ exports[`GroupEmptyState Renders an empty state for LOADING 1`] = `
exports[`GroupEmptyState Renders an empty state for LOADING 2`] = `undefined`;
exports[`GroupEmptyState Renders an empty state for NO_DATA 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
svgpath="/path/to/empty-group-illustration.svg"
title="No data to display"
@@ -58,7 +58,7 @@ exports[`GroupEmptyState Renders an empty state for NO_DATA 1`] = `
exports[`GroupEmptyState Renders an empty state for NO_DATA 2`] = `"The data source is connected, but there is no data to display. <a href=\\"/path/to/docs\\">More information</a>"`;
exports[`GroupEmptyState Renders an empty state for TIMEOUT 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
svgpath="/path/to/empty-group-illustration.svg"
title="Connection timed out"
@@ -68,7 +68,7 @@ exports[`GroupEmptyState Renders an empty state for TIMEOUT 1`] = `
exports[`GroupEmptyState Renders an empty state for TIMEOUT 2`] = `"Charts can't be displayed as the request for data has timed out. <a href=\\"/path/to/docs\\">More information</a>"`;
exports[`GroupEmptyState Renders an empty state for UNKNOWN_ERROR 1`] = `
-<glemptystate-stub
+<gl-empty-state-stub
compact="true"
description="An error occurred while loading the data. Please try again."
svgpath="/path/to/empty-group-illustration.svg"
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
index 7446461a574..cea22d075ec 100644
--- a/spec/frontend/monitoring/components/charts/anomaly_spec.js
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -38,7 +38,6 @@ describe('Anomaly chart component', () => {
slots: {
default: mockWidgets,
},
- sync: false,
});
};
const findTimeSeries = () => wrapper.find(MonitorTimeSeriesChart);
diff --git a/spec/frontend/monitoring/components/charts/column_spec.js b/spec/frontend/monitoring/components/charts/column_spec.js
index b4539801e0f..d6a96ffbd65 100644
--- a/spec/frontend/monitoring/components/charts/column_spec.js
+++ b/spec/frontend/monitoring/components/charts/column_spec.js
@@ -1,9 +1,7 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlColumnChart } from '@gitlab/ui/dist/charts';
import ColumnChart from '~/monitoring/components/charts/column.vue';
-const localVue = createLocalVue();
-
jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockResolvedValue('mockSvgPathContent'),
}));
@@ -12,7 +10,7 @@ describe('Column component', () => {
let columnChart;
beforeEach(() => {
- columnChart = shallowMount(localVue.extend(ColumnChart), {
+ columnChart = shallowMount(ColumnChart, {
propsData: {
graphData: {
metrics: [
@@ -34,8 +32,6 @@ describe('Column component', () => {
},
containerWidth: 100,
},
- sync: false,
- localVue,
});
});
diff --git a/spec/frontend/monitoring/components/charts/empty_chart_spec.js b/spec/frontend/monitoring/components/charts/empty_chart_spec.js
index 06822126b59..bbfca27dc5a 100644
--- a/spec/frontend/monitoring/components/charts/empty_chart_spec.js
+++ b/spec/frontend/monitoring/components/charts/empty_chart_spec.js
@@ -1,19 +1,15 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
-const localVue = createLocalVue();
-
describe('Empty Chart component', () => {
let emptyChart;
const graphTitle = 'Memory Usage';
beforeEach(() => {
- emptyChart = shallowMount(localVue.extend(EmptyChart), {
+ emptyChart = shallowMount(EmptyChart, {
propsData: {
graphTitle,
},
- sync: false,
- localVue,
});
});
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index 78bcc400787..2410dae112b 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -1,19 +1,15 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import SingleStatChart from '~/monitoring/components/charts/single_stat.vue';
import { graphDataPrometheusQuery } from '../../mock_data';
-const localVue = createLocalVue();
-
describe('Single Stat Chart component', () => {
let singleStatChart;
beforeEach(() => {
- singleStatChart = shallowMount(localVue.extend(SingleStatChart), {
+ singleStatChart = shallowMount(SingleStatChart, {
propsData: {
graphData: graphDataPrometheusQuery,
},
- sync: false,
- localVue,
});
});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 098b3408e67..d9960b3d18e 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -58,9 +58,7 @@ describe('Time series component', () => {
slots: {
default: mockWidgets,
},
- sync: false,
store,
- attachToDocument: true,
});
});
@@ -83,13 +81,17 @@ describe('Time series component', () => {
it('allows user to override max value label text using prop', () => {
timeSeriesChart.setProps({ legendMaxText: 'legendMaxText' });
- expect(timeSeriesChart.props().legendMaxText).toBe('legendMaxText');
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(timeSeriesChart.props().legendMaxText).toBe('legendMaxText');
+ });
});
it('allows user to override average value label text using prop', () => {
timeSeriesChart.setProps({ legendAverageText: 'averageText' });
- expect(timeSeriesChart.props().legendAverageText).toBe('averageText');
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(timeSeriesChart.props().legendAverageText).toBe('averageText');
+ });
});
describe('methods', () => {
@@ -267,7 +269,9 @@ describe('Time series component', () => {
option: mockOption,
});
- expect(timeSeriesChart.vm.chartOptions).toEqual(expect.objectContaining(mockOption));
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ expect(timeSeriesChart.vm.chartOptions).toEqual(expect.objectContaining(mockOption));
+ });
});
it('additional series', () => {
@@ -281,10 +285,12 @@ describe('Time series component', () => {
},
});
- const optionSeries = timeSeriesChart.vm.chartOptions.series;
+ return timeSeriesChart.vm.$nextTick().then(() => {
+ const optionSeries = timeSeriesChart.vm.chartOptions.series;
- expect(optionSeries.length).toEqual(2);
- expect(optionSeries[0].name).toEqual(mockSeriesName);
+ expect(optionSeries.length).toEqual(2);
+ expect(optionSeries[0].name).toEqual(mockSeriesName);
+ });
});
});
@@ -340,11 +346,10 @@ describe('Time series component', () => {
glChartComponents.forEach(dynamicComponent => {
describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
let timeSeriesAreaChart;
- let glChart;
+ const findChart = () => timeSeriesAreaChart.find(dynamicComponent.component);
beforeEach(done => {
timeSeriesAreaChart = makeTimeSeriesChart(mockGraphData, dynamicComponent.chartType);
- glChart = timeSeriesAreaChart.find(dynamicComponent.component);
timeSeriesAreaChart.vm.$nextTick(done);
});
@@ -353,12 +358,12 @@ describe('Time series component', () => {
});
it('is a Vue instance', () => {
- expect(glChart.exists()).toBe(true);
- expect(glChart.isVueInstance()).toBe(true);
+ expect(findChart().exists()).toBe(true);
+ expect(findChart().isVueInstance()).toBe(true);
});
it('receives data properties needed for proper chart render', () => {
- const props = glChart.props();
+ const props = findChart().props();
expect(props.data).toBe(timeSeriesAreaChart.vm.chartData);
expect(props.option).toBe(timeSeriesAreaChart.vm.chartOptions);
@@ -371,7 +376,9 @@ describe('Time series component', () => {
timeSeriesAreaChart.vm.tooltip.title = mockTitle;
timeSeriesAreaChart.vm.$nextTick(() => {
- expect(shallowWrapperContainsSlotText(glChart, 'tooltipTitle', mockTitle)).toBe(true);
+ expect(shallowWrapperContainsSlotText(findChart(), 'tooltipTitle', mockTitle)).toBe(
+ true,
+ );
done();
});
});
@@ -386,7 +393,9 @@ describe('Time series component', () => {
});
it('uses deployment title', () => {
- expect(shallowWrapperContainsSlotText(glChart, 'tooltipTitle', 'Deployed')).toBe(true);
+ expect(shallowWrapperContainsSlotText(findChart(), 'tooltipTitle', 'Deployed')).toBe(
+ true,
+ );
});
it('renders clickable commit sha in tooltip content', done => {
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
new file mode 100644
index 00000000000..85408d57dde
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -0,0 +1,553 @@
+import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
+import { GlDropdownItem, GlButton, GlToast } from '@gitlab/ui';
+import VueDraggable from 'vuedraggable';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import statusCodes from '~/lib/utils/http_status';
+import { metricStates } from '~/monitoring/constants';
+import Dashboard from '~/monitoring/components/dashboard.vue';
+
+import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
+import DateTimePicker from '~/monitoring/components/date_time_picker/date_time_picker.vue';
+import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
+import { createStore } from '~/monitoring/stores';
+import * as types from '~/monitoring/stores/mutation_types';
+import { setupComponentStore, propsData } from '../init_utils';
+import {
+ metricsGroupsAPIResponse,
+ mockedQueryResultPayload,
+ mockApiEndpoint,
+ environmentData,
+ dashboardGitResponse,
+} from '../mock_data';
+
+const localVue = createLocalVue();
+const expectedPanelCount = 2;
+
+describe('Dashboard', () => {
+ let store;
+ let wrapper;
+ let mock;
+
+ const createShallowWrapper = (props = {}, options = {}) => {
+ wrapper = shallowMount(Dashboard, {
+ localVue,
+ propsData: { ...propsData, ...props },
+ store,
+ ...options,
+ });
+ };
+
+ const createMountedWrapper = (props = {}, options = {}) => {
+ wrapper = mount(Dashboard, {
+ localVue,
+ propsData: { ...propsData, ...props },
+ store,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ mock.restore();
+ });
+
+ describe('no metrics are available yet', () => {
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+
+ createShallowWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the environment selector', () => {
+ expect(wrapper.vm.$el.querySelector('.js-environments-dropdown')).toBeTruthy();
+ });
+ });
+
+ describe('no data found', () => {
+ beforeEach(done => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+
+ createShallowWrapper();
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the environment selector dropdown', () => {
+ expect(wrapper.vm.$el.querySelector('.js-environments-dropdown')).toBeTruthy();
+ });
+ });
+
+ describe('request information to the server', () => {
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+ });
+
+ it('shows up a loading state', done => {
+ createShallowWrapper({ hasMetrics: true });
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.vm.emptyState).toEqual('loading');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('hides the group panels when showPanels is false', done => {
+ createMountedWrapper(
+ { hasMetrics: true, showPanels: false },
+ { stubs: ['graph-group', 'panel-type'] },
+ );
+
+ setupComponentStore(wrapper);
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.vm.showEmptyState).toEqual(false);
+ expect(wrapper.vm.$el.querySelector('.prometheus-panel')).toEqual(null);
+ // TODO: The last expectation doesn't belong here, it belongs in a `group_group_spec.js` file
+ // Issue: https://gitlab.com/gitlab-org/gitlab/issues/118780
+ // expect(wrapper.vm.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('fetches the metrics data with proper time window', done => {
+ jest.spyOn(store, 'dispatch');
+
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(store.dispatch).toHaveBeenCalled();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('when all requests have been commited by the store', () => {
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ setupComponentStore(wrapper);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the environments dropdown with a number of environments', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const environmentDropdownItems = wrapper
+ .find('.js-environments-dropdown')
+ .findAll(GlDropdownItem);
+
+ expect(wrapper.vm.environments.length).toEqual(environmentData.length);
+ expect(environmentDropdownItems.length).toEqual(wrapper.vm.environments.length);
+
+ environmentDropdownItems.wrappers.forEach((itemWrapper, index) => {
+ const anchorEl = itemWrapper.find('a');
+ if (anchorEl.exists() && environmentData[index].metrics_path) {
+ const href = anchorEl.attributes('href');
+ expect(href).toBe(environmentData[index].metrics_path);
+ }
+ });
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('renders the environments dropdown with a single active element', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const environmentDropdownItems = wrapper
+ .find('.js-environments-dropdown')
+ .findAll(GlDropdownItem);
+ const activeItem = environmentDropdownItems.wrappers.filter(itemWrapper =>
+ itemWrapper.find('.active').exists(),
+ );
+
+ expect(activeItem.length).toBe(1);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ it('hides the environments dropdown list when there is no environments', done => {
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsGroupsAPIResponse,
+ );
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayload,
+ );
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const environmentDropdownItems = wrapper
+ .find('.js-environments-dropdown')
+ .findAll(GlDropdownItem);
+
+ expect(environmentDropdownItems.length).toEqual(0);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('renders the datetimepicker dropdown', done => {
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ setupComponentStore(wrapper);
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.find(DateTimePicker).exists()).toBe(true);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ describe('when one of the metrics is missing', () => {
+ beforeEach(done => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ createShallowWrapper({ hasMetrics: true });
+ setupComponentStore(wrapper);
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('shows a group empty area', () => {
+ const emptyGroup = wrapper.findAll({ ref: 'empty-group' });
+
+ expect(emptyGroup).toHaveLength(1);
+ expect(emptyGroup.is(GroupEmptyState)).toBe(true);
+ });
+
+ it('group empty area displays a NO_DATA state', () => {
+ expect(
+ wrapper
+ .findAll({ ref: 'empty-group' })
+ .at(0)
+ .props('selectedState'),
+ ).toEqual(metricStates.NO_DATA);
+ });
+ });
+
+ describe('drag and drop function', () => {
+ const findDraggables = () => wrapper.findAll(VueDraggable);
+ const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
+ const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
+ const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
+
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+ });
+
+ beforeEach(done => {
+ createShallowWrapper({ hasMetrics: true });
+
+ setupComponentStore(wrapper);
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('wraps vuedraggable', () => {
+ expect(findDraggablePanels().exists()).toBe(true);
+ expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ });
+
+ it('is disabled by default', () => {
+ expect(findRearrangeButton().exists()).toBe(false);
+ expect(findEnabledDraggables().length).toBe(0);
+ });
+
+ describe('when rearrange is enabled', () => {
+ beforeEach(done => {
+ wrapper.setProps({ rearrangePanelsAvailable: true });
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('displays rearrange button', () => {
+ expect(findRearrangeButton().exists()).toBe(true);
+ });
+
+ describe('when rearrange button is clicked', () => {
+ const findFirstDraggableRemoveButton = () =>
+ findDraggablePanels()
+ .at(0)
+ .find('.js-draggable-remove');
+
+ beforeEach(done => {
+ findRearrangeButton().vm.$emit('click');
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('it enables draggables', () => {
+ expect(findRearrangeButton().attributes('pressed')).toBeTruthy();
+ expect(findEnabledDraggables()).toEqual(findDraggables());
+ });
+
+ it('metrics can be swapped', done => {
+ const firstDraggable = findDraggables().at(0);
+ const mockMetrics = [...metricsGroupsAPIResponse.panel_groups[1].panels];
+
+ const firstTitle = mockMetrics[0].title;
+ const secondTitle = mockMetrics[1].title;
+
+ // swap two elements and `input` them
+ [mockMetrics[0], mockMetrics[1]] = [mockMetrics[1], mockMetrics[0]];
+ firstDraggable.vm.$emit('input', mockMetrics);
+
+ wrapper.vm.$nextTick(() => {
+ const { panels } = wrapper.vm.dashboard.panel_groups[1];
+
+ expect(panels[1].title).toEqual(firstTitle);
+ expect(panels[0].title).toEqual(secondTitle);
+ done();
+ });
+ });
+
+ it('shows a remove button, which removes a panel', done => {
+ expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
+
+ expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ findFirstDraggableRemoveButton().trigger('click');
+
+ wrapper.vm.$nextTick(() => {
+ expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
+ done();
+ });
+ });
+
+ it('it disables draggables when clicked again', done => {
+ findRearrangeButton().vm.$emit('click');
+ wrapper.vm.$nextTick(() => {
+ expect(findRearrangeButton().attributes('pressed')).toBeFalsy();
+ expect(findEnabledDraggables().length).toBe(0);
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ describe('cluster health', () => {
+ beforeEach(done => {
+ mock.onGet(propsData.metricsEndpoint).reply(statusCodes.OK, JSON.stringify({}));
+ createShallowWrapper({ hasMetrics: true });
+
+ // all_dashboards is not defined in health dashboards
+ wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`, undefined);
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders correctly', () => {
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.exists()).toBe(true);
+ });
+ });
+
+ describe('dashboard edit link', () => {
+ const findEditLink = () => wrapper.find('.js-edit-link');
+
+ beforeEach(done => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+
+ createShallowWrapper({ hasMetrics: true });
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
+ dashboardGitResponse,
+ );
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('is not present for the default dashboard', () => {
+ expect(findEditLink().exists()).toBe(false);
+ });
+
+ it('is present for a custom dashboard, and links to its edit_path', done => {
+ const dashboard = dashboardGitResponse[1]; // non-default dashboard
+ const currentDashboard = dashboard.path;
+
+ wrapper.setProps({ currentDashboard });
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(findEditLink().exists()).toBe(true);
+ expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('Dashboard dropdown', () => {
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
+ dashboardGitResponse,
+ );
+ });
+
+ it('shows the dashboard dropdown', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const dashboardDropdown = wrapper.find(DashboardsDropdown);
+
+ expect(dashboardDropdown.exists()).toBe(true);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('external dashboard link', () => {
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ createMountedWrapper(
+ {
+ hasMetrics: true,
+ showPanels: false,
+ showTimeWindowDropdown: false,
+ externalDashboardUrl: '/mockUrl',
+ },
+ { stubs: ['graph-group', 'panel-type'] },
+ );
+ });
+
+ it('shows the link', done => {
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const externalDashboardButton = wrapper.find('.js-external-dashboard-link');
+
+ expect(externalDashboardButton.exists()).toBe(true);
+ expect(externalDashboardButton.is(GlButton)).toBe(true);
+ expect(externalDashboardButton.text()).toContain('View full dashboard');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ // https://gitlab.com/gitlab-org/gitlab-ce/issues/66922
+ // eslint-disable-next-line jest/no-disabled-tests
+ describe.skip('link to chart', () => {
+ const currentDashboard = 'TEST_DASHBOARD';
+ localVue.use(GlToast);
+ const link = () => wrapper.find('.js-chart-link');
+ const clipboardText = () => link().element.dataset.clipboardText;
+
+ beforeEach(done => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ createShallowWrapper({ hasMetrics: true, currentDashboard });
+
+ setTimeout(done);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('adds a copy button to the dropdown', () => {
+ expect(link().text()).toContain('Generate link to chart');
+ });
+
+ it('contains a link to the dashboard', () => {
+ expect(clipboardText()).toContain(`dashboard=${currentDashboard}`);
+ expect(clipboardText()).toContain(`group=`);
+ expect(clipboardText()).toContain(`title=`);
+ expect(clipboardText()).toContain(`y_label=`);
+ });
+
+ it('undefined parameter is stripped', done => {
+ wrapper.setProps({ currentDashboard: undefined });
+
+ wrapper.vm.$nextTick(() => {
+ expect(clipboardText()).not.toContain(`dashboard=`);
+ expect(clipboardText()).toContain(`y_label=`);
+ done();
+ });
+ });
+
+ it('null parameter is stripped', done => {
+ wrapper.setProps({ currentDashboard: null });
+
+ wrapper.vm.$nextTick(() => {
+ expect(clipboardText()).not.toContain(`dashboard=`);
+ expect(clipboardText()).toContain(`y_label=`);
+ done();
+ });
+ });
+
+ it('creates a toast when clicked', () => {
+ jest.spyOn(wrapper.vm.$toast, 'show').and.stub();
+
+ link().vm.$emit('click');
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboard_time_url_spec.js b/spec/frontend/monitoring/components/dashboard_time_url_spec.js
new file mode 100644
index 00000000000..2da377eb79f
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_time_url_spec.js
@@ -0,0 +1,51 @@
+import { mount } from '@vue/test-utils';
+import createFlash from '~/flash';
+import MockAdapter from 'axios-mock-adapter';
+import Dashboard from '~/monitoring/components/dashboard.vue';
+import { createStore } from '~/monitoring/stores';
+import { propsData } from '../init_utils';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('~/flash');
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getParameterValues: jest.fn().mockReturnValue('<script>alert("XSS")</script>'),
+}));
+
+describe('dashboard invalid url parameters', () => {
+ let store;
+ let wrapper;
+ let mock;
+
+ const createMountedWrapper = (props = {}, options = {}) => {
+ wrapper = mount(Dashboard, {
+ propsData: { ...propsData, ...props },
+ store,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ mock.restore();
+ });
+
+ it('shows an error message if invalid url parameters are passed', done => {
+ createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ })
+ .catch(done.fail);
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboard_time_window_spec.js b/spec/frontend/monitoring/components/dashboard_time_window_spec.js
new file mode 100644
index 00000000000..4acc2d75b73
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_time_window_spec.js
@@ -0,0 +1,68 @@
+import { mount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import statusCodes from '~/lib/utils/http_status';
+import Dashboard from '~/monitoring/components/dashboard.vue';
+import { createStore } from '~/monitoring/stores';
+import { propsData, setupComponentStore } from '../init_utils';
+import { metricsGroupsAPIResponse, mockApiEndpoint } from '../mock_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getParameterValues: jest.fn().mockImplementation(param => {
+ if (param === 'start') return ['2019-10-01T18:27:47.000Z'];
+ if (param === 'end') return ['2019-10-01T18:57:47.000Z'];
+ return [];
+ }),
+ mergeUrlParams: jest.fn().mockReturnValue('#'),
+}));
+
+describe('dashboard time window', () => {
+ let store;
+ let wrapper;
+ let mock;
+
+ const createComponentWrapperMounted = (props = {}, options = {}) => {
+ wrapper = mount(Dashboard, {
+ propsData: { ...propsData, ...props },
+ store,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ mock.restore();
+ });
+
+ it('shows an error message if invalid url parameters are passed', done => {
+ mock.onGet(mockApiEndpoint).reply(statusCodes.OK, metricsGroupsAPIResponse);
+
+ createComponentWrapperMounted({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
+
+ setupComponentStore(wrapper);
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const timeWindowDropdownItems = wrapper
+ .find('.js-time-window-dropdown')
+ .findAll(GlDropdownItem);
+ const activeItem = timeWindowDropdownItems.wrappers.filter(itemWrapper =>
+ itemWrapper.find('.active').exists(),
+ );
+
+ expect(activeItem.length).toBe(1);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
new file mode 100644
index 00000000000..6af5ab4ba75
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -0,0 +1,249 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdownItem, GlModal, GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
+import DuplicateDashboardForm from '~/monitoring/components/duplicate_dashboard_form.vue';
+
+import { dashboardGitResponse } from '../mock_data';
+
+const defaultBranch = 'master';
+
+function createComponent(props, opts = {}) {
+ const storeOpts = {
+ methods: {
+ duplicateSystemDashboard: jest.fn(),
+ },
+ computed: {
+ allDashboards: () => dashboardGitResponse,
+ },
+ };
+
+ return shallowMount(DashboardsDropdown, {
+ propsData: {
+ ...props,
+ defaultBranch,
+ },
+ sync: false,
+ ...storeOpts,
+ ...opts,
+ });
+}
+
+describe('DashboardsDropdown', () => {
+ let wrapper;
+
+ const findItems = () => wrapper.findAll(GlDropdownItem);
+ const findItemAt = i => wrapper.findAll(GlDropdownItem).at(i);
+
+ describe('when it receives dashboards data', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+ it('displays an item for each dashboard', () => {
+ expect(wrapper.findAll(GlDropdownItem).length).toEqual(dashboardGitResponse.length);
+ });
+
+ it('displays items with the dashboard display name', () => {
+ expect(findItemAt(0).text()).toBe(dashboardGitResponse[0].display_name);
+ expect(findItemAt(1).text()).toBe(dashboardGitResponse[1].display_name);
+ expect(findItemAt(2).text()).toBe(dashboardGitResponse[2].display_name);
+ });
+ });
+
+ describe('when a system dashboard is selected', () => {
+ let duplicateDashboardAction;
+ let modalDirective;
+
+ beforeEach(() => {
+ modalDirective = jest.fn();
+ duplicateDashboardAction = jest.fn().mockResolvedValue();
+
+ wrapper = createComponent(
+ {
+ selectedDashboard: dashboardGitResponse[0],
+ },
+ {
+ directives: {
+ GlModal: modalDirective,
+ },
+ methods: {
+ // Mock vuex actions
+ duplicateSystemDashboard: duplicateDashboardAction,
+ },
+ },
+ );
+
+ wrapper.vm.$refs.duplicateDashboardModal.hide = jest.fn();
+ });
+
+ it('displays an item for each dashboard plus a "duplicate dashboard" item', () => {
+ const item = wrapper.findAll({ ref: 'duplicateDashboardItem' });
+
+ expect(findItems().length).toEqual(dashboardGitResponse.length + 1);
+ expect(item.length).toBe(1);
+ });
+
+ describe('modal form', () => {
+ let okEvent;
+
+ const findModal = () => wrapper.find(GlModal);
+ const findAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ okEvent = {
+ preventDefault: jest.fn(),
+ };
+ });
+
+ it('exists and contains a form to duplicate a dashboard', () => {
+ expect(findModal().exists()).toBe(true);
+ expect(findModal().contains(DuplicateDashboardForm)).toBe(true);
+ });
+
+ it('saves a new dashboard', done => {
+ findModal().vm.$emit('ok', okEvent);
+
+ waitForPromises()
+ .then(() => {
+ expect(okEvent.preventDefault).toHaveBeenCalled();
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
+ expect(wrapper.emitted().selectDashboard).toBeTruthy();
+ expect(findAlert().exists()).toBe(false);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ describe('when a new dashboard is saved succesfully', () => {
+ const newDashboard = {
+ can_edit: true,
+ default: false,
+ display_name: 'A new dashboard',
+ system_dashboard: false,
+ };
+
+ const submitForm = formVals => {
+ duplicateDashboardAction.mockResolvedValueOnce(newDashboard);
+ findModal()
+ .find(DuplicateDashboardForm)
+ .vm.$emit('change', {
+ dashboard: 'common_metrics.yml',
+ commitMessage: 'A commit message',
+ ...formVals,
+ });
+ findModal().vm.$emit('ok', okEvent);
+ };
+
+ it('to the default branch, redirects to the new dashboard', done => {
+ submitForm({
+ branch: defaultBranch,
+ });
+
+ waitForPromises()
+ .then(() => {
+ expect(wrapper.emitted().selectDashboard[0][0]).toEqual(newDashboard);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('to a new branch refreshes in the current dashboard', done => {
+ submitForm({
+ branch: 'another-branch',
+ });
+
+ waitForPromises()
+ .then(() => {
+ expect(wrapper.emitted().selectDashboard[0][0]).toEqual(dashboardGitResponse[0]);
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ it('handles error when a new dashboard is not saved', done => {
+ const errMsg = 'An error occurred';
+
+ duplicateDashboardAction.mockRejectedValueOnce(errMsg);
+ findModal().vm.$emit('ok', okEvent);
+
+ waitForPromises()
+ .then(() => {
+ expect(okEvent.preventDefault).toHaveBeenCalled();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errMsg);
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('id is correct, as the value of modal directive binding matches modal id', () => {
+ expect(modalDirective).toHaveBeenCalledTimes(1);
+
+ // Binding's second argument contains the modal id
+ expect(modalDirective.mock.calls[0][1]).toEqual(
+ expect.objectContaining({
+ value: findModal().props('modalId'),
+ }),
+ );
+ });
+
+ it('updates the form on changes', () => {
+ const formVals = {
+ dashboard: 'common_metrics.yml',
+ commitMessage: 'A commit message',
+ };
+
+ findModal()
+ .find(DuplicateDashboardForm)
+ .vm.$emit('change', formVals);
+
+ // Binding's second argument contains the modal id
+ expect(wrapper.vm.form).toEqual(formVals);
+ });
+ });
+ });
+
+ describe('when a custom dashboard is selected', () => {
+ const findModal = () => wrapper.find(GlModal);
+
+ beforeEach(() => {
+ wrapper = createComponent({
+ selectedDashboard: dashboardGitResponse[1],
+ });
+ });
+
+ it('displays an item for each dashboard', () => {
+ const item = wrapper.findAll({ ref: 'duplicateDashboardItem' });
+
+ expect(findItems().length).toEqual(dashboardGitResponse.length);
+ expect(item.length).toBe(0);
+ });
+
+ it('modal form does not exist and contains a form to duplicate a dashboard', () => {
+ expect(findModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when a dashboard gets selected by the user', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ findItemAt(1).vm.$emit('click');
+ });
+
+ it('emits a "selectDashboard" event', () => {
+ expect(wrapper.emitted().selectDashboard).toBeTruthy();
+ });
+ it('emits a "selectDashboard" event with dashboard information', () => {
+ expect(wrapper.emitted().selectDashboard[0]).toEqual([dashboardGitResponse[1]]);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js
index 1315e1226a4..9cac63ad725 100644
--- a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js
+++ b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_input_spec.js
@@ -15,7 +15,6 @@ describe('DateTimePickerInput', () => {
label: '',
...propsData,
},
- sync: false,
});
};
@@ -58,8 +57,9 @@ describe('DateTimePickerInput', () => {
it('input event is emitted when focus is lost', () => {
createComponent();
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('input').setValue(inputValue);
- wrapper.find('input').trigger('blur');
+ const input = wrapper.find('input');
+ input.setValue(inputValue);
+ input.trigger('blur');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', inputValue);
});
diff --git a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
index ca05461c8cf..180e41861f4 100644
--- a/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
+++ b/spec/frontend/monitoring/components/date_time_picker/date_time_picker_spec.js
@@ -3,10 +3,8 @@ import DateTimePicker from '~/monitoring/components/date_time_picker/date_time_p
import { timeWindows } from '~/monitoring/constants';
const timeWindowsCount = Object.keys(timeWindows).length;
-const selectedTimeWindow = {
- start: '2019-10-10T07:00:00.000Z',
- end: '2019-10-13T07:00:00.000Z',
-};
+const start = '2019-10-10T07:00:00.000Z';
+const end = '2019-10-13T07:00:00.000Z';
const selectedTimeWindowText = `3 days`;
describe('DateTimePicker', () => {
@@ -18,17 +16,20 @@ describe('DateTimePicker', () => {
const cancelButtonElement = () => dateTimePicker.find('button.btn-secondary').element;
const fillInputAndBlur = (input, val) => {
dateTimePicker.find(input).setValue(val);
- dateTimePicker.find(input).trigger('blur');
+ return dateTimePicker.vm.$nextTick().then(() => {
+ dateTimePicker.find(input).trigger('blur');
+ return dateTimePicker.vm.$nextTick();
+ });
};
const createComponent = props => {
dateTimePicker = mount(DateTimePicker, {
propsData: {
timeWindows,
- selectedTimeWindow,
+ start,
+ end,
...props,
},
- sync: false,
});
};
@@ -63,10 +64,8 @@ describe('DateTimePicker', () => {
it('renders inputs with h/m/s truncated if its all 0s', done => {
createComponent({
- selectedTimeWindow: {
- start: '2019-10-10T00:00:00.000Z',
- end: '2019-10-14T00:10:00.000Z',
- },
+ start: '2019-10-10T00:00:00.000Z',
+ end: '2019-10-14T00:10:00.000Z',
});
dateTimePicker.vm.$nextTick(() => {
expect(dateTimePicker.find('#custom-time-from').element.value).toBe('2019-10-10');
@@ -95,60 +94,64 @@ describe('DateTimePicker', () => {
});
});
- it('renders a disabled apply button on load', () => {
- createComponent();
+ it('renders a disabled apply button on wrong input', () => {
+ createComponent({
+ start: 'invalid-input-date',
+ });
expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
});
it('displays inline error message if custom time range inputs are invalid', done => {
createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01abc');
- fillInputAndBlur('#custom-time-to', '2019-10-10abc');
-
- dateTimePicker.vm.$nextTick(() => {
- expect(dateTimePicker.findAll('.invalid-feedback').length).toBe(2);
- done();
- });
+ fillInputAndBlur('#custom-time-from', '2019-10-01abc')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-10abc'))
+ .then(() => {
+ expect(dateTimePicker.findAll('.invalid-feedback').length).toBe(2);
+ done();
+ })
+ .catch(done);
});
it('keeps apply button disabled with invalid custom time range inputs', done => {
createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01abc');
- fillInputAndBlur('#custom-time-to', '2019-09-19');
-
- dateTimePicker.vm.$nextTick(() => {
- expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
- done();
- });
+ fillInputAndBlur('#custom-time-from', '2019-10-01abc')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-09-19'))
+ .then(() => {
+ expect(applyButtonElement().getAttribute('disabled')).toBe('disabled');
+ done();
+ })
+ .catch(done);
});
it('enables apply button with valid custom time range inputs', done => {
createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01');
- fillInputAndBlur('#custom-time-to', '2019-10-19');
-
- dateTimePicker.vm.$nextTick(() => {
- expect(applyButtonElement().getAttribute('disabled')).toBeNull();
- done();
- });
+ fillInputAndBlur('#custom-time-from', '2019-10-01')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
+ .then(() => {
+ expect(applyButtonElement().getAttribute('disabled')).toBeNull();
+ done();
+ })
+ .catch(done.fail);
});
- it('returns an object when apply is clicked', done => {
+ it('emits dates in an object when apply is clicked', done => {
createComponent();
- fillInputAndBlur('#custom-time-from', '2019-10-01');
- fillInputAndBlur('#custom-time-to', '2019-10-19');
-
- dateTimePicker.vm.$nextTick(() => {
- jest.spyOn(dateTimePicker.vm, '$emit');
- applyButtonElement().click();
-
- expect(dateTimePicker.vm.$emit).toHaveBeenCalledWith('onApply', {
- end: '2019-10-19T00:00:00Z',
- start: '2019-10-01T00:00:00Z',
- });
- done();
- });
+ fillInputAndBlur('#custom-time-from', '2019-10-01')
+ .then(() => fillInputAndBlur('#custom-time-to', '2019-10-19'))
+ .then(() => {
+ applyButtonElement().click();
+
+ expect(dateTimePicker.emitted().apply).toHaveLength(1);
+ expect(dateTimePicker.emitted().apply[0]).toEqual([
+ {
+ end: '2019-10-19T00:00:00Z',
+ start: '2019-10-01T00:00:00Z',
+ },
+ ]);
+ done();
+ })
+ .catch(done.fail);
});
it('hides the popover with cancel button', done => {
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
new file mode 100644
index 00000000000..75a488b5c7b
--- /dev/null
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
@@ -0,0 +1,153 @@
+import { mount } from '@vue/test-utils';
+import DuplicateDashboardForm from '~/monitoring/components/duplicate_dashboard_form.vue';
+
+import { dashboardGitResponse } from '../mock_data';
+
+describe('DuplicateDashboardForm', () => {
+ let wrapper;
+
+ const defaultBranch = 'master';
+
+ const findByRef = ref => wrapper.find({ ref });
+ const setValue = (ref, val) => {
+ findByRef(ref).setValue(val);
+ };
+ const setChecked = value => {
+ const input = wrapper.find(`.form-check-input[value="${value}"]`);
+ input.element.checked = true;
+ input.trigger('click');
+ input.trigger('change');
+ };
+
+ beforeEach(() => {
+ // Use `mount` to render native input elements
+ wrapper = mount(DuplicateDashboardForm, {
+ propsData: {
+ dashboard: dashboardGitResponse[0],
+ defaultBranch,
+ },
+ sync: false,
+ });
+ });
+
+ it('renders correctly', () => {
+ expect(wrapper.exists()).toEqual(true);
+ });
+
+ it('renders form elements', () => {
+ expect(findByRef('fileName').exists()).toEqual(true);
+ expect(findByRef('branchName').exists()).toEqual(true);
+ expect(findByRef('branchOption').exists()).toEqual(true);
+ expect(findByRef('commitMessage').exists()).toEqual(true);
+ });
+
+ describe('validates the file name', () => {
+ const findInvalidFeedback = () => findByRef('fileNameFormGroup').find('.invalid-feedback');
+
+ it('when is empty', done => {
+ setValue('fileName', '');
+ wrapper.vm.$nextTick(() => {
+ expect(findByRef('fileNameFormGroup').is('.is-valid')).toBe(true);
+ expect(findInvalidFeedback().exists()).toBe(false);
+ done();
+ });
+ });
+
+ it('when is valid', done => {
+ setValue('fileName', 'my_dashboard.yml');
+ wrapper.vm.$nextTick(() => {
+ expect(findByRef('fileNameFormGroup').is('.is-valid')).toBe(true);
+ expect(findInvalidFeedback().exists()).toBe(false);
+ done();
+ });
+ });
+
+ it('when is not valid', done => {
+ setValue('fileName', 'my_dashboard.exe');
+ wrapper.vm.$nextTick(() => {
+ expect(findByRef('fileNameFormGroup').is('.is-invalid')).toBe(true);
+ expect(findInvalidFeedback().text()).toBeTruthy();
+ done();
+ });
+ });
+ });
+
+ describe('emits `change` event', () => {
+ const lastChange = () =>
+ wrapper.vm.$nextTick().then(() => {
+ wrapper.find('form').trigger('change');
+
+ // Resolves to the last emitted change
+ const changes = wrapper.emitted().change;
+ return changes[changes.length - 1][0];
+ });
+
+ it('with the inital form values', () => {
+ expect(wrapper.emitted().change).toHaveLength(1);
+ expect(lastChange()).resolves.toEqual({
+ branch: '',
+ commitMessage: expect.any(String),
+ dashboard: dashboardGitResponse[0].path,
+ fileName: 'common_metrics.yml',
+ });
+ });
+
+ it('containing an inputted file name', () => {
+ setValue('fileName', 'my_dashboard.yml');
+
+ expect(lastChange()).resolves.toMatchObject({
+ fileName: 'my_dashboard.yml',
+ });
+ });
+
+ it('containing a default commit message when no message is set', () => {
+ setValue('commitMessage', '');
+
+ expect(lastChange()).resolves.toMatchObject({
+ commitMessage: expect.stringContaining('Create custom dashboard'),
+ });
+ });
+
+ it('containing an inputted commit message', () => {
+ setValue('commitMessage', 'My commit message');
+
+ expect(lastChange()).resolves.toMatchObject({
+ commitMessage: expect.stringContaining('My commit message'),
+ });
+ });
+
+ it('containing an inputted branch name', () => {
+ setValue('branchName', 'a-new-branch');
+
+ expect(lastChange()).resolves.toMatchObject({
+ branch: 'a-new-branch',
+ });
+ });
+
+ it('when a `default` branch option is set, branch input is invisible and ignored', done => {
+ setChecked(wrapper.vm.$options.radioVals.DEFAULT);
+ setValue('branchName', 'a-new-branch');
+
+ expect(lastChange()).resolves.toMatchObject({
+ branch: defaultBranch,
+ });
+ wrapper.vm.$nextTick(() => {
+ expect(findByRef('branchName').isVisible()).toBe(false);
+ done();
+ });
+ });
+
+ it('when `new` branch option is chosen, focuses on the branch name input', done => {
+ setChecked(wrapper.vm.$options.radioVals.NEW);
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.find('form').trigger('change');
+ expect(findByRef('branchName').is(':focus')).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index 43ca17c3cbc..983785d0ecc 100644
--- a/spec/javascripts/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -1,9 +1,7 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import GraphGroup from '~/monitoring/components/graph_group.vue';
import Icon from '~/vue_shared/components/icon.vue';
-const localVue = createLocalVue();
-
describe('Graph group component', () => {
let wrapper;
@@ -12,10 +10,8 @@ describe('Graph group component', () => {
const findCaretIcon = () => wrapper.find(Icon);
const createComponent = propsData => {
- wrapper = shallowMount(localVue.extend(GraphGroup), {
+ wrapper = shallowMount(GraphGroup, {
propsData,
- sync: false,
- localVue,
});
};
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
new file mode 100644
index 00000000000..5f229cb6ee5
--- /dev/null
+++ b/spec/frontend/monitoring/init_utils.js
@@ -0,0 +1,57 @@
+import * as types from '~/monitoring/stores/mutation_types';
+import {
+ metricsGroupsAPIResponse,
+ mockedEmptyResult,
+ mockedQueryResultPayload,
+ mockedQueryResultPayloadCoresTotal,
+ mockApiEndpoint,
+ environmentData,
+} from './mock_data';
+
+export const propsData = {
+ hasMetrics: false,
+ documentationPath: '/path/to/docs',
+ settingsPath: '/path/to/settings',
+ clustersPath: '/path/to/clusters',
+ tagsPath: '/path/to/tags',
+ projectPath: '/path/to/project',
+ defaultBranch: 'master',
+ metricsEndpoint: mockApiEndpoint,
+ deploymentsEndpoint: null,
+ emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
+ emptyLoadingSvgPath: '/path/to/loading.svg',
+ emptyNoDataSvgPath: '/path/to/no-data.svg',
+ emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
+ emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
+ environmentsEndpoint: '/root/hello-prometheus/environments/35',
+ currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
+};
+
+export const setupComponentStore = wrapper => {
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsGroupsAPIResponse,
+ );
+
+ // Load 3 panels to the dashboard, one with an empty result
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedEmptyResult,
+ );
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayload,
+ );
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayloadCoresTotal,
+ );
+
+ wrapper.vm.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 6ded22b4a3f..8ed0e232775 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -331,77 +331,80 @@ export const mockedQueryResultPayloadCoresTotal = {
],
};
-export const metricsGroupsAPIResponse = [
- {
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
- panels: [
- {
- metrics: [
- {
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
- },
- ],
- title: 'Throughput',
- type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
- },
- ],
- },
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Pod average)',
- type: 'area-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
+export const metricsGroupsAPIResponse = {
+ dashboard: 'Environment metrics',
+ panel_groups: [
+ {
+ group: 'Response metrics (NGINX Ingress VTS)',
+ priority: 10,
+ panels: [
+ {
+ metrics: [
+ {
+ id: 'response_metrics_nginx_ingress_throughput_status_code',
+ label: 'Status Code',
+ metric_id: 1,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
+ query_range:
+ 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
+ unit: 'req / sec',
+ },
+ ],
+ title: 'Throughput',
+ type: 'area-chart',
+ weight: 1,
+ y_label: 'Requests / Sec',
+ },
+ ],
+ },
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'area-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 17,
+ prometheus_endpoint_path:
+ '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
+ appearance: {
+ line: {
+ width: 2,
+ },
},
},
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- ],
- },
-];
+ ],
+ },
+ {
+ title: 'Core Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+};
export const environmentData = [
{
@@ -519,6 +522,7 @@ export const dashboardGitResponse = [
default: true,
display_name: 'Default',
can_edit: false,
+ system_dashboard: true,
project_blob_path: null,
path: 'config/prometheus/common_metrics.yml',
},
@@ -526,6 +530,7 @@ export const dashboardGitResponse = [
default: false,
display_name: 'Custom Dashboard 1',
can_edit: true,
+ system_dashboard: false,
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
path: '.gitlab/dashboards/dashboard_1.yml',
},
@@ -533,6 +538,7 @@ export const dashboardGitResponse = [
default: false,
display_name: 'Custom Dashboard 2',
can_edit: true,
+ system_dashboard: false,
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
path: '.gitlab/dashboards/dashboard_2.yml',
},
diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js
index c869d77673e..e51b69ef14d 100644
--- a/spec/frontend/monitoring/panel_type_spec.js
+++ b/spec/frontend/monitoring/panel_type_spec.js
@@ -26,8 +26,6 @@ describe('Panel Type component', () => {
...props,
},
store,
- sync: false,
- attachToDocument: true,
});
beforeEach(() => {
@@ -152,8 +150,6 @@ describe('Panel Type component', () => {
graphData: graphDataPrometheusQueryRange,
},
store,
- sync: false,
- attachToDocument: true,
});
panelType.vm.$nextTick(done);
});
diff --git a/spec/javascripts/monitoring/shared/prometheus_header_spec.js b/spec/frontend/monitoring/shared/prometheus_header_spec.js
index 9f916a4dfbb..b216bfb72d8 100644
--- a/spec/javascripts/monitoring/shared/prometheus_header_spec.js
+++ b/spec/frontend/monitoring/shared/prometheus_header_spec.js
@@ -18,7 +18,7 @@ describe('Prometheus Header component', () => {
describe('Prometheus header component', () => {
it('should show a title', () => {
- const title = prometheusHeader.vm.$el.querySelector('.js-graph-title').textContent;
+ const title = prometheusHeader.find({ ref: 'title' }).text();
expect(title).toBe('graph header');
});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index f38bd4384e2..975bdd3a27a 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -18,6 +18,7 @@ import {
fetchPrometheusMetric,
setEndpoints,
setGettingStartedEmptyState,
+ duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
import storeState from '~/monitoring/stores/state';
import {
@@ -298,7 +299,7 @@ describe('Monitoring store actions', () => {
);
expect(commit).toHaveBeenCalledWith(
types.RECEIVE_METRICS_DATA_SUCCESS,
- metricsDashboardResponse.dashboard.panel_groups,
+ metricsDashboardResponse.dashboard,
);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
});
@@ -441,7 +442,7 @@ describe('Monitoring store actions', () => {
beforeEach(() => {
state = storeState();
[metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- [data] = metricsGroupsAPIResponse[0].panels[0].metrics;
+ [data] = metricsGroupsAPIResponse.panel_groups[0].panels[0].metrics;
});
it('commits result', done => {
@@ -544,4 +545,85 @@ describe('Monitoring store actions', () => {
});
});
});
+
+ describe('duplicateSystemDashboard', () => {
+ let state;
+
+ beforeEach(() => {
+ state = storeState();
+ state.dashboardsEndpoint = '/dashboards.json';
+ });
+
+ it('Succesful POST request resolves', done => {
+ mock.onPost(state.dashboardsEndpoint).reply(statusCodes.CREATED, {
+ dashboard: dashboardGitResponse[1],
+ });
+
+ testAction(duplicateSystemDashboard, {}, state, [], [])
+ .then(() => {
+ expect(mock.history.post).toHaveLength(1);
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('Succesful POST request resolves to a dashboard', done => {
+ const mockCreatedDashboard = dashboardGitResponse[1];
+
+ const params = {
+ dashboard: 'my-dashboard',
+ fileName: 'file-name.yml',
+ branch: 'my-new-branch',
+ commitMessage: 'A new commit message',
+ };
+
+ const expectedPayload = JSON.stringify({
+ dashboard: 'my-dashboard',
+ file_name: 'file-name.yml',
+ branch: 'my-new-branch',
+ commit_message: 'A new commit message',
+ });
+
+ mock.onPost(state.dashboardsEndpoint).reply(statusCodes.CREATED, {
+ dashboard: mockCreatedDashboard,
+ });
+
+ testAction(duplicateSystemDashboard, params, state, [], [])
+ .then(result => {
+ expect(mock.history.post).toHaveLength(1);
+ expect(mock.history.post[0].data).toEqual(expectedPayload);
+ expect(result).toEqual(mockCreatedDashboard);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('Failed POST request throws an error', done => {
+ mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST);
+
+ testAction(duplicateSystemDashboard, {}, state, [], []).catch(err => {
+ expect(mock.history.post).toHaveLength(1);
+ expect(err).toEqual(expect.any(String));
+
+ done();
+ });
+ });
+
+ it('Failed POST request throws an error with a description', done => {
+ const backendErrorMsg = 'This file already exists!';
+
+ mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST, {
+ error: backendErrorMsg,
+ });
+
+ testAction(duplicateSystemDashboard, {}, state, [], []).catch(err => {
+ expect(mock.history.post).toHaveLength(1);
+ expect(err).toEqual(expect.any(String));
+ expect(err).toEqual(expect.stringContaining(backendErrorMsg));
+
+ done();
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 60107a03674..cb53ab60bdb 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -29,8 +29,8 @@ describe('Monitoring mutations', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const groups = getGroups();
- expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts--0');
- expect(groups[1].key).toBe('system-metrics-kubernetes--1');
+ expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts-0');
+ expect(groups[1].key).toBe('system-metrics-kubernetes-1');
});
it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
@@ -100,12 +100,12 @@ describe('Monitoring mutations', () => {
values: [[0, 1], [1, 1], [1, 3]],
},
];
- const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
+ const { dashboard } = metricsDashboardResponse;
const getMetric = () => stateCopy.dashboard.panel_groups[0].panels[0].metrics[0];
describe('REQUEST_METRIC_RESULT', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
});
it('stores a loading state on a metric', () => {
expect(stateCopy.showEmptyState).toBe(true);
@@ -128,7 +128,7 @@ describe('Monitoring mutations', () => {
describe('RECEIVE_METRIC_RESULT_SUCCESS', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
});
it('clears empty state', () => {
expect(stateCopy.showEmptyState).toBe(true);
@@ -161,7 +161,7 @@ describe('Monitoring mutations', () => {
describe('RECEIVE_METRIC_RESULT_FAILURE', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
});
it('maintains the loading state when a metric fails', () => {
expect(stateCopy.showEmptyState).toBe(true);
diff --git a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap b/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap
index a2a7d0ee91e..3229492506a 100644
--- a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap
+++ b/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`MR Popover loaded state matches the snapshot 1`] = `
-<glpopover-stub
+<gl-popover-stub
boundary="viewport"
cssclasses=""
placement="top"
@@ -35,7 +35,7 @@ exports[`MR Popover loaded state matches the snapshot 1`] = `
</span>
</div>
- <ciicon-stub
+ <ci-icon-stub
cssclasses=""
size="16"
status="[object Object]"
@@ -56,11 +56,11 @@ exports[`MR Popover loaded state matches the snapshot 1`] = `
</div>
</div>
-</glpopover-stub>
+</gl-popover-stub>
`;
exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = `
-<glpopover-stub
+<gl-popover-stub
boundary="viewport"
cssclasses=""
placement="top"
@@ -71,7 +71,7 @@ exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = `
class="mr-popover"
>
<div>
- <glskeletonloading-stub
+ <gl-skeleton-loading-stub
class="animation-container-small mt-1"
lines="1"
/>
@@ -91,5 +91,5 @@ exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = `
</div>
</div>
-</glpopover-stub>
+</gl-popover-stub>
`;
diff --git a/spec/frontend/mr_popover/mr_popover_spec.js b/spec/frontend/mr_popover/mr_popover_spec.js
index e72b729f056..0c0d4c73d91 100644
--- a/spec/frontend/mr_popover/mr_popover_spec.js
+++ b/spec/frontend/mr_popover/mr_popover_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import MRPopover from '~/mr_popover/components/mr_popover';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
describe('MR Popover', () => {
let wrapper;
@@ -23,7 +24,9 @@ describe('MR Popover', () => {
it('shows skeleton-loader while apollo is loading', () => {
wrapper.vm.$apollo.loading = true;
- expect(wrapper.element).toMatchSnapshot();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
describe('loaded state', () => {
@@ -41,7 +44,9 @@ describe('MR Popover', () => {
},
});
- expect(wrapper.element).toMatchSnapshot();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
it('does not show CI Icon if there is no pipeline data', () => {
@@ -55,7 +60,9 @@ describe('MR Popover', () => {
},
});
- expect(wrapper.contains('ciicon-stub')).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.contains(CiIcon)).toBe(false);
+ });
});
});
});
diff --git a/spec/javascripts/namespace_select_spec.js b/spec/frontend/namespace_select_spec.js
index 07b82ce721e..399fa950769 100644
--- a/spec/javascripts/namespace_select_spec.js
+++ b/spec/frontend/namespace_select_spec.js
@@ -3,7 +3,7 @@ import NamespaceSelect from '~/namespace_select';
describe('NamespaceSelect', () => {
beforeEach(() => {
- spyOn($.fn, 'glDropdown');
+ jest.spyOn($.fn, 'glDropdown').mockImplementation(() => {});
});
it('initializes glDropdown', () => {
@@ -22,12 +22,12 @@ describe('NamespaceSelect', () => {
const dropdown = document.createElement('div');
// eslint-disable-next-line no-new
new NamespaceSelect({ dropdown });
- [glDropdownOptions] = $.fn.glDropdown.calls.argsFor(0);
+ [[glDropdownOptions]] = $.fn.glDropdown.mock.calls;
});
it('prevents click events', () => {
const dummyEvent = new Event('dummy');
- spyOn(dummyEvent, 'preventDefault');
+ jest.spyOn(dummyEvent, 'preventDefault').mockImplementation(() => {});
glDropdownOptions.clicked({ e: dummyEvent });
@@ -43,12 +43,12 @@ describe('NamespaceSelect', () => {
dropdown.dataset.isFilter = 'true';
// eslint-disable-next-line no-new
new NamespaceSelect({ dropdown });
- [glDropdownOptions] = $.fn.glDropdown.calls.argsFor(0);
+ [[glDropdownOptions]] = $.fn.glDropdown.mock.calls;
});
it('does not prevent click events', () => {
const dummyEvent = new Event('dummy');
- spyOn(dummyEvent, 'preventDefault');
+ jest.spyOn(dummyEvent, 'preventDefault').mockImplementation(() => {});
glDropdownOptions.clicked({ e: dummyEvent });
diff --git a/spec/javascripts/new_branch_spec.js b/spec/frontend/new_branch_spec.js
index 4e3140ce4f1..cff7ec1a9ee 100644
--- a/spec/javascripts/new_branch_spec.js
+++ b/spec/frontend/new_branch_spec.js
@@ -1,8 +1,14 @@
import $ from 'jquery';
import NewBranchForm from '~/new_branch_form';
-describe('Branch', function() {
- describe('create a new branch', function() {
+describe('Branch', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ describe('create a new branch', () => {
preloadFixtures('branches/new_branch.html');
function fillNameWith(value) {
@@ -15,30 +21,28 @@ describe('Branch', function() {
expect($('.js-branch-name-error span').text()).toEqual(error);
}
- beforeEach(function() {
+ beforeEach(() => {
loadFixtures('branches/new_branch.html');
- $('form').on('submit', function(e) {
- return e.preventDefault();
- });
- this.form = new NewBranchForm($('.js-create-branch-form'), []);
+ $('form').on('submit', e => e.preventDefault());
+ testContext.form = new NewBranchForm($('.js-create-branch-form'), []);
});
- it("can't start with a dot", function() {
+ it("can't start with a dot", () => {
fillNameWith('.foo');
expectToHaveError("can't start with '.'");
});
- it("can't start with a slash", function() {
+ it("can't start with a slash", () => {
fillNameWith('/foo');
expectToHaveError("can't start with '/'");
});
- it("can't have two consecutive dots", function() {
+ it("can't have two consecutive dots", () => {
fillNameWith('foo..bar');
expectToHaveError("can't contain '..'");
});
- it("can't have spaces anywhere", function() {
+ it("can't have spaces anywhere", () => {
fillNameWith(' foo');
expectToHaveError("can't contain spaces");
fillNameWith('foo bar');
@@ -47,7 +51,7 @@ describe('Branch', function() {
expectToHaveError("can't contain spaces");
});
- it("can't have ~ anywhere", function() {
+ it("can't have ~ anywhere", () => {
fillNameWith('~foo');
expectToHaveError("can't contain '~'");
fillNameWith('foo~bar');
@@ -56,7 +60,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '~'");
});
- it("can't have tilde anwhere", function() {
+ it("can't have tilde anwhere", () => {
fillNameWith('~foo');
expectToHaveError("can't contain '~'");
fillNameWith('foo~bar');
@@ -65,7 +69,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '~'");
});
- it("can't have caret anywhere", function() {
+ it("can't have caret anywhere", () => {
fillNameWith('^foo');
expectToHaveError("can't contain '^'");
fillNameWith('foo^bar');
@@ -74,7 +78,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '^'");
});
- it("can't have : anywhere", function() {
+ it("can't have : anywhere", () => {
fillNameWith(':foo');
expectToHaveError("can't contain ':'");
fillNameWith('foo:bar');
@@ -83,7 +87,7 @@ describe('Branch', function() {
expectToHaveError("can't contain ':'");
});
- it("can't have question mark anywhere", function() {
+ it("can't have question mark anywhere", () => {
fillNameWith('?foo');
expectToHaveError("can't contain '?'");
fillNameWith('foo?bar');
@@ -92,7 +96,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '?'");
});
- it("can't have asterisk anywhere", function() {
+ it("can't have asterisk anywhere", () => {
fillNameWith('*foo');
expectToHaveError("can't contain '*'");
fillNameWith('foo*bar');
@@ -101,7 +105,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '*'");
});
- it("can't have open bracket anywhere", function() {
+ it("can't have open bracket anywhere", () => {
fillNameWith('[foo');
expectToHaveError("can't contain '['");
fillNameWith('foo[bar');
@@ -110,7 +114,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '['");
});
- it("can't have a backslash anywhere", function() {
+ it("can't have a backslash anywhere", () => {
fillNameWith('\\foo');
expectToHaveError("can't contain '\\'");
fillNameWith('foo\\bar');
@@ -119,7 +123,7 @@ describe('Branch', function() {
expectToHaveError("can't contain '\\'");
});
- it("can't contain a sequence @{ anywhere", function() {
+ it("can't contain a sequence @{ anywhere", () => {
fillNameWith('@{foo');
expectToHaveError("can't contain '@{'");
fillNameWith('foo@{bar');
@@ -128,42 +132,42 @@ describe('Branch', function() {
expectToHaveError("can't contain '@{'");
});
- it("can't have consecutive slashes", function() {
+ it("can't have consecutive slashes", () => {
fillNameWith('foo//bar');
expectToHaveError("can't contain consecutive slashes");
});
- it("can't end with a slash", function() {
+ it("can't end with a slash", () => {
fillNameWith('foo/');
expectToHaveError("can't end in '/'");
});
- it("can't end with a dot", function() {
+ it("can't end with a dot", () => {
fillNameWith('foo.');
expectToHaveError("can't end in '.'");
});
- it("can't end with .lock", function() {
+ it("can't end with .lock", () => {
fillNameWith('foo.lock');
expectToHaveError("can't end in '.lock'");
});
- it("can't be the single character @", function() {
+ it("can't be the single character @", () => {
fillNameWith('@');
expectToHaveError("can't be '@'");
});
- it('concatenates all error messages', function() {
+ it('concatenates all error messages', () => {
fillNameWith('/foo bar?~.');
expectToHaveError("can't start with '/', can't contain spaces, '?', '~', can't end in '.'");
});
- it("doesn't duplicate error messages", function() {
+ it("doesn't duplicate error messages", () => {
fillNameWith('?foo?bar?zoo?');
expectToHaveError("can't contain '?'");
});
- it('removes the error message when is a valid name', function() {
+ it('removes the error message when is a valid name', () => {
fillNameWith('foo?bar');
expect($('.js-branch-name-error span').length).toEqual(1);
@@ -172,25 +176,25 @@ describe('Branch', function() {
expect($('.js-branch-name-error span').length).toEqual(0);
});
- it('can have dashes anywhere', function() {
+ it('can have dashes anywhere', () => {
fillNameWith('-foo-bar-zoo-');
expect($('.js-branch-name-error span').length).toEqual(0);
});
- it('can have underscores anywhere', function() {
+ it('can have underscores anywhere', () => {
fillNameWith('_foo_bar_zoo_');
expect($('.js-branch-name-error span').length).toEqual(0);
});
- it('can have numbers anywhere', function() {
+ it('can have numbers anywhere', () => {
fillNameWith('1foo2bar3zoo4');
expect($('.js-branch-name-error span').length).toEqual(0);
});
- it('can be only letters', function() {
+ it('can be only letters', () => {
fillNameWith('foo');
expect($('.js-branch-name-error span').length).toEqual(0);
diff --git a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
index b29d093130a..1e466f266ed 100644
--- a/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
+++ b/spec/frontend/notes/components/__snapshots__/discussion_jump_to_next_button_spec.js.snap
@@ -7,8 +7,7 @@ exports[`JumpToNextDiscussionButton matches the snapshot 1`] = `
>
<button
class="btn btn-default discussion-next-btn"
- data-original-title="Jump to next unresolved discussion"
- title=""
+ title="Jump to next unresolved discussion"
>
<icon-stub
name="comment-next"
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 7652f48474d..ceba31b1a70 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -37,8 +37,6 @@ describe('issue_comment_form component', () => {
noteableType,
},
store,
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/frontend/notes/components/diff_discussion_header_spec.js b/spec/frontend/notes/components/diff_discussion_header_spec.js
index f90147f9105..4c76f9c50fb 100644
--- a/spec/frontend/notes/components/diff_discussion_header_spec.js
+++ b/spec/frontend/notes/components/diff_discussion_header_spec.js
@@ -1,4 +1,4 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import createStore from '~/notes/stores';
import diffDiscussionHeader from '~/notes/components/diff_discussion_header.vue';
@@ -18,12 +18,9 @@ describe('diff_discussion_header component', () => {
window.mrTabs = {};
store = createStore();
- const localVue = createLocalVue();
wrapper = mount(diffDiscussionHeader, {
store,
propsData: { discussion: discussionMock },
- localVue,
- sync: false,
});
});
@@ -38,7 +35,9 @@ describe('diff_discussion_header component', () => {
wrapper.setProps({ discussion });
- expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
+ });
});
describe('action text', () => {
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index 6198f8b3c1d..2d95a86d8a6 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import { discussionMock } from '../../notes/mock_data';
import DiscussionActions from '~/notes/components/discussion_actions.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
@@ -22,12 +22,10 @@ const createUnallowedNote = () =>
describe('DiscussionActions', () => {
let wrapper;
const createComponentFactory = (shallow = true) => props => {
- const localVue = createLocalVue();
const store = createStore();
const mountFn = shallow ? shallowMount : mount;
wrapper = mountFn(DiscussionActions, {
- localVue,
store,
propsData: {
discussion: discussionMock,
@@ -37,8 +35,6 @@ describe('DiscussionActions', () => {
shouldShowJumpToNextDiscussion: true,
...props,
},
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/javascripts/notes/components/discussion_filter_note_spec.js b/spec/frontend/notes/components/discussion_filter_note_spec.js
index 52d2e7ce947..6b5f42a84e8 100644
--- a/spec/javascripts/notes/components/discussion_filter_note_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_note_spec.js
@@ -34,7 +34,7 @@ describe('DiscussionFilterNote component', () => {
describe('methods', () => {
describe('selectFilter', () => {
it('emits `dropdownSelect` event on `eventHub` with provided param', () => {
- spyOn(eventHub, '$emit');
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm.selectFilter(1);
@@ -74,7 +74,7 @@ describe('DiscussionFilterNote component', () => {
it('clicking `Show all activity` button calls `selectFilter("all")` method', () => {
const showAllBtn = vm.$el.querySelector('.discussion-filter-actions button:first-child');
- spyOn(vm, 'selectFilter');
+ jest.spyOn(vm, 'selectFilter').mockImplementation(() => {});
showAllBtn.dispatchEvent(new Event('click'));
@@ -83,7 +83,7 @@ describe('DiscussionFilterNote component', () => {
it('clicking `Show comments only` button calls `selectFilter("comments")` method', () => {
const showAllBtn = vm.$el.querySelector('.discussion-filter-actions button:last-child');
- spyOn(vm, 'selectFilter');
+ jest.spyOn(vm, 'selectFilter').mockImplementation(() => {});
showAllBtn.dispatchEvent(new Event('click'));
diff --git a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
index 3986340b6fd..58cdf3cb57e 100644
--- a/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
+++ b/spec/frontend/notes/components/discussion_jump_to_next_button_spec.js
@@ -5,10 +5,7 @@ describe('JumpToNextDiscussionButton', () => {
let wrapper;
beforeEach(() => {
- wrapper = shallowMount(JumpToNextDiscussionButton, {
- sync: false,
- attachToDocument: true,
- });
+ wrapper = shallowMount(JumpToNextDiscussionButton);
});
afterEach(() => {
@@ -24,7 +21,9 @@ describe('JumpToNextDiscussionButton', () => {
button.trigger('click');
- expect(wrapper.emitted().onClick).toBeTruthy();
- expect(wrapper.emitted().onClick.length).toBe(1);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().onClick).toBeTruthy();
+ expect(wrapper.emitted().onClick.length).toBe(1);
+ });
});
});
diff --git a/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js b/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js
index 279ca017b44..8d5ea108b50 100644
--- a/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js
@@ -16,7 +16,6 @@ describe('DiscussionNotesRepliesWrapper', () => {
const createComponent = (props = {}) => {
wrapper = mount(TestComponent, {
propsData: props,
- sync: false,
});
};
@@ -30,7 +29,7 @@ describe('DiscussionNotesRepliesWrapper', () => {
});
it('renders children directly', () => {
- expect(wrapper.html()).toEqual(`<ul>${TEST_CHILDREN}</ul>`);
+ expect(wrapper.element.outerHTML).toEqual(`<ul>${TEST_CHILDREN}</ul>`);
});
});
@@ -45,7 +44,7 @@ describe('DiscussionNotesRepliesWrapper', () => {
const notes = wrapper.find('li.discussion-collapsible ul.notes');
expect(notes.exists()).toBe(true);
- expect(notes.html()).toEqual(`<ul class="notes">${TEST_CHILDREN}</ul>`);
+ expect(notes.element.outerHTML).toEqual(`<ul class="notes">${TEST_CHILDREN}</ul>`);
});
});
});
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index 5ab26d742ca..81773752037 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import '~/behaviors/markdown/render_gfm';
import { SYSTEM_NOTE } from '~/notes/constants';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
@@ -6,12 +6,9 @@ import NoteableNote from '~/notes/components/noteable_note.vue';
import PlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';
import SystemNote from '~/vue_shared/components/notes/system_note.vue';
-import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import createStore from '~/notes/stores';
import { noteableDataMock, discussionMock, notesDataMock } from '../../notes/mock_data';
-const localVue = createLocalVue();
-
describe('DiscussionNotes', () => {
let wrapper;
@@ -21,7 +18,6 @@ describe('DiscussionNotes', () => {
store.dispatch('setNotesData', notesDataMock);
wrapper = shallowMount(DiscussionNotes, {
- localVue,
store,
propsData: {
discussion: discussionMock,
@@ -35,8 +31,6 @@ describe('DiscussionNotes', () => {
slots: {
'avatar-badge': '<span class="avatar-badge-slot-content" />',
},
- sync: false,
- attachToDocument: true,
});
};
@@ -48,13 +42,13 @@ describe('DiscussionNotes', () => {
it('renders an element for each note in the discussion', () => {
createComponent();
const notesCount = discussionMock.notes.length;
- const els = wrapper.findAll(TimelineEntryItem);
+ const els = wrapper.findAll(NoteableNote);
expect(els.length).toBe(notesCount);
});
it('renders one element if replies groupping is enabled', () => {
createComponent({ shouldGroupReplies: true });
- const els = wrapper.findAll(TimelineEntryItem);
+ const els = wrapper.findAll(NoteableNote);
expect(els.length).toBe(1);
});
@@ -85,7 +79,7 @@ describe('DiscussionNotes', () => {
];
discussion.notes = notesData;
createComponent({ discussion, shouldRenderDiffs: true });
- const notes = wrapper.findAll('.notes > li');
+ const notes = wrapper.findAll('.notes > *');
expect(notes.at(0).is(PlaceholderSystemNote)).toBe(true);
expect(notes.at(1).is(PlaceholderNote)).toBe(true);
@@ -111,7 +105,14 @@ describe('DiscussionNotes', () => {
describe('events', () => {
describe('with groupped notes and replies expanded', () => {
- const findNoteAtIndex = index => wrapper.find(`.note:nth-of-type(${index + 1}`);
+ const findNoteAtIndex = index => {
+ const noteComponents = [NoteableNote, SystemNote, PlaceholderNote, PlaceholderSystemNote];
+ const allowedNames = noteComponents.map(c => c.name);
+ return wrapper
+ .findAll('.notes *')
+ .filter(w => allowedNames.includes(w.name()))
+ .at(index);
+ };
beforeEach(() => {
createComponent({ shouldGroupReplies: true, isExpanded: true });
@@ -119,17 +120,26 @@ describe('DiscussionNotes', () => {
it('emits deleteNote when first note emits handleDeleteNote', () => {
findNoteAtIndex(0).vm.$emit('handleDeleteNote');
- expect(wrapper.emitted().deleteNote).toBeTruthy();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().deleteNote).toBeTruthy();
+ });
});
it('emits startReplying when first note emits startReplying', () => {
findNoteAtIndex(0).vm.$emit('startReplying');
- expect(wrapper.emitted().startReplying).toBeTruthy();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().startReplying).toBeTruthy();
+ });
});
it('emits deleteNote when second note emits handleDeleteNote', () => {
findNoteAtIndex(1).vm.$emit('handleDeleteNote');
- expect(wrapper.emitted().deleteNote).toBeTruthy();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().deleteNote).toBeTruthy();
+ });
});
});
@@ -137,12 +147,15 @@ describe('DiscussionNotes', () => {
let note;
beforeEach(() => {
createComponent();
- note = wrapper.find('.note');
+ note = wrapper.find('.notes > *');
});
it('emits deleteNote when first note emits handleDeleteNote', () => {
note.vm.$emit('handleDeleteNote');
- expect(wrapper.emitted().deleteNote).toBeTruthy();
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().deleteNote).toBeTruthy();
+ });
});
});
});
diff --git a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
index 3152b6ff241..a881e44a007 100644
--- a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
+++ b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
@@ -1,7 +1,6 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
-const localVue = createLocalVue();
const buttonText = 'Test Button Text';
describe('ReplyPlaceholder', () => {
@@ -11,7 +10,6 @@ describe('ReplyPlaceholder', () => {
beforeEach(() => {
wrapper = shallowMount(ReplyPlaceholder, {
- localVue,
propsData: {
buttonText,
},
@@ -25,8 +23,10 @@ describe('ReplyPlaceholder', () => {
it('emits onClick even on button click', () => {
findButton().trigger('click');
- expect(wrapper.emitted()).toEqual({
- onClick: [[]],
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted()).toEqual({
+ onClick: [[]],
+ });
});
});
diff --git a/spec/frontend/notes/components/discussion_resolve_button_spec.js b/spec/frontend/notes/components/discussion_resolve_button_spec.js
index 1fae19f4492..c64e299efc3 100644
--- a/spec/frontend/notes/components/discussion_resolve_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_button_spec.js
@@ -1,16 +1,13 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import resolveDiscussionButton from '~/notes/components/discussion_resolve_button.vue';
const buttonTitle = 'Resolve discussion';
describe('resolveDiscussionButton', () => {
let wrapper;
- let localVue;
const factory = options => {
- localVue = createLocalVue();
wrapper = shallowMount(resolveDiscussionButton, {
- localVue,
...options,
});
};
@@ -33,8 +30,10 @@ describe('resolveDiscussionButton', () => {
button.trigger('click');
- expect(wrapper.emitted()).toEqual({
- onClick: [[]],
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted()).toEqual({
+ onClick: [[]],
+ });
});
});
@@ -67,7 +66,7 @@ describe('resolveDiscussionButton', () => {
const button = wrapper.find({ ref: 'isResolvingIcon' });
- localVue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expect(button.exists()).toEqual(false);
});
});
diff --git a/spec/frontend/notes/components/note_app_spec.js b/spec/frontend/notes/components/note_app_spec.js
index 3c960adb698..f9b69e72619 100644
--- a/spec/frontend/notes/components/note_app_spec.js
+++ b/spec/frontend/notes/components/note_app_spec.js
@@ -1,7 +1,7 @@
import $ from 'helpers/jquery';
import AxiosMockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import NotesApp from '~/notes/components/notes_app.vue';
@@ -48,7 +48,6 @@ describe('note_app', () => {
notesData: mockData.notesDataMock,
userData: mockData.userDataMock,
};
- const localVue = createLocalVue();
return mount(
{
@@ -60,11 +59,8 @@ describe('note_app', () => {
</div>`,
},
{
- attachToDocument: true,
propsData,
store,
- localVue,
- sync: false,
},
);
};
@@ -290,7 +286,10 @@ describe('note_app', () => {
it('should not render quick actions docs url', () => {
wrapper.find('.js-note-edit').trigger('click');
const { quickActionsDocsPath } = mockData.notesDataMock;
- expect(wrapper.find(`.edit-note a[href="${quickActionsDocsPath}"]`).exists()).toBe(false);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(`.edit-note a[href="${quickActionsDocsPath}"]`).exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/notes/components/note_edited_text_spec.js b/spec/frontend/notes/components/note_edited_text_spec.js
index e8d5a24e86a..0a5fe48ef94 100644
--- a/spec/frontend/notes/components/note_edited_text_spec.js
+++ b/spec/frontend/notes/components/note_edited_text_spec.js
@@ -1,7 +1,6 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import NoteEditedText from '~/notes/components/note_edited_text.vue';
-const localVue = createLocalVue();
const propsData = {
actionText: 'Edited',
className: 'foo-bar',
@@ -21,10 +20,7 @@ describe('NoteEditedText', () => {
beforeEach(() => {
wrapper = shallowMount(NoteEditedText, {
- localVue,
propsData,
- sync: false,
- attachToDocument: true,
});
});
diff --git a/spec/javascripts/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 6d1a7ef370f..9b432387654 100644
--- a/spec/javascripts/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -90,7 +90,7 @@ describe('note_header component', () => {
});
it('emits toggle event on click', done => {
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.$el.querySelector('.js-vue-toggle-button').click();
diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index d69f469c7c7..83417bd70ef 100644
--- a/spec/javascripts/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -327,7 +327,7 @@ describe('Getters Notes Store', () => {
beforeEach(() => {
neighbor = {};
- findUnresolvedDiscussionIdNeighbor = jasmine.createSpy().and.returnValue(neighbor);
+ findUnresolvedDiscussionIdNeighbor = jest.fn(() => neighbor);
localGetters = { findUnresolvedDiscussionIdNeighbor };
});
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index ade4725dd68..49debe348e2 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -498,7 +498,7 @@ describe('Notes Store mutations', () => {
mutations.UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS(state);
expect(state).toEqual(
- jasmine.objectContaining({
+ expect.objectContaining({
resolvableDiscussionsCount: 1,
unresolvedDiscussionsCount: 1,
hasUnresolvedDiscussions: false,
@@ -535,7 +535,7 @@ describe('Notes Store mutations', () => {
mutations.UPDATE_RESOLVABLE_DISCUSSIONS_COUNTS(state);
expect(state).toEqual(
- jasmine.objectContaining({
+ expect.objectContaining({
resolvableDiscussionsCount: 4,
unresolvedDiscussionsCount: 2,
hasUnresolvedDiscussions: true,
diff --git a/spec/frontend/operation_settings/components/external_dashboard_spec.js b/spec/frontend/operation_settings/components/external_dashboard_spec.js
index bb6e029c808..89db03378db 100644
--- a/spec/frontend/operation_settings/components/external_dashboard_spec.js
+++ b/spec/frontend/operation_settings/components/external_dashboard_spec.js
@@ -1,4 +1,4 @@
-import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import { GlButton, GlLink, GlFormGroup, GlFormInput } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import ExternalDashboard from '~/operation_settings/components/external_dashboard.vue';
@@ -15,12 +15,10 @@ describe('operation settings external dashboard component', () => {
const operationsSettingsEndpoint = `${TEST_HOST}/mock/ops/settings/endpoint`;
const externalDashboardUrl = `http://mock-external-domain.com/external/dashboard/url`;
const externalDashboardHelpPagePath = `${TEST_HOST}/help/page/path`;
- const localVue = createLocalVue();
const mountComponent = (shallow = true) => {
const config = [
ExternalDashboard,
{
- localVue,
store: store({
operationsSettingsEndpoint,
externalDashboardUrl,
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index 78a736a9060..d5ce2c1ee24 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -29,7 +29,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
value="csrf"
/>
- <glforminput-stub
+ <gl-form-input-stub
autocomplete="off"
autofocus=""
name="username"
@@ -38,26 +38,26 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
/>
</form>
- <glbutton-stub
+ <gl-button-stub
variant="secondary"
>
Cancel
- </glbutton-stub>
+ </gl-button-stub>
- <glbutton-stub
+ <gl-button-stub
disabled="true"
variant="warning"
>
secondaryAction
- </glbutton-stub>
+ </gl-button-stub>
- <glbutton-stub
+ <gl-button-stub
disabled="true"
variant="danger"
>
action
- </glbutton-stub>
+ </gl-button-stub>
</div>
`;
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
index 4a3989f5192..4b4e9997953 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/user_operation_confirmation_modal_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`User Operation confirmation modal renders modal with form included 1`] = `
-<glmodal-stub
+<gl-modal-stub
modalclass=""
modalid="user-operation-modal"
ok-title="action"
@@ -29,5 +29,5 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
value="csrf"
/>
</form>
-</glmodal-stub>
+</gl-modal-stub>
`;
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
index 3efebc69011..3efefa8137f 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
@@ -48,7 +48,6 @@ describe('User Operation confirmation modal', () => {
stubs: {
GlModal: ModalStub,
},
- sync: false,
});
};
diff --git a/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js b/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js
index c88a182660d..3d615d9d05f 100644
--- a/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js
+++ b/spec/frontend/pages/admin/users/components/user_modal_manager_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import UserModalManager from '~/pages/admin/users/components/user_modal_manager.vue';
import ModalStub from './stubs/modal_stub';
@@ -22,18 +22,13 @@ describe('Users admin page Modal Manager', () => {
let wrapper;
const createComponent = (props = {}) => {
- wrapper = shallowMount(UserModalManager, {
+ wrapper = mount(UserModalManager, {
propsData: {
actionModals,
modalConfiguration,
csrfToken: 'dummyCSRF',
...props,
},
- stubs: {
- dummyComponent1: true,
- dummyComponent2: true,
- },
- sync: false,
});
};
diff --git a/spec/frontend/pages/admin/users/components/user_operation_confirmation_modal_spec.js b/spec/frontend/pages/admin/users/components/user_operation_confirmation_modal_spec.js
index 0ecdae2618c..f3a37a255cd 100644
--- a/spec/frontend/pages/admin/users/components/user_operation_confirmation_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/user_operation_confirmation_modal_spec.js
@@ -17,7 +17,6 @@ describe('User Operation confirmation modal', () => {
method: 'method',
...props,
},
- sync: false,
});
};
diff --git a/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 5f4dba5ecb9..8917251d285 100644
--- a/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -6,7 +6,7 @@ import TimezoneDropdown, {
findTimezoneByIdentifier,
} from '~/pages/projects/pipeline_schedules/shared/components/timezone_dropdown';
-describe('Timezone Dropdown', function() {
+describe('Timezone Dropdown', () => {
preloadFixtures('pipeline_schedules/edit.html');
let $inputEl = null;
@@ -81,7 +81,7 @@ describe('Timezone Dropdown', function() {
});
it('will call a provided handler when a new timezone is selected', () => {
- const onSelectTimezone = jasmine.createSpy('onSelectTimezoneMock');
+ const onSelectTimezone = jest.fn();
// eslint-disable-next-line no-new
new TimezoneDropdown({
$inputEl,
@@ -111,7 +111,7 @@ describe('Timezone Dropdown', function() {
});
it('will call a provided `displayFormat` handler to format the dropdown value', () => {
- const displayFormat = jasmine.createSpy('displayFormat');
+ const displayFormat = jest.fn();
// eslint-disable-next-line no-new
new TimezoneDropdown({
$inputEl,
diff --git a/spec/frontend/performance_bar/components/add_request_spec.js b/spec/frontend/performance_bar/components/add_request_spec.js
index a0ad25744b0..c5247a43f27 100644
--- a/spec/frontend/performance_bar/components/add_request_spec.js
+++ b/spec/frontend/performance_bar/components/add_request_spec.js
@@ -19,6 +19,7 @@ describe('add request form', () => {
describe('when clicking the button', () => {
beforeEach(() => {
wrapper.find('button').trigger('click');
+ return wrapper.vm.$nextTick();
});
it('shows the form', () => {
@@ -28,6 +29,7 @@ describe('add request form', () => {
describe('when pressing escape', () => {
beforeEach(() => {
wrapper.find('input').trigger('keyup.esc');
+ return wrapper.vm.$nextTick();
});
it('hides the input', () => {
@@ -38,7 +40,10 @@ describe('add request form', () => {
describe('when submitting the form', () => {
beforeEach(() => {
wrapper.find('input').setValue('http://gitlab.example.com/users/root/calendar.json');
- wrapper.find('input').trigger('keyup.enter');
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper.find('input').trigger('keyup.enter');
+ return wrapper.vm.$nextTick();
+ });
});
it('emits an event to add the request', () => {
@@ -54,8 +59,9 @@ describe('add request form', () => {
it('clears the value for next time', () => {
wrapper.find('button').trigger('click');
-
- expect(wrapper.find('input').text()).toEqual('');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('input').text()).toEqual('');
+ });
});
});
});
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index a8fddd5fff2..43da6388efa 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -19,8 +19,6 @@ describe('pipeline graph action component', () => {
link: 'foo',
actionIcon: 'cancel',
},
- sync: false,
- attachToDocument: true,
});
});
@@ -30,7 +28,7 @@ describe('pipeline graph action component', () => {
});
it('should render the provided title as a bootstrap tooltip', () => {
- expect(wrapper.attributes('data-original-title')).toBe('bar');
+ expect(wrapper.attributes('title')).toBe('bar');
});
it('should update bootstrap tooltip when title changes', done => {
@@ -39,7 +37,7 @@ describe('pipeline graph action component', () => {
wrapper.vm
.$nextTick()
.then(() => {
- expect(wrapper.attributes('data-original-title')).toBe('changed');
+ expect(wrapper.attributes('title')).toBe('changed');
})
.then(done)
.catch(done.fail);
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index c79af95b3f3..0c64d5c9fa8 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -6,7 +6,9 @@ describe('pipeline graph job item', () => {
let wrapper;
const createWrapper = propsData => {
- wrapper = mount(JobItem, { sync: false, attachToDocument: true, propsData });
+ wrapper = mount(JobItem, {
+ propsData,
+ });
};
const delayedJobFixture = getJSONFixture('jobs/delayed.json');
@@ -43,9 +45,7 @@ describe('pipeline graph job item', () => {
expect(link.attributes('href')).toBe(mockJob.status.details_path);
- expect(link.attributes('data-original-title')).toEqual(
- `${mockJob.name} - ${mockJob.status.label}`,
- );
+ expect(link.attributes('title')).toEqual(`${mockJob.name} - ${mockJob.status.label}`);
expect(wrapper.find('.js-status-icon-success')).toBeDefined();
@@ -110,9 +110,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-job-component-tooltip').attributes('data-original-title')).toBe(
- 'test',
- );
+ expect(wrapper.find('.js-job-component-tooltip').attributes('title')).toBe('test');
});
it('should not render status label when it is provided', () => {
@@ -128,7 +126,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-job-component-tooltip').attributes('data-original-title')).toEqual(
+ expect(wrapper.find('.js-job-component-tooltip').attributes('title')).toEqual(
'test - success',
);
});
@@ -140,7 +138,7 @@ describe('pipeline graph job item', () => {
job: delayedJobFixture,
});
- expect(wrapper.find('.js-pipeline-graph-job-link').attributes('data-original-title')).toEqual(
+ expect(wrapper.find('.js-pipeline-graph-job-link').attributes('title')).toEqual(
`delayed job - delayed manual action (${wrapper.vm.remainingTime})`,
);
});
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index c355d653203..7f49b21100d 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -8,6 +8,12 @@ const mockPipeline = mockData.triggered[0];
describe('Linked pipeline', () => {
let wrapper;
+ const createWrapper = propsData => {
+ wrapper = mount(LinkedPipelineComponent, {
+ propsData,
+ });
+ };
+
afterEach(() => {
wrapper.destroy();
});
@@ -15,14 +21,12 @@ describe('Linked pipeline', () => {
describe('rendered output', () => {
const props = {
pipeline: mockPipeline,
+ projectId: 20,
+ columnTitle: 'Downstream',
};
beforeEach(() => {
- wrapper = mount(LinkedPipelineComponent, {
- sync: false,
- attachToDocument: true,
- propsData: props,
- });
+ createWrapper(props);
});
it('should render a list item as the containing element', () => {
@@ -65,7 +69,7 @@ describe('Linked pipeline', () => {
it('should render the tooltip text as the title attribute', () => {
const tooltipRef = wrapper.find('.js-linked-pipeline-content');
- const titleAttr = tooltipRef.attributes('data-original-title');
+ const titleAttr = tooltipRef.attributes('title');
expect(titleAttr).toContain(mockPipeline.project.name);
expect(titleAttr).toContain(mockPipeline.details.status.label);
@@ -74,19 +78,50 @@ describe('Linked pipeline', () => {
it('does not render the loading icon when isLoading is false', () => {
expect(wrapper.find('.js-linked-pipeline-loading').exists()).toBe(false);
});
+
+ it('should not display child label when pipeline project id is not the same as triggered pipeline project id', () => {
+ const labelContainer = wrapper.find('.parent-child-label-container');
+ expect(labelContainer.exists()).toBe(false);
+ });
+ });
+
+ describe('parent/child', () => {
+ const downstreamProps = {
+ pipeline: mockPipeline,
+ projectId: 19,
+ columnTitle: 'Downstream',
+ };
+
+ const upstreamProps = {
+ ...downstreamProps,
+ columnTitle: 'Upstream',
+ };
+
+ it('parent/child label container should exist', () => {
+ createWrapper(downstreamProps);
+ expect(wrapper.find('.parent-child-label-container').exists()).toBe(true);
+ });
+
+ it('should display child label when pipeline project id is the same as triggered pipeline project id', () => {
+ createWrapper(downstreamProps);
+ expect(wrapper.find('.parent-child-label-container').text()).toContain('Child');
+ });
+
+ it('should display parent label when pipeline project id is the same as triggered_by pipeline project id', () => {
+ createWrapper(upstreamProps);
+ expect(wrapper.find('.parent-child-label-container').text()).toContain('Parent');
+ });
});
describe('when isLoading is true', () => {
const props = {
pipeline: { ...mockPipeline, isLoading: true },
+ projectId: 19,
+ columnTitle: 'Downstream',
};
beforeEach(() => {
- wrapper = mount(LinkedPipelineComponent, {
- sync: false,
- attachToDocument: true,
- propsData: props,
- });
+ createWrapper(props);
});
it('renders a loading icon', () => {
@@ -97,21 +132,19 @@ describe('Linked pipeline', () => {
describe('on click', () => {
const props = {
pipeline: mockPipeline,
+ projectId: 19,
+ columnTitle: 'Downstream',
};
beforeEach(() => {
- wrapper = mount(LinkedPipelineComponent, {
- sync: false,
- attachToDocument: true,
- propsData: props,
- });
+ createWrapper(props);
});
it('emits `pipelineClicked` event', () => {
jest.spyOn(wrapper.vm, '$emit');
wrapper.find('button').trigger('click');
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('pipelineClicked');
+ expect(wrapper.emitted().pipelineClicked).toBeTruthy();
});
it('should emit `bv::hide::tooltip` to close the tooltip', () => {
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
index f794b8484a7..c9a94b3101f 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
@@ -1,4 +1,7 @@
export default {
+ project: {
+ id: 19,
+ },
triggered_by: {
id: 129,
active: true,
@@ -63,6 +66,7 @@ export default {
path: '/gitlab-org/gitlab-foss/pipelines/132',
project: {
name: 'GitLabCE',
+ id: 19,
},
details: {
status: {
diff --git a/spec/javascripts/pipelines/nav_controls_spec.js b/spec/frontend/pipelines/nav_controls_spec.js
index 7806cdf1477..6d28da0ea2a 100644
--- a/spec/javascripts/pipelines/nav_controls_spec.js
+++ b/spec/frontend/pipelines/nav_controls_spec.js
@@ -75,7 +75,7 @@ describe('Pipelines Nav Controls', () => {
});
it('should emit postAction event when reset runner cache button is clicked', () => {
- spyOn(component, '$emit');
+ jest.spyOn(component, '$emit').mockImplementation(() => {});
component.$el.querySelector('.js-clear-cache').click();
diff --git a/spec/frontend/pipelines/pipeline_triggerer_spec.js b/spec/frontend/pipelines/pipeline_triggerer_spec.js
index e211852f74b..a8eec274487 100644
--- a/spec/frontend/pipelines/pipeline_triggerer_spec.js
+++ b/spec/frontend/pipelines/pipeline_triggerer_spec.js
@@ -24,8 +24,6 @@ describe('Pipelines Triggerer', () => {
const createComponent = () => {
wrapper = shallowMount(pipelineTriggerer, {
propsData: mockData,
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index 3c0c35e1f0f..70b94f2c8e1 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -10,8 +10,6 @@ describe('Pipeline Url Component', () => {
const createComponent = props => {
wrapper = shallowMount(PipelineUrlComponent, {
- sync: false,
- attachToDocument: true,
propsData: props,
});
};
@@ -105,8 +103,6 @@ describe('Pipeline Url Component', () => {
});
expect(wrapper.find('.js-pipeline-url-failure').text()).toContain('error');
- expect(wrapper.find('.js-pipeline-url-failure').attributes('data-original-title')).toContain(
- 'some reason',
- );
+ expect(wrapper.find('.js-pipeline-url-failure').attributes('title')).toContain('some reason');
});
});
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index 1c785ec6ffe..c43210c5350 100644
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -12,7 +12,6 @@ describe('Pipelines Table Row', () => {
autoDevopsHelpPath: 'foo',
viewType: 'root',
},
- sync: false,
});
let wrapper;
diff --git a/spec/frontend/polyfills/element_spec.js b/spec/frontend/polyfills/element_spec.js
new file mode 100644
index 00000000000..64ce248ca44
--- /dev/null
+++ b/spec/frontend/polyfills/element_spec.js
@@ -0,0 +1,46 @@
+import '~/commons/polyfills/element';
+
+describe('Element polyfills', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ beforeEach(() => {
+ testContext.element = document.createElement('ul');
+ });
+
+ describe('matches', () => {
+ it('returns true if element matches the selector', () => {
+ expect(testContext.element.matches('ul')).toBeTruthy();
+ });
+
+ it("returns false if element doesn't match the selector", () => {
+ expect(testContext.element.matches('.not-an-element')).toBeFalsy();
+ });
+ });
+
+ describe('closest', () => {
+ beforeEach(() => {
+ testContext.childElement = document.createElement('li');
+ testContext.element.appendChild(testContext.childElement);
+ });
+
+ it('returns the closest parent that matches the selector', () => {
+ expect(testContext.childElement.closest('ul').toString()).toBe(
+ testContext.element.toString(),
+ );
+ });
+
+ it('returns itself if it matches the selector', () => {
+ expect(testContext.childElement.closest('li').toString()).toBe(
+ testContext.childElement.toString(),
+ );
+ });
+
+ it('returns undefined if nothing matches the selector', () => {
+ expect(testContext.childElement.closest('.no-an-element')).toBeFalsy();
+ });
+ });
+});
diff --git a/spec/javascripts/profile/add_ssh_key_validation_spec.js b/spec/frontend/profile/add_ssh_key_validation_spec.js
index c71a2885acc..1fec864599c 100644
--- a/spec/javascripts/profile/add_ssh_key_validation_spec.js
+++ b/spec/frontend/profile/add_ssh_key_validation_spec.js
@@ -4,16 +4,18 @@ describe('AddSshKeyValidation', () => {
describe('submit', () => {
it('returns true if isValid is true', () => {
const addSshKeyValidation = new AddSshKeyValidation({});
- spyOn(AddSshKeyValidation, 'isPublicKey').and.returnValue(true);
+ jest.spyOn(AddSshKeyValidation, 'isPublicKey').mockReturnValue(true);
expect(addSshKeyValidation.submit()).toBeTruthy();
});
it('calls preventDefault and toggleWarning if isValid is false', () => {
const addSshKeyValidation = new AddSshKeyValidation({});
- const event = jasmine.createSpyObj('event', ['preventDefault']);
- spyOn(AddSshKeyValidation, 'isPublicKey').and.returnValue(false);
- spyOn(addSshKeyValidation, 'toggleWarning');
+ const event = {
+ preventDefault: jest.fn(),
+ };
+ jest.spyOn(AddSshKeyValidation, 'isPublicKey').mockReturnValue(false);
+ jest.spyOn(addSshKeyValidation, 'toggleWarning').mockImplementation(() => {});
addSshKeyValidation.submit(event);
diff --git a/spec/frontend/project_select_combo_button_spec.js b/spec/frontend/project_select_combo_button_spec.js
new file mode 100644
index 00000000000..c47db71b4ac
--- /dev/null
+++ b/spec/frontend/project_select_combo_button_spec.js
@@ -0,0 +1,140 @@
+import $ from 'jquery';
+import ProjectSelectComboButton from '~/project_select_combo_button';
+
+const fixturePath = 'static/project_select_combo_button.html';
+
+describe('Project Select Combo Button', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ preloadFixtures(fixturePath);
+
+ beforeEach(() => {
+ testContext.defaults = {
+ label: 'Select project to create issue',
+ groupId: 12345,
+ projectMeta: {
+ name: 'My Cool Project',
+ url: 'http://mycoolproject.com',
+ },
+ newProjectMeta: {
+ name: 'My Other Cool Project',
+ url: 'http://myothercoolproject.com',
+ },
+ localStorageKey: 'group-12345-new-issue-recent-project',
+ relativePath: 'issues/new',
+ };
+
+ loadFixtures(fixturePath);
+
+ testContext.newItemBtn = document.querySelector('.new-project-item-link');
+ testContext.projectSelectInput = document.querySelector('.project-item-select');
+ });
+
+ describe('on page load when localStorage is empty', () => {
+ beforeEach(() => {
+ testContext.comboButton = new ProjectSelectComboButton(testContext.projectSelectInput);
+ });
+
+ it('newItemBtn href is null', () => {
+ expect(testContext.newItemBtn.getAttribute('href')).toBe('');
+ });
+
+ it('newItemBtn text is the plain default label', () => {
+ expect(testContext.newItemBtn.textContent).toBe(testContext.defaults.label);
+ });
+ });
+
+ describe('on page load when localStorage is filled', () => {
+ beforeEach(() => {
+ window.localStorage.setItem(
+ testContext.defaults.localStorageKey,
+ JSON.stringify(testContext.defaults.projectMeta),
+ );
+ testContext.comboButton = new ProjectSelectComboButton(testContext.projectSelectInput);
+ });
+
+ it('newItemBtn href is correctly set', () => {
+ expect(testContext.newItemBtn.getAttribute('href')).toBe(
+ testContext.defaults.projectMeta.url,
+ );
+ });
+
+ it('newItemBtn text is the cached label', () => {
+ expect(testContext.newItemBtn.textContent).toBe(
+ `New issue in ${testContext.defaults.projectMeta.name}`,
+ );
+ });
+
+ afterEach(() => {
+ window.localStorage.clear();
+ });
+ });
+
+ describe('after selecting a new project', () => {
+ beforeEach(() => {
+ testContext.comboButton = new ProjectSelectComboButton(testContext.projectSelectInput);
+
+ // mock the effect of selecting an item from the projects dropdown (select2)
+ $('.project-item-select')
+ .val(JSON.stringify(testContext.defaults.newProjectMeta))
+ .trigger('change');
+ });
+
+ it('newItemBtn href is correctly set', () => {
+ expect(testContext.newItemBtn.getAttribute('href')).toBe(
+ 'http://myothercoolproject.com/issues/new',
+ );
+ });
+
+ it('newItemBtn text is the selected project label', () => {
+ expect(testContext.newItemBtn.textContent).toBe(
+ `New issue in ${testContext.defaults.newProjectMeta.name}`,
+ );
+ });
+
+ afterEach(() => {
+ window.localStorage.clear();
+ });
+ });
+
+ describe('deriveTextVariants', () => {
+ beforeEach(() => {
+ testContext.mockExecutionContext = {
+ resourceType: '',
+ resourceLabel: '',
+ };
+
+ testContext.comboButton = new ProjectSelectComboButton(testContext.projectSelectInput);
+
+ testContext.method = testContext.comboButton.deriveTextVariants.bind(
+ testContext.mockExecutionContext,
+ );
+ });
+
+ it('correctly derives test variants for merge requests', () => {
+ testContext.mockExecutionContext.resourceType = 'merge_requests';
+ testContext.mockExecutionContext.resourceLabel = 'New merge request';
+
+ const returnedVariants = testContext.method();
+
+ expect(returnedVariants.localStorageItemType).toBe('new-merge-request');
+ expect(returnedVariants.defaultTextPrefix).toBe('New merge request');
+ expect(returnedVariants.presetTextSuffix).toBe('merge request');
+ });
+
+ it('correctly derives text variants for issues', () => {
+ testContext.mockExecutionContext.resourceType = 'issues';
+ testContext.mockExecutionContext.resourceLabel = 'New issue';
+
+ const returnedVariants = testContext.method();
+
+ expect(returnedVariants.localStorageItemType).toBe('new-issue');
+ expect(returnedVariants.defaultTextPrefix).toBe('New issue');
+ expect(returnedVariants.presetTextSuffix).toBe('issue');
+ });
+ });
+});
diff --git a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
index 3084462f5ae..d11a9bdeb51 100644
--- a/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
+++ b/spec/frontend/registry/list/components/__snapshots__/project_empty_state_spec.js.snap
@@ -86,8 +86,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
<button
class="btn input-group-text btn-secondary btn-default"
data-clipboard-text="docker login host"
- data-original-title="Copy login command"
- title=""
+ title="Copy login command"
type="button"
>
<svg
@@ -125,8 +124,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
<button
class="btn input-group-text btn-secondary btn-default"
data-clipboard-text="docker build -t url ."
- data-original-title="Copy build command"
- title=""
+ title="Copy build command"
type="button"
>
<svg
@@ -156,8 +154,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = `
<button
class="btn input-group-text btn-secondary btn-default"
data-clipboard-text="docker push url"
- data-original-title="Copy push command"
- title=""
+ title="Copy push command"
type="button"
>
<svg
diff --git a/spec/frontend/registry/list/components/app_spec.js b/spec/frontend/registry/list/components/app_spec.js
index 5072a285f83..c2c220b2cd2 100644
--- a/spec/frontend/registry/list/components/app_spec.js
+++ b/spec/frontend/registry/list/components/app_spec.js
@@ -1,4 +1,3 @@
-import Vue from 'vue';
import { mount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import registry from '~/registry/list/components/app.vue';
@@ -35,12 +34,7 @@ describe('Registry List', () => {
};
beforeEach(() => {
- // This is needed due to console.error called by vue to emit a warning that stop the tests.
- // See https://github.com/vuejs/vue-test-utils/issues/532.
- Vue.config.silent = true;
wrapper = mount(registry, {
- attachToDocument: true,
- sync: false,
propsData,
computed: {
repos() {
@@ -52,7 +46,6 @@ describe('Registry List', () => {
});
afterEach(() => {
- Vue.config.silent = false;
wrapper.destroy();
});
@@ -67,8 +60,6 @@ describe('Registry List', () => {
describe('without data', () => {
beforeEach(() => {
wrapper = mount(registry, {
- attachToDocument: true,
- sync: false,
propsData,
computed: {
repos() {
@@ -138,7 +129,7 @@ describe('Registry List', () => {
wrapper = mount(registry, {
propsData: {
...propsData,
- endpoint: null,
+ endpoint: '',
isGroupPage,
},
methods,
@@ -146,7 +137,7 @@ describe('Registry List', () => {
});
it('call the right vuex setters', () => {
- expect(methods.setMainEndpoint).toHaveBeenLastCalledWith(null);
+ expect(methods.setMainEndpoint).toHaveBeenLastCalledWith('');
expect(methods.setIsDeleteDisabled).toHaveBeenLastCalledWith(true);
});
diff --git a/spec/frontend/registry/list/components/collapsible_container_spec.js b/spec/frontend/registry/list/components/collapsible_container_spec.js
index cba49e72588..f969f0ba9ba 100644
--- a/spec/frontend/registry/list/components/collapsible_container_spec.js
+++ b/spec/frontend/registry/list/components/collapsible_container_spec.js
@@ -1,4 +1,3 @@
-import Vue from 'vue';
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
import createFlash from '~/flash';
@@ -27,15 +26,10 @@ describe('collapsible registry container', () => {
...config,
store,
localVue,
- attachToDocument: true,
- sync: false,
});
beforeEach(() => {
createFlash.mockClear();
- // This is needed due to console.error called by vue to emit a warning that stop the tests
- // see https://github.com/vuejs/vue-test-utils/issues/532
- Vue.config.silent = true;
store = new Vuex.Store({
state: {
isDeleteDisabled: false,
@@ -51,7 +45,6 @@ describe('collapsible registry container', () => {
});
afterEach(() => {
- Vue.config.silent = false;
wrapper.destroy();
});
@@ -59,6 +52,7 @@ describe('collapsible registry container', () => {
beforeEach(() => {
const fetchList = jest.fn();
wrapper.setMethods({ fetchList });
+ return wrapper.vm.$nextTick();
});
const expectIsClosed = () => {
@@ -71,44 +65,54 @@ describe('collapsible registry container', () => {
expectIsClosed();
});
- it('should be open when user clicks on closed repo', done => {
+ it('should be open when user clicks on closed repo', () => {
const toggleRepos = findToggleRepos();
toggleRepos.at(0).trigger('click');
- Vue.nextTick(() => {
+ return wrapper.vm.$nextTick().then(() => {
const container = findContainerImageTags();
expect(container.exists()).toBe(true);
expect(wrapper.vm.fetchList).toHaveBeenCalled();
- done();
});
});
- it('should be closed when the user clicks on an opened repo', done => {
+ it('should be closed when the user clicks on an opened repo', () => {
const toggleRepos = findToggleRepos();
toggleRepos.at(0).trigger('click');
- Vue.nextTick(() => {
+ return wrapper.vm.$nextTick().then(() => {
toggleRepos.at(0).trigger('click');
- Vue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expectIsClosed();
- done();
});
});
});
});
describe('delete repo', () => {
+ beforeEach(() => {
+ const deleteItem = jest.fn().mockResolvedValue();
+ const fetchRepos = jest.fn().mockResolvedValue();
+ wrapper.setMethods({ deleteItem, fetchRepos });
+ });
+
it('should be possible to delete a repo', () => {
const deleteBtn = findDeleteBtn();
expect(deleteBtn.exists()).toBe(true);
});
it('should call deleteItem when confirming deletion', () => {
- const deleteItem = jest.fn().mockResolvedValue();
- const fetchRepos = jest.fn().mockResolvedValue();
- wrapper.setMethods({ deleteItem, fetchRepos });
wrapper.vm.handleDeleteRepository();
expect(wrapper.vm.deleteItem).toHaveBeenCalledWith(wrapper.vm.repo);
});
+ it('should show a flash with a success notice', () =>
+ wrapper.vm.handleDeleteRepository().then(() => {
+ expect(wrapper.vm.deleteImageConfirmationMessage).toContain(wrapper.vm.repo.name);
+ expect(createFlash).toHaveBeenCalledWith(
+ wrapper.vm.deleteImageConfirmationMessage,
+ 'notice',
+ );
+ }));
+
it('should show an error when there is API error', () => {
const deleteItem = jest.fn().mockRejectedValue('error');
wrapper.setMethods({ deleteItem });
diff --git a/spec/frontend/registry/list/components/project_empty_state_spec.js b/spec/frontend/registry/list/components/project_empty_state_spec.js
index bd717a4eb10..d29b9e47233 100644
--- a/spec/frontend/registry/list/components/project_empty_state_spec.js
+++ b/spec/frontend/registry/list/components/project_empty_state_spec.js
@@ -6,8 +6,6 @@ describe('Registry Project Empty state', () => {
beforeEach(() => {
wrapper = mount(projectEmptyState, {
- attachToDocument: true,
- sync: false,
propsData: {
noContainersImage: 'imageUrl',
helpPagePath: 'help',
diff --git a/spec/frontend/registry/list/components/table_registry_spec.js b/spec/frontend/registry/list/components/table_registry_spec.js
index fe099adbdfb..b13797929dd 100644
--- a/spec/frontend/registry/list/components/table_registry_spec.js
+++ b/spec/frontend/registry/list/components/table_registry_spec.js
@@ -1,4 +1,3 @@
-import Vue from 'vue';
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
import createFlash from '~/flash';
@@ -29,13 +28,13 @@ describe('table registry', () => {
const bulkDeletePath = 'path';
const mountWithStore = config =>
- mount(tableRegistry, { ...config, store, localVue, attachToDocument: true, sync: false });
+ mount(tableRegistry, {
+ ...config,
+ store,
+ localVue,
+ });
beforeEach(() => {
- // This is needed due to console.error called by vue to emit a warning that stop the tests
- // see https://github.com/vuejs/vue-test-utils/issues/532
- Vue.config.silent = true;
-
store = new Vuex.Store({
state: {
isDeleteDisabled: false,
@@ -52,7 +51,6 @@ describe('table registry', () => {
});
afterEach(() => {
- Vue.config.silent = false;
wrapper.destroy();
});
@@ -82,67 +80,65 @@ describe('table registry', () => {
});
describe('multi select', () => {
- it('selecting a row should enable delete button', done => {
+ it('selecting a row should enable delete button', () => {
const deleteBtn = findDeleteButton();
const checkboxes = findSelectCheckboxes();
expect(deleteBtn.attributes('disabled')).toBe('disabled');
checkboxes.at(0).trigger('click');
- Vue.nextTick(() => {
+ return wrapper.vm.$nextTick().then(() => {
expect(deleteBtn.attributes('disabled')).toEqual(undefined);
- done();
});
});
- it('selecting all checkbox should select all rows and enable delete button', done => {
+ it('selecting all checkbox should select all rows and enable delete button', () => {
const selectAll = findSelectAllCheckbox();
- const checkboxes = findSelectCheckboxes();
selectAll.trigger('click');
- Vue.nextTick(() => {
+ return wrapper.vm.$nextTick().then(() => {
+ const checkboxes = findSelectCheckboxes();
const checked = checkboxes.filter(w => w.element.checked);
expect(checked.length).toBe(checkboxes.length);
- done();
});
});
- it('deselecting select all checkbox should deselect all rows and disable delete button', done => {
+ it('deselecting select all checkbox should deselect all rows and disable delete button', () => {
const checkboxes = findSelectCheckboxes();
const selectAll = findSelectAllCheckbox();
selectAll.trigger('click');
selectAll.trigger('click');
- Vue.nextTick(() => {
+ return wrapper.vm.$nextTick().then(() => {
const checked = checkboxes.filter(w => !w.element.checked);
expect(checked.length).toBe(checkboxes.length);
- done();
});
});
- it('should delete multiple items when multiple items are selected', done => {
+ it('should delete multiple items when multiple items are selected', () => {
const multiDeleteItems = jest.fn().mockResolvedValue();
wrapper.setMethods({ multiDeleteItems });
- const selectAll = findSelectAllCheckbox();
- selectAll.trigger('click');
- Vue.nextTick(() => {
- const deleteBtn = findDeleteButton();
- expect(wrapper.vm.selectedItems).toEqual([0, 1]);
- expect(deleteBtn.attributes('disabled')).toEqual(undefined);
- wrapper.setData({ itemsToBeDeleted: [...wrapper.vm.selectedItems] });
- wrapper.vm.handleMultipleDelete();
-
- Vue.nextTick(() => {
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const selectAll = findSelectAllCheckbox();
+ selectAll.trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ const deleteBtn = findDeleteButton();
+ expect(wrapper.vm.selectedItems).toEqual([0, 1]);
+ expect(deleteBtn.attributes('disabled')).toEqual(undefined);
+ wrapper.setData({ itemsToBeDeleted: [...wrapper.vm.selectedItems] });
+ wrapper.vm.handleMultipleDelete();
expect(wrapper.vm.selectedItems).toEqual([]);
expect(wrapper.vm.itemsToBeDeleted).toEqual([]);
expect(wrapper.vm.multiDeleteItems).toHaveBeenCalledWith({
path: bulkDeletePath,
items: [firstImage.tag, secondImage.tag],
});
- done();
});
- });
});
it('should show an error message if bulkDeletePath is not set', () => {
@@ -162,6 +158,7 @@ describe('table registry', () => {
describe('delete registry', () => {
beforeEach(() => {
wrapper.setData({ selectedItems: [0] });
+ return wrapper.vm.$nextTick();
});
it('should be possible to delete a registry', () => {
@@ -178,10 +175,12 @@ describe('table registry', () => {
const deleteSingleItem = jest.fn();
const deleteItem = jest.fn().mockResolvedValue();
wrapper.setMethods({ deleteSingleItem, deleteItem });
- deleteBtns.at(0).trigger('click');
- expect(wrapper.vm.deleteSingleItem).toHaveBeenCalledWith(0);
- wrapper.vm.handleSingleDelete(1);
- expect(wrapper.vm.deleteItem).toHaveBeenCalledWith(1);
+ return wrapper.vm.$nextTick().then(() => {
+ deleteBtns.at(0).trigger('click');
+ expect(wrapper.vm.deleteSingleItem).toHaveBeenCalledWith(0);
+ wrapper.vm.handleSingleDelete(1);
+ expect(wrapper.vm.deleteItem).toHaveBeenCalledWith(1);
+ });
});
});
@@ -317,6 +316,7 @@ describe('table registry', () => {
describe('single tag delete', () => {
beforeEach(() => {
wrapper.setData({ itemsToBeDeleted: [0] });
+ return wrapper.vm.$nextTick();
});
it('send an event when delete button is clicked', () => {
@@ -345,6 +345,7 @@ describe('table registry', () => {
beforeEach(() => {
const items = [0, 1, 2];
wrapper.setData({ itemsToBeDeleted: items, selectedItems: items });
+ return wrapper.vm.$nextTick();
});
it('send an event when delete button is clicked', () => {
diff --git a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
index c6dbb1da8e9..966acdf52be 100644
--- a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
+++ b/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
@@ -1,10 +1,10 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Registry List renders 1`] = `
+exports[`Registry Settings App renders 1`] = `
<div>
<p>
- Tag retention policies are designed to:
+ Tag expiration policy is designed to:
</p>
@@ -20,14 +20,6 @@ exports[`Registry List renders 1`] = `
</li>
</ul>
- <p>
- Read more about the
- <a
- href="foo"
- target="_blank"
- >
- Container Registry tag retention policies
- </a>
- </p>
+ <settings-form-stub />
</div>
`;
diff --git a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
new file mode 100644
index 00000000000..d26df308b97
--- /dev/null
+++ b/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
@@ -0,0 +1,181 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Settings Form renders 1`] = `
+<form>
+ <div
+ class="card"
+ >
+ <!---->
+ <div
+ class="card-header"
+ >
+
+ Tag expiration policy
+
+ </div>
+ <div
+ class="card-body"
+ >
+ <!---->
+ <!---->
+
+ <glformgroup-stub
+ id="expiration-policy-toggle-group"
+ label="Expiration policy:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-toggle"
+ >
+ <div
+ class="d-flex align-items-start"
+ >
+ <gltoggle-stub
+ id="expiration-policy-toggle"
+ labeloff="Toggle Status: OFF"
+ labelon="Toggle Status: ON"
+ />
+
+ <span
+ class="mb-2 ml-1 lh-2"
+ >
+ Docker tag expiration policy is
+ <strong>
+ disabled
+ </strong>
+ </span>
+ </div>
+ </glformgroup-stub>
+
+ <glformgroup-stub
+ id="expiration-policy-interval-group"
+ label="Expiration interval:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-interval"
+ >
+ <glformselect-stub
+ disabled="true"
+ id="expiration-policy-interval"
+ value="bar"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </glformselect-stub>
+ </glformgroup-stub>
+
+ <glformgroup-stub
+ id="expiration-policy-schedule-group"
+ label="Expiration schedule:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-schedule"
+ >
+ <glformselect-stub
+ disabled="true"
+ id="expiration-policy-schedule"
+ value="bar"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </glformselect-stub>
+ </glformgroup-stub>
+
+ <glformgroup-stub
+ id="expiration-policy-latest-group"
+ label="Number of tags to retain:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-latest"
+ >
+ <glformselect-stub
+ disabled="true"
+ id="expiration-policy-latest"
+ value="bar"
+ >
+ <option
+ value="foo"
+ >
+
+ Foo
+
+ </option>
+ <option
+ value="bar"
+ >
+
+ Bar
+
+ </option>
+ </glformselect-stub>
+ </glformgroup-stub>
+
+ <glformgroup-stub
+ id="expiration-policy-name-matching-group"
+ invalid-feedback="The value of this input should be less than 255 characters"
+ label="Expire Docker tags that match this regex:"
+ label-align="right"
+ label-cols="3"
+ label-for="expiration-policy-name-matching"
+ >
+ <glformtextarea-stub
+ disabled="true"
+ id="expiration-policy-name-matching"
+ placeholder=".*"
+ trim=""
+ value=""
+ />
+ </glformgroup-stub>
+
+ </div>
+ <div
+ class="card-footer"
+ >
+ <div
+ class="d-flex justify-content-end"
+ >
+ <glbutton-stub
+ class="mr-2 d-block"
+ type="reset"
+ >
+ Cancel
+ </glbutton-stub>
+
+ <glbutton-stub
+ class="d-block"
+ type="submit"
+ variant="success"
+ >
+
+ Save expiration policy
+
+ </glbutton-stub>
+ </div>
+ </div>
+ <!---->
+ </div>
+</form>
+`;
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index 666d970aa6b..448ff2b3be9 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -1,29 +1,33 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import component from '~/registry/settings/components/registry_settings_app.vue';
-import { createStore } from '~/registry/settings/stores/';
+import { createStore } from '~/registry/settings/store/';
const localVue = createLocalVue();
localVue.use(Vuex);
-describe('Registry List', () => {
+describe('Registry Settings App', () => {
let wrapper;
let store;
+ let fetchSpy;
- const helpPagePath = 'foo';
- const findHelpLink = () => wrapper.find({ ref: 'help-link' }).find('a');
+ const findSettingsComponent = () => wrapper.find({ ref: 'settings-form' });
+ const findLoadingComponent = () => wrapper.find({ ref: 'loading-icon' });
- const mountComponent = (options = {}) =>
- shallowMount(component, {
- sync: false,
+ const mountComponent = (options = {}) => {
+ fetchSpy = jest.fn();
+ wrapper = shallowMount(component, {
store,
+ methods: {
+ fetchSettings: fetchSpy,
+ },
...options,
});
+ };
beforeEach(() => {
store = createStore();
- store.dispatch('setInitialState', { helpPagePath });
- wrapper = mountComponent();
+ mountComponent();
});
afterEach(() => {
@@ -34,7 +38,18 @@ describe('Registry List', () => {
expect(wrapper.element).toMatchSnapshot();
});
- it('renders an help link dependant on the helphPagePath', () => {
- expect(findHelpLink().attributes('href')).toBe(helpPagePath);
+ it('call the store function to load the data on mount', () => {
+ expect(fetchSpy).toHaveBeenCalled();
+ });
+
+ it('renders a loader if isLoading is true', () => {
+ store.dispatch('toggleLoading');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findLoadingComponent().exists()).toBe(true);
+ expect(findSettingsComponent().exists()).toBe(false);
+ });
+ });
+ it('renders the setting form', () => {
+ expect(findSettingsComponent().exists()).toBe(true);
});
});
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
new file mode 100644
index 00000000000..bd733e965a4
--- /dev/null
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -0,0 +1,169 @@
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
+import stubChildren from 'helpers/stub_children';
+import component from '~/registry/settings/components/settings_form.vue';
+import { createStore } from '~/registry/settings/store/';
+import { NAME_REGEX_LENGTH } from '~/registry/settings/constants';
+import { stringifiedFormOptions } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Settings Form', () => {
+ let wrapper;
+ let store;
+ let saveSpy;
+ let resetSpy;
+
+ const findFormGroup = name => wrapper.find(`#expiration-policy-${name}-group`);
+ const findFormElements = (name, father = wrapper) => father.find(`#expiration-policy-${name}`);
+ const findCancelButton = () => wrapper.find({ ref: 'cancel-button' });
+ const findSaveButton = () => wrapper.find({ ref: 'save-button' });
+ const findForm = () => wrapper.find({ ref: 'form-element' });
+
+ const mountComponent = (options = {}) => {
+ saveSpy = jest.fn();
+ resetSpy = jest.fn();
+ wrapper = mount(component, {
+ stubs: {
+ ...stubChildren(component),
+ GlCard: false,
+ },
+ store,
+ methods: {
+ saveSettings: saveSpy,
+ resetSettings: resetSpy,
+ },
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ store.dispatch('setInitialState', stringifiedFormOptions);
+ mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe.each`
+ elementName | modelName | value | disabledByToggle
+ ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
+ ${'interval'} | ${'older_than'} | ${'foo'} | ${'disabled'}
+ ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
+ ${'latest'} | ${'keep_n'} | ${'foo'} | ${'disabled'}
+ ${'name-matching'} | ${'name_regex'} | ${'foo'} | ${'disabled'}
+ `('$elementName form element', ({ elementName, modelName, value, disabledByToggle }) => {
+ let formGroup;
+ beforeEach(() => {
+ formGroup = findFormGroup(elementName);
+ });
+ it(`${elementName} form group exist in the dom`, () => {
+ expect(formGroup.exists()).toBe(true);
+ });
+
+ it(`${elementName} form group has a label-for property`, () => {
+ expect(formGroup.attributes('label-for')).toBe(`expiration-policy-${elementName}`);
+ });
+
+ it(`${elementName} form group has a label-cols property`, () => {
+ expect(formGroup.attributes('label-cols')).toBe(`${wrapper.vm.$options.labelsConfig.cols}`);
+ });
+
+ it(`${elementName} form group has a label-align property`, () => {
+ expect(formGroup.attributes('label-align')).toBe(`${wrapper.vm.$options.labelsConfig.align}`);
+ });
+
+ it(`${elementName} form group contains an input element`, () => {
+ expect(findFormElements(elementName, formGroup).exists()).toBe(true);
+ });
+
+ it(`${elementName} form element change updated ${modelName} with ${value}`, () => {
+ const element = findFormElements(elementName, formGroup);
+ const modelUpdateEvent = element.vm.$options.model
+ ? element.vm.$options.model.event
+ : 'input';
+ element.vm.$emit(modelUpdateEvent, value);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm[modelName]).toBe(value);
+ });
+ });
+
+ it(`${elementName} is ${disabledByToggle} by enabled set to false`, () => {
+ store.dispatch('updateSettings', { enabled: false });
+ const expectation = disabledByToggle === 'disabled' ? 'true' : undefined;
+ expect(findFormElements(elementName, formGroup).attributes('disabled')).toBe(expectation);
+ });
+ });
+
+ describe('form actions', () => {
+ let form;
+ beforeEach(() => {
+ form = findForm();
+ });
+ it('cancel has type reset', () => {
+ expect(findCancelButton().attributes('type')).toBe('reset');
+ });
+
+ it('form reset event call the appropriate function', () => {
+ form.trigger('reset');
+ expect(resetSpy).toHaveBeenCalled();
+ });
+
+ it('save has type submit', () => {
+ expect(findSaveButton().attributes('type')).toBe('submit');
+ });
+
+ it('form submit event call the appropriate function', () => {
+ form.trigger('submit');
+ expect(saveSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('form validation', () => {
+ describe(`when name regex is longer than ${NAME_REGEX_LENGTH}`, () => {
+ const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
+ beforeEach(() => {
+ store.dispatch('updateSettings', { name_regex: invalidString });
+ });
+
+ it('save btn is disabled', () => {
+ expect(findSaveButton().attributes('disabled')).toBeTruthy();
+ });
+
+ it('nameRegexState is false', () => {
+ expect(wrapper.vm.nameRegexState).toBe(false);
+ });
+ });
+
+ it('if the user did not type validation is null', () => {
+ store.dispatch('updateSettings', { name_regex: null });
+ expect(wrapper.vm.nameRegexState).toBe(null);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findSaveButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+
+ it(`if the user typed and is less than ${NAME_REGEX_LENGTH} state is true`, () => {
+ store.dispatch('updateSettings', { name_regex: 'abc' });
+ expect(wrapper.vm.nameRegexState).toBe(true);
+ });
+ });
+
+ describe('help text', () => {
+ it('toggleDescriptionText text reflects enabled property', () => {
+ const toggleHelpText = findFormGroup('toggle').find('span');
+ expect(toggleHelpText.html()).toContain('disabled');
+ wrapper.vm.enabled = true;
+ return wrapper.vm.$nextTick().then(() => {
+ expect(toggleHelpText.html()).toContain('enabled');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/registry/settings/mock_data.js
new file mode 100644
index 00000000000..411363c2c95
--- /dev/null
+++ b/spec/frontend/registry/settings/mock_data.js
@@ -0,0 +1,12 @@
+export const options = [{ key: 'foo', label: 'Foo' }, { key: 'bar', label: 'Bar', default: true }];
+export const stringifiedOptions = JSON.stringify(options);
+export const stringifiedFormOptions = {
+ cadenceOptions: stringifiedOptions,
+ keepNOptions: stringifiedOptions,
+ olderThanOptions: stringifiedOptions,
+};
+export const formOptions = {
+ cadence: options,
+ keepN: options,
+ olderThan: options,
+};
diff --git a/spec/frontend/registry/settings/store/actions_spec.js b/spec/frontend/registry/settings/store/actions_spec.js
new file mode 100644
index 00000000000..80fb800ac3a
--- /dev/null
+++ b/spec/frontend/registry/settings/store/actions_spec.js
@@ -0,0 +1,124 @@
+import Api from '~/api';
+import createFlash from '~/flash';
+import testAction from 'helpers/vuex_action_helper';
+import * as actions from '~/registry/settings/store/actions';
+import * as types from '~/registry/settings/store/mutation_types';
+import {
+ UPDATE_SETTINGS_ERROR_MESSAGE,
+ FETCH_SETTINGS_ERROR_MESSAGE,
+ UPDATE_SETTINGS_SUCCESS_MESSAGE,
+} from '~/registry/settings/constants';
+
+jest.mock('~/flash');
+
+describe('Actions Registry Store', () => {
+ describe.each`
+ actionName | mutationName | payload
+ ${'setInitialState'} | ${types.SET_INITIAL_STATE} | ${'foo'}
+ ${'updateSettings'} | ${types.UPDATE_SETTINGS} | ${'foo'}
+ ${'receiveSettingsSuccess'} | ${types.SET_SETTINGS} | ${'foo'}
+ ${'toggleLoading'} | ${types.TOGGLE_LOADING} | ${undefined}
+ ${'resetSettings'} | ${types.RESET_SETTINGS} | ${undefined}
+ `('%s action invokes %s mutation with payload %s', ({ actionName, mutationName, payload }) => {
+ it('should set the initial state', done => {
+ testAction(actions[actionName], payload, {}, [{ type: mutationName, payload }], [], done);
+ });
+ });
+
+ describe.each`
+ actionName | message
+ ${'receiveSettingsError'} | ${FETCH_SETTINGS_ERROR_MESSAGE}
+ ${'updateSettingsError'} | ${UPDATE_SETTINGS_ERROR_MESSAGE}
+ `('%s action', ({ actionName, message }) => {
+ it(`should call createFlash with ${message}`, done => {
+ testAction(actions[actionName], null, null, [], [], () => {
+ expect(createFlash).toHaveBeenCalledWith(message);
+ done();
+ });
+ });
+ });
+
+ describe('fetchSettings', () => {
+ const state = {
+ projectId: 'bar',
+ };
+
+ const payload = {
+ data: {
+ container_expiration_policy: 'foo',
+ },
+ };
+
+ it('should fetch the data from the API', done => {
+ Api.project = jest.fn().mockResolvedValue(payload);
+ testAction(
+ actions.fetchSettings,
+ null,
+ state,
+ [],
+ [
+ { type: 'toggleLoading' },
+ { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
+ { type: 'toggleLoading' },
+ ],
+ done,
+ );
+ });
+
+ it('should call receiveSettingsError on error', done => {
+ Api.project = jest.fn().mockRejectedValue();
+ testAction(
+ actions.fetchSettings,
+ null,
+ state,
+ [],
+ [{ type: 'toggleLoading' }, { type: 'receiveSettingsError' }, { type: 'toggleLoading' }],
+ done,
+ );
+ });
+ });
+
+ describe('saveSettings', () => {
+ const state = {
+ projectId: 'bar',
+ settings: 'baz',
+ };
+
+ const payload = {
+ data: {
+ tag_expiration_policies: 'foo',
+ },
+ };
+
+ it('should fetch the data from the API', done => {
+ Api.updateProject = jest.fn().mockResolvedValue(payload);
+ testAction(
+ actions.saveSettings,
+ null,
+ state,
+ [],
+ [
+ { type: 'toggleLoading' },
+ { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
+ { type: 'toggleLoading' },
+ ],
+ () => {
+ expect(createFlash).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, 'success');
+ done();
+ },
+ );
+ });
+
+ it('should call receiveSettingsError on error', done => {
+ Api.updateProject = jest.fn().mockRejectedValue();
+ testAction(
+ actions.saveSettings,
+ null,
+ state,
+ [],
+ [{ type: 'toggleLoading' }, { type: 'updateSettingsError' }, { type: 'toggleLoading' }],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/store/mutations_spec.js b/spec/frontend/registry/settings/store/mutations_spec.js
new file mode 100644
index 00000000000..1a0effbe125
--- /dev/null
+++ b/spec/frontend/registry/settings/store/mutations_spec.js
@@ -0,0 +1,58 @@
+import mutations from '~/registry/settings/store/mutations';
+import * as types from '~/registry/settings/store/mutation_types';
+import createState from '~/registry/settings/store/state';
+import { formOptions, stringifiedFormOptions } from '../mock_data';
+
+describe('Mutations Registry Store', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = createState();
+ });
+
+ describe('SET_INITIAL_STATE', () => {
+ it('should set the initial state', () => {
+ const expectedState = { ...mockState, projectId: 'foo', formOptions };
+ mutations[types.SET_INITIAL_STATE](mockState, {
+ projectId: 'foo',
+ ...stringifiedFormOptions,
+ });
+
+ expect(mockState.projectId).toEqual(expectedState.projectId);
+ expect(mockState.formOptions).toEqual(expectedState.formOptions);
+ });
+ });
+
+ describe('UPDATE_SETTINGS', () => {
+ it('should update the settings', () => {
+ mockState.settings = { foo: 'bar' };
+ const payload = { foo: 'baz' };
+ const expectedState = { ...mockState, settings: payload };
+ mutations[types.UPDATE_SETTINGS](mockState, payload);
+ expect(mockState.settings).toEqual(expectedState.settings);
+ });
+ });
+ describe('SET_SETTINGS', () => {
+ it('should set the settings and original', () => {
+ const payload = { foo: 'baz' };
+ const expectedState = { ...mockState, settings: payload };
+ mutations[types.SET_SETTINGS](mockState, payload);
+ expect(mockState.settings).toEqual(expectedState.settings);
+ expect(mockState.original).toEqual(expectedState.settings);
+ });
+ });
+ describe('RESET_SETTINGS', () => {
+ it('should copy original over settings', () => {
+ mockState.settings = { foo: 'bar' };
+ mockState.original = { foo: 'baz' };
+ mutations[types.RESET_SETTINGS](mockState);
+ expect(mockState.settings).toEqual(mockState.original);
+ });
+ });
+ describe('TOGGLE_LOADING', () => {
+ it('should toggle the loading', () => {
+ mutations[types.TOGGLE_LOADING](mockState);
+ expect(mockState.isLoading).toEqual(true);
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/stores/actions_spec.js b/spec/frontend/registry/settings/stores/actions_spec.js
deleted file mode 100644
index 484f1b2dc0a..00000000000
--- a/spec/frontend/registry/settings/stores/actions_spec.js
+++ /dev/null
@@ -1,20 +0,0 @@
-import testAction from 'helpers/vuex_action_helper';
-import * as actions from '~/registry/settings/stores/actions';
-import * as types from '~/registry/settings/stores/mutation_types';
-
-jest.mock('~/flash.js');
-
-describe('Actions Registry Store', () => {
- describe('setInitialState', () => {
- it('should set the initial state', done => {
- testAction(
- actions.setInitialState,
- 'foo',
- {},
- [{ type: types.SET_INITIAL_STATE, payload: 'foo' }],
- [],
- done,
- );
- });
- });
-});
diff --git a/spec/frontend/registry/settings/stores/mutations_spec.js b/spec/frontend/registry/settings/stores/mutations_spec.js
deleted file mode 100644
index 421cd3f13cb..00000000000
--- a/spec/frontend/registry/settings/stores/mutations_spec.js
+++ /dev/null
@@ -1,21 +0,0 @@
-import mutations from '~/registry/settings/stores/mutations';
-import * as types from '~/registry/settings/stores/mutation_types';
-import createState from '~/registry/settings/stores/state';
-
-describe('Mutations Registry Store', () => {
- let mockState;
-
- beforeEach(() => {
- mockState = createState();
- });
-
- describe('SET_INITIAL_STATE', () => {
- it('should set the initial state', () => {
- const payload = { helpPagePath: 'foo', registrySettingsEndpoint: 'bar' };
- const expectedState = { ...mockState, ...payload };
- mutations[types.SET_INITIAL_STATE](mockState, payload);
-
- expect(mockState.endpoint).toEqual(expectedState.endpoint);
- });
- });
-});
diff --git a/spec/frontend/releases/detail/components/app_spec.js b/spec/frontend/releases/detail/components/app_spec.js
index 4f094e8639a..fd5239ad44e 100644
--- a/spec/frontend/releases/detail/components/app_spec.js
+++ b/spec/frontend/releases/detail/components/app_spec.js
@@ -29,7 +29,9 @@ describe('Release detail component', () => {
const store = new Vuex.Store({ actions, state });
- wrapper = mount(ReleaseDetailApp, { store, sync: false, attachToDocument: true });
+ wrapper = mount(ReleaseDetailApp, {
+ store,
+ });
return wrapper.vm.$nextTick();
});
diff --git a/spec/frontend/releases/list/components/evidence_block_spec.js b/spec/frontend/releases/list/components/evidence_block_spec.js
index e8a3eace216..39f3975f665 100644
--- a/spec/frontend/releases/list/components/evidence_block_spec.js
+++ b/spec/frontend/releases/list/components/evidence_block_spec.js
@@ -1,4 +1,4 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import { truncateSha } from '~/lib/utils/text_utility';
import Icon from '~/vue_shared/components/icon.vue';
@@ -10,10 +10,7 @@ describe('Evidence Block', () => {
let wrapper;
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
- wrapper = mount(localVue.extend(EvidenceBlock), {
- localVue,
+ wrapper = mount(EvidenceBlock, {
...options,
});
};
@@ -39,7 +36,7 @@ describe('Evidence Block', () => {
});
it('renders the correct hover text for the download', () => {
- expect(wrapper.find(GlLink).attributes('data-original-title')).toBe('Download evidence JSON');
+ expect(wrapper.find(GlLink).attributes('title')).toBe('Download evidence JSON');
});
it('renders the correct file link for download', () => {
@@ -53,7 +50,10 @@ describe('Evidence Block', () => {
it('renders the long sha after expansion', () => {
wrapper.find('.js-text-expander-prepend').trigger('click');
- expect(wrapper.find('.js-expanded').text()).toBe(release.evidence_sha);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-expanded').text()).toBe(release.evidence_sha);
+ });
});
});
@@ -63,9 +63,7 @@ describe('Evidence Block', () => {
});
it('renders the correct hover text', () => {
- expect(wrapper.find(ClipboardButton).attributes('data-original-title')).toBe(
- 'Copy commit SHA',
- );
+ expect(wrapper.find(ClipboardButton).attributes('title')).toBe('Copy commit SHA');
});
it('copies the sha', () => {
diff --git a/spec/frontend/releases/list/components/release_block_footer_spec.js b/spec/frontend/releases/list/components/release_block_footer_spec.js
index 7652acbdd62..07f61303e33 100644
--- a/spec/frontend/releases/list/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/list/components/release_block_footer_spec.js
@@ -27,7 +27,6 @@ describe('Release block footer', () => {
...convertObjectPropsToCamelCase(releaseClone),
...props,
},
- sync: false,
});
return wrapper.vm.$nextTick();
diff --git a/spec/frontend/releases/list/components/release_block_milestone_info_spec.js b/spec/frontend/releases/list/components/release_block_milestone_info_spec.js
index 7179ab3d3cc..8a63dbbdca7 100644
--- a/spec/frontend/releases/list/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/list/components/release_block_milestone_info_spec.js
@@ -14,7 +14,6 @@ describe('Release block milestone info', () => {
propsData: {
milestones: milestonesProp,
},
- sync: false,
});
return wrapper.vm.$nextTick();
@@ -61,7 +60,7 @@ describe('Release block milestone info', () => {
expect(milestoneLink.text()).toBe(m.title);
expect(milestoneLink.attributes('href')).toBe(m.web_url);
- expect(milestoneLink.attributes('data-original-title')).toBe(m.description);
+ expect(milestoneLink.attributes('title')).toBe(m.description);
});
});
diff --git a/spec/frontend/releases/list/components/release_block_spec.js b/spec/frontend/releases/list/components/release_block_spec.js
index 38c5e4fc0a2..20c25a4aac2 100644
--- a/spec/frontend/releases/list/components/release_block_spec.js
+++ b/spec/frontend/releases/list/components/release_block_spec.js
@@ -34,7 +34,6 @@ describe('Release block', () => {
...featureFlags,
},
},
- sync: false,
});
return wrapper.vm.$nextTick();
@@ -170,7 +169,7 @@ describe('Release block', () => {
releaseClone.tag_name = 'a dangerous tag name <script>alert("hello")</script>';
return factory(releaseClone).then(() => {
- expect(wrapper.attributes().id).toBe('a-dangerous-tag-name-script-alert-hello-script-');
+ expect(wrapper.attributes().id).toBe('a-dangerous-tag-name-script-alert-hello-script');
});
});
@@ -271,7 +270,7 @@ describe('Release block', () => {
expect(milestoneLink.attributes('href')).toBe(milestone.web_url);
- expect(milestoneLink.attributes('data-original-title')).toBe(milestone.description);
+ expect(milestoneLink.attributes('title')).toBe(milestone.description);
});
});
diff --git a/spec/frontend/reports/components/report_item_spec.js b/spec/frontend/reports/components/report_item_spec.js
index bacbb399513..6aac07984e3 100644
--- a/spec/frontend/reports/components/report_item_spec.js
+++ b/spec/frontend/reports/components/report_item_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { STATUS_SUCCESS } from '~/reports/constants';
import ReportItem from '~/reports/components/report_item.vue';
+import IssueStatusIcon from '~/reports/components/issue_status_icon.vue';
import { componentNames } from '~/reports/components/issue_body';
describe('ReportItem', () => {
@@ -15,7 +16,7 @@ describe('ReportItem', () => {
},
});
- expect(wrapper.find('issuestatusicon-stub').exists()).toBe(false);
+ expect(wrapper.find(IssueStatusIcon).exists()).toBe(false);
});
it('shows status icon when unspecified', () => {
@@ -27,7 +28,7 @@ describe('ReportItem', () => {
},
});
- expect(wrapper.find('issuestatusicon-stub').exists()).toBe(true);
+ expect(wrapper.find(IssueStatusIcon).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
index 31a1cd23060..6968fb3e153 100644
--- a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
@@ -16,22 +16,22 @@ exports[`Repository directory download links component renders downloads links f
<div
class="btn-group ml-0 w-100"
>
- <gllink-stub
+ <gl-link-stub
class="btn btn-xs btn-primary"
href="http://test.com/?path=app"
>
zip
- </gllink-stub>
- <gllink-stub
+ </gl-link-stub>
+ <gl-link-stub
class="btn btn-xs"
href="http://test.com/?path=app"
>
tar
- </gllink-stub>
+ </gl-link-stub>
</div>
</div>
</section>
@@ -53,22 +53,22 @@ exports[`Repository directory download links component renders downloads links f
<div
class="btn-group ml-0 w-100"
>
- <gllink-stub
+ <gl-link-stub
class="btn btn-xs btn-primary"
href="http://test.com/?path=app/assets"
>
zip
- </gllink-stub>
- <gllink-stub
+ </gl-link-stub>
+ <gl-link-stub
class="btn btn-xs"
href="http://test.com/?path=app/assets"
>
tar
- </gllink-stub>
+ </gl-link-stub>
</div>
</div>
</section>
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 706c26403c0..1497539a0c1 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -4,7 +4,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
<div
class="info-well d-none d-sm-flex project-last-commit commit p-3"
>
- <useravatarlink-stub
+ <user-avatar-link-stub
class="avatar-cell"
imgalt=""
imgcssclasses=""
@@ -22,32 +22,32 @@ exports[`Repository last commit component renders commit widget 1`] = `
<div
class="commit-content qa-commit-content"
>
- <gllink-stub
+ <gl-link-stub
class="commit-row-message item-title"
href="https://test.com/commit/123"
>
Commit title
- </gllink-stub>
+ </gl-link-stub>
<!---->
<div
class="committer"
>
- <gllink-stub
+ <gl-link-stub
class="commit-author-link js-user-link"
href="https://test.com/test"
>
Test
- </gllink-stub>
+ </gl-link-stub>
authored
- <timeagotooltip-stub
+ <timeago-tooltip-stub
cssclass=""
time="2019-01-01"
tooltipplacement="bottom"
@@ -65,19 +65,18 @@ exports[`Repository last commit component renders commit widget 1`] = `
<div
class="ci-status-link"
>
- <gllink-stub
+ <gl-link-stub
class="js-commit-pipeline"
- data-original-title="Commit: failed"
href="https://test.com/pipeline"
- title=""
+ title="Commit: failed"
>
- <ciicon-stub
+ <ci-icon-stub
aria-label="Commit: failed"
cssclasses=""
size="24"
status="[object Object]"
/>
- </gllink-stub>
+ </gl-link-stub>
</div>
<div
@@ -91,7 +90,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
</div>
- <clipboardbutton-stub
+ <clipboard-button-stub
cssclass="btn-default"
text="123456789"
title="Copy commit SHA"
@@ -107,7 +106,7 @@ exports[`Repository last commit component renders the signature HTML as returned
<div
class="info-well d-none d-sm-flex project-last-commit commit p-3"
>
- <useravatarlink-stub
+ <user-avatar-link-stub
class="avatar-cell"
imgalt=""
imgcssclasses=""
@@ -125,32 +124,32 @@ exports[`Repository last commit component renders the signature HTML as returned
<div
class="commit-content qa-commit-content"
>
- <gllink-stub
+ <gl-link-stub
class="commit-row-message item-title"
href="https://test.com/commit/123"
>
Commit title
- </gllink-stub>
+ </gl-link-stub>
<!---->
<div
class="committer"
>
- <gllink-stub
+ <gl-link-stub
class="commit-author-link js-user-link"
href="https://test.com/test"
>
Test
- </gllink-stub>
+ </gl-link-stub>
authored
- <timeagotooltip-stub
+ <timeago-tooltip-stub
cssclass=""
time="2019-01-01"
tooltipplacement="bottom"
@@ -172,19 +171,18 @@ exports[`Repository last commit component renders the signature HTML as returned
<div
class="ci-status-link"
>
- <gllink-stub
+ <gl-link-stub
class="js-commit-pipeline"
- data-original-title="Commit: failed"
href="https://test.com/pipeline"
- title=""
+ title="Commit: failed"
>
- <ciicon-stub
+ <ci-icon-stub
aria-label="Commit: failed"
cssclasses=""
size="24"
status="[object Object]"
/>
- </gllink-stub>
+ </gl-link-stub>
</div>
<div
@@ -198,7 +196,7 @@ exports[`Repository last commit component renders the signature HTML as returned
</div>
- <clipboardbutton-stub
+ <clipboard-button-stub
cssclass="btn-default"
text="123456789"
title="Copy commit SHA"
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 707eae34793..bc2abb3db1a 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -49,7 +49,9 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: false, createMergeRequestIn: false } });
- expect(vm.find(GlDropdown).exists()).toBe(false);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find(GlDropdown).exists()).toBe(false);
+ });
});
it('renders add to tree dropdown when permissions are true', () => {
@@ -57,6 +59,8 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: true, createMergeRequestIn: true } });
- expect(vm.find(GlDropdown).exists()).toBe(true);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find(GlDropdown).exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index e07ad4cf46b..d2576ec26b7 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -6,7 +6,7 @@ import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link
let vm;
function createCommitData(data = {}) {
- return {
+ const defaultData = {
sha: '123456789',
title: 'Commit title',
message: 'Commit message',
@@ -26,8 +26,8 @@ function createCommitData(data = {}) {
group: {},
},
},
- ...data,
};
+ return Object.assign(defaultData, data);
}
function factory(commit = createCommitData(), loading = false) {
@@ -46,6 +46,8 @@ function factory(commit = createCommitData(), loading = false) {
vm.vm.$apollo.queries.commit.loading = loading;
}
+const emptyMessageClass = 'font-italic';
+
describe('Repository last commit component', () => {
afterEach(() => {
vm.destroy();
@@ -58,59 +60,89 @@ describe('Repository last commit component', () => {
`('$label when loading icon $loading is true', ({ loading }) => {
factory(createCommitData(), loading);
- expect(vm.find(GlLoadingIcon).exists()).toBe(loading);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find(GlLoadingIcon).exists()).toBe(loading);
+ });
});
it('renders commit widget', () => {
factory();
- expect(vm.element).toMatchSnapshot();
+ return vm.vm.$nextTick(() => {
+ expect(vm.element).toMatchSnapshot();
+ });
});
it('renders short commit ID', () => {
factory();
- expect(vm.find('.label-monospace').text()).toEqual('12345678');
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.label-monospace').text()).toEqual('12345678');
+ });
});
it('hides pipeline components when pipeline does not exist', () => {
factory(createCommitData({ pipeline: null }));
- expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.js-commit-pipeline').exists()).toBe(false);
+ });
});
it('renders pipeline components', () => {
factory();
- expect(vm.find('.js-commit-pipeline').exists()).toBe(true);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.js-commit-pipeline').exists()).toBe(true);
+ });
});
it('hides author component when author does not exist', () => {
factory(createCommitData({ author: null }));
- expect(vm.find('.js-user-link').exists()).toBe(false);
- expect(vm.find(UserAvatarLink).exists()).toBe(false);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.js-user-link').exists()).toBe(false);
+ expect(vm.find(UserAvatarLink).exists()).toBe(false);
+ });
});
it('does not render description expander when description is null', () => {
factory(createCommitData({ description: null }));
- expect(vm.find('.text-expander').exists()).toBe(false);
- expect(vm.find('.commit-row-description').exists()).toBe(false);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.text-expander').exists()).toBe(false);
+ expect(vm.find('.commit-row-description').exists()).toBe(false);
+ });
});
it('expands commit description when clicking expander', () => {
factory(createCommitData({ description: 'Test description' }));
- vm.find('.text-expander').vm.$emit('click');
-
- expect(vm.find('.commit-row-description').isVisible()).toBe(true);
- expect(vm.find('.text-expander').classes('open')).toBe(true);
+ return vm.vm
+ .$nextTick()
+ .then(() => {
+ vm.find('.text-expander').vm.$emit('click');
+ return vm.vm.$nextTick();
+ })
+ .then(() => {
+ expect(vm.find('.commit-row-description').isVisible()).toBe(true);
+ expect(vm.find('.text-expander').classes('open')).toBe(true);
+ });
});
it('renders the signature HTML as returned by the backend', () => {
factory(createCommitData({ signatureHtml: '<button>Verified</button>' }));
- expect(vm.element).toMatchSnapshot();
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.element).toMatchSnapshot();
+ });
+ });
+
+ it('sets correct CSS class if the commit message is empty', () => {
+ factory(createCommitData({ message: '' }));
+
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('.item-title').classes()).toContain(emptyMessageClass);
+ });
});
});
diff --git a/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
index cdc7ece89f6..8eeae9b8455 100644
--- a/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
@@ -15,13 +15,13 @@ exports[`Repository file preview component renders file HTML 1`] = `
class="fa fa-file-text-o fa-fw"
/>
- <gllink-stub
+ <gl-link-stub
href="http://test.com"
>
<strong>
README.md
</strong>
- </gllink-stub>
+ </gl-link-stub>
</div>
</div>
diff --git a/spec/frontend/repository/components/preview/index_spec.js b/spec/frontend/repository/components/preview/index_spec.js
index 0112e6310f4..7587ca4186c 100644
--- a/spec/frontend/repository/components/preview/index_spec.js
+++ b/spec/frontend/repository/components/preview/index_spec.js
@@ -33,7 +33,9 @@ describe('Repository file preview component', () => {
vm.setData({ readme: { html: '<div class="blob">test</div>' } });
- expect(vm.element).toMatchSnapshot();
+ return vm.vm.$nextTick(() => {
+ expect(vm.element).toMatchSnapshot();
+ });
});
it('renders loading icon', () => {
@@ -44,6 +46,8 @@ describe('Repository file preview component', () => {
vm.setData({ loading: 1 });
- expect(vm.find(GlLoadingIcon).exists()).toBe(true);
+ return vm.vm.$nextTick(() => {
+ expect(vm.find(GlLoadingIcon).exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index f8e65a51297..22e353dddc5 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -32,7 +32,7 @@ exports[`Repository table row component renders table row 1`] = `
<td
class="d-none d-sm-table-cell tree-commit"
>
- <glskeletonloading-stub
+ <gl-skeleton-loading-stub
class="h-auto"
lines="1"
/>
@@ -41,7 +41,7 @@ exports[`Repository table row component renders table row 1`] = `
<td
class="tree-time-ago text-right"
>
- <glskeletonloading-stub
+ <gl-skeleton-loading-stub
class="ml-auto h-auto w-50"
lines="1"
/>
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index 41450becabb..9db90839b29 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -53,9 +53,11 @@ describe('Repository table component', () => {
vm.setData({ ref });
- expect(vm.find('.table').attributes('aria-label')).toEqual(
- `Files, directories, and submodules in the path ${path} for commit reference ${ref}`,
- );
+ return vm.vm.$nextTick(() => {
+ expect(vm.find('.table').attributes('aria-label')).toEqual(
+ `Files, directories, and submodules in the path ${path} for commit reference ${ref}`,
+ );
+ });
});
it('shows loading icon', () => {
diff --git a/spec/frontend/repository/components/table/parent_row_spec.js b/spec/frontend/repository/components/table/parent_row_spec.js
index 7020055271f..439c7ff080c 100644
--- a/spec/frontend/repository/components/table/parent_row_spec.js
+++ b/spec/frontend/repository/components/table/parent_row_spec.js
@@ -1,10 +1,11 @@
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import ParentRow from '~/repository/components/table/parent_row.vue';
let vm;
let $router;
-function factory(path) {
+function factory(path, loadingPath) {
$router = {
push: jest.fn(),
};
@@ -13,6 +14,7 @@ function factory(path) {
propsData: {
commitRef: 'master',
path,
+ loadingPath,
},
stubs: {
RouterLink: RouterLinkStub,
@@ -61,4 +63,10 @@ describe('Repository parent row component', () => {
path: '/tree/master/app',
});
});
+
+ it('renders loading icon when loading parent', () => {
+ factory('app/assets', 'app');
+
+ expect(vm.find(GlLoadingIcon).exists()).toBe(true);
+ });
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 94fa8b1e363..b60560366a6 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -1,5 +1,5 @@
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
-import { GlBadge, GlLink } from '@gitlab/ui';
+import { GlBadge, GlLink, GlLoadingIcon } from '@gitlab/ui';
import { visitUrl } from '~/lib/utils/url_utility';
import TableRow from '~/repository/components/table/row.vue';
import Icon from '~/vue_shared/components/icon.vue';
@@ -46,7 +46,9 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- expect(vm.element).toMatchSnapshot();
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.element).toMatchSnapshot();
+ });
});
it.each`
@@ -63,7 +65,9 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- expect(vm.find(component).exists()).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(component).exists()).toBe(true);
+ });
});
it.each`
@@ -80,13 +84,15 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- vm.trigger('click');
+ return vm.vm.$nextTick().then(() => {
+ vm.trigger('click');
- if (pushes) {
- expect($router.push).toHaveBeenCalledWith({ path: '/tree/master/test' });
- } else {
- expect($router.push).not.toHaveBeenCalled();
- }
+ if (pushes) {
+ expect($router.push).toHaveBeenCalledWith({ path: '/tree/master/test' });
+ } else {
+ expect($router.push).not.toHaveBeenCalled();
+ }
+ });
});
it.each`
@@ -103,13 +109,17 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- vm.trigger('click');
+ return vm.vm.$nextTick().then(() => {
+ vm.trigger('click');
- if (pushes) {
- expect(visitUrl).not.toHaveBeenCalled();
- } else {
- expect(visitUrl).toHaveBeenCalledWith('https://test.com', undefined);
- }
+ if (pushes) {
+ expect(visitUrl).not.toHaveBeenCalled();
+ } else {
+ const [url, external] = visitUrl.mock.calls[0];
+ expect(url).toBe('https://test.com');
+ expect(external).toBeFalsy();
+ }
+ });
});
it('renders commit ID for submodule', () => {
@@ -121,7 +131,9 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- expect(vm.find('.commit-sha').text()).toContain('1');
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('.commit-sha').text()).toContain('1');
+ });
});
it('renders link with href', () => {
@@ -134,7 +146,9 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- expect(vm.find('a').attributes('href')).toEqual('https://test.com');
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('a').attributes('href')).toEqual('https://test.com');
+ });
});
it('renders LFS badge', () => {
@@ -147,7 +161,9 @@ describe('Repository table row component', () => {
lfsOid: '1',
});
- expect(vm.find(GlBadge).exists()).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(GlBadge).exists()).toBe(true);
+ });
});
it('renders commit and web links with href for submodule', () => {
@@ -161,8 +177,10 @@ describe('Repository table row component', () => {
currentPath: '/',
});
- expect(vm.find('a').attributes('href')).toEqual('https://test.com');
- expect(vm.find(GlLink).attributes('href')).toEqual('https://test.com/commit');
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find('a').attributes('href')).toEqual('https://test.com');
+ expect(vm.find(GlLink).attributes('href')).toEqual('https://test.com/commit');
+ });
});
it('renders lock icon', () => {
@@ -176,6 +194,21 @@ describe('Repository table row component', () => {
vm.setData({ commit: { lockLabel: 'Locked by Root', committedDate: '2019-01-01' } });
- expect(vm.find(Icon).exists()).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(Icon).exists()).toBe(true);
+ });
+ });
+
+ it('renders loading icon when path is loading', () => {
+ factory({
+ id: '1',
+ sha: '1',
+ path: 'test',
+ type: 'tree',
+ currentPath: '/',
+ loadingPath: 'test',
+ });
+
+ expect(vm.find(GlLoadingIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index 148e307a5d4..da892ce51d8 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -30,7 +30,9 @@ describe('Repository table component', () => {
vm.setData({ entries: { blobs: [{ name: 'README.md' }] } });
- expect(vm.find(FilePreview).exists()).toBe(true);
+ return vm.vm.$nextTick().then(() => {
+ expect(vm.find(FilePreview).exists()).toBe(true);
+ });
});
describe('normalizeData', () => {
diff --git a/spec/frontend/repository/utils/readme_spec.js b/spec/frontend/repository/utils/readme_spec.js
index 6b7876c8947..985d947a0af 100644
--- a/spec/frontend/repository/utils/readme_spec.js
+++ b/spec/frontend/repository/utils/readme_spec.js
@@ -1,33 +1,44 @@
import { readmeFile } from '~/repository/utils/readme';
describe('readmeFile', () => {
- describe('markdown files', () => {
- it('returns markdown file', () => {
- expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
- name: 'README.md',
- });
+ it('prefers README with markup over plain text README', () => {
+ expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
+ name: 'README.md',
+ });
+ });
- expect(readmeFile([{ name: 'README' }, { name: 'index.md' }])).toEqual({
- name: 'index.md',
- });
+ it('is case insensitive', () => {
+ expect(readmeFile([{ name: 'README' }, { name: 'readme.rdoc' }])).toEqual({
+ name: 'readme.rdoc',
});
});
- describe('plain files', () => {
- it('returns plain file', () => {
- expect(readmeFile([{ name: 'README' }, { name: 'TEST.md' }])).toEqual({
- name: 'README',
- });
+ it('returns the first README found', () => {
+ expect(readmeFile([{ name: 'INDEX.adoc' }, { name: 'README.md' }])).toEqual({
+ name: 'INDEX.adoc',
+ });
+ });
- expect(readmeFile([{ name: 'readme' }, { name: 'TEST.md' }])).toEqual({
- name: 'readme',
- });
+ it('expects extension to be separated by dot', () => {
+ expect(readmeFile([{ name: 'readmeXorg' }, { name: 'index.org' }])).toEqual({
+ name: 'index.org',
});
});
- describe('non-previewable file', () => {
- it('returns undefined', () => {
- expect(readmeFile([{ name: 'index.js' }, { name: 'TEST.md' }])).toBe(undefined);
+ it('returns plain text README when there is no README with markup', () => {
+ expect(readmeFile([{ name: 'README' }, { name: 'NOT_README.md' }])).toEqual({
+ name: 'README',
});
});
+
+ it('recognizes Readme.txt as a plain text README', () => {
+ expect(readmeFile([{ name: 'Readme.txt' }])).toEqual({
+ name: 'Readme.txt',
+ });
+ });
+
+ it('returns undefined when there are no appropriate files', () => {
+ expect(readmeFile([{ name: 'index.js' }, { name: 'md.README' }])).toBe(undefined);
+ expect(readmeFile([])).toBe(undefined);
+ });
});
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
new file mode 100644
index 00000000000..1d0f0c024d6
--- /dev/null
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_spec.js.snap
@@ -0,0 +1,72 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`self monitor component When the self monitor project has not been created default state to match the default snapshot 1`] = `
+<section
+ class="settings no-animate js-self-monitoring-settings"
+>
+ <div
+ class="settings-header"
+ >
+ <h4
+ class="js-section-header"
+ >
+
+ Self monitoring
+
+ </h4>
+
+ <gl-button-stub
+ class="js-settings-toggle"
+ >
+ Expand
+ </gl-button-stub>
+
+ <p
+ class="js-section-sub-header"
+ >
+
+ Enable or disable instance self monitoring
+
+ </p>
+ </div>
+
+ <div
+ class="settings-content"
+ >
+ <form
+ name="self-monitoring-form"
+ >
+ <p>
+ Enabling this feature creates a project that can be used to monitor the health of your instance.
+ </p>
+
+ <gl-form-group-stub
+ label="Create Project"
+ label-for="self-monitor-toggle"
+ >
+ <gl-toggle-stub
+ labeloff="Toggle Status: OFF"
+ labelon="Toggle Status: ON"
+ name="self-monitor-toggle"
+ />
+ </gl-form-group-stub>
+ </form>
+ </div>
+
+ <gl-modal-stub
+ cancel-title="Cancel"
+ modalclass=""
+ modalid="delete-self-monitor-modal"
+ ok-title="Delete project"
+ ok-variant="danger"
+ title="Disable self monitoring?"
+ titletag="h4"
+ >
+ <div>
+
+ Disabling this feature will delete the self monitoring project. Are you sure you want to delete the project?
+
+ </div>
+ </gl-modal-stub>
+</section>
+`;
diff --git a/spec/frontend/self_monitor/components/self_monitor_spec.js b/spec/frontend/self_monitor/components/self_monitor_spec.js
new file mode 100644
index 00000000000..b95c7514047
--- /dev/null
+++ b/spec/frontend/self_monitor/components/self_monitor_spec.js
@@ -0,0 +1,83 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
+import { createStore } from '~/self_monitor/store';
+
+describe('self monitor component', () => {
+ let wrapper;
+ let store;
+
+ describe('When the self monitor project has not been created', () => {
+ beforeEach(() => {
+ store = createStore({
+ projectEnabled: false,
+ selfMonitorProjectCreated: false,
+ createSelfMonitoringProjectPath: '/create',
+ deleteSelfMonitoringProjectPath: '/delete',
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper.destroy) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('default state', () => {
+ it('to match the default snapshot', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ it('renders header text', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.find('.js-section-header').text()).toBe('Self monitoring');
+ });
+
+ describe('expand/collapse button', () => {
+ it('renders as an expand button by default', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ const button = wrapper.find(GlButton);
+
+ expect(button.text()).toBe('Expand');
+ });
+ });
+
+ describe('sub-header', () => {
+ it('renders descriptive text', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.find('.js-section-sub-header').text()).toContain(
+ 'Enable or disable instance self monitoring',
+ );
+ });
+ });
+
+ describe('settings-content', () => {
+ it('renders the form description without a link', () => {
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.vm.selfMonitoringFormText).toContain(
+ 'Enabling this feature creates a project that can be used to monitor the health of your instance.',
+ );
+ });
+
+ it('renders the form description with a link', () => {
+ store = createStore({
+ projectEnabled: true,
+ selfMonitorProjectCreated: true,
+ createSelfMonitoringProjectPath: '/create',
+ deleteSelfMonitoringProjectPath: '/delete',
+ });
+
+ wrapper = shallowMount(SelfMonitor, { store });
+
+ expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/self_monitor/store/actions_spec.js b/spec/frontend/self_monitor/store/actions_spec.js
new file mode 100644
index 00000000000..344dbf11954
--- /dev/null
+++ b/spec/frontend/self_monitor/store/actions_spec.js
@@ -0,0 +1,255 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import statusCodes from '~/lib/utils/http_status';
+import * as actions from '~/self_monitor/store/actions';
+import * as types from '~/self_monitor/store/mutation_types';
+import createState from '~/self_monitor/store/state';
+
+describe('self monitor actions', () => {
+ let state;
+ let mock;
+
+ beforeEach(() => {
+ state = createState();
+ mock = new MockAdapter(axios);
+ });
+
+ describe('setSelfMonitor', () => {
+ it('commits the SET_ENABLED mutation', done => {
+ testAction(
+ actions.setSelfMonitor,
+ null,
+ state,
+ [{ type: types.SET_ENABLED, payload: null }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('resetAlert', () => {
+ it('commits the SET_ENABLED mutation', done => {
+ testAction(
+ actions.resetAlert,
+ null,
+ state,
+ [{ type: types.SET_SHOW_ALERT, payload: false }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestCreateProject', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ state.createProjectEndpoint = '/create';
+ state.createProjectStatusEndpoint = '/create_status';
+ mock.onPost(state.createProjectEndpoint).reply(statusCodes.ACCEPTED, {
+ job_id: '123',
+ });
+ mock.onGet(state.createProjectStatusEndpoint).reply(statusCodes.OK, {
+ project_full_path: '/self-monitor-url',
+ });
+ });
+
+ it('dispatches status request with job data', done => {
+ testAction(
+ actions.requestCreateProject,
+ null,
+ state,
+ [
+ {
+ type: types.SET_LOADING,
+ payload: true,
+ },
+ ],
+ [
+ {
+ type: 'requestCreateProjectStatus',
+ payload: '123',
+ },
+ ],
+ done,
+ );
+ });
+
+ it('dispatches success with project path', done => {
+ testAction(
+ actions.requestCreateProjectStatus,
+ null,
+ state,
+ [],
+ [
+ {
+ type: 'requestCreateProjectSuccess',
+ payload: { project_full_path: '/self-monitor-url' },
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ state.createProjectEndpoint = '/create';
+ mock.onPost(state.createProjectEndpoint).reply(500);
+ });
+
+ it('dispatches error', done => {
+ testAction(
+ actions.requestCreateProject,
+ null,
+ state,
+ [
+ {
+ type: types.SET_LOADING,
+ payload: true,
+ },
+ ],
+ [
+ {
+ type: 'requestCreateProjectError',
+ payload: new Error('Request failed with status code 500'),
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('requestCreateProjectSuccess', () => {
+ it('should commit the received data', done => {
+ testAction(
+ actions.requestCreateProjectSuccess,
+ { project_full_path: '/self-monitor-url' },
+ state,
+ [
+ { type: types.SET_LOADING, payload: false },
+ { type: types.SET_PROJECT_URL, payload: '/self-monitor-url' },
+ {
+ type: types.SET_ALERT_CONTENT,
+ payload: {
+ actionName: 'viewSelfMonitorProject',
+ actionText: 'View project',
+ message: 'Self monitoring project has been successfully created.',
+ },
+ },
+ { type: types.SET_SHOW_ALERT, payload: true },
+ { type: types.SET_PROJECT_CREATED, payload: true },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('deleteSelfMonitorProject', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ state.deleteProjectEndpoint = '/delete';
+ state.deleteProjectStatusEndpoint = '/delete-status';
+ mock.onDelete(state.deleteProjectEndpoint).reply(statusCodes.ACCEPTED, {
+ job_id: '456',
+ });
+ mock.onGet(state.deleteProjectStatusEndpoint).reply(statusCodes.OK, {
+ status: 'success',
+ });
+ });
+
+ it('dispatches status request with job data', done => {
+ testAction(
+ actions.requestDeleteProject,
+ null,
+ state,
+ [
+ {
+ type: types.SET_LOADING,
+ payload: true,
+ },
+ ],
+ [
+ {
+ type: 'requestDeleteProjectStatus',
+ payload: '456',
+ },
+ ],
+ done,
+ );
+ });
+
+ it('dispatches success with status', done => {
+ testAction(
+ actions.requestDeleteProjectStatus,
+ null,
+ state,
+ [],
+ [
+ {
+ type: 'requestDeleteProjectSuccess',
+ payload: { status: 'success' },
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ state.deleteProjectEndpoint = '/delete';
+ mock.onDelete(state.deleteProjectEndpoint).reply(500);
+ });
+
+ it('dispatches error', done => {
+ testAction(
+ actions.requestDeleteProject,
+ null,
+ state,
+ [
+ {
+ type: types.SET_LOADING,
+ payload: true,
+ },
+ ],
+ [
+ {
+ type: 'requestDeleteProjectError',
+ payload: new Error('Request failed with status code 500'),
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('requestDeleteProjectSuccess', () => {
+ it('should commit mutations to remove previously set data', done => {
+ testAction(
+ actions.requestDeleteProjectSuccess,
+ null,
+ state,
+ [
+ { type: types.SET_PROJECT_URL, payload: '' },
+ { type: types.SET_PROJECT_CREATED, payload: false },
+ {
+ type: types.SET_ALERT_CONTENT,
+ payload: {
+ actionName: 'createProject',
+ actionText: 'Undo',
+ message: 'Self monitoring project has been successfully deleted.',
+ },
+ },
+ { type: types.SET_SHOW_ALERT, payload: true },
+ { type: types.SET_LOADING, payload: false },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/self_monitor/store/mutations_spec.js b/spec/frontend/self_monitor/store/mutations_spec.js
new file mode 100644
index 00000000000..5282ae3b2f5
--- /dev/null
+++ b/spec/frontend/self_monitor/store/mutations_spec.js
@@ -0,0 +1,64 @@
+import mutations from '~/self_monitor/store/mutations';
+import createState from '~/self_monitor/store/state';
+
+describe('self monitoring mutations', () => {
+ let localState;
+
+ beforeEach(() => {
+ localState = createState();
+ });
+
+ describe('SET_ENABLED', () => {
+ it('sets selfMonitor', () => {
+ mutations.SET_ENABLED(localState, true);
+
+ expect(localState.projectEnabled).toBe(true);
+ });
+ });
+
+ describe('SET_PROJECT_CREATED', () => {
+ it('sets projectCreated', () => {
+ mutations.SET_PROJECT_CREATED(localState, true);
+
+ expect(localState.projectCreated).toBe(true);
+ });
+ });
+
+ describe('SET_SHOW_ALERT', () => {
+ it('sets showAlert', () => {
+ mutations.SET_SHOW_ALERT(localState, true);
+
+ expect(localState.showAlert).toBe(true);
+ });
+ });
+
+ describe('SET_PROJECT_URL', () => {
+ it('sets projectPath', () => {
+ mutations.SET_PROJECT_URL(localState, '/url/');
+
+ expect(localState.projectPath).toBe('/url/');
+ });
+ });
+
+ describe('SET_LOADING', () => {
+ it('sets loading', () => {
+ mutations.SET_LOADING(localState, true);
+
+ expect(localState.loading).toBe(true);
+ });
+ });
+
+ describe('SET_ALERT_CONTENT', () => {
+ it('set alertContent', () => {
+ const alertContent = {
+ message: 'success',
+ actionText: 'undo',
+ actionName: 'createProject',
+ };
+
+ mutations.SET_ALERT_CONTENT(localState, alertContent);
+
+ expect(localState.alertContent).toBe(alertContent);
+ });
+ });
+});
diff --git a/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js b/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js
new file mode 100644
index 00000000000..e5f83b6fa49
--- /dev/null
+++ b/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js
@@ -0,0 +1,87 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlLoadingIcon } from '@gitlab/ui';
+import Stacktrace from '~/error_tracking/components/stacktrace.vue';
+import SentryErrorStackTrace from '~/sentry_error_stack_trace/components/sentry_error_stack_trace.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Sentry Error Stack Trace', () => {
+ let actions;
+ let getters;
+ let store;
+ let wrapper;
+
+ function mountComponent({
+ stubs = {
+ stacktrace: Stacktrace,
+ },
+ } = {}) {
+ wrapper = shallowMount(SentryErrorStackTrace, {
+ localVue,
+ stubs,
+ store,
+ propsData: {
+ issueStackTracePath: '/stacktrace',
+ },
+ });
+ }
+
+ beforeEach(() => {
+ actions = {
+ startPollingStacktrace: () => {},
+ };
+
+ getters = {
+ stacktrace: () => [{ context: [1, 2], lineNo: 53, filename: 'index.js' }],
+ };
+
+ const state = {
+ stacktraceData: {},
+ loadingStacktrace: true,
+ };
+
+ store = new Vuex.Store({
+ modules: {
+ details: {
+ namespaced: true,
+ actions,
+ getters,
+ state,
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('loading', () => {
+ it('should show spinner while loading', () => {
+ mountComponent();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
+ });
+
+ describe('Stack trace', () => {
+ beforeEach(() => {
+ store.state.details.loadingStacktrace = false;
+ });
+
+ it('should show stacktrace', () => {
+ mountComponent({ stubs: {} });
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(true);
+ });
+
+ it('should not show stacktrace if it does not exist', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/serverless/components/area_spec.js b/spec/frontend/serverless/components/area_spec.js
index 62005e1981a..8b6f664ae25 100644
--- a/spec/frontend/serverless/components/area_spec.js
+++ b/spec/frontend/serverless/components/area_spec.js
@@ -16,7 +16,6 @@ describe('Area component', () => {
slots: {
default: mockWidgets,
},
- sync: false,
});
});
diff --git a/spec/frontend/serverless/components/environment_row_spec.js b/spec/frontend/serverless/components/environment_row_spec.js
index 866b2165917..a59b4fdbb7b 100644
--- a/spec/frontend/serverless/components/environment_row_spec.js
+++ b/spec/frontend/serverless/components/environment_row_spec.js
@@ -1,20 +1,20 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import environmentRowComponent from '~/serverless/components/environment_row.vue';
import { mockServerlessFunctions, mockServerlessFunctionsDiffEnv } from '../mock_data';
import { translate } from '~/serverless/utils';
-const createComponent = (localVue, env, envName) =>
- shallowMount(environmentRowComponent, { localVue, propsData: { env, envName }, sync: false }).vm;
+const createComponent = (env, envName) =>
+ shallowMount(environmentRowComponent, {
+ propsData: { env, envName },
+ }).vm;
describe('environment row component', () => {
describe('default global cluster case', () => {
- let localVue;
let vm;
beforeEach(() => {
- localVue = createLocalVue();
- vm = createComponent(localVue, translate(mockServerlessFunctions.functions)['*'], '*');
+ vm = createComponent(translate(mockServerlessFunctions.functions)['*'], '*');
});
afterEach(() => vm.$destroy());
@@ -44,15 +44,9 @@ describe('environment row component', () => {
describe('default named cluster case', () => {
let vm;
- let localVue;
beforeEach(() => {
- localVue = createLocalVue();
- vm = createComponent(
- localVue,
- translate(mockServerlessFunctionsDiffEnv.functions).test,
- 'test',
- );
+ vm = createComponent(translate(mockServerlessFunctionsDiffEnv.functions).test, 'test');
});
afterEach(() => vm.$destroy());
diff --git a/spec/frontend/serverless/components/function_details_spec.js b/spec/frontend/serverless/components/function_details_spec.js
index 27d3a43db27..40d2bbb0291 100644
--- a/spec/frontend/serverless/components/function_details_spec.js
+++ b/spec/frontend/serverless/components/function_details_spec.js
@@ -41,7 +41,6 @@ describe('functionDetailsComponent', () => {
clustersPath: '/clusters',
helpPath: '/help',
},
- sync: false,
});
expect(
@@ -69,7 +68,6 @@ describe('functionDetailsComponent', () => {
clustersPath: '/clusters',
helpPath: '/help',
},
- sync: false,
});
expect(component.vm.$el.querySelector('p').innerHTML.trim()).toContain('1 pod in use');
@@ -87,7 +85,6 @@ describe('functionDetailsComponent', () => {
clustersPath: '/clusters',
helpPath: '/help',
},
- sync: false,
});
expect(component.vm.$el.querySelector('p').innerHTML.trim()).toContain('3 pods in use');
@@ -105,7 +102,6 @@ describe('functionDetailsComponent', () => {
clustersPath: '/clusters',
helpPath: '/help',
},
- sync: false,
});
expect(
diff --git a/spec/frontend/serverless/components/function_row_spec.js b/spec/frontend/serverless/components/function_row_spec.js
index 559c55a1eb4..76a9e149302 100644
--- a/spec/frontend/serverless/components/function_row_spec.js
+++ b/spec/frontend/serverless/components/function_row_spec.js
@@ -8,7 +8,9 @@ describe('functionRowComponent', () => {
let wrapper;
const createComponent = func => {
- wrapper = shallowMount(functionRowComponent, { propsData: { func }, sync: false });
+ wrapper = shallowMount(functionRowComponent, {
+ propsData: { func },
+ });
};
afterEach(() => {
diff --git a/spec/frontend/serverless/components/functions_spec.js b/spec/frontend/serverless/components/functions_spec.js
index 29d35b5f1a6..8db04409357 100644
--- a/spec/frontend/serverless/components/functions_spec.js
+++ b/spec/frontend/serverless/components/functions_spec.js
@@ -43,7 +43,6 @@ describe('functionsComponent', () => {
helpPath: '',
statusPath: '',
},
- sync: false,
});
expect(component.find(EmptyState).exists()).toBe(true);
@@ -59,7 +58,6 @@ describe('functionsComponent', () => {
helpPath: '',
statusPath: '',
},
- sync: false,
});
expect(component.find(GlLoadingIcon).exists()).toBe(true);
@@ -75,7 +73,6 @@ describe('functionsComponent', () => {
helpPath: '',
statusPath: '',
},
- sync: false,
});
expect(
@@ -102,7 +99,6 @@ describe('functionsComponent', () => {
helpPath: '',
statusPath: '',
},
- sync: false,
});
expect(component.find('.js-functions-wrapper').exists()).toBe(true);
@@ -118,7 +114,6 @@ describe('functionsComponent', () => {
helpPath: 'helpPath',
statusPath,
},
- sync: false,
});
component.vm.$store.dispatch('receiveFunctionsSuccess', mockServerlessFunctions);
diff --git a/spec/frontend/serverless/components/missing_prometheus_spec.js b/spec/frontend/serverless/components/missing_prometheus_spec.js
index 908f534b847..896dc5b43e1 100644
--- a/spec/frontend/serverless/components/missing_prometheus_spec.js
+++ b/spec/frontend/serverless/components/missing_prometheus_spec.js
@@ -9,7 +9,6 @@ const createComponent = missingData =>
helpPath: '/help',
missingData,
},
- sync: false,
});
describe('missingPrometheusComponent', () => {
diff --git a/spec/frontend/serverless/components/pod_box_spec.js b/spec/frontend/serverless/components/pod_box_spec.js
index 8563d29c56b..495d11bd9ec 100644
--- a/spec/frontend/serverless/components/pod_box_spec.js
+++ b/spec/frontend/serverless/components/pod_box_spec.js
@@ -6,7 +6,6 @@ const createComponent = count =>
propsData: {
count,
},
- sync: false,
}).vm;
describe('podBoxComponent', () => {
diff --git a/spec/frontend/serverless/components/url_spec.js b/spec/frontend/serverless/components/url_spec.js
index 9b15df20a89..36dc9e73c74 100644
--- a/spec/frontend/serverless/components/url_spec.js
+++ b/spec/frontend/serverless/components/url_spec.js
@@ -8,7 +8,6 @@ const createComponent = uri =>
propsData: {
uri,
},
- sync: false,
});
describe('urlComponent', () => {
diff --git a/spec/javascripts/shared/popover_spec.js b/spec/frontend/shared/popover_spec.js
index cc2b2014d38..bbde936185e 100644
--- a/spec/javascripts/shared/popover_spec.js
+++ b/spec/frontend/shared/popover_spec.js
@@ -29,7 +29,7 @@ describe('popover', () => {
toggleClass: () => {},
};
- spyOn(context, 'popover').and.callFake(method => {
+ jest.spyOn(context, 'popover').mockImplementation(method => {
expect(method).toEqual('show');
done();
});
@@ -44,7 +44,7 @@ describe('popover', () => {
toggleClass: () => {},
};
- spyOn(context, 'toggleClass').and.callFake((classNames, show) => {
+ jest.spyOn(context, 'toggleClass').mockImplementation((classNames, show) => {
expect(classNames).toEqual('disable-animation js-popover-show');
expect(show).toEqual(true);
done();
@@ -80,7 +80,7 @@ describe('popover', () => {
toggleClass: () => {},
};
- spyOn(context, 'popover').and.callFake(method => {
+ jest.spyOn(context, 'popover').mockImplementation(method => {
expect(method).toEqual('hide');
done();
});
@@ -95,7 +95,7 @@ describe('popover', () => {
toggleClass: () => {},
};
- spyOn(context, 'toggleClass').and.callFake((classNames, show) => {
+ jest.spyOn(context, 'toggleClass').mockImplementation((classNames, show) => {
expect(classNames).toEqual('disable-animation js-popover-show');
expect(show).toEqual(false);
done();
@@ -112,13 +112,13 @@ describe('popover', () => {
length: 0,
};
- spyOn($.fn, 'init').and.callFake(selector =>
- selector === '.popover:hover' ? fakeJquery : $.fn,
- );
- spyOn(togglePopover, 'call');
+ jest
+ .spyOn($.fn, 'init')
+ .mockImplementation(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
+ jest.spyOn(togglePopover, 'call').mockImplementation(() => {});
mouseleave();
- expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), false);
+ expect(togglePopover.call).toHaveBeenCalledWith(expect.any(Object), false);
});
it('does not call hide popover if .popover:hover is true', () => {
@@ -126,10 +126,10 @@ describe('popover', () => {
length: 1,
};
- spyOn($.fn, 'init').and.callFake(selector =>
- selector === '.popover:hover' ? fakeJquery : $.fn,
- );
- spyOn(togglePopover, 'call');
+ jest
+ .spyOn($.fn, 'init')
+ .mockImplementation(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
+ jest.spyOn(togglePopover, 'call').mockImplementation(() => {});
mouseleave();
expect(togglePopover.call).not.toHaveBeenCalledWith(false);
@@ -140,15 +140,15 @@ describe('popover', () => {
const context = {};
it('shows popover', () => {
- spyOn(togglePopover, 'call').and.returnValue(false);
+ jest.spyOn(togglePopover, 'call').mockReturnValue(false);
mouseenter.call(context);
- expect(togglePopover.call).toHaveBeenCalledWith(jasmine.any(Object), true);
+ expect(togglePopover.call).toHaveBeenCalledWith(expect.any(Object), true);
});
it('registers mouseleave event if popover is showed', done => {
- spyOn(togglePopover, 'call').and.returnValue(true);
- spyOn($.fn, 'on').and.callFake(eventName => {
+ jest.spyOn(togglePopover, 'call').mockReturnValue(true);
+ jest.spyOn($.fn, 'on').mockImplementation(eventName => {
expect(eventName).toEqual('mouseleave');
done();
});
@@ -156,8 +156,8 @@ describe('popover', () => {
});
it('does not register mouseleave event if popover is not showed', () => {
- spyOn(togglePopover, 'call').and.returnValue(false);
- const spy = spyOn($.fn, 'on').and.callFake(() => {});
+ jest.spyOn(togglePopover, 'call').mockReturnValue(false);
+ const spy = jest.spyOn($.fn, 'on').mockImplementation(() => {});
mouseenter.call(context);
expect(spy).not.toHaveBeenCalled();
diff --git a/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap b/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
index 1704206c4ad..0a12eb327de 100644
--- a/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
+++ b/spec/frontend/sidebar/__snapshots__/todo_spec.js.snap
@@ -26,7 +26,7 @@ exports[`SidebarTodo template renders component container element with proper da
Mark as done
</span>
- <glloadingicon-stub
+ <gl-loading-icon-stub
color="orange"
inline="true"
label="Loading"
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index 14b6da10991..0cb182b2df4 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -15,8 +15,6 @@ describe('Assignee component', () => {
const createWrapper = (propsData = getDefaultProps()) => {
wrapper = mount(Assignee, {
propsData,
- sync: false,
- attachToDocument: true,
});
};
@@ -65,7 +63,9 @@ describe('Assignee component', () => {
jest.spyOn(wrapper.vm, '$emit');
wrapper.find('.assign-yourself .btn-link').trigger('click');
- expect(wrapper.emitted('assign-self')).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('assign-self')).toBeTruthy();
+ });
});
});
@@ -178,7 +178,7 @@ describe('Assignee component', () => {
const userItems = wrapper.findAll('.user-list .user-item a');
expect(userItems.length).toBe(3);
- expect(userItems.at(0).attributes('data-original-title')).toBe(users[2].name);
+ expect(userItems.at(0).attributes('title')).toBe(users[2].name);
});
it('passes the sorted assignees to the collapsed-assignee-list', () => {
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index 9b2e2e38366..03d1ac3ab8d 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -23,9 +23,7 @@ describe('AssigneeAvatarLink component', () => {
};
wrapper = shallowMount(AssigneeAvatarLink, {
- attachToDocument: true,
propsData,
- sync: false,
});
}
@@ -33,7 +31,7 @@ describe('AssigneeAvatarLink component', () => {
wrapper.destroy();
});
- const findTooltipText = () => wrapper.attributes('data-original-title');
+ const findTooltipText = () => wrapper.attributes('title');
it('has the root url present in the assigneeUrl method', () => {
createComponent();
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_spec.js
index e925da0e4c2..7df37d11987 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_spec.js
@@ -20,7 +20,6 @@ describe('AssigneeAvatar', () => {
wrapper = shallowMount(AssigneeAvatar, {
propsData,
- sync: false,
});
}
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
index 6aa7b166804..a1e19c1dd8e 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
@@ -16,16 +16,14 @@ describe('CollapsedAssigneeList component', () => {
};
wrapper = shallowMount(CollapsedAssigneeList, {
- attachToDocument: true,
propsData,
- sync: false,
});
}
const findNoUsersIcon = () => wrapper.find('i[aria-label=None]');
const findAvatarCounter = () => wrapper.find('.avatar-counter');
const findAssignees = () => wrapper.findAll(CollapsedAssignee);
- const getTooltipTitle = () => wrapper.attributes('data-original-title');
+ const getTooltipTitle = () => wrapper.attributes('title');
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
index f9ca7bc1ecb..49a6d9e8ae6 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
@@ -18,7 +18,6 @@ describe('CollapsedAssignee assignee component', () => {
wrapper = shallowMount(CollapsedAssignee, {
propsData,
- sync: false,
});
}
diff --git a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
index 8381cc25db4..1cf0af48bef 100644
--- a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
@@ -18,8 +18,6 @@ describe('UncollapsedAssigneeList component', () => {
};
wrapper = mount(UncollapsedAssigneeList, {
- attachToDocument: true,
- sync: false,
propsData,
});
}
diff --git a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
index 432ec111e52..13b7c426366 100644
--- a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
+++ b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
@@ -37,7 +37,6 @@ describe('Confidential Issue Sidebar Block', () => {
service,
...propsData,
},
- sync: false,
});
};
@@ -78,21 +77,29 @@ describe('Confidential Issue Sidebar Block', () => {
it('displays the edit form when editable', () => {
wrapper.setData({ edit: false });
- wrapper.find({ ref: 'editLink' }).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(EditForm).exists()).toBe(true);
- });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.find({ ref: 'editLink' }).trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(wrapper.find(EditForm).exists()).toBe(true);
+ });
});
it('displays the edit form when opened from collapsed state', () => {
wrapper.setData({ edit: false });
- wrapper.find({ ref: 'collapseIcon' }).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(EditForm).exists()).toBe(true);
- });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.find({ ref: 'collapseIcon' }).trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(wrapper.find(EditForm).exists()).toBe(true);
+ });
});
it('tracks the event when "Edit" is clicked', () => {
diff --git a/spec/frontend/sidebar/sidebar_store_spec.js b/spec/frontend/sidebar/sidebar_store_spec.js
new file mode 100644
index 00000000000..6d063a7cfcf
--- /dev/null
+++ b/spec/frontend/sidebar/sidebar_store_spec.js
@@ -0,0 +1,168 @@
+import SidebarStore from '~/sidebar/stores/sidebar_store';
+import Mock from './mock_data';
+import UsersMockHelper from '../helpers/user_mock_data_helper';
+
+const ASSIGNEE = {
+ id: 2,
+ name: 'gitlab user 2',
+ username: 'gitlab2',
+ avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+};
+
+const ANOTHER_ASSINEE = {
+ id: 3,
+ name: 'gitlab user 3',
+ username: 'gitlab3',
+ avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+};
+
+const PARTICIPANT = {
+ id: 1,
+ state: 'active',
+ username: 'marcene',
+ name: 'Allie Will',
+ web_url: 'foo.com',
+ avatar_url: 'gravatar.com/avatar/xxx',
+};
+
+const PARTICIPANT_LIST = [PARTICIPANT, { ...PARTICIPANT, id: 2 }, { ...PARTICIPANT, id: 3 }];
+
+describe('Sidebar store', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ beforeEach(() => {
+ testContext.store = new SidebarStore({
+ currentUser: {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ },
+ editable: true,
+ rootPath: '/',
+ endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
+ });
+ });
+
+ afterEach(() => {
+ SidebarStore.singleton = null;
+ });
+
+ it('has default isFetching values', () => {
+ expect(testContext.store.isFetching.assignees).toBe(true);
+ });
+
+ it('adds a new assignee', () => {
+ testContext.store.addAssignee(ASSIGNEE);
+
+ expect(testContext.store.assignees.length).toEqual(1);
+ });
+
+ it('removes an assignee', () => {
+ testContext.store.removeAssignee(ASSIGNEE);
+
+ expect(testContext.store.assignees.length).toEqual(0);
+ });
+
+ it('finds an existent assignee', () => {
+ let foundAssignee;
+
+ testContext.store.addAssignee(ASSIGNEE);
+ foundAssignee = testContext.store.findAssignee(ASSIGNEE);
+
+ expect(foundAssignee).toBeDefined();
+ expect(foundAssignee).toEqual(ASSIGNEE);
+ foundAssignee = testContext.store.findAssignee(ANOTHER_ASSINEE);
+
+ expect(foundAssignee).toBeUndefined();
+ });
+
+ it('removes all assignees', () => {
+ testContext.store.removeAllAssignees();
+
+ expect(testContext.store.assignees.length).toEqual(0);
+ });
+
+ it('sets participants data', () => {
+ expect(testContext.store.participants.length).toEqual(0);
+
+ testContext.store.setParticipantsData({
+ participants: PARTICIPANT_LIST,
+ });
+
+ expect(testContext.store.isFetching.participants).toEqual(false);
+ expect(testContext.store.participants.length).toEqual(PARTICIPANT_LIST.length);
+ });
+
+ it('sets subcriptions data', () => {
+ expect(testContext.store.subscribed).toEqual(null);
+
+ testContext.store.setSubscriptionsData({
+ subscribed: true,
+ });
+
+ expect(testContext.store.isFetching.subscriptions).toEqual(false);
+ expect(testContext.store.subscribed).toEqual(true);
+ });
+
+ it('set assigned data', () => {
+ const users = {
+ assignees: UsersMockHelper.createNumberRandomUsers(3),
+ };
+
+ testContext.store.setAssigneeData(users);
+
+ expect(testContext.store.isFetching.assignees).toBe(false);
+ expect(testContext.store.assignees.length).toEqual(3);
+ });
+
+ it('sets fetching state', () => {
+ expect(testContext.store.isFetching.participants).toEqual(true);
+
+ testContext.store.setFetchingState('participants', false);
+
+ expect(testContext.store.isFetching.participants).toEqual(false);
+ });
+
+ it('sets loading state', () => {
+ testContext.store.setLoadingState('assignees', true);
+
+ expect(testContext.store.isLoading.assignees).toEqual(true);
+ });
+
+ it('set time tracking data', () => {
+ testContext.store.setTimeTrackingData(Mock.time);
+
+ expect(testContext.store.timeEstimate).toEqual(Mock.time.time_estimate);
+ expect(testContext.store.totalTimeSpent).toEqual(Mock.time.total_time_spent);
+ expect(testContext.store.humanTimeEstimate).toEqual(Mock.time.human_time_estimate);
+ expect(testContext.store.humanTotalTimeSpent).toEqual(Mock.time.human_total_time_spent);
+ });
+
+ it('set autocomplete projects', () => {
+ const projects = [{ id: 0 }];
+ testContext.store.setAutocompleteProjects(projects);
+
+ expect(testContext.store.autocompleteProjects).toEqual(projects);
+ });
+
+ it('sets subscribed state', () => {
+ expect(testContext.store.subscribed).toEqual(null);
+
+ testContext.store.setSubscribedState(true);
+
+ expect(testContext.store.subscribed).toEqual(true);
+ });
+
+ it('set move to project ID', () => {
+ const projectId = 7;
+ testContext.store.setMoveToProjectId(projectId);
+
+ expect(testContext.store.moveToProjectId).toEqual(projectId);
+ });
+});
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index 5bbb42d402d..18b621cd12d 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -14,7 +14,6 @@ describe('SidebarTodo', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(SidebarTodos, {
- sync: false,
propsData: {
...defaultProps,
...props,
@@ -60,7 +59,9 @@ describe('SidebarTodo', () => {
createComponent();
wrapper.find('button').trigger('click');
- expect(wrapper.emitted().toggleTodo).toBeTruthy();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().toggleTodo).toBeTruthy();
+ });
});
it('renders component container element with proper data attributes', () => {
diff --git a/spec/frontend/snippets/components/app_spec.js b/spec/frontend/snippets/components/app_spec.js
index f2800f9e6af..6576e5b075f 100644
--- a/spec/frontend/snippets/components/app_spec.js
+++ b/spec/frontend/snippets/components/app_spec.js
@@ -2,11 +2,10 @@ import SnippetApp from '~/snippets/components/app.vue';
import SnippetHeader from '~/snippets/components/snippet_header.vue';
import { GlLoadingIcon } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
describe('Snippet view app', () => {
let wrapper;
- const localVue = createLocalVue();
const defaultProps = {
snippetGid: 'gid://gitlab/PersonalSnippet/42',
};
@@ -21,9 +20,7 @@ describe('Snippet view app', () => {
};
wrapper = shallowMount(SnippetApp, {
- sync: false,
mocks: { $apollo },
- localVue,
propsData: {
...props,
},
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 8847a3a6938..5cf20119189 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -2,11 +2,10 @@ import SnippetHeader from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
import { ApolloMutation } from 'vue-apollo';
import { GlButton, GlModal } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
describe('Snippet header component', () => {
let wrapper;
- const localVue = createLocalVue();
const snippet = {
snippet: {
id: 'gid://gitlab/PersonalSnippet/50',
@@ -62,9 +61,7 @@ describe('Snippet header component', () => {
};
wrapper = shallowMount(SnippetHeader, {
- sync: false,
mocks: { $apollo },
- localVue,
propsData: {
...defaultProps,
},
diff --git a/spec/frontend/snippets/components/snippet_title_spec.js b/spec/frontend/snippets/components/snippet_title_spec.js
new file mode 100644
index 00000000000..a7efa4ae341
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_title_spec.js
@@ -0,0 +1,71 @@
+import SnippetTitle from '~/snippets/components/snippet_title.vue';
+import { GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+describe('Snippet header component', () => {
+ let wrapper;
+ const title = 'The property of Thor';
+ const description = 'Do not touch this hammer';
+ const snippet = {
+ snippet: {
+ title,
+ description,
+ },
+ };
+
+ function createComponent({ props = snippet } = {}) {
+ const defaultProps = Object.assign({}, props);
+
+ wrapper = shallowMount(SnippetTitle, {
+ propsData: {
+ ...defaultProps,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders itself', () => {
+ createComponent();
+ expect(wrapper.find('.snippet-header').exists()).toBe(true);
+ });
+
+ it('renders snippets title and description', () => {
+ createComponent();
+ expect(wrapper.text().trim()).toContain(title);
+ expect(wrapper.text().trim()).toContain(description);
+ });
+
+ it('does not render recent changes time stamp if there were no updates', () => {
+ createComponent();
+ expect(wrapper.find(GlSprintf).exists()).toBe(false);
+ });
+
+ it('does not render recent changes time stamp if the time for creation and updates match', () => {
+ const props = Object.assign(snippet, {
+ snippet: {
+ ...snippet.snippet,
+ createdAt: '2019-12-16T21:45:36Z',
+ updatedAt: '2019-12-16T21:45:36Z',
+ },
+ });
+ createComponent({ props });
+
+ expect(wrapper.find(GlSprintf).exists()).toBe(false);
+ });
+
+ it('renders translated string with most recent changes timestamp if changes were made', () => {
+ const props = Object.assign(snippet, {
+ snippet: {
+ ...snippet.snippet,
+ createdAt: '2019-12-16T21:45:36Z',
+ updatedAt: '2019-15-16T21:45:36Z',
+ },
+ });
+ createComponent({ props });
+
+ expect(wrapper.find(GlSprintf).exists()).toBe(true);
+ });
+});
diff --git a/spec/javascripts/syntax_highlight_spec.js b/spec/frontend/syntax_highlight_spec.js
index 99c47fa31d4..d2fb5983f7b 100644
--- a/spec/javascripts/syntax_highlight_spec.js
+++ b/spec/frontend/syntax_highlight_spec.js
@@ -3,19 +3,19 @@
import $ from 'jquery';
import syntaxHighlight from '~/syntax_highlight';
-describe('Syntax Highlighter', function() {
- const stubUserColorScheme = function(value) {
+describe('Syntax Highlighter', () => {
+ const stubUserColorScheme = value => {
if (window.gon == null) {
window.gon = {};
}
return (window.gon.user_color_scheme = value);
};
- describe('on a js-syntax-highlight element', function() {
- beforeEach(function() {
- return setFixtures('<div class="js-syntax-highlight"></div>');
+ describe('on a js-syntax-highlight element', () => {
+ beforeEach(() => {
+ setFixtures('<div class="js-syntax-highlight"></div>');
});
- it('applies syntax highlighting', function() {
+ it('applies syntax highlighting', () => {
stubUserColorScheme('monokai');
syntaxHighlight($('.js-syntax-highlight'));
@@ -23,14 +23,14 @@ describe('Syntax Highlighter', function() {
});
});
- describe('on a parent element', function() {
- beforeEach(function() {
- return setFixtures(
+ describe('on a parent element', () => {
+ beforeEach(() => {
+ setFixtures(
'<div class="parent">\n <div class="js-syntax-highlight"></div>\n <div class="foo"></div>\n <div class="js-syntax-highlight"></div>\n</div>',
);
});
- it('applies highlighting to all applicable children', function() {
+ it('applies highlighting to all applicable children', () => {
stubUserColorScheme('monokai');
syntaxHighlight($('.parent'));
@@ -38,11 +38,9 @@ describe('Syntax Highlighter', function() {
expect($('.monokai').length).toBe(2);
});
- it('prevents an infinite loop when no matches exist', function() {
+ it('prevents an infinite loop when no matches exist', () => {
setFixtures('<div></div>');
- const highlight = function() {
- return syntaxHighlight($('div'));
- };
+ const highlight = () => syntaxHighlight($('div'));
expect(highlight).not.toThrow();
});
diff --git a/spec/javascripts/task_list_spec.js b/spec/frontend/task_list_spec.js
index 563f402de58..1261833e3ec 100644
--- a/spec/javascripts/task_list_spec.js
+++ b/spec/frontend/task_list_spec.js
@@ -25,10 +25,10 @@ describe('TaskList', () => {
});
it('should call init when the class constructed', () => {
- spyOn(TaskList.prototype, 'init').and.callThrough();
- spyOn(TaskList.prototype, 'disable');
- spyOn($.prototype, 'taskList');
- spyOn($.prototype, 'on');
+ jest.spyOn(TaskList.prototype, 'init');
+ jest.spyOn(TaskList.prototype, 'disable').mockImplementation(() => {});
+ jest.spyOn($.prototype, 'taskList').mockImplementation(() => {});
+ jest.spyOn($.prototype, 'on').mockImplementation(() => {});
taskList = createTaskList();
const $taskListEl = $(taskList.taskListContainerSelector);
@@ -59,7 +59,7 @@ describe('TaskList', () => {
describe('disableTaskListItems', () => {
it('should call taskList method with disable param', () => {
- spyOn($.prototype, 'taskList');
+ jest.spyOn($.prototype, 'taskList').mockImplementation(() => {});
taskList.disableTaskListItems({ currentTarget });
@@ -69,7 +69,7 @@ describe('TaskList', () => {
describe('enableTaskListItems', () => {
it('should call taskList method with enable param', () => {
- spyOn($.prototype, 'taskList');
+ jest.spyOn($.prototype, 'taskList').mockImplementation(() => {});
taskList.enableTaskListItems({ currentTarget });
@@ -79,8 +79,8 @@ describe('TaskList', () => {
describe('disable', () => {
it('should disable task list items and off document event', () => {
- spyOn(taskList, 'disableTaskListItems');
- spyOn($.prototype, 'off');
+ jest.spyOn(taskList, 'disableTaskListItems').mockImplementation(() => {});
+ jest.spyOn($.prototype, 'off').mockImplementation(() => {});
taskList.disable();
@@ -95,10 +95,10 @@ describe('TaskList', () => {
describe('update', () => {
it('should disable task list items and make a patch request then enable them again', done => {
const response = { data: { lock_version: 3 } };
- spyOn(taskList, 'enableTaskListItems');
- spyOn(taskList, 'disableTaskListItems');
- spyOn(taskList, 'onSuccess');
- spyOn(axios, 'patch').and.returnValue(Promise.resolve(response));
+ jest.spyOn(taskList, 'enableTaskListItems').mockImplementation(() => {});
+ jest.spyOn(taskList, 'disableTaskListItems').mockImplementation(() => {});
+ jest.spyOn(taskList, 'onSuccess').mockImplementation(() => {});
+ jest.spyOn(axios, 'patch').mockReturnValue(Promise.resolve(response));
const value = 'hello world';
const endpoint = '/foo';
@@ -139,9 +139,9 @@ describe('TaskList', () => {
it('should handle request error and enable task list items', done => {
const response = { data: { error: 1 } };
- spyOn(taskList, 'enableTaskListItems');
- spyOn(taskList, 'onError');
- spyOn(axios, 'patch').and.returnValue(Promise.reject({ response })); // eslint-disable-line prefer-promise-reject-errors
+ jest.spyOn(taskList, 'enableTaskListItems').mockImplementation(() => {});
+ jest.spyOn(taskList, 'onError').mockImplementation(() => {});
+ jest.spyOn(axios, 'patch').mockReturnValue(Promise.reject({ response })); // eslint-disable-line prefer-promise-reject-errors
const event = { detail: {} };
taskList
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index ab42dbe7cd1..203781bb6fc 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -57,6 +57,11 @@ Object.assign(global, {
// custom-jquery-matchers was written for an old Jest version, we need to make it compatible
Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => {
+ // Don't override existing Jest matcher
+ if (matcherName === 'toHaveLength') {
+ return;
+ }
+
expect.extend({
[matcherName]: matcherFactory().compare,
});
diff --git a/spec/frontend/version_check_image_spec.js b/spec/frontend/version_check_image_spec.js
new file mode 100644
index 00000000000..2ab157105a1
--- /dev/null
+++ b/spec/frontend/version_check_image_spec.js
@@ -0,0 +1,42 @@
+import $ from 'jquery';
+import VersionCheckImage from '~/version_check_image';
+import ClassSpecHelper from './helpers/class_spec_helper';
+
+describe('VersionCheckImage', () => {
+ let testContext;
+
+ beforeEach(() => {
+ testContext = {};
+ });
+
+ describe('bindErrorEvent', () => {
+ ClassSpecHelper.itShouldBeAStaticMethod(VersionCheckImage, 'bindErrorEvent');
+
+ beforeEach(() => {
+ testContext.imageElement = $('<div></div>');
+ });
+
+ it('registers an error event', () => {
+ jest.spyOn($.prototype, 'on').mockImplementation(() => {});
+ // eslint-disable-next-line func-names
+ jest.spyOn($.prototype, 'off').mockImplementation(function() {
+ return this;
+ });
+
+ VersionCheckImage.bindErrorEvent(testContext.imageElement);
+
+ expect($.prototype.off).toHaveBeenCalledWith('error');
+ expect($.prototype.on).toHaveBeenCalledWith('error', expect.any(Function));
+ });
+
+ it('hides the imageElement on error', () => {
+ jest.spyOn($.prototype, 'hide').mockImplementation(() => {});
+
+ VersionCheckImage.bindErrorEvent(testContext.imageElement);
+
+ testContext.imageElement.trigger('error');
+
+ expect($.prototype.hide).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
index f979d173eff..1401308f7f0 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
@@ -29,7 +29,7 @@ describe('Merge Requests Artifacts list app', () => {
});
const createComponent = () => {
- wrapper = mount(localVue.extend(ArtifactsListApp), {
+ wrapper = mount(ArtifactsListApp, {
propsData: {
endpoint: TEST_HOST,
},
@@ -38,7 +38,6 @@ describe('Merge Requests Artifacts list app', () => {
...actionSpies,
},
localVue,
- sync: false,
});
};
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js b/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
index 8c805faf574..1b1624e3e8f 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
+++ b/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
@@ -1,23 +1,20 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import ArtifactsList from '~/vue_merge_request_widget/components/artifacts_list.vue';
import { artifactsList } from './mock_data';
describe('Artifacts List', () => {
let wrapper;
- const localVue = createLocalVue();
const data = {
artifacts: artifactsList,
};
const mountComponent = props => {
- wrapper = shallowMount(localVue.extend(ArtifactsList), {
+ wrapper = shallowMount(ArtifactsList, {
propsData: {
...props,
},
- sync: false,
- localVue,
});
};
diff --git a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
index 5f101ba4cf6..a7ecb863cfb 100644
--- a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
@@ -42,6 +42,7 @@ describe('Merge Request Collapsible Extension', () => {
describe('onClick', () => {
beforeEach(() => {
wrapper.find('button').trigger('click');
+ return wrapper.vm.$nextTick();
});
it('rendes the provided slot', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
index 16c8c939a6f..60f970e0018 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MrWidgetContainer from '~/vue_merge_request_widget/components/mr_widget_container.vue';
const BODY_HTML = '<div class="test-body">Hello World</div>';
@@ -8,10 +8,7 @@ describe('MrWidgetContainer', () => {
let wrapper;
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
- wrapper = shallowMount(localVue.extend(MrWidgetContainer), {
- localVue,
+ wrapper = shallowMount(MrWidgetContainer, {
...options,
});
};
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
index f7c2376eebf..cee0b9b0118 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MrWidgetIcon from '~/vue_merge_request_widget/components/mr_widget_icon.vue';
import Icon from '~/vue_shared/components/icon.vue';
@@ -8,14 +8,10 @@ describe('MrWidgetIcon', () => {
let wrapper;
beforeEach(() => {
- const localVue = createLocalVue();
-
- wrapper = shallowMount(localVue.extend(MrWidgetIcon), {
+ wrapper = shallowMount(MrWidgetIcon, {
propsData: {
name: TEST_ICON,
},
- sync: false,
- localVue,
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
index 994d6255324..5d09af50420 100644
--- a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
@@ -1,7 +1,6 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue';
-const localVue = createLocalVue();
const testCommitMessage = 'Test commit message';
const testLabel = 'Test label';
const testInputId = 'test-input-id';
@@ -10,9 +9,7 @@ describe('Commits edit component', () => {
let wrapper;
const createComponent = (slots = {}) => {
- wrapper = shallowMount(localVue.extend(CommitEdit), {
- localVue,
- sync: false,
+ wrapper = shallowMount(CommitEdit, {
propsData: {
value: testCommitMessage,
label: testLabel,
@@ -55,8 +52,10 @@ describe('Commits edit component', () => {
findTextarea().element.value = changedCommitMessage;
findTextarea().trigger('input');
- expect(wrapper.emitted().input[0]).toEqual([changedCommitMessage]);
- expect(findTextarea().element.value).toBe(changedCommitMessage);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().input[0]).toEqual([changedCommitMessage]);
+ expect(findTextarea().element.value).toBe(changedCommitMessage);
+ });
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
index 1f4d1e17ea0..98af44b0975 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -10,7 +10,6 @@ describe('MRWidgetAutoMergeFailed', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(AutoMergeFailedComponent, {
- sync: false,
propsData: { ...props },
});
};
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
index daf1cc2d98b..56832f82b05 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
@@ -1,8 +1,7 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlDropdownItem } from '@gitlab/ui';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
-const localVue = createLocalVue();
const commits = [
{
title: 'Commit 1',
@@ -25,9 +24,7 @@ describe('Commits message dropdown component', () => {
let wrapper;
const createComponent = () => {
- wrapper = shallowMount(localVue.extend(CommitMessageDropdown), {
- localVue,
- sync: false,
+ wrapper = shallowMount(CommitMessageDropdown, {
propsData: {
commits,
},
@@ -56,6 +53,8 @@ describe('Commits message dropdown component', () => {
it('should emit a commit title on selecting commit', () => {
findFirstDropdownElement().vm.$emit('click');
- expect(wrapper.emitted().input[0]).toEqual(['Update test.txt']);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().input[0]).toEqual(['Update test.txt']);
+ });
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
index 9ee2f88c78d..67746b062b9 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
@@ -1,16 +1,12 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import CommitsHeader from '~/vue_merge_request_widget/components/states/commits_header.vue';
import Icon from '~/vue_shared/components/icon.vue';
-const localVue = createLocalVue();
-
describe('Commits header component', () => {
let wrapper;
const createComponent = props => {
- wrapper = shallowMount(localVue.extend(CommitsHeader), {
- localVue,
- sync: false,
+ wrapper = shallowMount(CommitsHeader, {
propsData: {
isSquashEnabled: false,
targetBranch: 'master',
@@ -64,7 +60,9 @@ describe('Commits header component', () => {
createComponent();
wrapper.setData({ expanded: false });
- expect(findIcon().props('name')).toBe('chevron-right');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findIcon().props('name')).toBe('chevron-right');
+ });
});
describe('when squash is disabled', () => {
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index 78e086e473d..2902c8280dd 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -134,7 +134,7 @@ describe('Deployment component', () => {
if (status === SUCCESS) {
expect(wrapper.find(DeploymentViewButton).text()).toContain('View app');
} else {
- expect(wrapper.find(DeploymentViewButton).text()).toContain('View previous app');
+ expect(wrapper.find(DeploymentViewButton).text()).toContain('View latest app');
}
});
}
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
index 6e3c6f64c68..5e0f38459b0 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
@@ -1,16 +1,18 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
import ReviewAppLink from '~/vue_merge_request_widget/components/review_app_link.vue';
import deploymentMockData from './deployment_mock_data';
+const appButtonText = {
+ text: 'View app',
+ tooltip: 'View the latest successful deployment to this environment',
+};
+
describe('Deployment View App button', () => {
let wrapper;
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
- wrapper = mount(localVue.extend(DeploymentViewButton), {
- localVue,
+ wrapper = mount(DeploymentViewButton, {
...options,
});
};
@@ -19,7 +21,7 @@ describe('Deployment View App button', () => {
factory({
propsData: {
deployment: deploymentMockData,
- isCurrent: true,
+ appButtonText,
},
});
});
@@ -29,25 +31,8 @@ describe('Deployment View App button', () => {
});
describe('text', () => {
- describe('when app is current', () => {
- it('shows View app', () => {
- expect(wrapper.find(ReviewAppLink).text()).toContain('View app');
- });
- });
-
- describe('when app is not current', () => {
- beforeEach(() => {
- factory({
- propsData: {
- deployment: deploymentMockData,
- isCurrent: false,
- },
- });
- });
-
- it('shows View Previous app', () => {
- expect(wrapper.find(ReviewAppLink).text()).toContain('View previous app');
- });
+ it('renders text as passed', () => {
+ expect(wrapper.find(ReviewAppLink).text()).toContain(appButtonText.text);
});
});
@@ -56,7 +41,7 @@ describe('Deployment View App button', () => {
factory({
propsData: {
deployment: { ...deploymentMockData, changes: null },
- isCurrent: false,
+ appButtonText,
},
});
});
@@ -71,7 +56,7 @@ describe('Deployment View App button', () => {
factory({
propsData: {
deployment: { ...deploymentMockData, changes: [deploymentMockData.changes[0]] },
- isCurrent: false,
+ appButtonText,
},
});
});
@@ -94,7 +79,7 @@ describe('Deployment View App button', () => {
factory({
propsData: {
deployment: deploymentMockData,
- isCurrent: false,
+ appButtonText,
},
});
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index cf71aefebe8..3a518029702 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -1,5 +1,14 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Expand button on click when short text is provided renders button after text 1`] = `"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <!----> <span><p>Expanded!</p></span> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button></span>"`;
+exports[`Expand button on click when short text is provided renders button after text 1`] = `
+"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <!----> <span><p>Expanded!</p></span> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\">
+ <use xlink:href=\\"#ellipsis_h\\"></use>
+ </svg></button></span>"
+`;
-exports[`Expand button when short text is provided renders button before text 1`] = `"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <span><p>Short</p></span> <!----> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button></span>"`;
+exports[`Expand button when short text is provided renders button before text 1`] = `
+"<span><button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-prepend text-expander btn-blank btn-secondary\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\"><use xlink:href=\\"#ellipsis_h\\"></use></svg></button> <span><p>Short</p></span>
+<!----> <button aria-label=\\"Click to expand text\\" type=\\"button\\" class=\\"btn js-text-expander-append text-expander btn-blank btn-secondary\\" style=\\"display: none;\\"><svg aria-hidden=\\"true\\" class=\\"s12 ic-ellipsis_h\\">
+ <use xlink:href=\\"#ellipsis_h\\"></use>
+ </svg></button></span>"
+`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/memory_graph_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/memory_graph_spec.js.snap
index a7f666ff56d..f4f9cc288f9 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/memory_graph_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/memory_graph_spec.js.snap
@@ -5,7 +5,7 @@ exports[`MemoryGraph Render chart should draw container with chart 1`] = `
class="memory-graph-container p-1"
style="width: 100px;"
>
- <glsparklinechart-stub
+ <gl-sparkline-chart-stub
data="Nov 12 2019 19:17:33,2.87,Nov 12 2019 19:18:33,2.78,Nov 12 2019 19:19:33,2.78,Nov 12 2019 19:20:33,3.01"
height="25"
tooltiplabel="MB"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index 530428ef27c..74f71c23d02 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -1,13 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`SplitButton renders actionItems 1`] = `
-<gldropdown-stub
+<gl-dropdown-stub
menu-class="dropdown-menu-selectable "
split="true"
text="professor"
variant="secondary"
>
- <gldropdownitem-stub
+ <gl-dropdown-item-stub
active="true"
active-class="is-active"
>
@@ -18,10 +18,10 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
very symphonic
</div>
- </gldropdownitem-stub>
+ </gl-dropdown-item-stub>
- <gldropdowndivider-stub />
- <gldropdownitem-stub
+ <gl-dropdown-divider-stub />
+ <gl-dropdown-item-stub
active-class="is-active"
>
<strong>
@@ -31,8 +31,8 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
warp drive
</div>
- </gldropdownitem-stub>
+ </gl-dropdown-item-stub>
<!---->
-</gldropdown-stub>
+</gl-dropdown-stub>
`;
diff --git a/spec/frontend/vue_shared/components/callout_spec.js b/spec/frontend/vue_shared/components/callout_spec.js
index 91208dfb31a..7c9bb6b4650 100644
--- a/spec/frontend/vue_shared/components/callout_spec.js
+++ b/spec/frontend/vue_shared/components/callout_spec.js
@@ -1,17 +1,14 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import Callout from '~/vue_shared/components/callout.vue';
const TEST_MESSAGE = 'This is a callout message!';
const TEST_SLOT = '<button>This is a callout slot!</button>';
-const localVue = createLocalVue();
-
describe('Callout Component', () => {
let wrapper;
const factory = options => {
- wrapper = shallowMount(localVue.extend(Callout), {
- localVue,
+ wrapper = shallowMount(Callout, {
...options,
});
};
diff --git a/spec/frontend/vue_shared/components/changed_file_icon_spec.js b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
index 2fabbe3d0f6..02c4dabeffc 100644
--- a/spec/frontend/vue_shared/components/changed_file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
@@ -18,8 +18,6 @@ describe('Changed file icon', () => {
showTooltip: true,
...props,
},
- sync: false,
- attachToDocument: true,
});
};
@@ -30,7 +28,7 @@ describe('Changed file icon', () => {
const findIcon = () => wrapper.find(Icon);
const findIconName = () => findIcon().props('name');
const findIconClasses = () => findIcon().classes();
- const findTooltipText = () => wrapper.attributes('data-original-title');
+ const findTooltipText = () => wrapper.attributes('title');
it('with isCentered true, adds center class', () => {
factory({
@@ -58,10 +56,10 @@ describe('Changed file icon', () => {
describe.each`
file | iconName | tooltipText | desc
- ${changedFile()} | ${'file-modified'} | ${'Unstaged modification'} | ${'with file changed'}
+ ${changedFile()} | ${'file-modified-solid'} | ${'Unstaged modification'} | ${'with file changed'}
${stagedFile()} | ${'file-modified-solid'} | ${'Staged modification'} | ${'with file staged'}
- ${changedAndStagedFile()} | ${'file-modified'} | ${'Unstaged and staged modification'} | ${'with file changed and staged'}
- ${newFile()} | ${'file-addition'} | ${'Unstaged addition'} | ${'with file new'}
+ ${changedAndStagedFile()} | ${'file-modified-solid'} | ${'Unstaged and staged modification'} | ${'with file changed and staged'}
+ ${newFile()} | ${'file-addition-solid'} | ${'Unstaged addition'} | ${'with file new'}
`('$desc', ({ file, iconName, tooltipText }) => {
beforeEach(() => {
factory({ file });
@@ -89,7 +87,7 @@ describe('Changed file icon', () => {
});
it('does not have tooltip text', () => {
- expect(findTooltipText()).toBe('');
+ expect(findTooltipText()).toBeFalsy();
});
});
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index 4fb6924daba..37f71867ab9 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -9,8 +9,6 @@ describe('clipboard button', () => {
const createWrapper = propsData => {
wrapper = shallowMount(ClipboardButton, {
propsData,
- sync: false,
- attachToDocument: true,
});
};
@@ -35,7 +33,7 @@ describe('clipboard button', () => {
});
it('should have a tooltip with default values', () => {
- expect(wrapper.attributes('data-original-title')).toBe('Copy this value');
+ expect(wrapper.attributes('title')).toBe('Copy this value');
});
it('should render provided classname', () => {
diff --git a/spec/frontend/vue_shared/components/commit_spec.js b/spec/frontend/vue_shared/components/commit_spec.js
index 67262eec0a5..3510c9b699d 100644
--- a/spec/frontend/vue_shared/components/commit_spec.js
+++ b/spec/frontend/vue_shared/components/commit_spec.js
@@ -7,13 +7,16 @@ describe('Commit component', () => {
let props;
let wrapper;
+ const findIcon = name => {
+ const icons = wrapper.findAll(Icon).filter(c => c.attributes('name') === name);
+ return icons.length ? icons.at(0) : icons;
+ };
+
const findUserAvatar = () => wrapper.find(UserAvatarLink);
const createComponent = propsData => {
wrapper = shallowMount(CommitComponent, {
propsData,
- sync: false,
- attachToDocument: true,
});
};
@@ -71,7 +74,7 @@ describe('Commit component', () => {
});
it('should render a tag icon if it represents a tag', () => {
- expect(wrapper.find('icon-stub[name="tag"]').exists()).toBe(true);
+ expect(findIcon('tag').exists()).toBe(true);
});
it('should render a link to the ref url', () => {
@@ -89,7 +92,7 @@ describe('Commit component', () => {
});
it('should render icon for commit', () => {
- expect(wrapper.find('icon-stub[name="commit"]').exists()).toBe(true);
+ expect(findIcon('commit').exists()).toBe(true);
});
describe('Given commit title and author props', () => {
@@ -160,9 +163,9 @@ describe('Commit component', () => {
expect(refEl.attributes('href')).toBe(props.commitRef.ref_url);
- expect(refEl.attributes('data-original-title')).toBe(props.commitRef.name);
+ expect(refEl.attributes('title')).toBe(props.commitRef.name);
- expect(wrapper.find('icon-stub[name="branch"]').exists()).toBe(true);
+ expect(findIcon('branch').exists()).toBe(true);
});
});
@@ -193,9 +196,9 @@ describe('Commit component', () => {
expect(refEl.attributes('href')).toBe(props.mergeRequestRef.path);
- expect(refEl.attributes('data-original-title')).toBe(props.mergeRequestRef.title);
+ expect(refEl.attributes('title')).toBe(props.mergeRequestRef.title);
- expect(wrapper.find('icon-stub[name="git-merge"]').exists()).toBe(true);
+ expect(findIcon('git-merge').exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
index 0d0e4ae4349..ffdeb25439c 100644
--- a/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
@@ -49,7 +49,9 @@ describe('DropdownSearchInputComponent', () => {
wrapper.setProps({ focused: true });
- expect(inputEl.focus).toHaveBeenCalled();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(inputEl.focus).toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/expand_button_spec.js b/spec/frontend/vue_shared/components/expand_button_spec.js
index a501e6695d5..3b1c8f6219c 100644
--- a/spec/frontend/vue_shared/components/expand_button_spec.js
+++ b/spec/frontend/vue_shared/components/expand_button_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import ExpandButton from '~/vue_shared/components/expand_button.vue';
const text = {
@@ -14,10 +14,7 @@ describe('Expand button', () => {
const expanderAppendEl = () => wrapper.find('.js-text-expander-append');
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
- wrapper = mount(localVue.extend(ExpandButton), {
- localVue,
+ wrapper = mount(ExpandButton, {
...options,
});
};
@@ -136,7 +133,10 @@ describe('Expand button', () => {
it('clicking hides itself and shows prepend', () => {
expect(expanderAppendEl().isVisible()).toBe(true);
expanderAppendEl().trigger('click');
- expect(expanderPrependEl().isVisible()).toBe(true);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(expanderPrependEl().isVisible()).toBe(true);
+ });
});
it('clicking hides expanded text', () => {
@@ -147,12 +147,15 @@ describe('Expand button', () => {
.trim(),
).toBe(text.expanded);
expanderAppendEl().trigger('click');
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).not.toBe(text.expanded);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(
+ wrapper
+ .find(ExpandButton)
+ .text()
+ .trim(),
+ ).not.toBe(text.expanded);
+ });
});
describe('when short text is provided', () => {
@@ -176,12 +179,15 @@ describe('Expand button', () => {
.trim(),
).toBe(text.expanded);
expanderAppendEl().trigger('click');
- expect(
- wrapper
- .find(ExpandButton)
- .text()
- .trim(),
- ).toBe(text.short);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(
+ wrapper
+ .find(ExpandButton)
+ .text()
+ .trim(),
+ ).toBe(text.short);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/file_icon_spec.js b/spec/frontend/vue_shared/components/file_icon_spec.js
index f8f68a6a77a..7b7633a06d6 100644
--- a/spec/frontend/vue_shared/components/file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/file_icon_spec.js
@@ -14,7 +14,6 @@ describe('File Icon component', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(FileIcon, {
- sync: false,
propsData: { ...props },
});
};
diff --git a/spec/javascripts/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index eb78d37db3e..8437e68d73c 100644
--- a/spec/javascripts/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -33,7 +33,7 @@ describe('GlModalVuex', () => {
...options.propsData,
};
- wrapper = shallowMount(localVue.extend(GlModalVuex), {
+ wrapper = shallowMount(GlModalVuex, {
...options,
localVue,
store,
@@ -45,8 +45,8 @@ describe('GlModalVuex', () => {
state = createState();
actions = {
- show: jasmine.createSpy('show'),
- hide: jasmine.createSpy('hide'),
+ show: jest.fn(),
+ hide: jest.fn(),
};
});
@@ -81,7 +81,7 @@ describe('GlModalVuex', () => {
});
it('passes listeners through to gl-modal', () => {
- const ok = jasmine.createSpy('ok');
+ const ok = jest.fn();
factory({
listeners: { ok },
@@ -119,12 +119,12 @@ describe('GlModalVuex', () => {
state.isVisible = false;
factory();
- const rootEmit = spyOn(wrapper.vm.$root, '$emit');
+ const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
state.isVisible = true;
- localVue
- .nextTick()
+ wrapper.vm
+ .$nextTick()
.then(() => {
expect(rootEmit).toHaveBeenCalledWith('bv::show::modal', TEST_MODAL_ID);
})
@@ -136,12 +136,12 @@ describe('GlModalVuex', () => {
state.isVisible = true;
factory();
- const rootEmit = spyOn(wrapper.vm.$root, '$emit');
+ const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
state.isVisible = false;
- localVue
- .nextTick()
+ wrapper.vm
+ .$nextTick()
.then(() => {
expect(rootEmit).toHaveBeenCalledWith('bv::hide::modal', TEST_MODAL_ID);
})
diff --git a/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js b/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
index d9badffb50d..30afb044bbf 100644
--- a/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
@@ -20,7 +20,6 @@ describe('GlToggleVuex component', () => {
stateProperty: 'toggleState',
...props,
},
- sync: false,
});
};
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index dcae2f12833..b00261ae067 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -17,8 +17,6 @@ describe('IssueAssigneesComponent', () => {
assignees: mockAssigneesList,
...props,
},
- sync: false,
- attachToDocument: true,
});
vm = wrapper.vm; // eslint-disable-line
};
@@ -66,7 +64,7 @@ describe('IssueAssigneesComponent', () => {
expect(findOverflowCounter().exists()).toBe(true);
expect(findOverflowCounter().text()).toEqual(expectedHidden.toString());
- expect(findOverflowCounter().attributes('data-original-title')).toEqual(
+ expect(findOverflowCounter().attributes('title')).toEqual(
`${hiddenCount} more assignees`,
);
});
diff --git a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
index 4a66330ac30..4c654e01f74 100644
--- a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
@@ -13,8 +13,6 @@ const createComponent = (milestone = mockMilestone) => {
propsData: {
milestone,
},
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
index 3cc640cb00d..f7b1f041ef2 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { formatDate } from '~/lib/utils/datetime_utility';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
import {
@@ -29,13 +29,8 @@ describe('RelatedIssuableItem', () => {
};
beforeEach(() => {
- const localVue = createLocalVue();
-
- wrapper = mount(localVue.extend(RelatedIssuableItem), {
- localVue,
+ wrapper = mount(RelatedIssuableItem, {
slots,
- sync: false,
- attachToDocument: true,
propsData: props,
});
});
@@ -192,10 +187,12 @@ describe('RelatedIssuableItem', () => {
it('triggers onRemoveRequest when clicked', () => {
removeBtn.trigger('click');
- const { relatedIssueRemoveRequest } = wrapper.emitted();
+ return wrapper.vm.$nextTick().then(() => {
+ const { relatedIssueRemoveRequest } = wrapper.emitted();
- expect(relatedIssueRemoveRequest.length).toBe(1);
- expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
+ expect(relatedIssueRemoveRequest.length).toBe(1);
+ expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/loading_button_spec.js b/spec/frontend/vue_shared/components/loading_button_spec.js
new file mode 100644
index 00000000000..8bcb80d140e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/loading_button_spec.js
@@ -0,0 +1,100 @@
+import { shallowMount } from '@vue/test-utils';
+import LoadingButton from '~/vue_shared/components/loading_button.vue';
+
+const LABEL = 'Hello';
+
+describe('LoadingButton', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = shallowMount(LoadingButton, {
+ propsData,
+ });
+ };
+ const findButtonLabel = () => wrapper.find('.js-loading-button-label');
+ const findButtonIcon = () => wrapper.find('.js-loading-button-icon');
+
+ describe('loading spinner', () => {
+ it('shown when loading', () => {
+ buildWrapper({ loading: true });
+
+ expect(findButtonIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('disabled state', () => {
+ it('disabled when loading', () => {
+ buildWrapper({ loading: true });
+ expect(wrapper.attributes('disabled')).toBe('disabled');
+ });
+
+ it('not disabled when normal', () => {
+ buildWrapper({ loading: false });
+
+ expect(wrapper.attributes('disabled')).toBe(undefined);
+ });
+ });
+
+ describe('label', () => {
+ it('shown when normal', () => {
+ buildWrapper({ loading: false, label: LABEL });
+ expect(findButtonLabel().text()).toBe(LABEL);
+ });
+
+ it('shown when loading', () => {
+ buildWrapper({ loading: false, label: LABEL });
+ expect(findButtonLabel().text()).toBe(LABEL);
+ });
+ });
+
+ describe('container class', () => {
+ it('should default to btn btn-align-content', () => {
+ buildWrapper();
+
+ expect(wrapper.classes()).toContain('btn');
+ expect(wrapper.classes()).toContain('btn-align-content');
+ });
+
+ it('should be configurable through props', () => {
+ const containerClass = 'test-class';
+
+ buildWrapper({
+ containerClass,
+ });
+
+ expect(wrapper.classes()).not.toContain('btn');
+ expect(wrapper.classes()).not.toContain('btn-align-content');
+ expect(wrapper.classes()).toContain(containerClass);
+ });
+ });
+
+ describe('click callback prop', () => {
+ it('calls given callback when normal', () => {
+ buildWrapper({
+ loading: false,
+ });
+
+ wrapper.trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('click')).toBeTruthy();
+ });
+ });
+
+ it('does not call given callback when disabled because of loading', () => {
+ buildWrapper({
+ loading: true,
+ });
+
+ wrapper.trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('click')).toBeFalsy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 4cd0f62da0f..46e269e5071 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -1,9 +1,9 @@
-import { mount, createLocalVue } from '@vue/test-utils';
-import { TEST_HOST } from 'spec/test_constants';
+import { mount } from '@vue/test-utils';
+import fieldComponent from '~/vue_shared/components/markdown/field.vue';
+import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import axios from '~/lib/utils/axios_utils';
-import fieldComponent from '~/vue_shared/components/markdown/field.vue';
const markdownPreviewPath = `${TEST_HOST}/preview`;
const markdownDocsPath = `${TEST_HOST}/docs`;
@@ -19,6 +19,7 @@ function createComponent() {
propsData: {
markdownDocsPath,
markdownPreviewPath,
+ isSubmitting: false,
},
slots: {
textarea: '<textarea>testing\n123</textarea>',
@@ -27,6 +28,7 @@ function createComponent() {
<field-component
markdown-preview-path="${markdownPreviewPath}"
markdown-docs-path="${markdownDocsPath}"
+ :isSubmitting="false"
>
<textarea
slot="textarea"
@@ -35,7 +37,6 @@ function createComponent() {
</textarea>
</field-component>
`,
- sync: false,
});
return wrapper;
}
@@ -44,10 +45,10 @@ const getPreviewLink = wrapper => wrapper.find('.nav-links .js-preview-link');
const getWriteLink = wrapper => wrapper.find('.nav-links .js-write-link');
const getMarkdownButton = wrapper => wrapper.find('.js-md');
const getAllMarkdownButtons = wrapper => wrapper.findAll('.js-md');
+const getVideo = wrapper => wrapper.find('video');
describe('Markdown field component', () => {
let axiosMock;
- const localVue = createLocalVue();
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
@@ -59,7 +60,10 @@ describe('Markdown field component', () => {
describe('mounted', () => {
let wrapper;
- const previewHTML = '<p>markdown preview</p>';
+ const previewHTML = `
+ <p>markdown preview</p>
+ <video src="${FIXTURES_PATH}/static/mock-video.mp4" muted="muted"></video>
+ `;
let previewLink;
let writeLink;
@@ -78,7 +82,7 @@ describe('Markdown field component', () => {
previewLink = getPreviewLink(wrapper);
previewLink.trigger('click');
- return localVue.nextTick().then(() => {
+ return wrapper.vm.$nextTick().then(() => {
expect(previewLink.element.parentNode.classList.contains('active')).toBeTruthy();
});
});
@@ -88,7 +92,7 @@ describe('Markdown field component', () => {
previewLink = getPreviewLink(wrapper);
previewLink.trigger('click');
- localVue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expect(wrapper.find('.md-preview-holder').element.textContent.trim()).toContain(
'Loading…',
);
@@ -112,9 +116,35 @@ describe('Markdown field component', () => {
previewLink.trigger('click');
- setTimeout(() => {
- expect($.fn.renderGFM).toHaveBeenCalled();
- }, 0);
+ return axios.waitFor(markdownPreviewPath).then(() => {
+ expect(wrapper.find('.md-preview-holder').element.innerHTML).toContain(previewHTML);
+ });
+ });
+
+ it('calls video.pause() on comment input when isSubmitting is changed to true', () => {
+ wrapper = createComponent();
+ previewLink = getPreviewLink(wrapper);
+ previewLink.trigger('click');
+
+ let callPause;
+
+ return axios
+ .waitFor(markdownPreviewPath)
+ .then(() => {
+ const video = getVideo(wrapper);
+ callPause = jest.spyOn(video.element, 'pause').mockImplementation(() => true);
+
+ wrapper.setProps({
+ isSubmitting: true,
+ markdownPreviewPath,
+ markdownDocsPath,
+ });
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(callPause).toHaveBeenCalled();
+ });
});
it('clicking already active write or preview link does nothing', () => {
@@ -123,17 +153,17 @@ describe('Markdown field component', () => {
previewLink = getPreviewLink(wrapper);
writeLink.trigger('click');
- return localVue
- .nextTick()
+ return wrapper.vm
+ .$nextTick()
.then(() => assertMarkdownTabs(true, writeLink, previewLink, wrapper))
.then(() => writeLink.trigger('click'))
- .then(() => localVue.nextTick())
+ .then(() => wrapper.vm.$nextTick())
.then(() => assertMarkdownTabs(true, writeLink, previewLink, wrapper))
.then(() => previewLink.trigger('click'))
- .then(() => localVue.nextTick())
+ .then(() => wrapper.vm.$nextTick())
.then(() => assertMarkdownTabs(false, writeLink, previewLink, wrapper))
.then(() => previewLink.trigger('click'))
- .then(() => localVue.nextTick())
+ .then(() => wrapper.vm.$nextTick())
.then(() => assertMarkdownTabs(false, writeLink, previewLink, wrapper));
});
});
@@ -146,7 +176,7 @@ describe('Markdown field component', () => {
const markdownButton = getMarkdownButton(wrapper);
markdownButton.trigger('click');
- localVue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expect(textarea.value).toContain('**testing**');
});
});
@@ -158,7 +188,7 @@ describe('Markdown field component', () => {
const markdownButton = getAllMarkdownButtons(wrapper).wrappers[5];
markdownButton.trigger('click');
- localVue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expect(textarea.value).toContain('* testing');
});
});
@@ -170,7 +200,7 @@ describe('Markdown field component', () => {
const markdownButton = getAllMarkdownButtons(wrapper).wrappers[5];
markdownButton.trigger('click');
- localVue.nextTick(() => {
+ wrapper.vm.$nextTick(() => {
expect(textarea.value).toContain('* testing\n* 123');
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 1014fbf0308..551d781d296 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -12,8 +12,6 @@ describe('Markdown field header component', () => {
previewMarkdown: false,
...props,
},
- sync: false,
- attachToDocument: true,
});
};
@@ -66,11 +64,17 @@ describe('Markdown field header component', () => {
it('emits toggle markdown event when clicking preview', () => {
wrapper.find('.js-preview-link').trigger('click');
- expect(wrapper.emitted('preview-markdown').length).toEqual(1);
-
- wrapper.find('.js-write-link').trigger('click');
-
- expect(wrapper.emitted('write-markdown').length).toEqual(1);
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.emitted('preview-markdown').length).toEqual(1);
+
+ wrapper.find('.js-write-link').trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(wrapper.emitted('write-markdown').length).toEqual(1);
+ });
});
it('does not emit toggle markdown event when triggered from another form', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index 71f9b5e3244..9b9c3d559e3 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -1,9 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import SuggestionDiffHeader from '~/vue_shared/components/markdown/suggestion_diff_header.vue';
-const localVue = createLocalVue();
-
const DEFAULT_PROPS = {
canApply: true,
isApplied: false,
@@ -14,14 +12,11 @@ describe('Suggestion Diff component', () => {
let wrapper;
const createComponent = props => {
- wrapper = shallowMount(localVue.extend(SuggestionDiffHeader), {
+ wrapper = shallowMount(SuggestionDiffHeader, {
propsData: {
...DEFAULT_PROPS,
...props,
},
- localVue,
- sync: false,
- attachToDocument: true,
});
};
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
index c8deac1c086..97fcdc67791 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_row_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import SuggestionDiffRow from '~/vue_shared/components/markdown/suggestion_diff_row.vue';
const oldLine = {
@@ -7,8 +7,8 @@ const oldLine = {
meta_data: null,
new_line: null,
old_line: 5,
- rich_text: '-oldtext',
- text: '-oldtext',
+ rich_text: 'oldrichtext',
+ text: 'oldplaintext',
type: 'old',
};
@@ -18,8 +18,8 @@ const newLine = {
meta_data: null,
new_line: 6,
old_line: null,
- rich_text: '-newtext',
- text: '-newtext',
+ rich_text: 'newrichtext',
+ text: 'newplaintext',
type: 'new',
};
@@ -27,10 +27,7 @@ describe('SuggestionDiffRow', () => {
let wrapper;
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
wrapper = shallowMount(SuggestionDiffRow, {
- localVue,
...options,
});
};
@@ -42,14 +39,46 @@ describe('SuggestionDiffRow', () => {
wrapper.destroy();
});
- it('renders correctly', () => {
- factory({
- propsData: {
- line: oldLine,
- },
+ describe('renders correctly', () => {
+ it('has the right classes on the wrapper', () => {
+ factory({
+ propsData: {
+ line: oldLine,
+ },
+ });
+
+ expect(wrapper.is('.line_holder')).toBe(true);
+ });
+
+ it('renders the rich text when it is available', () => {
+ factory({
+ propsData: {
+ line: newLine,
+ },
+ });
+
+ expect(wrapper.find('td.line_content').text()).toEqual('newrichtext');
+ });
+
+ it('renders the plain text when it is available but rich text is not', () => {
+ factory({
+ propsData: {
+ line: Object.assign({}, newLine, { rich_text: undefined }),
+ },
+ });
+
+ expect(wrapper.find('td.line_content').text()).toEqual('newplaintext');
});
- expect(wrapper.is('.line_holder')).toBe(true);
+ it('renders a zero-width space when it has no plain or rich texts', () => {
+ factory({
+ propsData: {
+ line: Object.assign({}, newLine, { rich_text: undefined, text: undefined }),
+ },
+ });
+
+ expect(wrapper.find('td.line_content').text()).toEqual('\u200B');
+ });
});
describe('when passed line has type old', () => {
diff --git a/spec/javascripts/vue_shared/components/markdown/suggestion_diff_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
index dc929e83eb7..3c5e7500ba7 100644
--- a/spec/javascripts/vue_shared/components/markdown/suggestion_diff_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
@@ -92,7 +92,7 @@ describe('Suggestion Diff component', () => {
describe('applySuggestion', () => {
it('emits apply event when applySuggestion is called', () => {
const callback = () => {};
- spyOn(vm, '$emit');
+ jest.spyOn(vm, '$emit').mockImplementation(() => {});
vm.applySuggestion(callback);
expect(vm.$emit).toHaveBeenCalledWith('apply', { suggestionId: vm.suggestion.id, callback });
diff --git a/spec/frontend/vue_shared/components/modal_copy_button_spec.js b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
index 3c71cb16bd5..e5a8860f42e 100644
--- a/spec/frontend/vue_shared/components/modal_copy_button_spec.js
+++ b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
@@ -16,8 +16,6 @@ describe('modal copy button', () => {
text: 'copy me',
title: 'Copy this value',
},
- attachToDocument: true,
- sync: false,
});
});
@@ -29,14 +27,20 @@ describe('modal copy button', () => {
removeAllRanges: jest.fn(),
}));
wrapper.trigger('click');
- expect(wrapper.emitted().success).not.toBeEmpty();
- expect(document.execCommand).toHaveBeenCalledWith('copy');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().success).not.toBeEmpty();
+ expect(document.execCommand).toHaveBeenCalledWith('copy');
+ });
});
it("should propagate the clipboard error event if execCommand doesn't work", () => {
document.execCommand = jest.fn(() => false);
wrapper.trigger('click');
- expect(wrapper.emitted().error).not.toBeEmpty();
- expect(document.execCommand).toHaveBeenCalledWith('copy');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted().error).not.toBeEmpty();
+ expect(document.execCommand).toHaveBeenCalledWith('copy');
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/notes/system_note_spec.js b/spec/frontend/vue_shared/components/notes/system_note_spec.js
index 603c37c6c49..d5eac7c2aa3 100644
--- a/spec/frontend/vue_shared/components/notes/system_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/system_note_spec.js
@@ -1,12 +1,10 @@
-import { createLocalVue, mount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import IssueSystemNote from '~/vue_shared/components/notes/system_note.vue';
import createStore from '~/notes/stores';
import initMRPopovers from '~/mr_popover/index';
jest.mock('~/mr_popover/index', () => jest.fn());
-const localVue = createLocalVue();
-
describe('system note component', () => {
let vm;
let props;
@@ -34,10 +32,7 @@ describe('system note component', () => {
vm = mount(IssueSystemNote, {
store,
- localVue,
propsData: props,
- attachToDocument: true,
- sync: false,
});
});
diff --git a/spec/frontend/vue_shared/components/notes/timeline_entry_item_spec.js b/spec/frontend/vue_shared/components/notes/timeline_entry_item_spec.js
index be6c58f0683..f73d3edec5d 100644
--- a/spec/frontend/vue_shared/components/notes/timeline_entry_item_spec.js
+++ b/spec/frontend/vue_shared/components/notes/timeline_entry_item_spec.js
@@ -1,14 +1,11 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
describe(`TimelineEntryItem`, () => {
let wrapper;
const factory = (options = {}) => {
- const localVue = createLocalVue();
-
wrapper = shallowMount(TimelineEntryItem, {
- localVue,
...options,
});
};
diff --git a/spec/frontend/vue_shared/components/paginated_list_spec.js b/spec/frontend/vue_shared/components/paginated_list_spec.js
index 4e1b29a4d3a..46e45296c37 100644
--- a/spec/frontend/vue_shared/components/paginated_list_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_list_spec.js
@@ -26,8 +26,6 @@ describe('Pagination links component', () => {
list: [{ id: 'foo' }, { id: 'bar' }],
props,
},
- attachToDocument: true,
- sync: false,
});
[glPaginatedList] = wrapper.vm.$children;
diff --git a/spec/frontend/vue_shared/components/pagination_links_spec.js b/spec/frontend/vue_shared/components/pagination_links_spec.js
index efa5825d92f..bf004c83c4f 100644
--- a/spec/frontend/vue_shared/components/pagination_links_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_links_spec.js
@@ -1,4 +1,4 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { GlPagination } from '@gitlab/ui';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import {
@@ -10,8 +10,6 @@ import {
LABEL_LAST_PAGE,
} from '~/vue_shared/components/pagination/constants';
-const localVue = createLocalVue();
-
describe('Pagination links component', () => {
const pageInfo = {
page: 3,
@@ -38,8 +36,6 @@ describe('Pagination links component', () => {
change: changeMock,
pageInfo,
},
- localVue,
- sync: false,
});
};
diff --git a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js b/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
index e509fe09d94..223e7187d99 100644
--- a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
+++ b/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
@@ -14,7 +14,6 @@ describe('RecaptchaModal', () => {
beforeEach(() => {
wrapper = shallowMount(RecaptchaModal, {
- sync: false,
propsData: {
html: recaptchaHtml,
},
diff --git a/spec/frontend/vue_shared/components/resizable_chart_container_spec.js b/spec/frontend/vue_shared/components/resizable_chart_container_spec.js
index 552cfade7b6..3a5514ef318 100644
--- a/spec/frontend/vue_shared/components/resizable_chart_container_spec.js
+++ b/spec/frontend/vue_shared/components/resizable_chart_container_spec.js
@@ -14,7 +14,6 @@ describe('Resizable Chart Container', () => {
beforeEach(() => {
wrapper = mount(ResizableChartContainer, {
- attachToDocument: true,
scopedSlots: {
default: `
<div class="slot" slot-scope="{ width, height }">
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
index da22034a8db..d90fafb6bf7 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/base_spec.js
@@ -12,8 +12,6 @@ import {
const createComponent = (config = mockConfig) =>
shallowMount(BaseComponent, {
propsData: config,
- sync: false,
- attachToDocument: true,
});
describe('BaseComponent', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
index 52c0298603d..54ad96073c8 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
@@ -24,8 +24,6 @@ const createComponent = (
labelFilterBasePath,
enableScopedLabels: true,
},
- attachToDocument: true,
- sync: false,
});
};
diff --git a/spec/frontend/vue_shared/components/slot_switch_spec.js b/spec/frontend/vue_shared/components/slot_switch_spec.js
index cff955c05b2..71e6087c272 100644
--- a/spec/frontend/vue_shared/components/slot_switch_spec.js
+++ b/spec/frontend/vue_shared/components/slot_switch_spec.js
@@ -14,7 +14,6 @@ describe('SlotSwitch', () => {
wrapper = shallowMount(SlotSwitch, {
propsData,
slots,
- sync: false,
});
};
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
index 520abb02cf7..e09bc073042 100644
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -22,7 +22,6 @@ describe('SplitButton', () => {
const createComponent = propsData => {
wrapper = shallowMount(SplitButton, {
propsData,
- sync: false,
});
};
@@ -75,6 +74,7 @@ describe('SplitButton', () => {
describe('emitted event', () => {
let eventHandler;
+ let changeEventHandler;
beforeEach(() => {
createComponent({ actionItems: mockActionItems });
@@ -85,6 +85,11 @@ describe('SplitButton', () => {
wrapper.vm.$once(eventName, () => eventHandler());
};
+ const addChangeEventHandler = () => {
+ changeEventHandler = jest.fn();
+ wrapper.vm.$once('change', item => changeEventHandler(item));
+ };
+
it('defaults to first actionItems event', () => {
addEventHandler(mockActionItems[0]);
@@ -100,5 +105,13 @@ describe('SplitButton', () => {
.then(() => {
expect(eventHandler).toHaveBeenCalled();
}));
+
+ it('change to selected actionItem emits change event', () => {
+ addChangeEventHandler();
+
+ return selectItem(1).then(() => {
+ expect(changeEventHandler).toHaveBeenCalledWith(mockActionItems[1]);
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/table_pagination_spec.js b/spec/frontend/vue_shared/components/table_pagination_spec.js
index 8105d1fcef3..56ffffc7f0f 100644
--- a/spec/frontend/vue_shared/components/table_pagination_spec.js
+++ b/spec/frontend/vue_shared/components/table_pagination_spec.js
@@ -8,7 +8,6 @@ describe('Pagination component', () => {
const mountComponent = props => {
wrapper = shallowMount(TablePagination, {
- sync: false,
propsData: props,
});
};
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index f1f231c1a29..46fcb92455b 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
@@ -7,10 +7,7 @@ describe('Time ago with tooltip component', () => {
const buildVm = (propsData = {}) => {
vm = shallowMount(TimeAgoTooltip, {
- attachToDocument: true,
- sync: false,
propsData,
- localVue: createLocalVue(),
});
};
const timestamp = '2017-05-08T14:57:39.781Z';
@@ -25,7 +22,7 @@ describe('Time ago with tooltip component', () => {
});
const timeago = getTimeago();
- expect(vm.attributes('data-original-title')).toEqual(formatDate(timestamp));
+ expect(vm.attributes('title')).toEqual(formatDate(timestamp));
expect(vm.text()).toEqual(timeago.format(timestamp));
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
index e76b2ca2d65..663d0af4cc4 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -27,7 +27,6 @@ describe('User Avatar Image Component', () => {
propsData: {
...DEFAULT_PROPS,
},
- sync: false,
});
});
@@ -54,7 +53,6 @@ describe('User Avatar Image Component', () => {
...DEFAULT_PROPS,
lazy: true,
},
- sync: false,
});
});
@@ -69,7 +67,7 @@ describe('User Avatar Image Component', () => {
describe('Initialization without src', () => {
beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, { sync: false });
+ wrapper = shallowMount(UserAvatarImage);
});
it('should have default avatar image', () => {
@@ -86,7 +84,10 @@ describe('User Avatar Image Component', () => {
};
beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, { propsData: { props }, slots, sync: false });
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: { props },
+ slots,
+ });
});
it('renders the tooltip slot', () => {
@@ -100,7 +101,7 @@ describe('User Avatar Image Component', () => {
it('does not render tooltip data attributes for on avatar image', () => {
const avatarImg = wrapper.find('img');
- expect(avatarImg.attributes('data-original-title')).toBeFalsy();
+ expect(avatarImg.attributes('title')).toBeFalsy();
expect(avatarImg.attributes('data-placement')).not.toBeDefined();
expect(avatarImg.attributes('data-container')).not.toBeDefined();
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index 7f5df02d51d..2f68e15b0d7 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -26,8 +26,6 @@ describe('User Avatar Link Component', () => {
...defaultProps,
...props,
},
- sync: false,
- attachToDocument: true,
});
};
@@ -99,9 +97,9 @@ describe('User Avatar Link Component', () => {
});
it('should render text tooltip for <span>', () => {
- expect(
- wrapper.find('.js-user-avatar-link-username').attributes('data-original-title'),
- ).toEqual(defaultProps.tooltipText);
+ expect(wrapper.find('.js-user-avatar-link-username').attributes('title')).toEqual(
+ defaultProps.tooltipText,
+ );
});
it('should render text tooltip placement for <span>', () => {
diff --git a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_list_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
index 96bc3b0cc17..6f66d1cafb9 100644
--- a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_list_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
import UserAvatarList from '~/vue_shared/components/user_avatar/user_avatar_list.vue';
@@ -20,8 +20,6 @@ const createList = n =>
.fill(1)
.map((x, id) => createUser(id));
-const localVue = createLocalVue();
-
describe('UserAvatarList', () => {
let props;
let wrapper;
@@ -32,9 +30,8 @@ describe('UserAvatarList', () => {
...options.propsData,
};
- wrapper = shallowMount(localVue.extend(UserAvatarList), {
+ wrapper = shallowMount(UserAvatarList, {
...options,
- localVue,
propsData,
});
};
@@ -86,7 +83,7 @@ describe('UserAvatarList', () => {
expect(linkProps).toEqual(
items.map(x =>
- jasmine.objectContaining({
+ expect.objectContaining({
linkHref: x.web_url,
imgSrc: x.avatar_url,
imgAlt: x.name,
@@ -147,9 +144,12 @@ describe('UserAvatarList', () => {
it('with collapse clicked, it renders avatars up to breakpoint', () => {
clickButton();
- const links = wrapper.findAll(UserAvatarLink);
- expect(links.length).toEqual(TEST_BREAKPOINT);
+ return wrapper.vm.$nextTick(() => {
+ const links = wrapper.findAll(UserAvatarLink);
+
+ expect(links.length).toEqual(TEST_BREAKPOINT);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index f2e743cc1f6..a8bbc80d2df 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -29,23 +29,38 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
+ const findUserStatus = () => wrapper.find('.js-user-status');
+ const findTarget = () => document.querySelector('.js-user-link');
+
+ const createWrapper = (props = {}, options = {}) => {
+ wrapper = shallowMount(UserPopover, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ target: findTarget(),
+ ...props,
+ },
+ ...options,
+ });
+ };
+
describe('Empty', () => {
beforeEach(() => {
- wrapper = shallowMount(UserPopover, {
- propsData: {
- target: document.querySelector('.js-user-link'),
- user: {
- name: null,
- username: null,
- location: null,
- bio: null,
- organization: null,
- status: null,
+ createWrapper(
+ {},
+ {
+ propsData: {
+ target: findTarget(),
+ user: {
+ name: null,
+ username: null,
+ location: null,
+ bio: null,
+ organization: null,
+ status: null,
+ },
},
},
- attachToDocument: true,
- sync: false,
- });
+ );
});
it('should return skeleton loaders', () => {
@@ -55,13 +70,7 @@ describe('User Popover Component', () => {
describe('basic data', () => {
it('should show basic fields', () => {
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ createWrapper();
expect(wrapper.text()).toContain(DEFAULT_PROPS.user.name);
expect(wrapper.text()).toContain(DEFAULT_PROPS.user.username);
@@ -77,64 +86,38 @@ describe('User Popover Component', () => {
describe('job data', () => {
it('should show only bio if no organization is available', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Engineer';
+ const user = { ...DEFAULT_PROPS.user, bio: 'Engineer' };
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...testProps,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ createWrapper({ user });
expect(wrapper.text()).toContain('Engineer');
});
it('should show only organization if no bio is available', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.organization = 'GitLab';
+ const user = { ...DEFAULT_PROPS.user, organization: 'GitLab' };
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...testProps,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ createWrapper({ user });
expect(wrapper.text()).toContain('GitLab');
});
it('should display bio and organization in separate lines', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Engineer';
- testProps.user.organization = 'GitLab';
-
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ const user = { ...DEFAULT_PROPS.user, bio: 'Engineer', organization: 'GitLab' };
+
+ createWrapper({ user });
expect(wrapper.find('.js-bio').text()).toContain('Engineer');
expect(wrapper.find('.js-organization').text()).toContain('GitLab');
});
it('should not encode special characters in bio and organization', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.bio = 'Manager & Team Lead';
- testProps.user.organization = 'Me & my <funky> Company';
-
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ const user = {
+ ...DEFAULT_PROPS.user,
+ bio: 'Manager & Team Lead',
+ organization: 'Me & my <funky> Company',
+ };
+
+ createWrapper({ user });
expect(wrapper.find('.js-bio').text()).toContain('Manager & Team Lead');
expect(wrapper.find('.js-organization').text()).toContain('Me & my <funky> Company');
@@ -153,35 +136,41 @@ describe('User Popover Component', () => {
describe('status data', () => {
it('should show only message', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.status = { message_html: 'Hello World' };
+ const user = { ...DEFAULT_PROPS.user, status: { message_html: 'Hello World' } };
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- },
- sync: false,
- });
+ createWrapper({ user });
+ expect(findUserStatus().exists()).toBe(true);
expect(wrapper.text()).toContain('Hello World');
});
it('should show message and emoji', () => {
- const testProps = Object.assign({}, DEFAULT_PROPS);
- testProps.user.status = { emoji: 'basketball_player', message_html: 'Hello World' };
-
- wrapper = shallowMount(UserPopover, {
- propsData: {
- ...DEFAULT_PROPS,
- target: document.querySelector('.js-user-link'),
- status: { emoji: 'basketball_player', message_html: 'Hello World' },
- },
- sync: false,
- });
+ const user = {
+ ...DEFAULT_PROPS.user,
+ status: { emoji: 'basketball_player', message_html: 'Hello World' },
+ };
+
+ createWrapper({ user });
+ expect(findUserStatus().exists()).toBe(true);
expect(wrapper.text()).toContain('Hello World');
expect(wrapper.html()).toContain('<gl-emoji data-name="basketball_player"');
});
+
+ it('hides the div when status is null', () => {
+ const user = { ...DEFAULT_PROPS.user, status: null };
+
+ createWrapper({ user });
+
+ expect(findUserStatus().exists()).toBe(false);
+ });
+
+ it('hides the div when status is empty', () => {
+ const user = { ...DEFAULT_PROPS.user, status: { emoji: '', message_html: '' } };
+
+ createWrapper({ user });
+
+ expect(findUserStatus().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/vue_shared/directives/track_event_spec.js b/spec/frontend/vue_shared/directives/track_event_spec.js
index d63f6ae05b4..8d867c8e3fc 100644
--- a/spec/frontend/vue_shared/directives/track_event_spec.js
+++ b/spec/frontend/vue_shared/directives/track_event_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import Tracking from '~/tracking';
import TrackEvent from '~/vue_shared/directives/track_event';
@@ -17,15 +17,12 @@ const Component = Vue.component('dummy-element', {
template: '<button id="trackable" v-track-event="trackingOptions"></button>',
});
-const localVue = createLocalVue();
let wrapper;
let button;
describe('Error Tracking directive', () => {
beforeEach(() => {
- wrapper = shallowMount(localVue.extend(Component), {
- localVue,
- });
+ wrapper = shallowMount(Component);
button = wrapper.find('#trackable');
});
@@ -43,7 +40,10 @@ describe('Error Tracking directive', () => {
wrapper.setData({ trackingOptions });
const { category, action, label, property, value } = trackingOptions;
- button.trigger('click');
- expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value });
+
+ return wrapper.vm.$nextTick(() => {
+ button.trigger('click');
+ expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value });
+ });
});
});
diff --git a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js b/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
index 22295721328..e57c730ecee 100644
--- a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
+++ b/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
@@ -1,4 +1,4 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import DroplabDropdownButton from '~/vue_shared/components/droplab_dropdown_button.vue';
@@ -18,11 +18,8 @@ const createComponent = ({
dropdownClass = '',
actions = mockActions,
defaultAction = 0,
-}) => {
- const localVue = createLocalVue();
-
- return mount(DroplabDropdownButton, {
- localVue,
+}) =>
+ mount(DroplabDropdownButton, {
propsData: {
size,
dropdownClass,
@@ -30,7 +27,6 @@ const createComponent = ({
defaultAction,
},
});
-};
describe('DroplabDropdownButton', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js b/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
index a3e3270a4e8..3ce12caf95a 100644
--- a/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
+++ b/spec/frontend/vue_shared/mixins/gl_feature_flags_mixin_spec.js
@@ -1,8 +1,6 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-const localVue = createLocalVue();
-
describe('GitLab Feature Flags Mixin', () => {
let wrapper;
@@ -20,7 +18,6 @@ describe('GitLab Feature Flags Mixin', () => {
};
wrapper = shallowMount(component, {
- localVue,
provide: {
glFeatures: { ...(gon.features || {}) },
},
diff --git a/spec/frontend/vuex_shared/bindings_spec.js b/spec/frontend/vuex_shared/bindings_spec.js
new file mode 100644
index 00000000000..0f91a09018f
--- /dev/null
+++ b/spec/frontend/vuex_shared/bindings_spec.js
@@ -0,0 +1,79 @@
+import { shallowMount } from '@vue/test-utils';
+import { mapComputed } from '~/vuex_shared/bindings';
+
+describe('Binding utils', () => {
+ describe('mapComputed', () => {
+ const defaultArgs = [['baz'], 'bar', 'foo'];
+
+ const createDummy = (mapComputedArgs = defaultArgs) => ({
+ computed: {
+ ...mapComputed(...mapComputedArgs),
+ },
+ render() {
+ return null;
+ },
+ });
+
+ const mocks = {
+ $store: {
+ state: {
+ baz: 2,
+ foo: {
+ baz: 1,
+ },
+ },
+ getters: {
+ getBaz: 'foo',
+ },
+ dispatch: jest.fn(),
+ },
+ };
+
+ it('returns an object with keys equal to the first fn parameter ', () => {
+ const keyList = ['foo1', 'foo2'];
+ const result = mapComputed(keyList, 'foo', 'bar');
+ expect(Object.keys(result)).toEqual(keyList);
+ });
+
+ it('returned object has set and get function', () => {
+ const result = mapComputed(['baz'], 'foo', 'bar');
+ expect(result.baz.set).toBeDefined();
+ expect(result.baz.get).toBeDefined();
+ });
+
+ describe('set function', () => {
+ it('invokes $store.dispatch', () => {
+ const context = shallowMount(createDummy(), { mocks });
+ context.vm.baz = 'a';
+ expect(context.vm.$store.dispatch).toHaveBeenCalledWith('bar', { baz: 'a' });
+ });
+ it('uses updateFn in list object mode if updateFn exists', () => {
+ const context = shallowMount(createDummy([[{ key: 'foo', updateFn: 'baz' }]]), { mocks });
+ context.vm.foo = 'b';
+ expect(context.vm.$store.dispatch).toHaveBeenCalledWith('baz', { foo: 'b' });
+ });
+ it('in list object mode defaults to defaultUpdateFn if updateFn do not exists', () => {
+ const context = shallowMount(createDummy([[{ key: 'foo' }], 'defaultFn']), { mocks });
+ context.vm.foo = 'c';
+ expect(context.vm.$store.dispatch).toHaveBeenCalledWith('defaultFn', { foo: 'c' });
+ });
+ });
+
+ describe('get function', () => {
+ it('if root is set returns $store.state[root][key]', () => {
+ const context = shallowMount(createDummy(), { mocks });
+ expect(context.vm.baz).toBe(mocks.$store.state.foo.baz);
+ });
+
+ it('if root is not set returns $store.state[key]', () => {
+ const context = shallowMount(createDummy([['baz'], 'bar']), { mocks });
+ expect(context.vm.baz).toBe(mocks.$store.state.baz);
+ });
+
+ it('when using getters it invoke the appropriate getter', () => {
+ const context = shallowMount(createDummy([[{ getter: 'getBaz', key: 'baz' }]]), { mocks });
+ expect(context.vm.baz).toBe(mocks.$store.getters.getBaz);
+ });
+ });
+ });
+});
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index dcf3c989047..2ec477fc494 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -124,14 +124,26 @@ describe GitlabSchema do
describe '.object_from_id' do
context 'for subclasses of `ApplicationRecord`' do
- it 'returns the correct record' do
- user = create(:user)
+ let_it_be(:user) { create(:user) }
+ it 'returns the correct record' do
result = described_class.object_from_id(user.to_global_id.to_s)
expect(result.sync).to eq(user)
end
+ it 'returns the correct record, of the expected type' do
+ result = described_class.object_from_id(user.to_global_id.to_s, expected_type: ::User)
+
+ expect(result.sync).to eq(user)
+ end
+
+ it 'fails if the type does not match' do
+ expect do
+ described_class.object_from_id(user.to_global_id.to_s, expected_type: ::Project)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+
it 'batchloads the queries' do
user1 = create(:user)
user2 = create(:user)
diff --git a/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb b/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb
new file mode 100644
index 00000000000..416a90a841f
--- /dev/null
+++ b/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::Projects::GrafanaIntegrationResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:grafana_integration) { create(:grafana_integration, project: project)}
+
+ describe '#resolve' do
+ context 'when object is not a project' do
+ it { expect(resolve_integration(obj: current_user)).to eq nil }
+ end
+
+ context 'when object is a project' do
+ it { expect(resolve_integration(obj: project)).to eq grafana_integration }
+ end
+
+ context 'when object is nil' do
+ it { expect(resolve_integration(obj: nil)).to eq nil}
+ end
+ end
+
+ def resolve_integration(obj: project, context: { current_user: current_user })
+ resolve(described_class, obj: obj, ctx: context)
+ end
+end
diff --git a/spec/graphql/types/environment_type_spec.rb b/spec/graphql/types/environment_type_spec.rb
new file mode 100644
index 00000000000..cf30893b3ca
--- /dev/null
+++ b/spec/graphql/types/environment_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['Environment'] do
+ it { expect(described_class.graphql_name).to eq('Environment') }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ name id
+ ]
+
+ is_expected.to have_graphql_fields(*expected_fields)
+ end
+
+ it { is_expected.to require_graphql_authorizations(:read_environment) }
+end
diff --git a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
index 3576adb5272..30cede6f4cf 100644
--- a/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
+++ b/spec/graphql/types/error_tracking/sentry_detailed_error_type_spec.rb
@@ -30,6 +30,8 @@ describe GitlabSchema.types['SentryDetailedError'] do
lastReleaseLastCommit
firstReleaseShortVersion
lastReleaseShortVersion
+ gitlabCommit
+ gitlabCommitPath
]
is_expected.to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/grafana_integration_type_spec.rb b/spec/graphql/types/grafana_integration_type_spec.rb
new file mode 100644
index 00000000000..ddfedc5a75c
--- /dev/null
+++ b/spec/graphql/types/grafana_integration_type_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['GrafanaIntegration'] do
+ let(:expected_fields) do
+ %i[
+ id
+ grafana_url
+ token
+ enabled
+ created_at
+ updated_at
+ ]
+ end
+
+ it { expect(described_class.graphql_name).to eq('GrafanaIntegration') }
+
+ it { expect(described_class).to require_graphql_authorizations(:admin_operations) }
+
+ it { is_expected.to have_graphql_fields(*expected_fields) }
+end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 3dd5b602aa2..de11bad0723 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -8,4 +8,10 @@ describe GitlabSchema.types['Group'] do
it { expect(described_class.graphql_name).to eq('Group') }
it { expect(described_class).to require_graphql_authorizations(:read_group) }
+
+ it 'has the expected fields' do
+ expected_fields = %w[web_url avatar_url mentions_disabled parent]
+
+ is_expected.to include_graphql_fields(*expected_fields)
+ end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index a3c51f24307..ac2d2d6f7f0 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -23,6 +23,7 @@ describe GitlabSchema.types['Project'] do
only_allow_merge_if_all_discussions_are_resolved printing_merge_request_link_enabled
namespace group statistics repository merge_requests merge_request issues
issue pipelines removeSourceBranchAfterMerge sentryDetailedError snippets
+ grafanaIntegration autocloseReferencedIssues suggestion_commit_message environments
]
is_expected.to include_graphql_fields(*expected_fields)
@@ -31,45 +32,49 @@ describe GitlabSchema.types['Project'] do
describe 'issue field' do
subject { described_class.fields['issue'] }
- it 'returns issue' do
- is_expected.to have_graphql_type(Types::IssueType)
- is_expected.to have_graphql_resolver(Resolvers::IssuesResolver.single)
- end
+ it { is_expected.to have_graphql_type(Types::IssueType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::IssuesResolver.single) }
end
describe 'issues field' do
subject { described_class.fields['issues'] }
- it 'returns issue' do
- is_expected.to have_graphql_type(Types::IssueType.connection_type)
- is_expected.to have_graphql_resolver(Resolvers::IssuesResolver)
- end
+ it { is_expected.to have_graphql_type(Types::IssueType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::IssuesResolver) }
end
describe 'merge_requests field' do
subject { described_class.fields['mergeRequest'] }
- it 'returns merge requests' do
- is_expected.to have_graphql_type(Types::MergeRequestType)
- is_expected.to have_graphql_resolver(Resolvers::MergeRequestsResolver.single)
- end
+ it { is_expected.to have_graphql_type(Types::MergeRequestType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::MergeRequestsResolver.single) }
end
describe 'merge_request field' do
subject { described_class.fields['mergeRequests'] }
- it 'returns merge request' do
- is_expected.to have_graphql_type(Types::MergeRequestType.connection_type)
- is_expected.to have_graphql_resolver(Resolvers::MergeRequestsResolver)
- end
+ it { is_expected.to have_graphql_type(Types::MergeRequestType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::MergeRequestsResolver) }
end
describe 'snippets field' do
subject { described_class.fields['snippets'] }
- it 'returns snippets' do
- is_expected.to have_graphql_type(Types::SnippetType.connection_type)
- is_expected.to have_graphql_resolver(Resolvers::Projects::SnippetsResolver)
- end
+ it { is_expected.to have_graphql_type(Types::SnippetType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Projects::SnippetsResolver) }
+ end
+
+ describe 'grafana_integration field' do
+ subject { described_class.fields['grafanaIntegration'] }
+
+ it { is_expected.to have_graphql_type(Types::GrafanaIntegrationType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Projects::GrafanaIntegrationResolver) }
+ end
+
+ describe 'environments field' do
+ subject { described_class.fields['environments'] }
+
+ it { is_expected.to have_graphql_type(Types::EnvironmentType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::EnvironmentsResolver) }
end
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index b2d0ba27d4e..39a363cb913 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -7,7 +7,16 @@ describe GitlabSchema.types['Query'] do
expect(described_class.graphql_name).to eq('Query')
end
- it { is_expected.to have_graphql_fields(:project, :namespace, :group, :echo, :metadata, :current_user, :snippets) }
+ it do
+ is_expected.to have_graphql_fields(:project,
+ :namespace,
+ :group,
+ :echo,
+ :metadata,
+ :current_user,
+ :snippets
+ ).at_least
+ end
describe 'namespace field' do
subject { described_class.fields['namespace'] }
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index a0c85863150..a67475e47a3 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -206,6 +206,15 @@ describe ApplicationHelper do
end
end
+ context 'when @snippet is set' do
+ it 'returns the passed path' do
+ snippet = create(:snippet)
+ assign(:snippet, snippet)
+
+ expect(helper.external_storage_url_or_path('/foo/bar', project)).to eq('/foo/bar')
+ end
+ end
+
context 'when external storage is enabled' do
let(:user) { create(:user, static_object_token: 'hunter1') }
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 8303c4eafbe..41008ff8eaf 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -59,4 +59,68 @@ describe ApplicationSettingsHelper do
expect(helper.integration_expanded?('plantuml_')).to be_falsey
end
end
+
+ describe '.self_monitoring_project_data' do
+ context 'when self monitoring project does not exist' do
+ it 'returns create_self_monitoring_project_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'create_self_monitoring_project_path' =>
+ create_self_monitoring_project_admin_application_settings_path
+ )
+ end
+
+ it 'returns status_create_self_monitoring_project_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'status_create_self_monitoring_project_path' =>
+ status_create_self_monitoring_project_admin_application_settings_path
+ )
+ end
+
+ it 'returns delete_self_monitoring_project_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'delete_self_monitoring_project_path' =>
+ delete_self_monitoring_project_admin_application_settings_path
+ )
+ end
+
+ it 'returns status_delete_self_monitoring_project_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'status_delete_self_monitoring_project_path' =>
+ status_delete_self_monitoring_project_admin_application_settings_path
+ )
+ end
+
+ it 'returns self_monitoring_project_exists false' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'self_monitoring_project_exists' => "false"
+ )
+ end
+
+ it 'returns nil for project full_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'self_monitoring_project_full_path' => nil
+ )
+ end
+ end
+
+ context 'when self monitoring project exists' do
+ let(:project) { build(:project) }
+
+ before do
+ stub_application_setting(instance_administration_project: project)
+ end
+
+ it 'returns self_monitoring_project_exists true' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'self_monitoring_project_exists' => "true"
+ )
+ end
+
+ it 'returns project full_path' do
+ expect(helper.self_monitoring_project_data).to include(
+ 'self_monitoring_project_full_path' => project.full_path
+ )
+ end
+ end
+ end
end
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index d0f0e6f1dd5..a0682c0e278 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -4,24 +4,22 @@ require 'spec_helper'
describe BroadcastMessagesHelper do
describe 'broadcast_message' do
+ let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
+
it 'returns nil when no current message' do
expect(helper.broadcast_message(nil)).to be_nil
end
it 'includes the current message' do
- current = BroadcastMessage.new(message: 'Current Message')
-
allow(helper).to receive(:broadcast_message_style).and_return(nil)
- expect(helper.broadcast_message(current)).to include 'Current Message'
+ expect(helper.broadcast_message(current_broadcast_message)).to include 'Current Message'
end
it 'includes custom style' do
- current = BroadcastMessage.new(message: 'Current Message')
-
allow(helper).to receive(:broadcast_message_style).and_return('foo')
- expect(helper.broadcast_message(current)).to include 'style="foo"'
+ expect(helper.broadcast_message(current_broadcast_message)).to include 'style="foo"'
end
end
@@ -32,12 +30,18 @@ describe BroadcastMessagesHelper do
expect(helper.broadcast_message_style(broadcast_message)).to eq ''
end
- it 'allows custom style' do
- broadcast_message = double(color: '#f2dede', font: '#b94a48')
+ it 'allows custom style for banner messages' do
+ broadcast_message = BroadcastMessage.new(color: '#f2dede', font: '#b94a48', broadcast_type: "banner")
expect(helper.broadcast_message_style(broadcast_message))
.to match('background-color: #f2dede; color: #b94a48')
end
+
+ it 'does not add style for notification messages' do
+ broadcast_message = BroadcastMessage.new(color: '#f2dede', broadcast_type: "notification")
+
+ expect(helper.broadcast_message_style(broadcast_message)).to eq ''
+ end
end
describe 'broadcast_message_status' do
diff --git a/spec/helpers/container_expiration_policies_helper_spec.rb b/spec/helpers/container_expiration_policies_helper_spec.rb
index 3eb1234d82b..f7e851fb012 100644
--- a/spec/helpers/container_expiration_policies_helper_spec.rb
+++ b/spec/helpers/container_expiration_policies_helper_spec.rb
@@ -8,7 +8,7 @@ describe ContainerExpirationPoliciesHelper do
expected_result = [
{ key: 1, label: '1 tag per image name' },
{ key: 5, label: '5 tags per image name' },
- { key: 10, label: '10 tags per image name' },
+ { key: 10, label: '10 tags per image name', default: true },
{ key: 25, label: '25 tags per image name' },
{ key: 50, label: '50 tags per image name' },
{ key: 100, label: '100 tags per image name' }
@@ -21,7 +21,7 @@ describe ContainerExpirationPoliciesHelper do
describe '#cadence_options' do
it 'returns cadence options formatted for dropdown usage' do
expected_result = [
- { key: '1d', label: 'Every day' },
+ { key: '1d', label: 'Every day', default: true },
{ key: '7d', label: 'Every week' },
{ key: '14d', label: 'Every two weeks' },
{ key: '1month', label: 'Every month' },
@@ -37,7 +37,7 @@ describe ContainerExpirationPoliciesHelper do
expected_result = [
{ key: '7d', label: '7 days until tags are automatically removed' },
{ key: '14d', label: '14 days until tags are automatically removed' },
- { key: '30d', label: '30 days until tags are automatically removed' },
+ { key: '30d', label: '30 days until tags are automatically removed', default: true },
{ key: '90d', label: '90 days until tags are automatically removed' }
]
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index a50c8e9bf8e..b7a6cd4db74 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe EnvironmentsHelper do
- set(:environment) { create(:environment) }
- set(:project) { environment.project }
set(:user) { create(:user) }
+ set(:project) { create(:project, :repository) }
+ set(:environment) { create(:environment, project: project) }
describe '#metrics_data' do
before do
@@ -28,6 +28,7 @@ describe EnvironmentsHelper do
'empty-unable-to-connect-svg-path' => match_asset_path('/assets/illustrations/monitoring/unable_to_connect.svg'),
'metrics-endpoint' => additional_metrics_project_environment_path(project, environment, format: :json),
'deployments-endpoint' => project_environment_deployments_path(project, environment, format: :json),
+ 'default-branch' => 'master',
'environments-endpoint': project_environments_path(project, format: :json),
'project-path' => project_path(project),
'tags-path' => project_tags_path(project),
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index e76ebcb5637..1955927e2df 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -113,6 +113,29 @@ describe GitlabRoutingHelper do
end
end
+ context 'artifacts' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:job) { create(:ci_build, project: project, name: 'test:job', artifacts_expire_at: 1.hour.from_now) }
+
+ describe '#fast_download_project_job_artifacts_path' do
+ it 'matches the Rails download path' do
+ expect(fast_download_project_job_artifacts_path(project, job)).to eq(download_project_job_artifacts_path(project, job))
+ end
+ end
+
+ describe '#fast_keep_project_job_artifacts_path' do
+ it 'matches the Rails keep path' do
+ expect(fast_keep_project_job_artifacts_path(project, job)).to eq(keep_project_job_artifacts_path(project, job))
+ end
+ end
+
+ describe '#fast_browse_project_job_artifacts_path' do
+ it 'matches the Rails browse path' do
+ expect(fast_browse_project_job_artifacts_path(project, job)).to eq(browse_project_job_artifacts_path(project, job))
+ end
+ end
+ end
+
context 'snippets' do
let_it_be(:personal_snippet) { create(:personal_snippet) }
let_it_be(:project_snippet) { create(:project_snippet) }
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index fce0b5bd7e3..a775c69335e 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -273,16 +273,19 @@ describe MarkupHelper do
describe '#render_wiki_content' do
let(:wiki) { double('WikiPage', path: "file.#{extension}") }
+ let(:wiki_repository) { double('Repository') }
let(:context) do
{
pipeline: :wiki, project: project, project_wiki: wiki,
- page_slug: 'nested/page', issuable_state_filter_enabled: true
+ page_slug: 'nested/page', issuable_state_filter_enabled: true,
+ repository: wiki_repository
}
end
before do
expect(wiki).to receive(:content).and_return('wiki content')
expect(wiki).to receive(:slug).and_return('nested/page')
+ expect(wiki).to receive(:repository).and_return(wiki_repository)
helper.instance_variable_set(:@project_wiki, wiki)
end
@@ -354,10 +357,10 @@ describe MarkupHelper do
describe '#markup_unsafe' do
subject { helper.markup_unsafe(file_name, text, context) }
+ let_it_be(:project_base) { create(:project, :repository) }
+ let_it_be(:context) { { project: project_base } }
let(:file_name) { 'foo.bar' }
let(:text) { 'Noël' }
- let(:project_base) { build(:project, :repository) }
- let(:context) { { project: project_base } }
context 'when text is missing' do
let(:text) { nil }
@@ -380,12 +383,21 @@ describe MarkupHelper do
context 'when renderer returns an error' do
before do
- allow(Banzai).to receive(:render).and_raise("An error")
+ allow(Banzai).to receive(:render).and_raise(StandardError, "An error")
end
it 'returns html (rendered by ActionView:TextHelper)' do
is_expected.to eq('<p>Noël</p>')
end
+
+ it 'logs the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(StandardError),
+ project_id: project.id, file_name: 'foo.md', context: context
+ )
+
+ subject
+ end
end
end
@@ -410,7 +422,7 @@ describe MarkupHelper do
end
context 'when file has an unknown type' do
- let(:file_name) { 'foo' }
+ let(:file_name) { 'foo.tex' }
it 'returns html (rendered by Gitlab::OtherMarkup)' do
expected_html = 'Noël'
diff --git a/spec/helpers/projects/error_tracking_helper_spec.rb b/spec/helpers/projects/error_tracking_helper_spec.rb
index 753144eef89..325ff32dd89 100644
--- a/spec/helpers/projects/error_tracking_helper_spec.rb
+++ b/spec/helpers/projects/error_tracking_helper_spec.rb
@@ -11,6 +11,8 @@ describe Projects::ErrorTrackingHelper do
describe '#error_tracking_data' do
let(:can_enable_error_tracking) { true }
let(:setting_path) { project_settings_operations_path(project) }
+ let(:list_path) { project_error_tracking_index_path(project) }
+ let(:project_path) { project.full_path }
let(:index_path) do
project_error_tracking_index_path(project, format: :json)
@@ -30,6 +32,8 @@ describe Projects::ErrorTrackingHelper do
'user-can-enable-error-tracking' => 'true',
'enable-error-tracking-link' => setting_path,
'error-tracking-enabled' => 'false',
+ 'list-path' => list_path,
+ 'project-path' => project_path,
'illustration-path' => match_asset_path('/assets/illustrations/cluster_popover.svg')
)
end
@@ -79,12 +83,26 @@ describe Projects::ErrorTrackingHelper do
describe '#error_details_data' do
let(:issue_id) { 1234 }
let(:route_params) { [project.owner, project, issue_id, { format: :json }] }
+ let(:list_path) { project_error_tracking_index_path(project) }
let(:details_path) { details_namespace_project_error_tracking_index_path(*route_params) }
+ let(:project_path) { project.full_path }
let(:stack_trace_path) { stack_trace_namespace_project_error_tracking_index_path(*route_params) }
let(:issues_path) { project_issues_path(project) }
let(:result) { helper.error_details_data(project, issue_id) }
+ it 'returns the correct list path' do
+ expect(result['list-path']).to eq list_path
+ end
+
+ it 'returns the correct issue id' do
+ expect(result['issue-id']).to eq issue_id
+ end
+
+ it 'returns the correct project path' do
+ expect(result['project-path']).to eq project_path
+ end
+
it 'returns the correct details path' do
expect(result['issue-details-path']).to eq details_path
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 46228d0d1c2..c7e454771bb 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -332,13 +332,13 @@ describe ProjectsHelper do
end
it 'returns image tag for member avatar' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16"], alt: "", "data-src" => anything })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16], alt: "", "data-src" => anything })
helper.link_to_member_avatar(user)
end
it 'returns image tag with avatar class' do
- expect(helper).to receive(:image_tag).with(expected, { width: 16, class: ["avatar", "avatar-inline", "s16", "any-avatar-class"], alt: "", "data-src" => anything })
+ expect(helper).to receive(:image_tag).with(expected, { width: 16, class: %w[avatar avatar-inline s16 any-avatar-class], alt: "", "data-src" => anything })
helper.link_to_member_avatar(user, avatar_class: "any-avatar-class")
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 172ead158fb..8479f8509f5 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -7,6 +7,10 @@ describe UsersHelper do
let(:user) { create(:user) }
+ def filter_ee_badges(badges)
+ badges.reject { |badge| badge[:text] == 'Is using seat' }
+ end
+
describe '#user_link' do
subject { helper.user_link(user) }
@@ -118,7 +122,7 @@ describe UsersHelper do
badges = helper.user_badges_in_admin_section(blocked_user)
- expect(badges).to eq([text: "Blocked", variant: "danger"])
+ expect(filter_ee_badges(badges)).to eq([text: "Blocked", variant: "danger"])
end
end
@@ -128,7 +132,7 @@ describe UsersHelper do
badges = helper.user_badges_in_admin_section(admin_user)
- expect(badges).to eq([text: "Admin", variant: "success"])
+ expect(filter_ee_badges(badges)).to eq([text: "Admin", variant: "success"])
end
end
@@ -138,7 +142,7 @@ describe UsersHelper do
badges = helper.user_badges_in_admin_section(external_user)
- expect(badges).to eq([text: "External", variant: "secondary"])
+ expect(filter_ee_badges(badges)).to eq([text: "External", variant: "secondary"])
end
end
@@ -146,7 +150,7 @@ describe UsersHelper do
it 'returns the "It\'s You" badge' do
badges = helper.user_badges_in_admin_section(user)
- expect(badges).to eq([text: "It's you!", variant: nil])
+ expect(filter_ee_badges(badges)).to eq([text: "It's you!", variant: nil])
end
end
@@ -170,7 +174,7 @@ describe UsersHelper do
badges = helper.user_badges_in_admin_section(user)
- expect(badges).to be_empty
+ expect(filter_ee_badges(badges)).to be_empty
end
end
end
diff --git a/spec/initializers/database_config_spec.rb b/spec/initializers/database_config_spec.rb
index a5a074f5884..85577ce007a 100644
--- a/spec/initializers/database_config_spec.rb
+++ b/spec/initializers/database_config_spec.rb
@@ -11,13 +11,12 @@ describe 'Database config initializer' do
allow(ActiveRecord::Base).to receive(:establish_connection)
end
- context "when using Puma" do
- let(:puma) { double('puma') }
- let(:puma_options) { { max_threads: 8 } }
+ context "when using multi-threaded runtime" do
+ let(:max_threads) { 8 }
before do
- stub_const("Puma", puma)
- allow(puma).to receive_message_chain(:cli_config, :options).and_return(puma_options)
+ allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(max_threads)
end
context "and no existing pool size is set" do
@@ -26,23 +25,23 @@ describe 'Database config initializer' do
end
it "sets it to the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(8)
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(max_threads)
end
end
context "and the existing pool size is smaller than the max number of worker threads" do
before do
- stub_database_config(pool_size: 7)
+ stub_database_config(pool_size: max_threads - 1)
end
it "sets it to the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(7).to(8)
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.by(1)
end
end
context "and the existing pool size is larger than the max number of worker threads" do
before do
- stub_database_config(pool_size: 9)
+ stub_database_config(pool_size: max_threads + 1)
end
it "keeps the configured pool size" do
@@ -51,11 +50,7 @@ describe 'Database config initializer' do
end
end
- context "when not using Puma" do
- before do
- stub_database_config(pool_size: 7)
- end
-
+ context "when using single-threaded runtime" do
it "does nothing" do
expect { subject }.not_to change { Gitlab::Database.config['pool'] }
end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 5dd296b6040..65652468d93 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -112,7 +112,7 @@ describe 'lograge', type: :request do
expect(log_data['exception.class']).to eq('RuntimeError')
expect(log_data['exception.message']).to eq('bad request')
- expect(log_data['exception.backtrace']).to eq(Gitlab::Profiler.clean_backtrace(backtrace))
+ expect(log_data['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace))
end
end
end
diff --git a/spec/javascripts/behaviors/bind_in_out_spec.js b/spec/javascripts/behaviors/bind_in_out_spec.js
deleted file mode 100644
index 0c214f5886a..00000000000
--- a/spec/javascripts/behaviors/bind_in_out_spec.js
+++ /dev/null
@@ -1,192 +0,0 @@
-import BindInOut from '~/behaviors/bind_in_out';
-import ClassSpecHelper from '../helpers/class_spec_helper';
-
-describe('BindInOut', function() {
- describe('constructor', function() {
- beforeEach(function() {
- this.in = {};
- this.out = {};
-
- this.bindInOut = new BindInOut(this.in, this.out);
- });
-
- it('should set .in', function() {
- expect(this.bindInOut.in).toBe(this.in);
- });
-
- it('should set .out', function() {
- expect(this.bindInOut.out).toBe(this.out);
- });
-
- it('should set .eventWrapper', function() {
- expect(this.bindInOut.eventWrapper).toEqual({});
- });
-
- describe('if .in is an input', function() {
- beforeEach(function() {
- this.bindInOut = new BindInOut({ tagName: 'INPUT' });
- });
-
- it('should set .eventType to keyup ', function() {
- expect(this.bindInOut.eventType).toEqual('keyup');
- });
- });
-
- describe('if .in is a textarea', function() {
- beforeEach(function() {
- this.bindInOut = new BindInOut({ tagName: 'TEXTAREA' });
- });
-
- it('should set .eventType to keyup ', function() {
- expect(this.bindInOut.eventType).toEqual('keyup');
- });
- });
-
- describe('if .in is not an input or textarea', function() {
- beforeEach(function() {
- this.bindInOut = new BindInOut({ tagName: 'SELECT' });
- });
-
- it('should set .eventType to change ', function() {
- expect(this.bindInOut.eventType).toEqual('change');
- });
- });
- });
-
- describe('addEvents', function() {
- beforeEach(function() {
- this.in = jasmine.createSpyObj('in', ['addEventListener']);
-
- this.bindInOut = new BindInOut(this.in);
-
- this.addEvents = this.bindInOut.addEvents();
- });
-
- it('should set .eventWrapper.updateOut', function() {
- expect(this.bindInOut.eventWrapper.updateOut).toEqual(jasmine.any(Function));
- });
-
- it('should call .addEventListener', function() {
- expect(this.in.addEventListener).toHaveBeenCalledWith(
- this.bindInOut.eventType,
- this.bindInOut.eventWrapper.updateOut,
- );
- });
-
- it('should return the instance', function() {
- expect(this.addEvents).toBe(this.bindInOut);
- });
- });
-
- describe('updateOut', function() {
- beforeEach(function() {
- this.in = { value: 'the-value' };
- this.out = { textContent: 'not-the-value' };
-
- this.bindInOut = new BindInOut(this.in, this.out);
-
- this.updateOut = this.bindInOut.updateOut();
- });
-
- it('should set .out.textContent to .in.value', function() {
- expect(this.out.textContent).toBe(this.in.value);
- });
-
- it('should return the instance', function() {
- expect(this.updateOut).toBe(this.bindInOut);
- });
- });
-
- describe('removeEvents', function() {
- beforeEach(function() {
- this.in = jasmine.createSpyObj('in', ['removeEventListener']);
- this.updateOut = () => {};
-
- this.bindInOut = new BindInOut(this.in);
- this.bindInOut.eventWrapper.updateOut = this.updateOut;
-
- this.removeEvents = this.bindInOut.removeEvents();
- });
-
- it('should call .removeEventListener', function() {
- expect(this.in.removeEventListener).toHaveBeenCalledWith(
- this.bindInOut.eventType,
- this.updateOut,
- );
- });
-
- it('should return the instance', function() {
- expect(this.removeEvents).toBe(this.bindInOut);
- });
- });
-
- describe('initAll', function() {
- beforeEach(function() {
- this.ins = [0, 1, 2];
- this.instances = [];
-
- spyOn(document, 'querySelectorAll').and.returnValue(this.ins);
- spyOn(Array.prototype, 'map').and.callThrough();
- spyOn(BindInOut, 'init');
-
- this.initAll = BindInOut.initAll();
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'initAll');
-
- it('should call .querySelectorAll', function() {
- expect(document.querySelectorAll).toHaveBeenCalledWith('*[data-bind-in]');
- });
-
- it('should call .map', function() {
- expect(Array.prototype.map).toHaveBeenCalledWith(jasmine.any(Function));
- });
-
- it('should call .init for each element', function() {
- expect(BindInOut.init.calls.count()).toEqual(3);
- });
-
- it('should return an array of instances', function() {
- expect(this.initAll).toEqual(jasmine.any(Array));
- });
- });
-
- describe('init', function() {
- beforeEach(function() {
- spyOn(BindInOut.prototype, 'addEvents').and.callFake(function() {
- return this;
- });
- spyOn(BindInOut.prototype, 'updateOut').and.callFake(function() {
- return this;
- });
-
- this.init = BindInOut.init({}, {});
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'init');
-
- it('should call .addEvents', function() {
- expect(BindInOut.prototype.addEvents).toHaveBeenCalled();
- });
-
- it('should call .updateOut', function() {
- expect(BindInOut.prototype.updateOut).toHaveBeenCalled();
- });
-
- describe('if no anOut is provided', function() {
- beforeEach(function() {
- this.anIn = { dataset: { bindIn: 'the-data-bind-in' } };
-
- spyOn(document, 'querySelector');
-
- BindInOut.init(this.anIn);
- });
-
- it('should call .querySelector', function() {
- expect(document.querySelector).toHaveBeenCalledWith(
- `*[data-bind-out="${this.anIn.dataset.bindIn}"]`,
- );
- });
- });
- });
-});
diff --git a/spec/javascripts/breakpoints_spec.js b/spec/javascripts/breakpoints_spec.js
deleted file mode 100644
index fc0d9eb907a..00000000000
--- a/spec/javascripts/breakpoints_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import bp, { breakpoints } from '~/breakpoints';
-
-describe('breakpoints', () => {
- Object.keys(breakpoints).forEach(key => {
- const size = breakpoints[key];
-
- it(`returns ${key} when larger than ${size}`, () => {
- spyOn(bp, 'windowWidth').and.returnValue(size + 10);
-
- expect(bp.getBreakpointSize()).toBe(key);
- });
- });
-
- describe('isDesktop', () => {
- it('returns true when screen size is medium', () => {
- spyOn(bp, 'windowWidth').and.returnValue(breakpoints.md + 10);
-
- expect(bp.isDesktop()).toBe(true);
- });
-
- it('returns false when screen size is small', () => {
- spyOn(bp, 'windowWidth').and.returnValue(breakpoints.sm + 10);
-
- expect(bp.isDesktop()).toBe(false);
- });
- });
-});
diff --git a/spec/javascripts/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js b/spec/javascripts/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
deleted file mode 100644
index e687040ddf9..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/components/gke_machine_type_dropdown_spec.js
+++ /dev/null
@@ -1,109 +0,0 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import GkeMachineTypeDropdown from '~/create_cluster/gke_cluster/components/gke_machine_type_dropdown.vue';
-import { createStore } from '~/create_cluster/gke_cluster/store';
-import {
- SET_PROJECT,
- SET_PROJECT_BILLING_STATUS,
- SET_ZONE,
- SET_MACHINE_TYPES,
-} from '~/create_cluster/gke_cluster/store/mutation_types';
-import {
- selectedZoneMock,
- selectedProjectMock,
- selectedMachineTypeMock,
- gapiMachineTypesResponseMock,
-} from '../mock_data';
-
-const componentConfig = {
- fieldId: 'cluster_provider_gcp_attributes_gcp_machine_type',
- fieldName: 'cluster[provider_gcp_attributes][gcp_machine_type]',
-};
-
-const LABELS = {
- LOADING: 'Fetching machine types',
- DISABLED_NO_PROJECT: 'Select project and zone to choose machine type',
- DISABLED_NO_ZONE: 'Select zone to choose machine type',
- DEFAULT: 'Select machine type',
-};
-
-const createComponent = (store, props = componentConfig) => {
- const Component = Vue.extend(GkeMachineTypeDropdown);
-
- return mountComponentWithStore(Component, {
- el: null,
- props,
- store,
- });
-};
-
-describe('GkeMachineTypeDropdown', () => {
- let vm;
- let store;
-
- beforeEach(() => {
- store = createStore();
- vm = createComponent(store);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('shows various toggle text depending on state', () => {
- it('returns disabled state toggle text when no project and zone are selected', () => {
- expect(vm.toggleText).toBe(LABELS.DISABLED_NO_PROJECT);
- });
-
- it('returns disabled state toggle text when no zone is selected', () => {
- vm.$store.commit(SET_PROJECT, selectedProjectMock);
- vm.$store.commit(SET_PROJECT_BILLING_STATUS, true);
-
- expect(vm.toggleText).toBe(LABELS.DISABLED_NO_ZONE);
- });
-
- it('returns loading toggle text', () => {
- vm.isLoading = true;
-
- expect(vm.toggleText).toBe(LABELS.LOADING);
- });
-
- it('returns default toggle text', () => {
- expect(vm.toggleText).toBe(LABELS.DISABLED_NO_PROJECT);
-
- vm.$store.commit(SET_PROJECT, selectedProjectMock);
- vm.$store.commit(SET_PROJECT_BILLING_STATUS, true);
- vm.$store.commit(SET_ZONE, selectedZoneMock);
-
- expect(vm.toggleText).toBe(LABELS.DEFAULT);
- });
-
- it('returns machine type name if machine type selected', () => {
- vm.setItem(selectedMachineTypeMock);
-
- expect(vm.toggleText).toBe(selectedMachineTypeMock);
- });
- });
-
- describe('form input', () => {
- it('reflects new value when dropdown item is clicked', done => {
- expect(vm.$el.querySelector('input').value).toBe('');
- vm.$store.commit(SET_MACHINE_TYPES, gapiMachineTypesResponseMock.items);
-
- return vm
- .$nextTick()
- .then(() => {
- vm.$el.querySelector('.dropdown-content button').click();
-
- return vm
- .$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('input').value).toBe(selectedMachineTypeMock);
- done();
- })
- .catch(done.fail);
- })
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js b/spec/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
deleted file mode 100644
index 4c89124454e..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/components/gke_project_id_dropdown_spec.js
+++ /dev/null
@@ -1,115 +0,0 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import GkeProjectIdDropdown from '~/create_cluster/gke_cluster/components/gke_project_id_dropdown.vue';
-import { createStore } from '~/create_cluster/gke_cluster/store';
-import { SET_PROJECTS } from '~/create_cluster/gke_cluster/store/mutation_types';
-import { emptyProjectMock, selectedProjectMock } from '../mock_data';
-import { gapi } from '../helpers';
-
-const componentConfig = {
- docsUrl: 'https://console.cloud.google.com/home/dashboard',
- fieldId: 'cluster_provider_gcp_attributes_gcp_project_id',
- fieldName: 'cluster[provider_gcp_attributes][gcp_project_id]',
-};
-
-const LABELS = {
- LOADING: 'Fetching projects',
- VALIDATING_PROJECT_BILLING: 'Validating project billing status',
- DEFAULT: 'Select project',
- EMPTY: 'No projects found',
-};
-
-const createComponent = (store, props = componentConfig) => {
- const Component = Vue.extend(GkeProjectIdDropdown);
-
- return mountComponentWithStore(Component, {
- el: null,
- props,
- store,
- });
-};
-
-describe('GkeProjectIdDropdown', () => {
- let vm;
- let store;
-
- let originalGapi;
- beforeAll(() => {
- originalGapi = window.gapi;
- window.gapi = gapi();
- });
-
- afterAll(() => {
- window.gapi = originalGapi;
- });
-
- beforeEach(() => {
- store = createStore();
- vm = createComponent(store);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('toggleText', () => {
- it('returns loading toggle text', () => {
- expect(vm.toggleText).toBe(LABELS.LOADING);
- });
-
- it('returns project billing validation text', () => {
- vm.setIsValidatingProjectBilling(true);
-
- expect(vm.toggleText).toBe(LABELS.VALIDATING_PROJECT_BILLING);
- });
-
- it('returns default toggle text', done =>
- setTimeout(() => {
- vm.setItem(emptyProjectMock);
-
- expect(vm.toggleText).toBe(LABELS.DEFAULT);
-
- done();
- }));
-
- it('returns project name if project selected', done =>
- setTimeout(() => {
- vm.isLoading = false;
-
- expect(vm.toggleText).toBe(selectedProjectMock.name);
-
- done();
- }));
-
- it('returns empty toggle text', done =>
- setTimeout(() => {
- vm.$store.commit(SET_PROJECTS, null);
- vm.setItem(emptyProjectMock);
-
- expect(vm.toggleText).toBe(LABELS.EMPTY);
-
- done();
- }));
- });
-
- describe('selectItem', () => {
- it('reflects new value when dropdown item is clicked', done => {
- expect(vm.$el.querySelector('input').value).toBe('');
-
- return vm
- .$nextTick()
- .then(() => {
- vm.$el.querySelector('.dropdown-content button').click();
-
- return vm
- .$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('input').value).toBe(selectedProjectMock.projectId);
- done();
- })
- .catch(done.fail);
- })
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/javascripts/diffs/components/app_spec.js b/spec/javascripts/diffs/components/app_spec.js
index 48e1ed18a2f..5f97182489e 100644
--- a/spec/javascripts/diffs/components/app_spec.js
+++ b/spec/javascripts/diffs/components/app_spec.js
@@ -10,6 +10,7 @@ import CompareVersions from '~/diffs/components/compare_versions.vue';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import CommitWidget from '~/diffs/components/commit_widget.vue';
import TreeList from '~/diffs/components/tree_list.vue';
+import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
import createDiffsStore from '../create_diffs_store';
import diffsMockData from '../mock_data/merge_request_diffs';
@@ -41,7 +42,6 @@ describe('diffs/components/app', () => {
changesEmptyStateIllustration: '',
dismissEndpoint: '',
showSuggestPopover: true,
- useSingleDiffStyle: false,
...props,
},
store,
@@ -53,6 +53,12 @@ describe('diffs/components/app', () => {
});
}
+ function getOppositeViewType(currentViewType) {
+ return currentViewType === INLINE_DIFF_VIEW_TYPE
+ ? PARALLEL_DIFF_VIEW_TYPE
+ : INLINE_DIFF_VIEW_TYPE;
+ }
+
beforeEach(() => {
// setup globals (needed for component to mount :/)
window.mrTabs = jasmine.createSpyObj('mrTabs', ['resetViewContainer']);
@@ -68,17 +74,164 @@ describe('diffs/components/app', () => {
});
describe('fetch diff methods', () => {
- beforeEach(() => {
+ beforeEach(done => {
+ const fetchResolver = () => {
+ store.state.diffs.retrievingBatches = false;
+ store.state.notes.discussions = 'test';
+ return Promise.resolve({ real_size: 100 });
+ };
spyOn(window, 'requestIdleCallback').and.callFake(fn => fn());
createComponent();
- spyOn(wrapper.vm, 'fetchDiffFiles').and.callFake(() => Promise.resolve());
- spyOn(wrapper.vm, 'fetchDiffFilesMeta').and.callFake(() => Promise.resolve());
- spyOn(wrapper.vm, 'fetchDiffFilesBatch').and.callFake(() => Promise.resolve());
+ spyOn(wrapper.vm, 'fetchDiffFiles').and.callFake(fetchResolver);
+ spyOn(wrapper.vm, 'fetchDiffFilesMeta').and.callFake(fetchResolver);
+ spyOn(wrapper.vm, 'fetchDiffFilesBatch').and.callFake(fetchResolver);
spyOn(wrapper.vm, 'setDiscussions');
spyOn(wrapper.vm, 'startRenderDiffsQueue');
+ spyOn(wrapper.vm, 'unwatchDiscussions');
+ spyOn(wrapper.vm, 'unwatchRetrievingBatches');
+ store.state.diffs.retrievingBatches = true;
+ store.state.diffs.diffFiles = [];
+ wrapper.vm.$nextTick(done);
+ });
+
+ describe('when the diff view type changes and it should load a single diff view style', () => {
+ const noLinesDiff = {
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ };
+ const parallelLinesDiff = {
+ highlighted_diff_lines: [],
+ parallel_diff_lines: ['line'],
+ };
+ const inlineLinesDiff = {
+ highlighted_diff_lines: ['line'],
+ parallel_diff_lines: [],
+ };
+ const fullDiff = {
+ highlighted_diff_lines: ['line'],
+ parallel_diff_lines: ['line'],
+ };
+
+ function expectFetchToOccur({
+ vueInstance,
+ done = () => {},
+ batch = false,
+ existingFiles = 1,
+ } = {}) {
+ vueInstance.$nextTick(() => {
+ expect(vueInstance.diffFiles.length).toEqual(existingFiles);
+
+ if (!batch) {
+ expect(vueInstance.fetchDiffFiles).toHaveBeenCalled();
+ expect(vueInstance.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ } else {
+ expect(vueInstance.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(vueInstance.fetchDiffFilesBatch).toHaveBeenCalled();
+ }
+
+ done();
+ });
+ }
+
+ beforeEach(() => {
+ wrapper.vm.glFeatures.singleMrDiffView = true;
+ });
+
+ it('fetches diffs if it has none', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: false, existingFiles: 0, done });
+ });
+
+ it('fetches diffs if it has both view styles, but no lines in either', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(noLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches diffs if it only has inline view style', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(inlineLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches diffs if it only has parallel view style', done => {
+ wrapper.vm.isLatestVersion = () => false;
+
+ store.state.diffs.diffFiles.push(parallelLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, done });
+ });
+
+ it('fetches batch diffs if it has none', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, existingFiles: 0, done });
+ });
+
+ it('fetches batch diffs if it has both view styles, but no lines in either', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(noLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('fetches batch diffs if it only has inline view style', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(inlineLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('fetches batch diffs if it only has parallel view style', done => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(parallelLinesDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expectFetchToOccur({ vueInstance: wrapper.vm, batch: true, done });
+ });
+
+ it('does not fetch diffs if it has already fetched both styles of diff', () => {
+ wrapper.vm.glFeatures.diffsBatchLoad = false;
+
+ store.state.diffs.diffFiles.push(fullDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expect(wrapper.vm.diffFiles.length).toEqual(1);
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ });
+
+ it('does not fetch batch diffs if it has already fetched both styles of diff', () => {
+ wrapper.vm.glFeatures.diffsBatchLoad = true;
+
+ store.state.diffs.diffFiles.push(fullDiff);
+ store.state.diffs.diffViewType = getOppositeViewType(wrapper.vm.diffViewType);
+
+ expect(wrapper.vm.diffFiles.length).toEqual(1);
+ expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ });
});
it('calls fetchDiffFiles if diffsBatchLoad is not enabled', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = false;
wrapper.vm.fetchData(false);
@@ -87,33 +240,46 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
done();
});
});
- it('calls batch methods if diffsBatchLoad is enabled, and not latest version', () => {
+ it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = true;
wrapper.vm.isLatestVersion = () => false;
wrapper.vm.fetchData(false);
expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- wrapper.vm.$nextTick(() => {
+ setTimeout(() => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
+ done();
});
});
- it('calls batch methods if diffsBatchLoad is enabled, and latest version', () => {
+ it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = true;
wrapper.vm.fetchData(false);
expect(wrapper.vm.fetchDiffFiles).not.toHaveBeenCalled();
- wrapper.vm.$nextTick(() => {
+ setTimeout(() => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(100);
+ expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
+ done();
});
});
});
diff --git a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
index 8a3834d542f..df160d7a363 100644
--- a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
+++ b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
@@ -25,7 +25,6 @@ describe('CompareVersionsDropdown', () => {
const createComponent = (props = {}) => {
wrapper = shallowMount(localVue.extend(CompareVersionsDropdown), {
localVue,
- sync: false,
propsData: { ...props },
});
};
diff --git a/spec/javascripts/diffs/components/diff_discussions_spec.js b/spec/javascripts/diffs/components/diff_discussions_spec.js
index f7f0ab83c21..1b924bb947d 100644
--- a/spec/javascripts/diffs/components/diff_discussions_spec.js
+++ b/spec/javascripts/diffs/components/diff_discussions_spec.js
@@ -24,7 +24,6 @@ describe('DiffDiscussions', () => {
...props,
},
localVue,
- sync: false,
});
};
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index b23334d38dc..af2dd7b4f93 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -120,7 +120,7 @@ describe('DiffsStoreActions', () => {
describe('fetchDiffFiles', () => {
it('should fetch diff files', done => {
- const endpoint = '/fetch/diff/files?w=1';
+ const endpoint = '/fetch/diff/files?view=inline&w=1';
const mock = new MockAdapter(axios);
const res = { diff_files: 1, merge_request_diffs: [] };
mock.onGet(endpoint).reply(200, res);
@@ -128,7 +128,7 @@ describe('DiffsStoreActions', () => {
testAction(
fetchDiffFiles,
{},
- { endpoint },
+ { endpoint, diffFiles: [], showWhitespace: false, diffViewType: 'inline' },
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
@@ -141,6 +141,13 @@ describe('DiffsStoreActions', () => {
done();
},
);
+
+ fetchDiffFiles({ state: { endpoint }, commit: () => null })
+ .then(data => {
+ expect(data).toEqual(res);
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -163,10 +170,12 @@ describe('DiffsStoreActions', () => {
{ endpointBatch },
[
{ type: types.SET_BATCH_LOADING, payload: true },
+ { type: types.SET_RETRIEVING_BATCHES, payload: true },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: res1.diff_files } },
{ type: types.SET_BATCH_LOADING, payload: false },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: [] } },
{ type: types.SET_BATCH_LOADING, payload: false },
+ { type: types.SET_RETRIEVING_BATCHES, payload: false },
],
[],
() => {
@@ -215,6 +224,8 @@ describe('DiffsStoreActions', () => {
describe('assignDiscussionsToDiff', () => {
it('should merge discussions into diffs', done => {
+ window.location.hash = 'ABC_123';
+
const state = {
diffFiles: [
{
diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js
index eab5703dfb2..9e628fdd540 100644
--- a/spec/javascripts/diffs/store/getters_spec.js
+++ b/spec/javascripts/diffs/store/getters_spec.js
@@ -263,14 +263,6 @@ describe('Diffs Module Getters', () => {
});
});
- describe('diffFilesLength', () => {
- it('returns length of diff files', () => {
- localState.diffFiles.push('test', 'test 2');
-
- expect(getters.diffFilesLength(localState)).toBe(2);
- });
- });
-
describe('currentDiffIndex', () => {
it('returns index of currently selected diff in diffList', () => {
localState.diffFiles = [{ file_hash: '111' }, { file_hash: '222' }, { file_hash: '333' }];
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 13f16e4f9a6..24405dcc796 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -40,9 +40,26 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('SET_RETRIEVING_BATCHES', () => {
+ it('should set retrievingBatches state', () => {
+ const state = {};
+
+ mutations[types.SET_RETRIEVING_BATCHES](state, false);
+
+ expect(state.retrievingBatches).toEqual(false);
+ });
+ });
+
describe('SET_DIFF_DATA', () => {
it('should set diff data type properly', () => {
- const state = {};
+ const state = {
+ diffFiles: [
+ {
+ content_sha: diffFileMockData.content_sha,
+ file_hash: diffFileMockData.file_hash,
+ },
+ ],
+ };
const diffMock = {
diff_files: [diffFileMockData],
};
@@ -52,9 +69,41 @@ describe('DiffsStoreMutations', () => {
const firstLine = state.diffFiles[0].parallel_diff_lines[0];
expect(firstLine.right.text).toBeUndefined();
+ expect(state.diffFiles.length).toEqual(1);
expect(state.diffFiles[0].renderIt).toEqual(true);
expect(state.diffFiles[0].collapsed).toEqual(false);
});
+
+ describe('given diffsBatchLoad feature flag is enabled', () => {
+ beforeEach(() => {
+ gon.features = { diffsBatchLoad: true };
+ });
+
+ afterEach(() => {
+ delete gon.features;
+ });
+
+ it('should not modify the existing state', () => {
+ const state = {
+ diffFiles: [
+ {
+ content_sha: diffFileMockData.content_sha,
+ file_hash: diffFileMockData.file_hash,
+ highlighted_diff_lines: [],
+ },
+ ],
+ };
+ const diffMock = {
+ diff_files: [diffFileMockData],
+ };
+
+ mutations[types.SET_DIFF_DATA](state, diffMock);
+
+ // If the batch load is enabled, there shouldn't be any processing
+ // done on the existing state object, so we shouldn't have this.
+ expect(state.diffFiles[0].parallel_diff_lines).toBeUndefined();
+ });
+ });
});
describe('SET_DIFFSET_DIFF_DATA_BATCH_DATA', () => {
@@ -158,11 +207,17 @@ describe('DiffsStoreMutations', () => {
it('should update the state with the given data for the given file hash', () => {
const fileHash = 123;
const state = {
- diffFiles: [{}, { file_hash: fileHash, existing_field: 0 }],
+ diffFiles: [{}, { content_sha: 'abc', file_hash: fileHash, existing_field: 0 }],
};
const data = {
diff_files: [
- { file_hash: fileHash, extra_field: 1, existing_field: 1, viewer: { name: 'text' } },
+ {
+ content_sha: 'abc',
+ file_hash: fileHash,
+ extra_field: 1,
+ existing_field: 1,
+ viewer: { name: 'text' },
+ },
],
};
@@ -198,7 +253,7 @@ describe('DiffsStoreMutations', () => {
discussions: [],
},
right: {
- line_code: 'ABC_1',
+ line_code: 'ABC_2',
discussions: [],
},
},
@@ -264,7 +319,7 @@ describe('DiffsStoreMutations', () => {
discussions: [],
},
right: {
- line_code: 'ABC_1',
+ line_code: 'ABC_2',
discussions: [],
},
},
@@ -342,7 +397,7 @@ describe('DiffsStoreMutations', () => {
discussions: [],
},
right: {
- line_code: 'ABC_1',
+ line_code: 'ABC_2',
discussions: [],
},
},
@@ -438,6 +493,7 @@ describe('DiffsStoreMutations', () => {
discussions: [],
},
],
+ parallel_diff_lines: [],
},
],
};
diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js
index 65eb4c9d2a3..638b4510221 100644
--- a/spec/javascripts/diffs/store/utils_spec.js
+++ b/spec/javascripts/diffs/store/utils_spec.js
@@ -314,11 +314,29 @@ describe('DiffsStoreUtils', () => {
});
describe('prepareDiffData', () => {
+ let mock;
let preparedDiff;
+ let splitInlineDiff;
+ let splitParallelDiff;
+ let completedDiff;
beforeEach(() => {
- preparedDiff = { diff_files: [getDiffFileMock()] };
+ mock = getDiffFileMock();
+ preparedDiff = { diff_files: [mock] };
+ splitInlineDiff = {
+ diff_files: [Object.assign({}, mock, { parallel_diff_lines: undefined })],
+ };
+ splitParallelDiff = {
+ diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })],
+ };
+ completedDiff = {
+ diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })],
+ };
+
utils.prepareDiffData(preparedDiff);
+ utils.prepareDiffData(splitInlineDiff);
+ utils.prepareDiffData(splitParallelDiff);
+ utils.prepareDiffData(completedDiff, [mock]);
});
it('sets the renderIt and collapsed attribute on files', () => {
@@ -359,6 +377,19 @@ describe('DiffsStoreUtils', () => {
expect(firstLine.line_code).toEqual(firstLine.right.line_code);
});
+
+ it('guarantees an empty array for both diff styles', () => {
+ expect(splitInlineDiff.diff_files[0].parallel_diff_lines.length).toEqual(0);
+ expect(splitInlineDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
+ expect(splitParallelDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
+ expect(splitParallelDiff.diff_files[0].highlighted_diff_lines.length).toEqual(0);
+ });
+
+ it('merges existing diff files with newly loaded diff files to ensure split diffs are eventually completed', () => {
+ expect(completedDiff.diff_files.length).toEqual(1);
+ expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
+ expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
+ });
});
describe('isDiscussionApplicableToLine', () => {
diff --git a/spec/javascripts/droplab/constants_spec.js b/spec/javascripts/droplab/constants_spec.js
deleted file mode 100644
index 23b69defec6..00000000000
--- a/spec/javascripts/droplab/constants_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import * as constants from '~/droplab/constants';
-
-describe('constants', function() {
- describe('DATA_TRIGGER', function() {
- it('should be `data-dropdown-trigger`', function() {
- expect(constants.DATA_TRIGGER).toBe('data-dropdown-trigger');
- });
- });
-
- describe('DATA_DROPDOWN', function() {
- it('should be `data-dropdown`', function() {
- expect(constants.DATA_DROPDOWN).toBe('data-dropdown');
- });
- });
-
- describe('SELECTED_CLASS', function() {
- it('should be `droplab-item-selected`', function() {
- expect(constants.SELECTED_CLASS).toBe('droplab-item-selected');
- });
- });
-
- describe('ACTIVE_CLASS', function() {
- it('should be `droplab-item-active`', function() {
- expect(constants.ACTIVE_CLASS).toBe('droplab-item-active');
- });
- });
-
- describe('TEMPLATE_REGEX', function() {
- it('should be a handlebars templating syntax regex', function() {
- expect(constants.TEMPLATE_REGEX).toEqual(/\{\{(.+?)\}\}/g);
- });
- });
-
- describe('IGNORE_CLASS', function() {
- it('should be `droplab-item-ignore`', function() {
- expect(constants.IGNORE_CLASS).toBe('droplab-item-ignore');
- });
- });
-});
diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js
index 18ab03653f4..22346c10547 100644
--- a/spec/javascripts/droplab/drop_down_spec.js
+++ b/spec/javascripts/droplab/drop_down_spec.js
@@ -398,14 +398,21 @@ describe('DropLab DropDown', function() {
describe('render', function() {
beforeEach(function() {
- this.list = { querySelector: () => {}, dispatchEvent: () => {} };
- this.dropdown = { renderChildren: () => {}, list: this.list };
this.renderableList = {};
+ this.list = {
+ querySelector: q => {
+ if (q === '.filter-dropdown-loading') {
+ return false;
+ }
+ return this.renderableList;
+ },
+ dispatchEvent: () => {},
+ };
+ this.dropdown = { renderChildren: () => {}, list: this.list };
this.data = [0, 1];
this.customEvent = {};
spyOn(this.dropdown, 'renderChildren').and.callFake(data => data);
- spyOn(this.list, 'querySelector').and.returnValue(this.renderableList);
spyOn(this.list, 'dispatchEvent');
spyOn(this.data, 'map').and.callThrough();
spyOn(window, 'CustomEvent').and.returnValue(this.customEvent);
diff --git a/spec/javascripts/dropzone_input_spec.js b/spec/javascripts/dropzone_input_spec.js
index 8d0f0d20d89..6f6f20ccca2 100644
--- a/spec/javascripts/dropzone_input_spec.js
+++ b/spec/javascripts/dropzone_input_spec.js
@@ -1,6 +1,7 @@
import $ from 'jquery';
import { TEST_HOST } from 'spec/test_constants';
import dropzoneInput from '~/dropzone_input';
+import PasteMarkdownTable from '~/behaviors/markdown/paste_markdown_table';
const TEST_FILE = new File([], 'somefile.jpg');
TEST_FILE.upload = {};
@@ -25,6 +26,34 @@ describe('dropzone_input', () => {
expect(dropzone.version).toBeTruthy();
});
+ describe('handlePaste', () => {
+ beforeEach(() => {
+ loadFixtures('issues/new-issue.html');
+
+ const form = $('#new_issue');
+ form.data('uploads-path', TEST_UPLOAD_PATH);
+ dropzoneInput(form);
+ });
+
+ it('pastes Markdown tables', () => {
+ const event = $.Event('paste');
+ const origEvent = new Event('paste');
+ const pasteData = new DataTransfer();
+ pasteData.setData('text/plain', 'Hello World');
+ pasteData.setData('text/html', '<table><tr><td>Hello World</td></tr></table>');
+ origEvent.clipboardData = pasteData;
+ event.originalEvent = origEvent;
+
+ spyOn(PasteMarkdownTable.prototype, 'isTable').and.callThrough();
+ spyOn(PasteMarkdownTable.prototype, 'convertToTableMarkdown').and.callThrough();
+
+ $('.js-gfm-input').trigger(event);
+
+ expect(PasteMarkdownTable.prototype.isTable).toHaveBeenCalled();
+ expect(PasteMarkdownTable.prototype.convertToTableMarkdown).toHaveBeenCalled();
+ });
+ });
+
describe('shows error message', () => {
let form;
let dropzone;
diff --git a/spec/javascripts/filtered_search/dropdown_utils_spec.js b/spec/javascripts/filtered_search/dropdown_utils_spec.js
index 62d1bd69635..6eda4f391a4 100644
--- a/spec/javascripts/filtered_search/dropdown_utils_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_utils_spec.js
@@ -222,7 +222,7 @@ describe('Dropdown Utils', () => {
hasAttribute: () => false,
};
- DropdownUtils.setDataValueIfSelected(null, selected);
+ DropdownUtils.setDataValueIfSelected(null, '=', selected);
expect(FilteredSearchDropdownManager.addWordToInput.calls.count()).toEqual(1);
});
@@ -233,9 +233,11 @@ describe('Dropdown Utils', () => {
hasAttribute: () => false,
};
- const result = DropdownUtils.setDataValueIfSelected(null, selected);
+ const result = DropdownUtils.setDataValueIfSelected(null, '=', selected);
+ const result2 = DropdownUtils.setDataValueIfSelected(null, '!=', selected);
expect(result).toBe(true);
+ expect(result2).toBe(true);
});
it('returns false when dataValue does not exist', () => {
@@ -243,9 +245,11 @@ describe('Dropdown Utils', () => {
getAttribute: () => null,
};
- const result = DropdownUtils.setDataValueIfSelected(null, selected);
+ const result = DropdownUtils.setDataValueIfSelected(null, '=', selected);
+ const result2 = DropdownUtils.setDataValueIfSelected(null, '!=', selected);
expect(result).toBe(false);
+ expect(result2).toBe(false);
});
});
@@ -349,7 +353,7 @@ describe('Dropdown Utils', () => {
beforeEach(() => {
loadFixtures(issueListFixture);
- authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '@user');
+ authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '=', '@user');
const searchTermToken = FilteredSearchSpecHelper.createSearchVisualToken('search term');
const tokensContainer = document.querySelector('.tokens-container');
@@ -364,7 +368,7 @@ describe('Dropdown Utils', () => {
const searchQuery = DropdownUtils.getSearchQuery();
- expect(searchQuery).toBe(' search term author:original dance');
+ expect(searchQuery).toBe(' search term author:=original dance');
});
});
});
diff --git a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
index 8c5a0961a02..853f6b3b7b8 100644
--- a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
@@ -27,7 +27,7 @@ describe('Filtered Search Dropdown Manager', () => {
describe('input has no existing value', () => {
it('should add just tokenName', () => {
- FilteredSearchDropdownManager.addWordToInput('milestone');
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'milestone' });
const token = document.querySelector('.tokens-container .js-visual-token');
@@ -36,8 +36,8 @@ describe('Filtered Search Dropdown Manager', () => {
expect(getInputValue()).toBe('');
});
- it('should add tokenName and tokenValue', () => {
- FilteredSearchDropdownManager.addWordToInput('label');
+ it('should add tokenName, tokenOperator, and tokenValue', () => {
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'label' });
let token = document.querySelector('.tokens-container .js-visual-token');
@@ -45,13 +45,27 @@ describe('Filtered Search Dropdown Manager', () => {
expect(token.querySelector('.name').innerText).toBe('label');
expect(getInputValue()).toBe('');
- FilteredSearchDropdownManager.addWordToInput('label', 'none');
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'label', tokenOperator: '=' });
+
+ token = document.querySelector('.tokens-container .js-visual-token');
+
+ expect(token.classList.contains('filtered-search-token')).toEqual(true);
+ expect(token.querySelector('.name').innerText).toBe('label');
+ expect(token.querySelector('.operator').innerText).toBe('=');
+ expect(getInputValue()).toBe('');
+
+ FilteredSearchDropdownManager.addWordToInput({
+ tokenName: 'label',
+ tokenOperator: '=',
+ tokenValue: 'none',
+ });
// We have to get that reference again
// Because FilteredSearchDropdownManager deletes the previous token
token = document.querySelector('.tokens-container .js-visual-token');
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toBe('label');
+ expect(token.querySelector('.operator').innerText).toBe('=');
expect(token.querySelector('.value').innerText).toBe('none');
expect(getInputValue()).toBe('');
});
@@ -60,7 +74,7 @@ describe('Filtered Search Dropdown Manager', () => {
describe('input has existing value', () => {
it('should be able to just add tokenName', () => {
setInputValue('a');
- FilteredSearchDropdownManager.addWordToInput('author');
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'author' });
const token = document.querySelector('.tokens-container .js-visual-token');
@@ -70,29 +84,40 @@ describe('Filtered Search Dropdown Manager', () => {
});
it('should replace tokenValue', () => {
- FilteredSearchDropdownManager.addWordToInput('author');
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'author' });
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'author', tokenOperator: '=' });
setInputValue('roo');
- FilteredSearchDropdownManager.addWordToInput(null, '@root');
+ FilteredSearchDropdownManager.addWordToInput({
+ tokenName: null,
+ tokenOperator: '=',
+ tokenValue: '@root',
+ });
const token = document.querySelector('.tokens-container .js-visual-token');
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toBe('author');
+ expect(token.querySelector('.operator').innerText).toBe('=');
expect(token.querySelector('.value').innerText).toBe('@root');
expect(getInputValue()).toBe('');
});
it('should add tokenValues containing spaces', () => {
- FilteredSearchDropdownManager.addWordToInput('label');
+ FilteredSearchDropdownManager.addWordToInput({ tokenName: 'label' });
setInputValue('"test ');
- FilteredSearchDropdownManager.addWordToInput('label', '~\'"test me"\'');
+ FilteredSearchDropdownManager.addWordToInput({
+ tokenName: 'label',
+ tokenOperator: '=',
+ tokenValue: '~\'"test me"\'',
+ });
const token = document.querySelector('.tokens-container .js-visual-token');
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toBe('label');
+ expect(token.querySelector('.operator').innerText).toBe('=');
expect(token.querySelector('.value').innerText).toBe('~\'"test me"\'');
expect(getInputValue()).toBe('');
});
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index e076120f5cc..e5d1d1d690e 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -201,8 +201,8 @@ describe('Filtered Search Manager', function() {
it('removes duplicated tokens', done => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
`);
spyOnDependency(FilteredSearchManager, 'visitUrl').and.callFake(url => {
@@ -234,7 +234,7 @@ describe('Filtered Search Manager', function() {
it('should not render placeholder when there are tokens and no input', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
);
const event = new Event('input');
@@ -252,7 +252,7 @@ describe('Filtered Search Manager', function() {
describe('tokens and no input', () => {
beforeEach(() => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~bug'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
);
});
@@ -306,7 +306,7 @@ describe('Filtered Search Manager', function() {
it('removes token even when it is already selected', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'none', true),
);
tokensContainer.querySelector('.js-visual-token .remove-token').click();
@@ -319,7 +319,7 @@ describe('Filtered Search Manager', function() {
spyOn(FilteredSearchManager.prototype, 'removeSelectedToken').and.callThrough();
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'none'),
);
tokensContainer.querySelector('.js-visual-token .remove-token').click();
});
@@ -338,7 +338,7 @@ describe('Filtered Search Manager', function() {
beforeEach(() => {
initializeManager();
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'none', true),
);
});
@@ -424,7 +424,7 @@ describe('Filtered Search Manager', function() {
});
it('Clicking the "x" clear button, clears the input', () => {
- const inputValue = 'label:~bug ';
+ const inputValue = 'label:=~bug';
manager.filteredSearchInput.value = inputValue;
manager.filteredSearchInput.dispatchEvent(new Event('input'));
diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
index 0ee13faf841..fda078bd41c 100644
--- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
@@ -6,9 +6,10 @@ describe('Filtered Search Visual Tokens', () => {
const findElements = tokenElement => {
const tokenNameElement = tokenElement.querySelector('.name');
+ const tokenOperatorElement = tokenElement.querySelector('.operator');
const tokenValueContainer = tokenElement.querySelector('.value-container');
const tokenValueElement = tokenValueContainer.querySelector('.value');
- return { tokenNameElement, tokenValueContainer, tokenValueElement };
+ return { tokenNameElement, tokenOperatorElement, tokenValueContainer, tokenValueElement };
};
let tokensContainer;
@@ -23,8 +24,8 @@ describe('Filtered Search Visual Tokens', () => {
`);
tokensContainer = document.querySelector('.tokens-container');
- authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '@user');
- bugLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '~bug');
+ authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '=', '@user');
+ bugLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '=', '~bug');
});
describe('getLastVisualTokenBeforeInput', () => {
@@ -62,7 +63,7 @@ describe('Filtered Search Visual Tokens', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${bugLabelToken.outerHTML}
${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root')}
`);
const { lastVisualToken, isLastVisualTokenValid } = subject.getLastVisualTokenBeforeInput();
@@ -92,7 +93,7 @@ describe('Filtered Search Visual Tokens', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${bugLabelToken.outerHTML}
${FilteredSearchSpecHelper.createInputHTML()}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root')}
`);
const { lastVisualToken, isLastVisualTokenValid } = subject.getLastVisualTokenBeforeInput();
@@ -105,7 +106,7 @@ describe('Filtered Search Visual Tokens', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${FilteredSearchSpecHelper.createNameFilterVisualTokenHTML('label')}
${FilteredSearchSpecHelper.createInputHTML()}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root')}
`);
const { lastVisualToken, isLastVisualTokenValid } = subject.getLastVisualTokenBeforeInput();
@@ -150,8 +151,8 @@ describe('Filtered Search Visual Tokens', () => {
it('removes the selected class from buttons', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@author')}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '%123', true)}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@author')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', '%123', true)}
`);
const selected = tokensContainer.querySelector('.js-visual-token .selected');
@@ -169,7 +170,7 @@ describe('Filtered Search Visual Tokens', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${bugLabelToken.outerHTML}
${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~awesome')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~awesome')}
`);
});
@@ -206,7 +207,7 @@ describe('Filtered Search Visual Tokens', () => {
describe('removeSelectedToken', () => {
it('does not remove when there are no selected tokens', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'none'),
);
expect(tokensContainer.querySelector('.js-visual-token .selectable')).not.toEqual(null);
@@ -218,7 +219,7 @@ describe('Filtered Search Visual Tokens', () => {
it('removes selected token', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'none', true),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'none', true),
);
expect(tokensContainer.querySelector('.js-visual-token .selectable')).not.toEqual(null);
@@ -281,16 +282,22 @@ describe('Filtered Search Visual Tokens', () => {
describe('addVisualTokenElement', () => {
it('renders search visual tokens', () => {
- subject.addVisualTokenElement('search term', null, { isSearchTerm: true });
+ subject.addVisualTokenElement({
+ name: 'search term',
+ operator: '=',
+ value: null,
+ options: { isSearchTerm: true },
+ });
const token = tokensContainer.querySelector('.js-visual-token');
expect(token.classList.contains('filtered-search-term')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('search term');
+ expect(token.querySelector('.operator').innerText).toEqual('=');
expect(token.querySelector('.value')).toEqual(null);
});
it('renders filter visual token name', () => {
- subject.addVisualTokenElement('milestone');
+ subject.addVisualTokenElement({ name: 'milestone' });
const token = tokensContainer.querySelector('.js-visual-token');
expect(token.classList.contains('search-token-milestone')).toEqual(true);
@@ -299,22 +306,23 @@ describe('Filtered Search Visual Tokens', () => {
expect(token.querySelector('.value')).toEqual(null);
});
- it('renders filter visual token name and value', () => {
- subject.addVisualTokenElement('label', 'Frontend');
+ it('renders filter visual token name, operator, and value', () => {
+ subject.addVisualTokenElement({ name: 'label', operator: '!=', value: 'Frontend' });
const token = tokensContainer.querySelector('.js-visual-token');
expect(token.classList.contains('search-token-label')).toEqual(true);
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('label');
+ expect(token.querySelector('.operator').innerText).toEqual('!=');
expect(token.querySelector('.value').innerText).toEqual('Frontend');
});
it('inserts visual token before input', () => {
tokensContainer.appendChild(
- FilteredSearchSpecHelper.createFilterVisualToken('assignee', '@root'),
+ FilteredSearchSpecHelper.createFilterVisualToken('assignee', '=', '@root'),
);
- subject.addVisualTokenElement('label', 'Frontend');
+ subject.addVisualTokenElement({ name: 'label', operator: '!=', value: 'Frontend' });
const tokens = tokensContainer.querySelectorAll('.js-visual-token');
const labelToken = tokens[0];
const assigneeToken = tokens[1];
@@ -323,18 +331,20 @@ describe('Filtered Search Visual Tokens', () => {
expect(labelToken.classList.contains('filtered-search-token')).toEqual(true);
expect(labelToken.querySelector('.name').innerText).toEqual('label');
expect(labelToken.querySelector('.value').innerText).toEqual('Frontend');
+ expect(labelToken.querySelector('.operator').innerText).toEqual('!=');
expect(assigneeToken.classList.contains('search-token-assignee')).toEqual(true);
expect(assigneeToken.classList.contains('filtered-search-token')).toEqual(true);
expect(assigneeToken.querySelector('.name').innerText).toEqual('assignee');
expect(assigneeToken.querySelector('.value').innerText).toEqual('@root');
+ expect(assigneeToken.querySelector('.operator').innerText).toEqual('=');
});
});
describe('addValueToPreviousVisualTokenElement', () => {
it('does not add when previous visual token element has no value', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root'),
);
const original = tokensContainer.innerHTML;
@@ -345,7 +355,7 @@ describe('Filtered Search Visual Tokens', () => {
it('does not add when previous visual token element is a search', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root')}
${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')}
`);
@@ -357,7 +367,7 @@ describe('Filtered Search Visual Tokens', () => {
it('adds value to previous visual filter token', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createNameFilterVisualTokenHTML('label'),
+ FilteredSearchSpecHelper.createNameOperatorFilterVisualTokenHTML('label', '='),
);
const original = tokensContainer.innerHTML;
@@ -377,25 +387,28 @@ describe('Filtered Search Visual Tokens', () => {
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('milestone');
+ expect(token.querySelector('.operator')).toEqual(null);
expect(token.querySelector('.value')).toEqual(null);
});
it('creates visual token with just tokenValue', () => {
- subject.addFilterVisualToken('milestone');
+ subject.addFilterVisualToken('milestone', '=');
subject.addFilterVisualToken('%8.17');
const token = tokensContainer.querySelector('.js-visual-token');
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('milestone');
+ expect(token.querySelector('.operator').innerText).toEqual('=');
expect(token.querySelector('.value').innerText).toEqual('%8.17');
});
it('creates full visual token', () => {
- subject.addFilterVisualToken('assignee', '@john');
+ subject.addFilterVisualToken('assignee', '=', '@john');
const token = tokensContainer.querySelector('.js-visual-token');
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('assignee');
+ expect(token.querySelector('.operator').innerText).toEqual('=');
expect(token.querySelector('.value').innerText).toEqual('@john');
});
});
@@ -412,7 +425,7 @@ describe('Filtered Search Visual Tokens', () => {
it('appends to previous search visual token if previous token was a search token', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '@root')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('author', '=', '@root')}
${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search term')}
`);
@@ -467,7 +480,11 @@ describe('Filtered Search Visual Tokens', () => {
describe('removeLastTokenPartial', () => {
it('should remove the last token value if it exists', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '~"Community Contribution"'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML(
+ 'label',
+ '=',
+ '~"Community Contribution"',
+ ),
);
expect(tokensContainer.querySelector('.js-visual-token .value')).not.toEqual(null);
@@ -507,7 +524,7 @@ describe('Filtered Search Visual Tokens', () => {
it('adds search visual token if previous visual token is valid', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('assignee', 'none'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('assignee', '=', 'none'),
);
const input = document.querySelector('.filtered-search');
@@ -523,7 +540,7 @@ describe('Filtered Search Visual Tokens', () => {
it('adds value to previous visual token element if previous visual token is invalid', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createNameFilterVisualTokenHTML('assignee'),
+ FilteredSearchSpecHelper.createNameOperatorFilterVisualTokenHTML('assignee', '='),
);
const input = document.querySelector('.filtered-search');
@@ -534,6 +551,7 @@ describe('Filtered Search Visual Tokens', () => {
expect(input.value).toEqual('');
expect(updatedToken.querySelector('.name').innerText).toEqual('assignee');
+ expect(updatedToken.querySelector('.operator').innerText).toEqual('=');
expect(updatedToken.querySelector('.value').innerText).toEqual('@john');
});
});
@@ -544,9 +562,9 @@ describe('Filtered Search Visual Tokens', () => {
beforeEach(() => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', 'none')}
${FilteredSearchSpecHelper.createSearchVisualTokenHTML('search')}
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', 'upcoming')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', 'upcoming')}
`);
input = document.querySelector('.filtered-search');
@@ -614,7 +632,7 @@ describe('Filtered Search Visual Tokens', () => {
describe('moveInputTotheRight', () => {
it('does nothing if the input is already the right most element', () => {
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
- FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none'),
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', 'none'),
);
spyOn(subject, 'tokenizeInput').and.callFake(() => {});
@@ -628,12 +646,12 @@ describe('Filtered Search Visual Tokens', () => {
it("tokenize's input", () => {
tokensContainer.innerHTML = `
- ${FilteredSearchSpecHelper.createNameFilterVisualTokenHTML('label')}
+ ${FilteredSearchSpecHelper.createNameOperatorFilterVisualTokenHTML('label', '=')}
${FilteredSearchSpecHelper.createInputHTML()}
${bugLabelToken.outerHTML}
`;
- document.querySelector('.filtered-search').value = 'none';
+ tokensContainer.querySelector('.filtered-search').value = 'none';
subject.moveInputToTheRight();
const value = tokensContainer.querySelector('.js-visual-token .value');
@@ -643,7 +661,7 @@ describe('Filtered Search Visual Tokens', () => {
it('converts input into search term token if last token is valid', () => {
tokensContainer.innerHTML = `
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', 'none')}
${FilteredSearchSpecHelper.createInputHTML()}
${bugLabelToken.outerHTML}
`;
@@ -658,7 +676,7 @@ describe('Filtered Search Visual Tokens', () => {
it('moves the input to the right most element', () => {
tokensContainer.innerHTML = `
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', 'none')}
${FilteredSearchSpecHelper.createInputHTML()}
${bugLabelToken.outerHTML}
`;
@@ -670,8 +688,8 @@ describe('Filtered Search Visual Tokens', () => {
it('tokenizes input even if input is the right most element', () => {
tokensContainer.innerHTML = `
- ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', 'none')}
- ${FilteredSearchSpecHelper.createNameFilterVisualTokenHTML('label')}
+ ${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', 'none')}
+ ${FilteredSearchSpecHelper.createNameOperatorFilterVisualTokenHTML('label')}
${FilteredSearchSpecHelper.createInputHTML('', '~bug')}
`;
diff --git a/spec/javascripts/filtered_search/issues_filtered_search_token_keys_spec.js b/spec/javascripts/filtered_search/issues_filtered_search_token_keys_spec.js
new file mode 100644
index 00000000000..c7be900ba2c
--- /dev/null
+++ b/spec/javascripts/filtered_search/issues_filtered_search_token_keys_spec.js
@@ -0,0 +1,148 @@
+import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys';
+
+describe('Issues Filtered Search Token Keys', () => {
+ describe('get', () => {
+ let tokenKeys;
+
+ beforeEach(() => {
+ tokenKeys = IssuableFilteredSearchTokenKeys.get();
+ });
+
+ it('should return tokenKeys', () => {
+ expect(tokenKeys).not.toBeNull();
+ });
+
+ it('should return tokenKeys as an array', () => {
+ expect(tokenKeys instanceof Array).toBe(true);
+ });
+
+ it('should always return the same array', () => {
+ const tokenKeys2 = IssuableFilteredSearchTokenKeys.get();
+
+ expect(tokenKeys).toEqual(tokenKeys2);
+ });
+
+ it('should return assignee as a string', () => {
+ const assignee = tokenKeys.find(tokenKey => tokenKey.key === 'assignee');
+
+ expect(assignee.type).toEqual('string');
+ });
+ });
+
+ describe('getKeys', () => {
+ it('should return keys', () => {
+ const getKeys = IssuableFilteredSearchTokenKeys.getKeys();
+ const keys = IssuableFilteredSearchTokenKeys.get().map(i => i.key);
+
+ keys.forEach((key, i) => {
+ expect(key).toEqual(getKeys[i]);
+ });
+ });
+ });
+
+ describe('getConditions', () => {
+ let conditions;
+
+ beforeEach(() => {
+ conditions = IssuableFilteredSearchTokenKeys.getConditions();
+ });
+
+ it('should return conditions', () => {
+ expect(conditions).not.toBeNull();
+ });
+
+ it('should return conditions as an array', () => {
+ expect(conditions instanceof Array).toBe(true);
+ });
+ });
+
+ describe('searchByKey', () => {
+ it('should return null when key not found', () => {
+ const tokenKey = IssuableFilteredSearchTokenKeys.searchByKey('notakey');
+
+ expect(tokenKey).toBeNull();
+ });
+
+ it('should return tokenKey when found by key', () => {
+ const tokenKeys = IssuableFilteredSearchTokenKeys.get();
+ const result = IssuableFilteredSearchTokenKeys.searchByKey(tokenKeys[0].key);
+
+ expect(result).toEqual(tokenKeys[0]);
+ });
+ });
+
+ describe('searchBySymbol', () => {
+ it('should return null when symbol not found', () => {
+ const tokenKey = IssuableFilteredSearchTokenKeys.searchBySymbol('notasymbol');
+
+ expect(tokenKey).toBeNull();
+ });
+
+ it('should return tokenKey when found by symbol', () => {
+ const tokenKeys = IssuableFilteredSearchTokenKeys.get();
+ const result = IssuableFilteredSearchTokenKeys.searchBySymbol(tokenKeys[0].symbol);
+
+ expect(result).toEqual(tokenKeys[0]);
+ });
+ });
+
+ describe('searchByKeyParam', () => {
+ it('should return null when key param not found', () => {
+ const tokenKey = IssuableFilteredSearchTokenKeys.searchByKeyParam('notakeyparam');
+
+ expect(tokenKey).toBeNull();
+ });
+
+ it('should return tokenKey when found by key param', () => {
+ const tokenKeys = IssuableFilteredSearchTokenKeys.get();
+ const result = IssuableFilteredSearchTokenKeys.searchByKeyParam(
+ `${tokenKeys[0].key}_${tokenKeys[0].param}`,
+ );
+
+ expect(result).toEqual(tokenKeys[0]);
+ });
+
+ it('should return alternative tokenKey when found by key param', () => {
+ const tokenKeys = IssuableFilteredSearchTokenKeys.getAlternatives();
+ const result = IssuableFilteredSearchTokenKeys.searchByKeyParam(
+ `${tokenKeys[0].key}_${tokenKeys[0].param}`,
+ );
+
+ expect(result).toEqual(tokenKeys[0]);
+ });
+ });
+
+ describe('searchByConditionUrl', () => {
+ it('should return null when condition url not found', () => {
+ const condition = IssuableFilteredSearchTokenKeys.searchByConditionUrl(null);
+
+ expect(condition).toBeNull();
+ });
+
+ it('should return condition when found by url', () => {
+ const conditions = IssuableFilteredSearchTokenKeys.getConditions();
+ const result = IssuableFilteredSearchTokenKeys.searchByConditionUrl(conditions[0].url);
+
+ expect(result).toBe(conditions[0]);
+ });
+ });
+
+ describe('searchByConditionKeyValue', () => {
+ it('should return null when condition tokenKey and value not found', () => {
+ const condition = IssuableFilteredSearchTokenKeys.searchByConditionKeyValue(null, null);
+
+ expect(condition).toBeNull();
+ });
+
+ it('should return condition when found by tokenKey and value', () => {
+ const conditions = IssuableFilteredSearchTokenKeys.getConditions();
+ const result = IssuableFilteredSearchTokenKeys.searchByConditionKeyValue(
+ conditions[0].tokenKey,
+ conditions[0].operator,
+ conditions[0].value,
+ );
+
+ expect(result).toEqual(conditions[0]);
+ });
+ });
+});
diff --git a/spec/javascripts/filtered_search/visual_token_value_spec.js b/spec/javascripts/filtered_search/visual_token_value_spec.js
index 5863005de1e..a039e280028 100644
--- a/spec/javascripts/filtered_search/visual_token_value_spec.js
+++ b/spec/javascripts/filtered_search/visual_token_value_spec.js
@@ -10,9 +10,11 @@ describe('Filtered Search Visual Tokens', () => {
const tokenNameElement = tokenElement.querySelector('.name');
const tokenValueContainer = tokenElement.querySelector('.value-container');
const tokenValueElement = tokenValueContainer.querySelector('.value');
+ const tokenOperatorElement = tokenElement.querySelector('.operator');
const tokenType = tokenNameElement.innerText.toLowerCase();
const tokenValue = tokenValueElement.innerText;
- const subject = new VisualTokenValue(tokenValue, tokenType);
+ const tokenOperator = tokenOperatorElement.innerText;
+ const subject = new VisualTokenValue(tokenValue, tokenType, tokenOperator);
return { subject, tokenValueContainer, tokenValueElement };
};
@@ -28,8 +30,8 @@ describe('Filtered Search Visual Tokens', () => {
`);
tokensContainer = document.querySelector('.tokens-container');
- authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '@user');
- bugLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '~bug');
+ authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '=', '@user');
+ bugLabelToken = FilteredSearchSpecHelper.createFilterVisualToken('label', '=', '~bug');
});
describe('updateUserTokenAppearance', () => {
@@ -140,10 +142,12 @@ describe('Filtered Search Visual Tokens', () => {
const missingLabelToken = FilteredSearchSpecHelper.createFilterVisualToken(
'label',
+ '=',
'~doesnotexist',
);
const spaceLabelToken = FilteredSearchSpecHelper.createFilterVisualToken(
'label',
+ '=',
'~"some space"',
);
diff --git a/spec/javascripts/fly_out_nav_spec.js b/spec/javascripts/fly_out_nav_spec.js
index 4772f754937..afcf132bea3 100644
--- a/spec/javascripts/fly_out_nav_spec.js
+++ b/spec/javascripts/fly_out_nav_spec.js
@@ -1,3 +1,4 @@
+import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
import {
calculateTop,
showSubLevelItems,
@@ -15,7 +16,6 @@ import {
subItemsMouseLeave,
} from '~/fly_out_nav';
import { SIDEBAR_COLLAPSED_CLASS } from '~/contextual_sidebar';
-import bp from '~/breakpoints';
describe('Fly out sidebar navigation', () => {
let el;
@@ -26,7 +26,7 @@ describe('Fly out sidebar navigation', () => {
el.style.position = 'relative';
document.body.appendChild(el);
- spyOn(bp, 'getBreakpointSize').and.callFake(() => breakpointSize);
+ spyOn(GlBreakpointInstance, 'getBreakpointSize').and.callFake(() => breakpointSize);
setOpenMenu(null);
});
diff --git a/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js b/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js
index 9bf3e02557f..e3f05e89a2d 100644
--- a/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/javascripts/frequent_items/components/frequent_items_list_item_spec.js
@@ -18,7 +18,6 @@ describe('FrequentItemsListItemComponent', () => {
avatarUrl: mockProject.avatarUrl,
...props,
},
- sync: false,
localVue,
});
};
diff --git a/spec/javascripts/frequent_items/utils_spec.js b/spec/javascripts/frequent_items/utils_spec.js
index cd27d79b29a..2480af5b31d 100644
--- a/spec/javascripts/frequent_items/utils_spec.js
+++ b/spec/javascripts/frequent_items/utils_spec.js
@@ -1,10 +1,16 @@
-import bp from '~/breakpoints';
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { isMobile, getTopFrequentItems, updateExistingFrequentItem } from '~/frequent_items/utils';
import { HOUR_IN_MS, FREQUENT_ITEMS } from '~/frequent_items/constants';
import { mockProject, unsortedFrequentItems, sortedFrequentItems } from './mock_data';
describe('Frequent Items utils spec', () => {
describe('isMobile', () => {
+ it('returns true when the screen is medium ', () => {
+ spyOn(bp, 'getBreakpointSize').and.returnValue('md');
+
+ expect(isMobile()).toBe(true);
+ });
+
it('returns true when the screen is small ', () => {
spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
@@ -17,8 +23,8 @@ describe('Frequent Items utils spec', () => {
expect(isMobile()).toBe(true);
});
- it('returns false when the screen is larger than small ', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('md');
+ it('returns false when the screen is larger than medium ', () => {
+ spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
expect(isMobile()).toBe(false);
});
@@ -32,21 +38,21 @@ describe('Frequent Items utils spec', () => {
});
it('returns correct amount of items for mobile', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
+ spyOn(bp, 'getBreakpointSize').and.returnValue('md');
const result = getTopFrequentItems(unsortedFrequentItems);
expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_MOBILE);
});
it('returns correct amount of items for desktop', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
+ spyOn(bp, 'getBreakpointSize').and.returnValue('xl');
const result = getTopFrequentItems(unsortedFrequentItems);
expect(result.length).toBe(FREQUENT_ITEMS.LIST_COUNT_DESKTOP);
});
it('sorts frequent items in order of frequency and lastAccessedOn', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
+ spyOn(bp, 'getBreakpointSize').and.returnValue('xl');
const result = getTopFrequentItems(unsortedFrequentItems);
const expectedResult = sortedFrequentItems.slice(0, FREQUENT_ITEMS.LIST_COUNT_DESKTOP);
diff --git a/spec/javascripts/helpers/filtered_search_spec_helper.js b/spec/javascripts/helpers/filtered_search_spec_helper.js
index fd06bb1f324..ceb7982bbc3 100644
--- a/spec/javascripts/helpers/filtered_search_spec_helper.js
+++ b/spec/javascripts/helpers/filtered_search_spec_helper.js
@@ -1,15 +1,17 @@
export default class FilteredSearchSpecHelper {
- static createFilterVisualTokenHTML(name, value, isSelected) {
- return FilteredSearchSpecHelper.createFilterVisualToken(name, value, isSelected).outerHTML;
+ static createFilterVisualTokenHTML(name, operator, value, isSelected) {
+ return FilteredSearchSpecHelper.createFilterVisualToken(name, operator, value, isSelected)
+ .outerHTML;
}
- static createFilterVisualToken(name, value, isSelected = false) {
+ static createFilterVisualToken(name, operator, value, isSelected = false) {
const li = document.createElement('li');
li.classList.add('js-visual-token', 'filtered-search-token', `search-token-${name}`);
li.innerHTML = `
<div class="selectable ${isSelected ? 'selected' : ''}" role="button">
<div class="name">${name}</div>
+ <div class="operator">${operator}</div>
<div class="value-container">
<div class="value">${value}</div>
<div class="remove-token" role="button">
@@ -30,6 +32,15 @@ export default class FilteredSearchSpecHelper {
`;
}
+ static createNameOperatorFilterVisualTokenHTML(name, operator) {
+ return `
+ <li class="js-visual-token filtered-search-token">
+ <div class="name">${name}</div>
+ <div class="operator">${operator}</div>
+ </li>
+ `;
+ }
+
static createSearchVisualToken(name) {
const li = document.createElement('li');
li.classList.add('js-visual-token', 'filtered-search-term');
diff --git a/spec/javascripts/ide/components/commit_sidebar/form_spec.js b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
index fdbabf84e25..e984389bd46 100644
--- a/spec/javascripts/ide/components/commit_sidebar/form_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
@@ -33,6 +33,12 @@ describe('IDE commit form', () => {
});
describe('compact', () => {
+ beforeEach(done => {
+ vm.isCompact = true;
+
+ vm.$nextTick(done);
+ });
+
it('renders commit button in compact mode', () => {
expect(vm.$el.querySelector('.btn-primary')).not.toBeNull();
expect(vm.$el.querySelector('.btn-primary').textContent).toContain('Commit');
@@ -61,7 +67,7 @@ describe('IDE commit form', () => {
});
});
- it('toggles activity bar vie when clicking commit button', done => {
+ it('toggles activity bar view when clicking commit button', done => {
vm.$el.querySelector('.btn-primary').click();
vm.$nextTick(() => {
@@ -70,6 +76,25 @@ describe('IDE commit form', () => {
done();
});
});
+
+ it('collapses if lastCommitMsg is set to empty and current view is not commit view', done => {
+ store.state.lastCommitMsg = 'abc';
+ store.state.currentActivityView = activityBarViews.edit;
+
+ vm.$nextTick(() => {
+ // if commit message is set, form is uncollapsed
+ expect(vm.isCompact).toBe(false);
+
+ store.state.lastCommitMsg = '';
+
+ vm.$nextTick(() => {
+ // collapsed when set to empty
+ expect(vm.isCompact).toBe(true);
+
+ done();
+ });
+ });
+ });
});
describe('full', () => {
@@ -104,6 +129,17 @@ describe('IDE commit form', () => {
});
});
+ it('always opens itself in full view current activity view is not commit view when clicking commit button', done => {
+ vm.$el.querySelector('.btn-primary').click();
+
+ vm.$nextTick(() => {
+ expect(store.state.currentActivityView).toBe(activityBarViews.commit);
+ expect(vm.isCompact).toBe(false);
+
+ done();
+ });
+ });
+
describe('discard draft button', () => {
it('hidden when commitMessage is empty', () => {
expect(vm.$el.querySelector('.btn-default').textContent).toContain('Collapse');
diff --git a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
index caf06b5e1d8..63ba6b95619 100644
--- a/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/list_item_spec.js
@@ -20,7 +20,6 @@ describe('Multi-file editor commit sidebar list item', () => {
vm = createComponentWithStore(Component, store, {
file: f,
- actionComponent: 'stage-button',
activeFileKey: `staged-${f.key}`,
}).$mount();
diff --git a/spec/javascripts/ide/components/commit_sidebar/list_spec.js b/spec/javascripts/ide/components/commit_sidebar/list_spec.js
index 81120f6d277..5a1682523d8 100644
--- a/spec/javascripts/ide/components/commit_sidebar/list_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/list_spec.js
@@ -17,7 +17,6 @@ describe('Multi-file editor commit sidebar list', () => {
action: 'stageAllChanges',
actionBtnText: 'stage all',
actionBtnIcon: 'history',
- itemActionComponent: 'stage-button',
activeFileKey: 'staged-testing',
keyPrefix: 'staged',
});
diff --git a/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js b/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js
deleted file mode 100644
index e09ccbe2a63..00000000000
--- a/spec/javascripts/ide/components/commit_sidebar/stage_button_spec.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import Vue from 'vue';
-import store from '~/ide/stores';
-import stageButton from '~/ide/components/commit_sidebar/stage_button.vue';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
-import { file, resetStore } from '../../helpers';
-
-describe('IDE stage file button', () => {
- let vm;
- let f;
-
- beforeEach(() => {
- const Component = Vue.extend(stageButton);
- f = file();
-
- vm = createComponentWithStore(Component, store, {
- path: f.path,
- });
-
- spyOn(vm, 'stageChange');
- spyOn(vm, 'discardFileChanges');
-
- vm.$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
-
- resetStore(vm.$store);
- });
-
- it('renders button to discard & stage', () => {
- expect(vm.$el.querySelectorAll('.btn-blank').length).toBe(2);
- });
-
- it('calls store with stage button', () => {
- vm.$el.querySelectorAll('.btn')[0].click();
-
- expect(vm.stageChange).toHaveBeenCalledWith(f.path);
- });
-
- it('calls store with discard button', () => {
- vm.$el.querySelector('.btn-danger').click();
-
- expect(vm.discardFileChanges).toHaveBeenCalledWith(f.path);
- });
-});
diff --git a/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js b/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js
deleted file mode 100644
index 917bbb9fb46..00000000000
--- a/spec/javascripts/ide/components/commit_sidebar/unstage_button_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import Vue from 'vue';
-import store from '~/ide/stores';
-import unstageButton from '~/ide/components/commit_sidebar/unstage_button.vue';
-import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
-import { file, resetStore } from '../../helpers';
-
-describe('IDE unstage file button', () => {
- let vm;
- let f;
-
- beforeEach(() => {
- const Component = Vue.extend(unstageButton);
- f = file();
-
- vm = createComponentWithStore(Component, store, {
- path: f.path,
- });
-
- spyOn(vm, 'unstageChange');
-
- vm.$mount();
- });
-
- afterEach(() => {
- vm.$destroy();
-
- resetStore(vm.$store);
- });
-
- it('renders button to unstage', () => {
- expect(vm.$el.querySelectorAll('.btn').length).toBe(1);
- });
-
- it('calls store with unnstage button', () => {
- vm.$el.querySelector('.btn').click();
-
- expect(vm.unstageChange).toHaveBeenCalledWith(f.path);
- });
-});
diff --git a/spec/javascripts/ide/components/new_dropdown/modal_spec.js b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
index a1c00e99927..0ea767e087d 100644
--- a/spec/javascripts/ide/components/new_dropdown/modal_spec.js
+++ b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
@@ -52,19 +52,6 @@ describe('new file modal component', () => {
expect(templateFilesEl instanceof Element).toBeTruthy();
}
});
-
- describe('createEntryInStore', () => {
- it('$emits create', () => {
- spyOn(vm, 'createTempEntry');
-
- vm.submitForm();
-
- expect(vm.createTempEntry).toHaveBeenCalledWith({
- name: 'testing',
- type,
- });
- });
- });
});
});
@@ -145,31 +132,19 @@ describe('new file modal component', () => {
vm = createComponentWithStore(Component, store).$mount();
const flashSpy = spyOnDependency(modal, 'flash');
- vm.submitForm();
- expect(flashSpy).toHaveBeenCalled();
- });
+ expect(flashSpy).not.toHaveBeenCalled();
- it('calls createTempEntry when target path does not exist', () => {
- const store = createStore();
- store.state.entryModal = {
- type: 'rename',
- path: 'test-path/test',
- entry: {
- name: 'test',
- type: 'blob',
- path: 'test-path1/test',
- },
- };
-
- vm = createComponentWithStore(Component, store).$mount();
- spyOn(vm, 'createTempEntry').and.callFake(() => Promise.resolve());
vm.submitForm();
- expect(vm.createTempEntry).toHaveBeenCalledWith({
- name: 'test-path1',
- type: 'tree',
- });
+ expect(flashSpy).toHaveBeenCalledWith(
+ 'The name "test-path/test" is already taken in this directory.',
+ 'alert',
+ jasmine.anything(),
+ null,
+ false,
+ true,
+ );
});
});
});
diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js
index 21fb5449858..8935d8f56fc 100644
--- a/spec/javascripts/ide/components/repo_editor_spec.js
+++ b/spec/javascripts/ide/components/repo_editor_spec.js
@@ -52,6 +52,18 @@ describe('RepoEditor', () => {
state.rightPanelCollapsed = !state.rightPanelCollapsed;
};
+ it('sets renderWhitespace to `all`', () => {
+ vm.$store.state.renderWhitespaceInCode = true;
+
+ expect(vm.editorOptions.renderWhitespace).toEqual('all');
+ });
+
+ it('sets renderWhitespace to `none`', () => {
+ vm.$store.state.renderWhitespaceInCode = false;
+
+ expect(vm.editorOptions.renderWhitespace).toEqual('none');
+ });
+
it('renders an ide container', () => {
expect(vm.shouldHideEditor).toBeFalsy();
expect(vm.showEditor).toBe(true);
diff --git a/spec/javascripts/ide/components/repo_tab_spec.js b/spec/javascripts/ide/components/repo_tab_spec.js
index 3b52f279bf2..7466ed5468b 100644
--- a/spec/javascripts/ide/components/repo_tab_spec.js
+++ b/spec/javascripts/ide/components/repo_tab_spec.js
@@ -93,13 +93,13 @@ describe('RepoTab', () => {
Vue.nextTick()
.then(() => {
- expect(vm.$el.querySelector('.file-modified')).toBeNull();
+ expect(vm.$el.querySelector('.file-modified-solid')).toBeNull();
vm.$el.dispatchEvent(new Event('mouseout'));
})
.then(Vue.nextTick)
.then(() => {
- expect(vm.$el.querySelector('.file-modified')).not.toBeNull();
+ expect(vm.$el.querySelector('.file-modified-solid')).not.toBeNull();
done();
})
diff --git a/spec/javascripts/ide/lib/editor_spec.js b/spec/javascripts/ide/lib/editor_spec.js
index c2cb964ea87..f1973f7798f 100644
--- a/spec/javascripts/ide/lib/editor_spec.js
+++ b/spec/javascripts/ide/lib/editor_spec.js
@@ -67,6 +67,7 @@ describe('Multi-file editor library', () => {
},
readOnly: true,
scrollBeyondLastLine: false,
+ renderWhitespace: 'none',
quickSuggestions: false,
occurrencesHighlight: false,
wordWrap: 'on',
diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js
index a8894c644be..ca8f33407fd 100644
--- a/spec/javascripts/ide/stores/actions/merge_request_spec.js
+++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js
@@ -348,6 +348,8 @@ describe('IDE store merge request actions', () => {
let testMergeRequest;
let testMergeRequestChanges;
+ const mockGetters = { findBranch: () => ({ commit: { id: 'abcd2322' } }) };
+
beforeEach(() => {
testMergeRequest = {
source_branch: 'abcbranch',
@@ -406,8 +408,8 @@ describe('IDE store merge request actions', () => {
);
});
- it('dispatch actions for merge request data', done => {
- openMergeRequest(store, mr)
+ it('dispatches actions for merge request data', done => {
+ openMergeRequest({ state: store.state, dispatch: store.dispatch, getters: mockGetters }, mr)
.then(() => {
expect(store.dispatch.calls.allArgs()).toEqual([
['getMergeRequestData', mr],
@@ -424,6 +426,7 @@ describe('IDE store merge request actions', () => {
{
projectId: mr.projectId,
branchId: testMergeRequest.source_branch,
+ ref: 'abcd2322',
},
],
['getMergeRequestVersions', mr],
@@ -449,7 +452,7 @@ describe('IDE store merge request actions', () => {
{ new_path: 'bar', path: 'bar' },
];
- openMergeRequest(store, mr)
+ openMergeRequest({ state: store.state, dispatch: store.dispatch, getters: mockGetters }, mr)
.then(() => {
expect(store.dispatch).toHaveBeenCalledWith(
'updateActivityBarView',
diff --git a/spec/javascripts/ide/stores/actions/project_spec.js b/spec/javascripts/ide/stores/actions/project_spec.js
index bcc7b5d5e46..bd51222ac3c 100644
--- a/spec/javascripts/ide/stores/actions/project_spec.js
+++ b/spec/javascripts/ide/stores/actions/project_spec.js
@@ -201,35 +201,30 @@ describe('IDE store project actions', () => {
});
describe('showEmptyState', () => {
- it('commits proper mutations when supplied error is 404', done => {
+ it('creates a blank tree and sets loading state to false', done => {
testAction(
showEmptyState,
- {
- err: {
- response: {
- status: 404,
- },
- },
- projectId: 'abc/def',
- branchId: 'master',
- },
+ { projectId: 'abc/def', branchId: 'master' },
store.state,
[
- {
- type: 'CREATE_TREE',
- payload: {
- treePath: 'abc/def/master',
- },
- },
+ { type: 'CREATE_TREE', payload: { treePath: 'abc/def/master' } },
{
type: 'TOGGLE_LOADING',
- payload: {
- entry: store.state.trees['abc/def/master'],
- forceValue: false,
- },
+ payload: { entry: store.state.trees['abc/def/master'], forceValue: false },
},
],
- [],
+ jasmine.any(Object),
+ done,
+ );
+ });
+
+ it('sets the currentBranchId to the branchId that was passed', done => {
+ testAction(
+ showEmptyState,
+ { projectId: 'abc/def', branchId: 'master' },
+ store.state,
+ jasmine.any(Object),
+ [{ type: 'setCurrentBranchId', payload: 'master' }],
done,
);
});
@@ -285,16 +280,21 @@ describe('IDE store project actions', () => {
describe('loadBranch', () => {
const projectId = 'abc/def';
const branchId = '123-lorem';
+ const ref = 'abcd2322';
it('fetches branch data', done => {
+ const mockGetters = { findBranch: () => ({ commit: { id: ref } }) };
spyOn(store, 'dispatch').and.returnValue(Promise.resolve());
- loadBranch(store, { projectId, branchId })
+ loadBranch(
+ { getters: mockGetters, state: store.state, dispatch: store.dispatch },
+ { projectId, branchId },
+ )
.then(() => {
expect(store.dispatch.calls.allArgs()).toEqual([
['getBranchData', { projectId, branchId }],
['getMergeRequestsForBranch', { projectId, branchId }],
- ['getFiles', { projectId, branchId }],
+ ['getFiles', { projectId, branchId, ref }],
]);
})
.then(done)
diff --git a/spec/javascripts/ide/stores/actions/tree_spec.js b/spec/javascripts/ide/stores/actions/tree_spec.js
index e2d8cc195ae..be350b6f6cc 100644
--- a/spec/javascripts/ide/stores/actions/tree_spec.js
+++ b/spec/javascripts/ide/stores/actions/tree_spec.js
@@ -17,6 +17,7 @@ describe('Multi-file store tree actions', () => {
projectId: 'abcproject',
branch: 'master',
branchId: 'master',
+ ref: '12345678',
};
beforeEach(() => {
@@ -29,14 +30,6 @@ describe('Multi-file store tree actions', () => {
store.state.currentBranchId = 'master';
store.state.projects.abcproject = {
web_url: '',
- branches: {
- master: {
- workingReference: '12345678',
- commit: {
- id: '12345678',
- },
- },
- },
};
});
diff --git a/spec/javascripts/ide/stores/actions_spec.js b/spec/javascripts/ide/stores/actions_spec.js
index 0ee114cb70d..d582462d542 100644
--- a/spec/javascripts/ide/stores/actions_spec.js
+++ b/spec/javascripts/ide/stores/actions_spec.js
@@ -18,19 +18,19 @@ import axios from '~/lib/utils/axios_utils';
import { createStore } from '~/ide/stores';
import * as types from '~/ide/stores/mutation_types';
import router from '~/ide/ide_router';
-import { resetStore, file } from '../helpers';
+import { file } from '../helpers';
import testAction from '../../helpers/vuex_action_helper';
import eventHub from '~/ide/eventhub';
-const store = createStore();
-
describe('Multi-file store actions', () => {
+ let store;
+
beforeEach(() => {
- spyOn(router, 'push');
- });
+ store = createStore();
- afterEach(() => {
- resetStore(store);
+ spyOn(store, 'commit').and.callThrough();
+ spyOn(store, 'dispatch').and.callThrough();
+ spyOn(router, 'push');
});
describe('redirectToUrl', () => {
@@ -61,24 +61,25 @@ describe('Multi-file store actions', () => {
});
describe('discardAllChanges', () => {
- let f;
+ const paths = ['to_discard', 'another_one_to_discard'];
+
beforeEach(() => {
- f = file('discardAll');
- f.changed = true;
+ paths.forEach(path => {
+ const f = file(path);
+ f.changed = true;
- store.state.openFiles.push(f);
- store.state.changedFiles.push(f);
- store.state.entries[f.path] = f;
+ store.state.openFiles.push(f);
+ store.state.changedFiles.push(f);
+ store.state.entries[f.path] = f;
+ });
});
- it('discards changes in file', done => {
- store
- .dispatch('discardAllChanges')
- .then(() => {
- expect(store.state.openFiles.changed).toBeFalsy();
- })
- .then(done)
- .catch(done.fail);
+ it('discards all changes in file', () => {
+ const expectedCalls = paths.map(path => ['restoreOriginalFile', path]);
+
+ discardAllChanges(store);
+
+ expect(store.dispatch.calls.allArgs()).toEqual(jasmine.arrayContaining(expectedCalls));
});
it('removes all files from changedFiles state', done => {
@@ -86,64 +87,11 @@ describe('Multi-file store actions', () => {
.dispatch('discardAllChanges')
.then(() => {
expect(store.state.changedFiles.length).toBe(0);
- expect(store.state.openFiles.length).toBe(1);
+ expect(store.state.openFiles.length).toBe(2);
})
.then(done)
.catch(done.fail);
});
-
- it('closes the temp file and deletes it if it was open', done => {
- f.tempFile = true;
-
- testAction(
- discardAllChanges,
- undefined,
- store.state,
- [{ type: types.REMOVE_ALL_CHANGES_FILES }],
- [
- { type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
- { type: 'deleteEntry', payload: 'discardAll' },
- ],
- done,
- );
- });
-
- it('renames the file to its original name and closes it if it was open', done => {
- Object.assign(f, {
- prevPath: 'parent/path/old_name',
- prevName: 'old_name',
- prevParentPath: 'parent/path',
- });
-
- testAction(
- discardAllChanges,
- undefined,
- store.state,
- [{ type: types.REMOVE_ALL_CHANGES_FILES }],
- [
- { type: 'closeFile', payload: jasmine.objectContaining({ path: 'discardAll' }) },
- {
- type: 'renameEntry',
- payload: { path: 'discardAll', name: 'old_name', parentPath: 'parent/path' },
- },
- ],
- done,
- );
- });
-
- it('discards file changes on all other files', done => {
- testAction(
- discardAllChanges,
- undefined,
- store.state,
- [
- { type: types.DISCARD_FILE_CHANGES, payload: 'discardAll' },
- { type: types.REMOVE_ALL_CHANGES_FILES },
- ],
- [],
- done,
- );
- });
});
describe('closeAllFiles', () => {
@@ -258,13 +206,17 @@ describe('Multi-file store actions', () => {
describe('blob', () => {
it('creates temp file', done => {
+ const name = 'test';
+
store
.dispatch('createTempEntry', {
- name: 'test',
+ name,
branchId: 'mybranch',
type: 'blob',
})
- .then(f => {
+ .then(() => {
+ const f = store.state.entries[name];
+
expect(f.tempFile).toBeTruthy();
expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1);
@@ -273,14 +225,47 @@ describe('Multi-file store actions', () => {
.catch(done.fail);
});
+ describe('when `gon.feature.stageAllByDefault` is true', () => {
+ const originalGonFeatures = Object.assign({}, gon.features);
+
+ beforeAll(() => {
+ gon.features = { stageAllByDefault: true };
+ });
+
+ afterAll(() => {
+ gon.features = originalGonFeatures;
+ });
+
+ it('adds tmp file to staged files', done => {
+ const name = 'test';
+
+ store
+ .dispatch('createTempEntry', {
+ name,
+ branchId: 'mybranch',
+ type: 'blob',
+ })
+ .then(() => {
+ expect(store.state.stagedFiles).toEqual([jasmine.objectContaining({ name })]);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
it('adds tmp file to open files', done => {
+ const name = 'test';
+
store
.dispatch('createTempEntry', {
- name: 'test',
+ name,
branchId: 'mybranch',
type: 'blob',
})
- .then(f => {
+ .then(() => {
+ const f = store.state.entries[name];
+
expect(store.state.openFiles.length).toBe(1);
expect(store.state.openFiles[0].name).toBe(f.name);
@@ -290,46 +275,34 @@ describe('Multi-file store actions', () => {
});
it('adds tmp file to changed files', done => {
+ const name = 'test';
+
store
.dispatch('createTempEntry', {
- name: 'test',
+ name,
branchId: 'mybranch',
type: 'blob',
})
- .then(f => {
- expect(store.state.changedFiles.length).toBe(1);
- expect(store.state.changedFiles[0].name).toBe(f.name);
+ .then(() => {
+ expect(store.state.changedFiles).toEqual([
+ jasmine.objectContaining({ name, tempFile: true }),
+ ]);
done();
})
.catch(done.fail);
});
- it('sets tmp file as active', done => {
- testAction(
- createTempEntry,
- {
- name: 'test',
- branchId: 'mybranch',
- type: 'blob',
- },
- store.state,
- [
- { type: types.CREATE_TMP_ENTRY, payload: jasmine.any(Object) },
- { type: types.TOGGLE_FILE_OPEN, payload: 'test' },
- { type: types.ADD_FILE_TO_CHANGED, payload: 'test' },
- ],
- [
- {
- type: 'setFileActive',
- payload: 'test',
- },
- {
- type: 'triggerFilesChange',
- },
- ],
- done,
+ it('sets tmp file as active', () => {
+ const dispatch = jasmine.createSpy();
+ const commit = jasmine.createSpy();
+
+ createTempEntry(
+ { state: store.state, getters: store.getters, dispatch, commit },
+ { name: 'test', branchId: 'mybranch', type: 'blob' },
);
+
+ expect(dispatch).toHaveBeenCalledWith('setFileActive', 'test');
});
it('creates flash message if file already exists', done => {
@@ -344,7 +317,24 @@ describe('Multi-file store actions', () => {
type: 'blob',
})
.then(() => {
- expect(document.querySelector('.flash-alert')).not.toBeNull();
+ expect(document.querySelector('.flash-alert')?.textContent.trim()).toEqual(
+ `The name "${f.name}" is already taken in this directory.`,
+ );
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('bursts unused seal', done => {
+ store
+ .dispatch('createTempEntry', {
+ name: 'test',
+ branchId: 'mybranch',
+ type: 'blob',
+ })
+ .then(() => {
+ expect(store.state.unusedSeal).toBe(false);
done();
})
@@ -375,58 +365,82 @@ describe('Multi-file store actions', () => {
});
});
- describe('stageAllChanges', () => {
- it('adds all files from changedFiles to stagedFiles', done => {
- const openFile = { ...file(), path: 'test' };
+ describe('stage/unstageAllChanges', () => {
+ let file1;
+ let file2;
- store.state.openFiles.push(openFile);
- store.state.stagedFiles.push(openFile);
- store.state.changedFiles.push(openFile, file('new'));
+ beforeEach(() => {
+ file1 = { ...file('test'), content: 'changed test', raw: 'test' };
+ file2 = { ...file('test2'), content: 'changed test2', raw: 'test2' };
- testAction(
- stageAllChanges,
- null,
- store.state,
- [
- { type: types.SET_LAST_COMMIT_MSG, payload: '' },
- { type: types.STAGE_CHANGE, payload: store.state.changedFiles[0].path },
- { type: types.STAGE_CHANGE, payload: store.state.changedFiles[1].path },
- ],
- [
- {
- type: 'openPendingTab',
- payload: { file: openFile, keyPrefix: 'staged' },
- },
- ],
- done,
- );
+ store.state.openFiles = [file1];
+ store.state.changedFiles = [file1];
+ store.state.stagedFiles = [{ ...file2, content: 'staged test' }];
+
+ store.state.entries = {
+ [file1.path]: { ...file1 },
+ [file2.path]: { ...file2 },
+ };
});
- });
- describe('unstageAllChanges', () => {
- it('removes all files from stagedFiles after unstaging', done => {
- const openFile = { ...file(), path: 'test' };
+ describe('stageAllChanges', () => {
+ it('adds all files from changedFiles to stagedFiles', () => {
+ stageAllChanges(store);
+
+ expect(store.commit.calls.allArgs()).toEqual([
+ [types.SET_LAST_COMMIT_MSG, ''],
+ [types.STAGE_CHANGE, jasmine.objectContaining({ path: file1.path })],
+ ]);
+ });
- store.state.openFiles.push(openFile);
- store.state.changedFiles.push(openFile);
- store.state.stagedFiles.push(openFile, file('new'));
+ it('opens pending tab if a change exists in that file', () => {
+ stageAllChanges(store);
- testAction(
- unstageAllChanges,
- null,
- store.state,
- [
- { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[0].path },
- { type: types.UNSTAGE_CHANGE, payload: store.state.stagedFiles[1].path },
- ],
- [
- {
- type: 'openPendingTab',
- payload: { file: openFile, keyPrefix: 'unstaged' },
- },
- ],
- done,
- );
+ expect(store.dispatch.calls.allArgs()).toEqual([
+ [
+ 'openPendingTab',
+ { file: { ...file1, staged: true, changed: true }, keyPrefix: 'staged' },
+ ],
+ ]);
+ });
+
+ it('does not open pending tab if no change exists in that file', () => {
+ store.state.entries[file1.path].content = 'test';
+ store.state.stagedFiles = [file1];
+ store.state.changedFiles = [store.state.entries[file1.path]];
+
+ stageAllChanges(store);
+
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('unstageAllChanges', () => {
+ it('removes all files from stagedFiles after unstaging', () => {
+ unstageAllChanges(store);
+
+ expect(store.commit.calls.allArgs()).toEqual([
+ [types.UNSTAGE_CHANGE, jasmine.objectContaining({ path: file2.path })],
+ ]);
+ });
+
+ it('opens pending tab if a change exists in that file', () => {
+ unstageAllChanges(store);
+
+ expect(store.dispatch.calls.allArgs()).toEqual([
+ ['openPendingTab', { file: file1, keyPrefix: 'unstaged' }],
+ ]);
+ });
+
+ it('does not open pending tab if no change exists in that file', () => {
+ store.state.entries[file1.path].content = 'test';
+ store.state.stagedFiles = [file1];
+ store.state.changedFiles = [store.state.entries[file1.path]];
+
+ unstageAllChanges(store);
+
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
});
});
@@ -617,36 +631,111 @@ describe('Multi-file store actions', () => {
);
});
- it('if renamed, reverts the rename before deleting', () => {
- const testEntry = {
- path: 'test',
- name: 'test',
- prevPath: 'lorem/ipsum',
- prevName: 'ipsum',
- prevParentPath: 'lorem',
- };
+ describe('when renamed', () => {
+ let testEntry;
- store.state.entries = { test: testEntry };
- testAction(
- deleteEntry,
- testEntry.path,
- store.state,
- [],
- [
- {
- type: 'renameEntry',
- payload: {
- path: testEntry.path,
- name: testEntry.prevName,
- parentPath: testEntry.prevParentPath,
- },
- },
- {
- type: 'deleteEntry',
- payload: testEntry.prevPath,
- },
- ],
- );
+ beforeEach(() => {
+ testEntry = {
+ path: 'test',
+ name: 'test',
+ prevPath: 'test_old',
+ prevName: 'test_old',
+ prevParentPath: '',
+ };
+
+ store.state.entries = { test: testEntry };
+ });
+
+ describe('and previous does not exist', () => {
+ it('reverts the rename before deleting', done => {
+ testAction(
+ deleteEntry,
+ testEntry.path,
+ store.state,
+ [],
+ [
+ {
+ type: 'renameEntry',
+ payload: {
+ path: testEntry.path,
+ name: testEntry.prevName,
+ parentPath: testEntry.prevParentPath,
+ },
+ },
+ {
+ type: 'deleteEntry',
+ payload: testEntry.prevPath,
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('and previous exists', () => {
+ beforeEach(() => {
+ const oldEntry = {
+ path: testEntry.prevPath,
+ name: testEntry.prevName,
+ };
+
+ store.state.entries[oldEntry.path] = oldEntry;
+ });
+
+ it('does not revert rename before deleting', done => {
+ testAction(
+ deleteEntry,
+ testEntry.path,
+ store.state,
+ [{ type: types.DELETE_ENTRY, payload: testEntry.path }],
+ [
+ { type: 'burstUnusedSeal' },
+ { type: 'stageChange', payload: testEntry.path },
+ { type: 'triggerFilesChange' },
+ ],
+ done,
+ );
+ });
+
+ it('when previous is deleted, it reverts rename before deleting', done => {
+ store.state.entries[testEntry.prevPath].deleted = true;
+
+ testAction(
+ deleteEntry,
+ testEntry.path,
+ store.state,
+ [],
+ [
+ {
+ type: 'renameEntry',
+ payload: {
+ path: testEntry.path,
+ name: testEntry.prevName,
+ parentPath: testEntry.prevParentPath,
+ },
+ },
+ {
+ type: 'deleteEntry',
+ payload: testEntry.prevPath,
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ it('bursts unused seal', done => {
+ store.state.entries.test = file('test');
+
+ store
+ .dispatch('deleteEntry', 'test')
+ .then(() => {
+ expect(store.state.unusedSeal).toBe(false);
+
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -724,8 +813,31 @@ describe('Multi-file store actions', () => {
});
});
- afterEach(() => {
- resetStore(store);
+ describe('when `gon.feature.stageAllByDefault` is true', () => {
+ const originalGonFeatures = Object.assign({}, gon.features);
+
+ beforeAll(() => {
+ gon.features = { stageAllByDefault: true };
+ });
+
+ afterAll(() => {
+ gon.features = originalGonFeatures;
+ });
+
+ it('by default renames an entry and stages it', () => {
+ const dispatch = jasmine.createSpy();
+ const commit = jasmine.createSpy();
+
+ renameEntry(
+ { dispatch, commit, state: store.state, getters: store.getters },
+ { path: 'orig', name: 'renamed' },
+ );
+
+ expect(commit.calls.allArgs()).toEqual([
+ [types.RENAME_ENTRY, { path: 'orig', name: 'renamed', parentPath: undefined }],
+ [types.STAGE_CHANGE, jasmine.objectContaining({ path: 'renamed' })],
+ ]);
+ });
});
it('by default renames an entry and adds to changed', done => {
@@ -747,12 +859,12 @@ describe('Multi-file store actions', () => {
payload: 'renamed',
},
],
- [{ type: 'triggerFilesChange' }],
+ jasmine.any(Object),
done,
);
});
- it('if not changed, completely unstages entry if renamed to original', done => {
+ it('if not changed, completely unstages and discards entry if renamed to original', done => {
testAction(
renameEntry,
{ path: 'renamed', name: 'orig' },
@@ -807,6 +919,20 @@ describe('Multi-file store actions', () => {
.then(done)
.catch(done.fail);
});
+
+ it('bursts unused seal', done => {
+ store
+ .dispatch('renameEntry', {
+ path: 'orig',
+ name: 'renamed',
+ })
+ .then(() => {
+ expect(store.state.unusedSeal).toBe(false);
+
+ done();
+ })
+ .catch(done.fail);
+ });
});
describe('folder', () => {
@@ -908,6 +1034,103 @@ describe('Multi-file store actions', () => {
.then(done)
.catch(done.fail);
});
+
+ describe('with file in directory', () => {
+ const parentPath = 'original-dir';
+ const newParentPath = 'new-dir';
+ const fileName = 'test.md';
+ const filePath = `${parentPath}/${fileName}`;
+
+ let rootDir;
+
+ beforeEach(() => {
+ const parentEntry = file(parentPath, parentPath, 'tree');
+ const fileEntry = file(filePath, filePath, 'blob', parentEntry);
+ rootDir = {
+ tree: [],
+ };
+
+ Object.assign(store.state, {
+ entries: {
+ [parentPath]: {
+ ...parentEntry,
+ tree: [fileEntry],
+ },
+ [filePath]: fileEntry,
+ },
+ trees: {
+ '/': rootDir,
+ },
+ });
+ });
+
+ it('creates new directory', done => {
+ expect(store.state.entries[newParentPath]).toBeUndefined();
+
+ store
+ .dispatch('renameEntry', { path: filePath, name: fileName, parentPath: newParentPath })
+ .then(() => {
+ expect(store.state.entries[newParentPath]).toEqual(
+ jasmine.objectContaining({
+ path: newParentPath,
+ type: 'tree',
+ tree: jasmine.arrayContaining([
+ store.state.entries[`${newParentPath}/${fileName}`],
+ ]),
+ }),
+ );
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ describe('when new directory exists', () => {
+ let newDir;
+
+ beforeEach(() => {
+ newDir = file(newParentPath, newParentPath, 'tree');
+
+ store.state.entries[newDir.path] = newDir;
+ rootDir.tree.push(newDir);
+ });
+
+ it('inserts in new directory', done => {
+ expect(newDir.tree).toEqual([]);
+
+ store
+ .dispatch('renameEntry', {
+ path: filePath,
+ name: fileName,
+ parentPath: newParentPath,
+ })
+ .then(() => {
+ expect(newDir.tree).toEqual([store.state.entries[`${newParentPath}/${fileName}`]]);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('when new directory is deleted, it undeletes it', done => {
+ store.dispatch('deleteEntry', newParentPath);
+
+ expect(store.state.entries[newParentPath].deleted).toBe(true);
+ expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(false);
+
+ store
+ .dispatch('renameEntry', {
+ path: filePath,
+ name: fileName,
+ parentPath: newParentPath,
+ })
+ .then(() => {
+ expect(store.state.entries[newParentPath].deleted).toBe(false);
+ expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
});
});
@@ -924,18 +1147,19 @@ describe('Multi-file store actions', () => {
describe('error', () => {
let dispatch;
- const callParams = [
- {
- commit() {},
- state: store.state,
- },
- {
- projectId: 'abc/def',
- branchId: 'master-testing',
- },
- ];
+ let callParams;
beforeEach(() => {
+ callParams = [
+ {
+ commit() {},
+ state: store.state,
+ },
+ {
+ projectId: 'abc/def',
+ branchId: 'master-testing',
+ },
+ ];
dispatch = jasmine.createSpy('dispatchSpy');
document.body.innerHTML += '<div class="flash-container"></div>';
});
diff --git a/spec/javascripts/jobs/components/manual_variables_form_spec.js b/spec/javascripts/jobs/components/manual_variables_form_spec.js
index 1f2bf8674c1..547f146cf88 100644
--- a/spec/javascripts/jobs/components/manual_variables_form_spec.js
+++ b/spec/javascripts/jobs/components/manual_variables_form_spec.js
@@ -20,7 +20,6 @@ describe('Manual Variables Form', () => {
wrapper = shallowMount(localVue.extend(Form), {
propsData: props,
localVue,
- sync: false,
});
};
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index e471be608c8..504d4a3e01a 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -1,8 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
+import { GlBreakpointInstance as breakpointInstance } from '@gitlab/ui/dist/utils';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
import { faviconDataUrl, overlayDataUrl, faviconWithOverlayDataUrl } from './mock_data';
-import breakpointInstance from '~/breakpoints';
const PIXEL_TOLERANCE = 0.2;
@@ -88,10 +88,12 @@ describe('common_utils', () => {
describe('handleLocationHash', () => {
beforeEach(() => {
spyOn(window.document, 'getElementById').and.callThrough();
+ jasmine.clock().install();
});
afterEach(() => {
window.history.pushState({}, null, '');
+ jasmine.clock().uninstall();
});
function expectGetElementIdToHaveBeenCalledWith(elementId) {
@@ -171,6 +173,7 @@ describe('common_utils', () => {
window.history.pushState({}, null, '#test');
commonUtils.handleLocationHash();
+ jasmine.clock().tick(1);
expectGetElementIdToHaveBeenCalledWith('test');
expectGetElementIdToHaveBeenCalledWith('user-content-test');
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index 73b1ea4d36f..019aa191dc0 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -3,7 +3,6 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/commit/pipelines/pipelines_bundle';
-import '~/breakpoints';
import '~/lib/utils/common_utils';
import 'vendor/jquery.scrollTo';
import initMrPage from './helpers/init_vue_mr_page_helper';
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
new file mode 100644
index 00000000000..46a6679da18
--- /dev/null
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -0,0 +1,141 @@
+import Vue from 'vue';
+import { createLocalVue } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import Dashboard from '~/monitoring/components/dashboard.vue';
+import * as types from '~/monitoring/stores/mutation_types';
+import { createStore } from '~/monitoring/stores';
+import axios from '~/lib/utils/axios_utils';
+import {
+ metricsGroupsAPIResponse,
+ mockedEmptyResult,
+ mockedQueryResultPayload,
+ mockedQueryResultPayloadCoresTotal,
+ mockApiEndpoint,
+ environmentData,
+} from '../mock_data';
+
+const localVue = createLocalVue();
+const propsData = {
+ hasMetrics: false,
+ documentationPath: '/path/to/docs',
+ settingsPath: '/path/to/settings',
+ clustersPath: '/path/to/clusters',
+ tagsPath: '/path/to/tags',
+ projectPath: '/path/to/project',
+ defaultBranch: 'master',
+ metricsEndpoint: mockApiEndpoint,
+ deploymentsEndpoint: null,
+ emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
+ emptyLoadingSvgPath: '/path/to/loading.svg',
+ emptyNoDataSvgPath: '/path/to/no-data.svg',
+ emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
+ emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
+ environmentsEndpoint: '/root/hello-prometheus/environments/35',
+ currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
+};
+
+function setupComponentStore(component) {
+ // Load 2 panel groups
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ metricsGroupsAPIResponse,
+ );
+
+ // Load 3 panels to the dashboard, one with an empty result
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedEmptyResult,
+ );
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayload,
+ );
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayloadCoresTotal,
+ );
+
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+}
+
+describe('Dashboard', () => {
+ let DashboardComponent;
+ let mock;
+ let store;
+ let component;
+ let wrapper;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div class="prometheus-graphs"></div>
+ <div class="layout-page"></div>
+ `);
+
+ store = createStore();
+ mock = new MockAdapter(axios);
+ DashboardComponent = localVue.extend(Dashboard);
+ });
+
+ afterEach(() => {
+ if (component) {
+ component.$destroy();
+ }
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ mock.restore();
+ });
+
+ describe('responds to window resizes', () => {
+ let promPanel;
+ let promGroup;
+ let panelToggle;
+ let chart;
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+
+ component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: {
+ ...propsData,
+ hasMetrics: true,
+ showPanels: true,
+ },
+ store,
+ });
+
+ setupComponentStore(component);
+
+ return Vue.nextTick().then(() => {
+ [, promPanel] = component.$el.querySelectorAll('.prometheus-panel');
+ promGroup = promPanel.querySelector('.prometheus-graph-group');
+ panelToggle = promPanel.querySelector('.js-graph-group-toggle');
+ chart = promGroup.querySelector('.position-relative svg');
+ });
+ });
+
+ it('setting chart size to zero when panel group is hidden', () => {
+ expect(promGroup.style.display).toBe('');
+ expect(chart.clientWidth).toBeGreaterThan(0);
+
+ panelToggle.click();
+ return Vue.nextTick().then(() => {
+ expect(promGroup.style.display).toBe('none');
+ expect(chart.clientWidth).toBe(0);
+ promPanel.style.width = '500px';
+ });
+ });
+
+ it('expanding chart panel group after resize displays chart', () => {
+ panelToggle.click();
+
+ expect(chart.clientWidth).toBeGreaterThan(0);
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/components/dashboard_spec.js b/spec/javascripts/monitoring/components/dashboard_spec.js
deleted file mode 100644
index b29bac21820..00000000000
--- a/spec/javascripts/monitoring/components/dashboard_spec.js
+++ /dev/null
@@ -1,729 +0,0 @@
-import Vue from 'vue';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlToast } from '@gitlab/ui';
-import VueDraggable from 'vuedraggable';
-import MockAdapter from 'axios-mock-adapter';
-import Dashboard from '~/monitoring/components/dashboard.vue';
-import { metricStates } from '~/monitoring/constants';
-import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
-import * as types from '~/monitoring/stores/mutation_types';
-import { createStore } from '~/monitoring/stores';
-import axios from '~/lib/utils/axios_utils';
-import {
- metricsGroupsAPIResponse,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
- dashboardGitResponse,
-} from '../mock_data';
-
-const localVue = createLocalVue();
-const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- environmentsEndpoint: '/root/hello-prometheus/environments/35',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-const resetSpy = spy => {
- if (spy) {
- spy.calls.reset();
- }
-};
-
-let expectedPanelCount;
-
-function setupComponentStore(component) {
- // Load 2 panel groups
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- expectedPanelCount = 2;
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-}
-
-describe('Dashboard', () => {
- let DashboardComponent;
- let mock;
- let store;
- let component;
- let wrapper;
-
- const createComponentWrapper = (props = {}, options = {}) => {
- wrapper = shallowMount(localVue.extend(DashboardComponent), {
- localVue,
- sync: false,
- propsData: { ...propsData, ...props },
- store,
- ...options,
- });
- };
-
- beforeEach(() => {
- setFixtures(`
- <div class="prometheus-graphs"></div>
- <div class="layout-page"></div>
- `);
-
- store = createStore();
- mock = new MockAdapter(axios);
- DashboardComponent = localVue.extend(Dashboard);
- });
-
- afterEach(() => {
- if (component) {
- component.$destroy();
- }
- if (wrapper) {
- wrapper.destroy();
- }
- mock.restore();
- });
-
- describe('no metrics are available yet', () => {
- beforeEach(() => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: { ...propsData },
- store,
- });
- });
-
- it('shows a getting started empty state when no metrics are present', () => {
- expect(component.$el.querySelector('.prometheus-graphs')).toBe(null);
- expect(component.emptyState).toEqual('gettingStarted');
- });
-
- it('shows the environment selector', () => {
- expect(component.$el.querySelector('.js-environments-dropdown')).toBeTruthy();
- });
- });
-
- describe('no data found', () => {
- it('shows the environment selector dropdown', () => {
- createComponentWrapper();
-
- expect(wrapper.find('.js-environments-dropdown').exists()).toBeTruthy();
- });
- });
-
- describe('cluster health', () => {
- beforeEach(done => {
- createComponentWrapper({ hasMetrics: true });
-
- // all_dashboards is not defined in health dashboards
- wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_ALL_DASHBOARDS}`, undefined);
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders correctly', () => {
- expect(wrapper.isVueInstance()).toBe(true);
- expect(wrapper.exists()).toBe(true);
- });
- });
-
- describe('requests information to the server', () => {
- let spy;
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- });
-
- afterEach(() => {
- resetSpy(spy);
- });
-
- it('shows up a loading state', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: { ...propsData, hasMetrics: true },
- store,
- });
-
- Vue.nextTick(() => {
- expect(component.emptyState).toEqual('loading');
- done();
- });
- });
-
- it('hides the group panels when showPanels is false', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
- });
-
- setupComponentStore(component);
-
- Vue.nextTick()
- .then(() => {
- expect(component.showEmptyState).toEqual(false);
- expect(component.$el.querySelector('.prometheus-panel')).toEqual(null);
- expect(component.$el.querySelector('.prometheus-graph-group')).toBeTruthy();
-
- done();
- })
- .catch(done.fail);
- });
-
- describe('when all the requests have been commited by the store', () => {
- beforeEach(() => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- },
- store,
- });
-
- setupComponentStore(component);
- });
-
- it('renders the environments dropdown with a number of environments', done => {
- Vue.nextTick()
- .then(() => {
- const dropdownMenuEnvironments = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item',
- );
-
- expect(component.environments.length).toEqual(environmentData.length);
- expect(dropdownMenuEnvironments.length).toEqual(component.environments.length);
-
- Array.from(dropdownMenuEnvironments).forEach((value, index) => {
- if (environmentData[index].metrics_path) {
- expect(value).toHaveAttr('href', environmentData[index].metrics_path);
- }
- });
-
- done();
- })
- .catch(done.fail);
- });
-
- it('renders the environments dropdown with a single active element', done => {
- Vue.nextTick()
- .then(() => {
- const dropdownItems = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item.active',
- );
-
- expect(dropdownItems.length).toEqual(1);
- done();
- })
- .catch(done.fail);
- });
- });
-
- it('hides the environments dropdown list when there is no environments', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- },
- store,
- });
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
- metricsGroupsAPIResponse,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
-
- Vue.nextTick()
- .then(() => {
- const dropdownMenuEnvironments = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item',
- );
-
- expect(dropdownMenuEnvironments.length).toEqual(0);
- done();
- })
- .catch(done.fail);
- });
-
- it('renders the datetimepicker dropdown', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
- });
-
- setupComponentStore(component);
-
- Vue.nextTick()
- .then(() => {
- expect(component.$el.querySelector('.js-time-window-dropdown')).not.toBeNull();
- done();
- })
- .catch(done.fail);
- });
-
- it('fetches the metrics data with proper time window', done => {
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
- });
-
- spyOn(component.$store, 'dispatch').and.stub();
- const getTimeDiffSpy = spyOnDependency(Dashboard, 'getTimeDiff').and.callThrough();
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-
- component.$mount();
-
- Vue.nextTick()
- .then(() => {
- expect(component.$store.dispatch).toHaveBeenCalled();
- expect(getTimeDiffSpy).toHaveBeenCalled();
-
- done();
- })
- .catch(done.fail);
- });
-
- it('shows a specific time window selected from the url params', done => {
- const start = '2019-10-01T18:27:47.000Z';
- const end = '2019-10-01T18:57:47.000Z';
- spyOnDependency(Dashboard, 'getTimeDiff').and.returnValue({
- start,
- end,
- });
- spyOnDependency(Dashboard, 'getParameterValues').and.callFake(param => {
- if (param === 'start') return [start];
- if (param === 'end') return [end];
- return [];
- });
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: { ...propsData, hasMetrics: true },
- store,
- sync: false,
- });
-
- setupComponentStore(component);
-
- Vue.nextTick()
- .then(() => {
- const selectedTimeWindow = component.$el.querySelector(
- '.js-time-window-dropdown .active',
- );
-
- expect(selectedTimeWindow.textContent.trim()).toEqual('30 minutes');
- done();
- })
- .catch(done.fail);
- });
-
- it('shows an error message if invalid url parameters are passed', done => {
- spyOnDependency(Dashboard, 'getParameterValues').and.returnValue([
- '<script>alert("XSS")</script>',
- ]);
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: { ...propsData, hasMetrics: true },
- store,
- });
-
- spy = spyOn(component, 'showInvalidDateError');
- component.$mount();
-
- component.$nextTick(() => {
- expect(component.showInvalidDateError).toHaveBeenCalled();
- done();
- });
- });
- });
-
- describe('when one of the metrics is missing', () => {
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- });
-
- beforeEach(done => {
- createComponentWrapper({ hasMetrics: true });
- setupComponentStore(wrapper.vm);
-
- wrapper.vm.$nextTick(done);
- });
-
- it('shows a group empty area', () => {
- const emptyGroup = wrapper.findAll({ ref: 'empty-group' });
-
- expect(emptyGroup).toHaveLength(1);
- expect(emptyGroup.is(GroupEmptyState)).toBe(true);
- });
-
- it('group empty area displays a NO_DATA state', () => {
- expect(
- wrapper
- .findAll({ ref: 'empty-group' })
- .at(0)
- .props('selectedState'),
- ).toEqual(metricStates.NO_DATA);
- });
- });
-
- describe('drag and drop function', () => {
- const findDraggables = () => wrapper.findAll(VueDraggable);
- const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
- const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
- const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
-
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
- });
-
- beforeEach(done => {
- createComponentWrapper({ hasMetrics: true }, { attachToDocument: true });
-
- setupComponentStore(wrapper.vm);
-
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('wraps vuedraggable', () => {
- expect(findDraggablePanels().exists()).toBe(true);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
- });
-
- it('is disabled by default', () => {
- expect(findRearrangeButton().exists()).toBe(false);
- expect(findEnabledDraggables().length).toBe(0);
- });
-
- describe('when rearrange is enabled', () => {
- beforeEach(done => {
- wrapper.setProps({ rearrangePanelsAvailable: true });
- wrapper.vm.$nextTick(done);
- });
-
- it('displays rearrange button', () => {
- expect(findRearrangeButton().exists()).toBe(true);
- });
-
- describe('when rearrange button is clicked', () => {
- const findFirstDraggableRemoveButton = () =>
- findDraggablePanels()
- .at(0)
- .find('.js-draggable-remove');
-
- beforeEach(done => {
- findRearrangeButton().vm.$emit('click');
- wrapper.vm.$nextTick(done);
- });
-
- it('it enables draggables', () => {
- expect(findRearrangeButton().attributes('pressed')).toBeTruthy();
- expect(findEnabledDraggables()).toEqual(findDraggables());
- });
-
- it('metrics can be swapped', done => {
- const firstDraggable = findDraggables().at(0);
- const mockMetrics = [...metricsGroupsAPIResponse[1].panels];
-
- const firstTitle = mockMetrics[0].title;
- const secondTitle = mockMetrics[1].title;
-
- // swap two elements and `input` them
- [mockMetrics[0], mockMetrics[1]] = [mockMetrics[1], mockMetrics[0]];
- firstDraggable.vm.$emit('input', mockMetrics);
-
- wrapper.vm.$nextTick(() => {
- const { panels } = wrapper.vm.dashboard.panel_groups[1];
-
- expect(panels[1].title).toEqual(firstTitle);
- expect(panels[0].title).toEqual(secondTitle);
- done();
- });
- });
-
- it('shows a remove button, which removes a panel', done => {
- expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
-
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
- findFirstDraggableRemoveButton().trigger('click');
-
- wrapper.vm.$nextTick(() => {
- expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
- done();
- });
- });
-
- it('it disables draggables when clicked again', done => {
- findRearrangeButton().vm.$emit('click');
- wrapper.vm.$nextTick(() => {
- expect(findRearrangeButton().attributes('pressed')).toBeFalsy();
- expect(findEnabledDraggables().length).toBe(0);
- done();
- });
- });
- });
- });
- });
-
- // https://gitlab.com/gitlab-org/gitlab-ce/issues/66922
- // eslint-disable-next-line jasmine/no-disabled-tests
- xdescribe('link to chart', () => {
- const currentDashboard = 'TEST_DASHBOARD';
- localVue.use(GlToast);
- const link = () => wrapper.find('.js-chart-link');
- const clipboardText = () => link().element.dataset.clipboardText;
-
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
- createComponentWrapper({ hasMetrics: true, currentDashboard }, { attachToDocument: true });
-
- setTimeout(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('adds a copy button to the dropdown', () => {
- expect(link().text()).toContain('Generate link to chart');
- });
-
- it('contains a link to the dashboard', () => {
- expect(clipboardText()).toContain(`dashboard=${currentDashboard}`);
- expect(clipboardText()).toContain(`group=`);
- expect(clipboardText()).toContain(`title=`);
- expect(clipboardText()).toContain(`y_label=`);
- });
-
- it('undefined parameter is stripped', done => {
- wrapper.setProps({ currentDashboard: undefined });
-
- wrapper.vm.$nextTick(() => {
- expect(clipboardText()).not.toContain(`dashboard=`);
- expect(clipboardText()).toContain(`y_label=`);
- done();
- });
- });
-
- it('null parameter is stripped', done => {
- wrapper.setProps({ currentDashboard: null });
-
- wrapper.vm.$nextTick(() => {
- expect(clipboardText()).not.toContain(`dashboard=`);
- expect(clipboardText()).toContain(`y_label=`);
- done();
- });
- });
-
- it('creates a toast when clicked', () => {
- spyOn(wrapper.vm.$toast, 'show').and.stub();
-
- link().vm.$emit('click');
-
- expect(wrapper.vm.$toast.show).toHaveBeenCalled();
- });
- });
-
- describe('responds to window resizes', () => {
- let promPanel;
- let promGroup;
- let panelToggle;
- let chart;
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: true,
- },
- store,
- });
-
- setupComponentStore(component);
-
- return Vue.nextTick().then(() => {
- [, promPanel] = component.$el.querySelectorAll('.prometheus-panel');
- promGroup = promPanel.querySelector('.prometheus-graph-group');
- panelToggle = promPanel.querySelector('.js-graph-group-toggle');
- chart = promGroup.querySelector('.position-relative svg');
- });
- });
-
- it('setting chart size to zero when panel group is hidden', () => {
- expect(promGroup.style.display).toBe('');
- expect(chart.clientWidth).toBeGreaterThan(0);
-
- panelToggle.click();
- return Vue.nextTick().then(() => {
- expect(promGroup.style.display).toBe('none');
- expect(chart.clientWidth).toBe(0);
- promPanel.style.width = '500px';
- });
- });
-
- it('expanding chart panel group after resize displays chart', () => {
- panelToggle.click();
-
- expect(chart.clientWidth).toBeGreaterThan(0);
- });
- });
-
- describe('dashboard edit link', () => {
- const findEditLink = () => wrapper.find('.js-edit-link');
-
- beforeEach(done => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
- createComponentWrapper({ hasMetrics: true }, { attachToDocument: true });
-
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
- dashboardGitResponse,
- );
- wrapper.vm.$nextTick(done);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('is not present for the default dashboard', () => {
- expect(findEditLink().exists()).toBe(false);
- });
-
- it('is present for a custom dashboard, and links to its edit_path', done => {
- const dashboard = dashboardGitResponse[1]; // non-default dashboard
- const currentDashboard = dashboard.path;
-
- wrapper.setProps({ currentDashboard });
- wrapper.vm.$nextTick(() => {
- expect(findEditLink().exists()).toBe(true);
- expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
- done();
- });
- });
- });
-
- describe('external dashboard link', () => {
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- showTimeWindowDropdown: false,
- externalDashboardUrl: '/mockUrl',
- },
- store,
- });
- });
-
- it('shows the link', done => {
- setTimeout(() => {
- expect(component.$el.querySelector('.js-external-dashboard-link').innerText).toContain(
- 'View full dashboard',
- );
- done();
- });
- });
- });
-
- describe('Dashboard dropdown', () => {
- beforeEach(() => {
- mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
-
- component = new DashboardComponent({
- el: document.querySelector('.prometheus-graphs'),
- propsData: {
- ...propsData,
- hasMetrics: true,
- showPanels: false,
- },
- store,
- });
-
- component.$store.commit(
- `monitoringDashboard/${types.SET_ALL_DASHBOARDS}`,
- dashboardGitResponse,
- );
- });
-
- it('shows the dashboard dropdown', done => {
- setTimeout(() => {
- const dashboardDropdown = component.$el.querySelector('.js-dashboards-dropdown');
-
- expect(dashboardDropdown).not.toEqual(null);
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/monitoring/helpers.js b/spec/javascripts/monitoring/helpers.js
deleted file mode 100644
index 672e3b948c4..00000000000
--- a/spec/javascripts/monitoring/helpers.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// eslint-disable-next-line import/prefer-default-export
-export const resetStore = store => {
- store.replaceState({
- showEmptyState: true,
- emptyState: 'loading',
- groups: [],
- });
-};
diff --git a/spec/javascripts/notes/components/discussion_resolve_with_issue_button_spec.js b/spec/javascripts/notes/components/discussion_resolve_with_issue_button_spec.js
index 3be1f0be0d0..4348445f7ca 100644
--- a/spec/javascripts/notes/components/discussion_resolve_with_issue_button_spec.js
+++ b/spec/javascripts/notes/components/discussion_resolve_with_issue_button_spec.js
@@ -12,7 +12,6 @@ describe('ResolveWithIssueButton', () => {
beforeEach(() => {
wrapper = shallowMount(ResolveWithIssueButton, {
localVue,
- sync: false,
propsData: {
url,
},
diff --git a/spec/javascripts/notes/components/note_actions/reply_button_spec.js b/spec/javascripts/notes/components/note_actions/reply_button_spec.js
index aa39ab15833..720ab10b270 100644
--- a/spec/javascripts/notes/components/note_actions/reply_button_spec.js
+++ b/spec/javascripts/notes/components/note_actions/reply_button_spec.js
@@ -10,7 +10,6 @@ describe('ReplyButton', () => {
beforeEach(() => {
wrapper = mount(localVue.extend(ReplyButton), {
- sync: false,
localVue,
});
});
diff --git a/spec/javascripts/notes/components/note_actions_spec.js b/spec/javascripts/notes/components/note_actions_spec.js
index 2e0694869ba..259122597fb 100644
--- a/spec/javascripts/notes/components/note_actions_spec.js
+++ b/spec/javascripts/notes/components/note_actions_spec.js
@@ -16,7 +16,6 @@ describe('noteActions', () => {
store,
propsData,
localVue,
- sync: false,
});
};
diff --git a/spec/javascripts/notes/components/note_form_spec.js b/spec/javascripts/notes/components/note_form_spec.js
index 35283e14dc5..8ab8bce9027 100644
--- a/spec/javascripts/notes/components/note_form_spec.js
+++ b/spec/javascripts/notes/components/note_form_spec.js
@@ -19,7 +19,6 @@ describe('issue_note_form component', () => {
propsData: props,
// see https://gitlab.com/gitlab-org/gitlab-foss/issues/56317 for the following
localVue,
- sync: false,
});
};
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index 5e359759afc..6efc6485b9c 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -5,8 +5,15 @@ import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vu
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
import NoteForm from '~/notes/components/note_form.vue';
import '~/behaviors/markdown/render_gfm';
-import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
+import {
+ noteableDataMock,
+ discussionMock,
+ notesDataMock,
+ loggedOutnoteableData,
+ userDataMock,
+} from '../mock_data';
import mockDiffFile from '../../diffs/mock_data/diff_file';
+import { trimText } from '../../helpers/text_helper';
const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
@@ -15,6 +22,7 @@ const localVue = createLocalVue();
describe('noteable_discussion component', () => {
let store;
let wrapper;
+ let originalGon;
preloadFixtures(discussionWithTwoUnresolvedNotes);
@@ -28,7 +36,6 @@ describe('noteable_discussion component', () => {
store,
propsData: { discussion: discussionMock },
localVue,
- sync: false,
});
});
@@ -167,4 +174,53 @@ describe('noteable_discussion component', () => {
expect(button.exists()).toBe(true);
});
});
+
+ describe('signout widget', () => {
+ beforeEach(() => {
+ originalGon = Object.assign({}, window.gon);
+ window.gon = window.gon || {};
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ window.gon = originalGon;
+ });
+
+ describe('user is logged in', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = userDataMock.id;
+ store.dispatch('setUserData', userDataMock);
+
+ wrapper = mount(localVue.extend(noteableDiscussion), {
+ store,
+ propsData: { discussion: discussionMock },
+ localVue,
+ });
+ });
+
+ it('should not render signed out widget', () => {
+ expect(Boolean(wrapper.vm.isLoggedIn)).toBe(true);
+ expect(trimText(wrapper.text())).not.toContain('Please register or sign in to reply');
+ });
+ });
+
+ describe('user is not logged in', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = null;
+ store.dispatch('setNoteableData', loggedOutnoteableData);
+ store.dispatch('setNotesData', notesDataMock);
+
+ wrapper = mount(localVue.extend(noteableDiscussion), {
+ store,
+ propsData: { discussion: discussionMock },
+ localVue,
+ });
+ });
+
+ it('should render signed out widget', () => {
+ expect(Boolean(wrapper.vm.isLoggedIn)).toBe(false);
+ expect(trimText(wrapper.text())).toContain('Please register or sign in to reply');
+ });
+ });
+ });
});
diff --git a/spec/javascripts/notes/components/noteable_note_spec.js b/spec/javascripts/notes/components/noteable_note_spec.js
index 72a13afe498..5fbac7faefd 100644
--- a/spec/javascripts/notes/components/noteable_note_spec.js
+++ b/spec/javascripts/notes/components/noteable_note_spec.js
@@ -23,7 +23,6 @@ describe('issue_note', () => {
propsData: {
note,
},
- sync: false,
localVue,
});
});
diff --git a/spec/javascripts/pipelines/graph/graph_component_spec.js b/spec/javascripts/pipelines/graph/graph_component_spec.js
index 5effbaabcd1..fa6a5f57410 100644
--- a/spec/javascripts/pipelines/graph/graph_component_spec.js
+++ b/spec/javascripts/pipelines/graph/graph_component_spec.js
@@ -190,6 +190,7 @@ describe('graph component', () => {
describe('on click', () => {
it('should emit `onClickTriggered`', () => {
spyOn(component, '$emit');
+ spyOn(component, 'calculateMarginTop').and.callFake(() => '16px');
component.$el.querySelector('#js-linked-pipeline-34993051').click();
diff --git a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js b/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
index fe7039da9e4..613ab2a906f 100644
--- a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
@@ -9,6 +9,7 @@ describe('Linked Pipelines Column', () => {
columnTitle: 'Upstream',
linkedPipelines: mockData.triggered,
graphPosition: 'right',
+ projectId: 19,
};
let vm;
diff --git a/spec/javascripts/pipelines/header_component_spec.js b/spec/javascripts/pipelines/header_component_spec.js
index 556a0976b29..8c033447ce4 100644
--- a/spec/javascripts/pipelines/header_component_spec.js
+++ b/spec/javascripts/pipelines/header_component_spec.js
@@ -34,6 +34,7 @@ describe('Pipeline details header', () => {
avatar_url: 'link',
},
retry_path: 'path',
+ delete_path: 'path',
},
isLoading: false,
};
@@ -55,12 +56,22 @@ describe('Pipeline details header', () => {
});
describe('action buttons', () => {
- it('should call postAction when button action is clicked', () => {
+ it('should call postAction when retry button action is clicked', done => {
eventHub.$on('headerPostAction', action => {
expect(action.path).toEqual('path');
+ done();
});
- vm.$el.querySelector('button').click();
+ vm.$el.querySelector('.js-retry-button').click();
+ });
+
+ it('should fire modal event when delete button action is clicked', done => {
+ vm.$root.$on('bv::modal::show', action => {
+ expect(action.componentId).toEqual('pipeline-delete-modal');
+ done();
+ });
+
+ vm.$el.querySelector('.js-btn-delete-pipeline').click();
});
});
});
diff --git a/spec/javascripts/pipelines/linked_pipelines_mock.json b/spec/javascripts/pipelines/linked_pipelines_mock.json
index b498903f804..60e214ddc32 100644
--- a/spec/javascripts/pipelines/linked_pipelines_mock.json
+++ b/spec/javascripts/pipelines/linked_pipelines_mock.json
@@ -341,6 +341,9 @@
"commit_url": "https://gitlab.com/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46",
"commit_path": "/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46"
},
+ "project": {
+ "id": 1794617
+ },
"triggered_by": {
"id": 12,
"user": {
diff --git a/spec/javascripts/polyfills/element_spec.js b/spec/javascripts/polyfills/element_spec.js
deleted file mode 100644
index d35df595c72..00000000000
--- a/spec/javascripts/polyfills/element_spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import '~/commons/polyfills/element';
-
-describe('Element polyfills', function() {
- beforeEach(() => {
- this.element = document.createElement('ul');
- });
-
- describe('matches', () => {
- it('returns true if element matches the selector', () => {
- expect(this.element.matches('ul')).toBeTruthy();
- });
-
- it("returns false if element doesn't match the selector", () => {
- expect(this.element.matches('.not-an-element')).toBeFalsy();
- });
- });
-
- describe('closest', () => {
- beforeEach(() => {
- this.childElement = document.createElement('li');
- this.element.appendChild(this.childElement);
- });
-
- it('returns the closest parent that matches the selector', () => {
- expect(this.childElement.closest('ul').toString()).toBe(this.element.toString());
- });
-
- it('returns itself if it matches the selector', () => {
- expect(this.childElement.closest('li').toString()).toBe(this.childElement.toString());
- });
-
- it('returns undefined if nothing matches the selector', () => {
- expect(this.childElement.closest('.no-an-element')).toBeFalsy();
- });
- });
-});
diff --git a/spec/javascripts/project_select_combo_button_spec.js b/spec/javascripts/project_select_combo_button_spec.js
deleted file mode 100644
index dc85292c23e..00000000000
--- a/spec/javascripts/project_select_combo_button_spec.js
+++ /dev/null
@@ -1,124 +0,0 @@
-import $ from 'jquery';
-import ProjectSelectComboButton from '~/project_select_combo_button';
-
-const fixturePath = 'static/project_select_combo_button.html';
-
-describe('Project Select Combo Button', function() {
- preloadFixtures(fixturePath);
-
- beforeEach(function() {
- this.defaults = {
- label: 'Select project to create issue',
- groupId: 12345,
- projectMeta: {
- name: 'My Cool Project',
- url: 'http://mycoolproject.com',
- },
- newProjectMeta: {
- name: 'My Other Cool Project',
- url: 'http://myothercoolproject.com',
- },
- localStorageKey: 'group-12345-new-issue-recent-project',
- relativePath: 'issues/new',
- };
-
- loadFixtures(fixturePath);
-
- this.newItemBtn = document.querySelector('.new-project-item-link');
- this.projectSelectInput = document.querySelector('.project-item-select');
- });
-
- describe('on page load when localStorage is empty', function() {
- beforeEach(function() {
- this.comboButton = new ProjectSelectComboButton(this.projectSelectInput);
- });
-
- it('newItemBtn href is null', function() {
- expect(this.newItemBtn.getAttribute('href')).toBe('');
- });
-
- it('newItemBtn text is the plain default label', function() {
- expect(this.newItemBtn.textContent).toBe(this.defaults.label);
- });
- });
-
- describe('on page load when localStorage is filled', function() {
- beforeEach(function() {
- window.localStorage.setItem(
- this.defaults.localStorageKey,
- JSON.stringify(this.defaults.projectMeta),
- );
- this.comboButton = new ProjectSelectComboButton(this.projectSelectInput);
- });
-
- it('newItemBtn href is correctly set', function() {
- expect(this.newItemBtn.getAttribute('href')).toBe(this.defaults.projectMeta.url);
- });
-
- it('newItemBtn text is the cached label', function() {
- expect(this.newItemBtn.textContent).toBe(`New issue in ${this.defaults.projectMeta.name}`);
- });
-
- afterEach(function() {
- window.localStorage.clear();
- });
- });
-
- describe('after selecting a new project', function() {
- beforeEach(function() {
- this.comboButton = new ProjectSelectComboButton(this.projectSelectInput);
-
- // mock the effect of selecting an item from the projects dropdown (select2)
- $('.project-item-select')
- .val(JSON.stringify(this.defaults.newProjectMeta))
- .trigger('change');
- });
-
- it('newItemBtn href is correctly set', function() {
- expect(this.newItemBtn.getAttribute('href')).toBe('http://myothercoolproject.com/issues/new');
- });
-
- it('newItemBtn text is the selected project label', function() {
- expect(this.newItemBtn.textContent).toBe(`New issue in ${this.defaults.newProjectMeta.name}`);
- });
-
- afterEach(function() {
- window.localStorage.clear();
- });
- });
-
- describe('deriveTextVariants', function() {
- beforeEach(function() {
- this.mockExecutionContext = {
- resourceType: '',
- resourceLabel: '',
- };
-
- this.comboButton = new ProjectSelectComboButton(this.projectSelectInput);
-
- this.method = this.comboButton.deriveTextVariants.bind(this.mockExecutionContext);
- });
-
- it('correctly derives test variants for merge requests', function() {
- this.mockExecutionContext.resourceType = 'merge_requests';
- this.mockExecutionContext.resourceLabel = 'New merge request';
-
- const returnedVariants = this.method();
-
- expect(returnedVariants.localStorageItemType).toBe('new-merge-request');
- expect(returnedVariants.defaultTextPrefix).toBe('New merge request');
- expect(returnedVariants.presetTextSuffix).toBe('merge request');
- });
-
- it('correctly derives text variants for issues', function() {
- this.mockExecutionContext.resourceType = 'issues';
- this.mockExecutionContext.resourceLabel = 'New issue';
-
- const returnedVariants = this.method();
-
- expect(returnedVariants.localStorageItemType).toBe('new-issue');
- expect(returnedVariants.defaultTextPrefix).toBe('New issue');
- expect(returnedVariants.presetTextSuffix).toBe('issue');
- });
- });
-});
diff --git a/spec/javascripts/projects/project_import_gitlab_project_spec.js b/spec/javascripts/projects/project_import_gitlab_project_spec.js
index 126f73103e0..3c94934699d 100644
--- a/spec/javascripts/projects/project_import_gitlab_project_spec.js
+++ b/spec/javascripts/projects/project_import_gitlab_project_spec.js
@@ -1,25 +1,59 @@
import projectImportGitlab from '~/projects/project_import_gitlab_project';
describe('Import Gitlab project', () => {
- let projectName;
- beforeEach(() => {
- projectName = 'project';
- window.history.pushState({}, null, `?path=${projectName}`);
+ const pathName = 'my-project';
+ const projectName = 'My Project';
+
+ const setTestFixtures = url => {
+ window.history.pushState({}, null, url);
setFixtures(`
<input class="js-path-name" />
+ <input class="js-project-name" />
`);
projectImportGitlab();
+ };
+
+ beforeEach(() => {
+ setTestFixtures(`?name=${projectName}&path=${pathName}`);
});
afterEach(() => {
window.history.pushState({}, null, '');
});
- describe('path name', () => {
+ describe('project name', () => {
it('should fill in the project name derived from the previously filled project name', () => {
- expect(document.querySelector('.js-path-name').value).toEqual(projectName);
+ expect(document.querySelector('.js-project-name').value).toEqual(projectName);
+ });
+
+ describe('empty path name', () => {
+ it('derives the path name from the previously filled project name', () => {
+ const alternateProjectName = 'My Alt Project';
+ const alternatePathName = 'my-alt-project';
+
+ setTestFixtures(`?name=${alternateProjectName}`);
+
+ expect(document.querySelector('.js-path-name').value).toEqual(alternatePathName);
+ });
+ });
+ });
+
+ describe('path name', () => {
+ it('should fill in the path name derived from the previously filled path name', () => {
+ expect(document.querySelector('.js-path-name').value).toEqual(pathName);
+ });
+
+ describe('empty project name', () => {
+ it('derives the project name from the previously filled path name', () => {
+ const alternateProjectName = 'My Alt Project';
+ const alternatePathName = 'my-alt-project';
+
+ setTestFixtures(`?path=${alternatePathName}`);
+
+ expect(document.querySelector('.js-project-name').value).toEqual(alternateProjectName);
+ });
});
});
});
diff --git a/spec/javascripts/projects/project_new_spec.js b/spec/javascripts/projects/project_new_spec.js
index 106a3ba94e4..7c6ff90aff6 100644
--- a/spec/javascripts/projects/project_new_spec.js
+++ b/spec/javascripts/projects/project_new_spec.js
@@ -172,4 +172,34 @@ describe('New Project', () => {
expect($projectPath.val()).toEqual('my-dash-delimited-awesome-project');
});
});
+
+ describe('derivesProjectNameFromSlug', () => {
+ const dummyProjectPath = 'my-awesome-project';
+ const dummyProjectName = 'Original Awesome Project';
+
+ beforeEach(() => {
+ projectNew.bindEvents();
+ $projectPath.val('').change();
+ });
+
+ it('converts slug to humanized project name', () => {
+ $projectPath.val(dummyProjectPath);
+
+ projectNew.onProjectPathChange($projectName, $projectPath);
+
+ expect($projectName.val()).toEqual('My Awesome Project');
+ });
+
+ it('does not convert slug to humanized project name if a project name already exists', () => {
+ $projectName.val(dummyProjectName);
+ $projectPath.val(dummyProjectPath);
+ projectNew.onProjectPathChange(
+ $projectName,
+ $projectPath,
+ $projectName.val().trim().length > 0,
+ );
+
+ expect($projectName.val()).toEqual(dummyProjectName);
+ });
+ });
});
diff --git a/spec/javascripts/related_merge_requests/components/related_merge_requests_spec.js b/spec/javascripts/related_merge_requests/components/related_merge_requests_spec.js
index cf3ab4d4a68..d8bdf69dfee 100644
--- a/spec/javascripts/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/javascripts/related_merge_requests/components/related_merge_requests_spec.js
@@ -22,7 +22,6 @@ describe('RelatedMergeRequests', () => {
wrapper = mount(localVue.extend(RelatedMergeRequests), {
localVue,
- sync: false,
store: createStore(),
propsData: {
endpoint: API_ENDPOINT,
diff --git a/spec/javascripts/sidebar/participants_spec.js b/spec/javascripts/sidebar/participants_spec.js
index 8d8ec5884f6..7e80e86f8ca 100644
--- a/spec/javascripts/sidebar/participants_spec.js
+++ b/spec/javascripts/sidebar/participants_spec.js
@@ -182,4 +182,21 @@ describe('Participants', function() {
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
});
+
+ describe('when not showing participants label', () => {
+ beforeEach(() => {
+ vm = mountComponent(Participants, {
+ participants: PARTICIPANT_LIST,
+ showParticipantLabel: false,
+ });
+ });
+
+ it('does not show sidebar collapsed icon', () => {
+ expect(vm.$el.querySelector('.sidebar-collapsed-icon')).not.toBeTruthy();
+ });
+
+ it('does not show participants label title', () => {
+ expect(vm.$el.querySelector('.title')).not.toBeTruthy();
+ });
+ });
});
diff --git a/spec/javascripts/sidebar/sidebar_store_spec.js b/spec/javascripts/sidebar/sidebar_store_spec.js
deleted file mode 100644
index 85ff70fffbd..00000000000
--- a/spec/javascripts/sidebar/sidebar_store_spec.js
+++ /dev/null
@@ -1,162 +0,0 @@
-import SidebarStore from '~/sidebar/stores/sidebar_store';
-import Mock from './mock_data';
-import UsersMockHelper from '../helpers/user_mock_data_helper';
-
-const ASSIGNEE = {
- id: 2,
- name: 'gitlab user 2',
- username: 'gitlab2',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
-};
-
-const ANOTHER_ASSINEE = {
- id: 3,
- name: 'gitlab user 3',
- username: 'gitlab3',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
-};
-
-const PARTICIPANT = {
- id: 1,
- state: 'active',
- username: 'marcene',
- name: 'Allie Will',
- web_url: 'foo.com',
- avatar_url: 'gravatar.com/avatar/xxx',
-};
-
-const PARTICIPANT_LIST = [PARTICIPANT, { ...PARTICIPANT, id: 2 }, { ...PARTICIPANT, id: 3 }];
-
-describe('Sidebar store', function() {
- beforeEach(() => {
- this.store = new SidebarStore({
- currentUser: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- },
- editable: true,
- rootPath: '/',
- endpoint: '/gitlab-org/gitlab-shell/issues/5.json',
- });
- });
-
- afterEach(() => {
- SidebarStore.singleton = null;
- });
-
- it('has default isFetching values', () => {
- expect(this.store.isFetching.assignees).toBe(true);
- });
-
- it('adds a new assignee', () => {
- this.store.addAssignee(ASSIGNEE);
-
- expect(this.store.assignees.length).toEqual(1);
- });
-
- it('removes an assignee', () => {
- this.store.removeAssignee(ASSIGNEE);
-
- expect(this.store.assignees.length).toEqual(0);
- });
-
- it('finds an existent assignee', () => {
- let foundAssignee;
-
- this.store.addAssignee(ASSIGNEE);
- foundAssignee = this.store.findAssignee(ASSIGNEE);
-
- expect(foundAssignee).toBeDefined();
- expect(foundAssignee).toEqual(ASSIGNEE);
- foundAssignee = this.store.findAssignee(ANOTHER_ASSINEE);
-
- expect(foundAssignee).toBeUndefined();
- });
-
- it('removes all assignees', () => {
- this.store.removeAllAssignees();
-
- expect(this.store.assignees.length).toEqual(0);
- });
-
- it('sets participants data', () => {
- expect(this.store.participants.length).toEqual(0);
-
- this.store.setParticipantsData({
- participants: PARTICIPANT_LIST,
- });
-
- expect(this.store.isFetching.participants).toEqual(false);
- expect(this.store.participants.length).toEqual(PARTICIPANT_LIST.length);
- });
-
- it('sets subcriptions data', () => {
- expect(this.store.subscribed).toEqual(null);
-
- this.store.setSubscriptionsData({
- subscribed: true,
- });
-
- expect(this.store.isFetching.subscriptions).toEqual(false);
- expect(this.store.subscribed).toEqual(true);
- });
-
- it('set assigned data', () => {
- const users = {
- assignees: UsersMockHelper.createNumberRandomUsers(3),
- };
-
- this.store.setAssigneeData(users);
-
- expect(this.store.isFetching.assignees).toBe(false);
- expect(this.store.assignees.length).toEqual(3);
- });
-
- it('sets fetching state', () => {
- expect(this.store.isFetching.participants).toEqual(true);
-
- this.store.setFetchingState('participants', false);
-
- expect(this.store.isFetching.participants).toEqual(false);
- });
-
- it('sets loading state', () => {
- this.store.setLoadingState('assignees', true);
-
- expect(this.store.isLoading.assignees).toEqual(true);
- });
-
- it('set time tracking data', () => {
- this.store.setTimeTrackingData(Mock.time);
-
- expect(this.store.timeEstimate).toEqual(Mock.time.time_estimate);
- expect(this.store.totalTimeSpent).toEqual(Mock.time.total_time_spent);
- expect(this.store.humanTimeEstimate).toEqual(Mock.time.human_time_estimate);
- expect(this.store.humanTotalTimeSpent).toEqual(Mock.time.human_total_time_spent);
- });
-
- it('set autocomplete projects', () => {
- const projects = [{ id: 0 }];
- this.store.setAutocompleteProjects(projects);
-
- expect(this.store.autocompleteProjects).toEqual(projects);
- });
-
- it('sets subscribed state', () => {
- expect(this.store.subscribed).toEqual(null);
-
- this.store.setSubscribedState(true);
-
- expect(this.store.subscribed).toEqual(true);
- });
-
- it('set move to project ID', () => {
- const projectId = 7;
- this.store.setMoveToProjectId(projectId);
-
- expect(this.store.moveToProjectId).toEqual(projectId);
- });
-});
diff --git a/spec/javascripts/version_check_image_spec.js b/spec/javascripts/version_check_image_spec.js
deleted file mode 100644
index 0e69fcc4c5f..00000000000
--- a/spec/javascripts/version_check_image_spec.js
+++ /dev/null
@@ -1,35 +0,0 @@
-import $ from 'jquery';
-import VersionCheckImage from '~/version_check_image';
-import ClassSpecHelper from './helpers/class_spec_helper';
-
-describe('VersionCheckImage', function() {
- describe('bindErrorEvent', function() {
- ClassSpecHelper.itShouldBeAStaticMethod(VersionCheckImage, 'bindErrorEvent');
-
- beforeEach(function() {
- this.imageElement = $('<div></div>');
- });
-
- it('registers an error event', function() {
- spyOn($.prototype, 'on');
- spyOn($.prototype, 'off').and.callFake(function() {
- return this;
- });
-
- VersionCheckImage.bindErrorEvent(this.imageElement);
-
- expect($.prototype.off).toHaveBeenCalledWith('error');
- expect($.prototype.on).toHaveBeenCalledWith('error', jasmine.any(Function));
- });
-
- it('hides the imageElement on error', function() {
- spyOn($.prototype, 'hide');
-
- VersionCheckImage.bindErrorEvent(this.imageElement);
-
- this.imageElement.trigger('error');
-
- expect($.prototype.hide).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_alert_message_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_alert_message_spec.js
index bd567f1f93a..f78fcfb52b4 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_alert_message_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_alert_message_spec.js
@@ -11,7 +11,6 @@ describe('MrWidgetAlertMessage', () => {
wrapper = shallowMount(localVue.extend(MrWidgetAlertMessage), {
propsData: {},
localVue,
- sync: false,
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
index 2d6d22d66aa..76827cde093 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
@@ -16,7 +16,6 @@ describe('MrWidgetPipelineContainer', () => {
...props,
},
localVue,
- sync: false,
});
};
diff --git a/spec/javascripts/vue_mr_widget/components/review_app_link_spec.js b/spec/javascripts/vue_mr_widget/components/review_app_link_spec.js
index bd481f93413..242193c7b3d 100644
--- a/spec/javascripts/vue_mr_widget/components/review_app_link_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/review_app_link_spec.js
@@ -8,7 +8,10 @@ describe('review app link', () => {
const props = {
link: '/review',
cssClass: 'js-link',
- isCurrent: true,
+ display: {
+ text: 'View app',
+ tooltip: '',
+ },
};
let vm;
let el;
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index a2fa098bf6b..6c44ffc6ec9 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -634,20 +634,18 @@ describe('ReadyToMerge', () => {
});
describe('when user can merge and can delete branch', () => {
- let customVm;
-
beforeEach(() => {
- customVm = createComponent({
+ vm = createComponent({
mr: { canRemoveSourceBranch: true },
});
});
it('isRemoveSourceBranchButtonDisabled should be false', () => {
- expect(customVm.isRemoveSourceBranchButtonDisabled).toBe(false);
+ expect(vm.isRemoveSourceBranchButtonDisabled).toBe(false);
});
- it('should be enabled in rendered output', () => {
- const checkboxElement = customVm.$el.querySelector('#remove-source-branch-input');
+ it('removed source branch should be enabled in rendered output', () => {
+ const checkboxElement = vm.$el.querySelector('#remove-source-branch-input');
expect(checkboxElement).not.toBeNull();
});
@@ -926,22 +924,36 @@ describe('ReadyToMerge', () => {
});
describe('Commit message area', () => {
- it('when using merge commits, should show "Modify commit message" button', () => {
- const customVm = createComponent({
- mr: { ffOnlyEnabled: false },
+ describe('when using merge commits', () => {
+ beforeEach(() => {
+ vm = createComponent({
+ mr: { ffOnlyEnabled: false },
+ });
+ });
+
+ it('should not show fast forward message', () => {
+ expect(vm.$el.querySelector('.mr-fast-forward-message')).toBeNull();
});
- expect(customVm.$el.querySelector('.mr-fast-forward-message')).toBeNull();
- expect(customVm.$el.querySelector('.js-modify-commit-message-button')).toBeDefined();
+ it('should show "Modify commit message" button', () => {
+ expect(vm.$el.querySelector('.js-modify-commit-message-button')).toBeDefined();
+ });
});
- it('when fast-forward merge is enabled, only show fast-forward message', () => {
- const customVm = createComponent({
- mr: { ffOnlyEnabled: true },
+ describe('when fast-forward merge is enabled', () => {
+ beforeEach(() => {
+ vm = createComponent({
+ mr: { ffOnlyEnabled: true },
+ });
+ });
+
+ it('should show fast forward message', () => {
+ expect(vm.$el.querySelector('.mr-fast-forward-message')).toBeDefined();
});
- expect(customVm.$el.querySelector('.mr-fast-forward-message')).toBeDefined();
- expect(customVm.$el.querySelector('.js-modify-commit-message-button')).toBeNull();
+ it('should not show "Modify commit message" button', () => {
+ expect(vm.$el.querySelector('.js-modify-commit-message-button')).toBeNull();
+ });
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
index cb656525f06..b70d580ed04 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
@@ -9,7 +9,6 @@ describe('Squash before merge component', () => {
const createComponent = props => {
wrapper = shallowMount(localVue.extend(SquashBeforeMerge), {
localVue,
- sync: false,
propsData: {
...props,
},
diff --git a/spec/javascripts/vue_shared/components/header_ci_component_spec.js b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
index 7bd5e5a64b1..ea2eed2886a 100644
--- a/spec/javascripts/vue_shared/components/header_ci_component_spec.js
+++ b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
@@ -31,17 +31,9 @@ describe('Header CI Component', () => {
{
label: 'Retry',
path: 'path',
- type: 'button',
cssClass: 'btn',
isLoading: false,
},
- {
- label: 'Go',
- path: 'path',
- type: 'link',
- cssClass: 'link',
- isLoading: false,
- },
],
hasSidebarButton: true,
};
@@ -77,11 +69,10 @@ describe('Header CI Component', () => {
});
it('should render provided actions', () => {
- expect(vm.$el.querySelector('.btn').tagName).toEqual('BUTTON');
- expect(vm.$el.querySelector('.btn').textContent.trim()).toEqual(props.actions[0].label);
- expect(vm.$el.querySelector('.link').tagName).toEqual('A');
- expect(vm.$el.querySelector('.link').textContent.trim()).toEqual(props.actions[1].label);
- expect(vm.$el.querySelector('.link').getAttribute('href')).toEqual(props.actions[0].path);
+ const btn = vm.$el.querySelector('.btn');
+
+ expect(btn.tagName).toEqual('BUTTON');
+ expect(btn.textContent.trim()).toEqual(props.actions[0].label);
});
it('should show loading icon', done => {
diff --git a/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
index 26bfdd7551e..92080cb9bd5 100644
--- a/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
+++ b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
@@ -6,40 +6,43 @@ export const defaultProps = {
export const issuable1 = {
id: 200,
- epic_issue_id: 1,
+ epicIssueId: 1,
confidential: false,
reference: 'foo/bar#123',
displayReference: '#123',
title: 'some title',
path: '/foo/bar/issues/123',
state: 'opened',
+ linkType: 'relates_to',
};
export const issuable2 = {
id: 201,
- epic_issue_id: 2,
+ epicIssueId: 2,
confidential: false,
reference: 'foo/bar#124',
displayReference: '#124',
title: 'some other thing',
path: '/foo/bar/issues/124',
state: 'opened',
+ linkType: 'blocks',
};
export const issuable3 = {
id: 202,
- epic_issue_id: 3,
+ epicIssueId: 3,
confidential: false,
reference: 'foo/bar#125',
displayReference: '#125',
title: 'some other other thing',
path: '/foo/bar/issues/125',
state: 'opened',
+ linkType: 'is_blocked_by',
};
export const issuable4 = {
id: 203,
- epic_issue_id: 4,
+ epicIssueId: 4,
confidential: false,
reference: 'foo/bar#126',
displayReference: '#126',
@@ -50,7 +53,7 @@ export const issuable4 = {
export const issuable5 = {
id: 204,
- epic_issue_id: 5,
+ epicIssueId: 5,
confidential: false,
reference: 'foo/bar#127',
displayReference: '#127',
diff --git a/spec/javascripts/vue_shared/components/loading_button_spec.js b/spec/javascripts/vue_shared/components/loading_button_spec.js
deleted file mode 100644
index 6b03c354e01..00000000000
--- a/spec/javascripts/vue_shared/components/loading_button_spec.js
+++ /dev/null
@@ -1,111 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import loadingButton from '~/vue_shared/components/loading_button.vue';
-
-const LABEL = 'Hello';
-
-describe('LoadingButton', function() {
- let vm;
- let LoadingButton;
-
- beforeEach(() => {
- LoadingButton = Vue.extend(loadingButton);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('loading spinner', () => {
- it('shown when loading', () => {
- vm = mountComponent(LoadingButton, {
- loading: true,
- });
-
- expect(vm.$el.querySelector('.js-loading-button-icon')).toBeDefined();
- });
- });
-
- describe('disabled state', () => {
- it('disabled when loading', () => {
- vm = mountComponent(LoadingButton, {
- loading: true,
- });
-
- expect(vm.$el.disabled).toEqual(true);
- });
-
- it('not disabled when normal', () => {
- vm = mountComponent(LoadingButton, {
- loading: false,
- });
-
- expect(vm.$el.disabled).toEqual(false);
- });
- });
-
- describe('label', () => {
- it('shown when normal', () => {
- vm = mountComponent(LoadingButton, {
- loading: false,
- label: LABEL,
- });
- const label = vm.$el.querySelector('.js-loading-button-label');
-
- expect(label.textContent.trim()).toEqual(LABEL);
- });
-
- it('shown when loading', () => {
- vm = mountComponent(LoadingButton, {
- loading: true,
- label: LABEL,
- });
- const label = vm.$el.querySelector('.js-loading-button-label');
-
- expect(label.textContent.trim()).toEqual(LABEL);
- });
- });
-
- describe('container class', () => {
- it('should default to btn btn-align-content', () => {
- vm = mountComponent(LoadingButton, {});
-
- expect(vm.$el.classList.contains('btn')).toEqual(true);
- expect(vm.$el.classList.contains('btn-align-content')).toEqual(true);
- });
-
- it('should be configurable through props', () => {
- vm = mountComponent(LoadingButton, {
- containerClass: 'test-class',
- });
-
- expect(vm.$el.classList.contains('btn')).toEqual(false);
- expect(vm.$el.classList.contains('btn-align-content')).toEqual(false);
- expect(vm.$el.classList.contains('test-class')).toEqual(true);
- });
- });
-
- describe('click callback prop', () => {
- it('calls given callback when normal', () => {
- vm = mountComponent(LoadingButton, {
- loading: false,
- });
- spyOn(vm, '$emit');
-
- vm.$el.click();
-
- expect(vm.$emit).toHaveBeenCalledWith('click', jasmine.any(Object));
- });
-
- it('does not call given callback when disabled because of loading', () => {
- vm = mountComponent(LoadingButton, {
- loading: true,
- });
- spyOn(vm, '$emit');
-
- vm.$el.click();
-
- expect(vm.$emit).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js b/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js
index 204c0decfd8..9e72a0e2480 100644
--- a/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js
+++ b/spec/javascripts/vue_shared/components/pagination/graphql_pagination_spec.js
@@ -11,7 +11,6 @@ describe('Graphql Pagination component', () => {
hasNextPage,
hasPreviousPage,
},
- sync: false,
localVue,
});
}
diff --git a/spec/javascripts/vue_shared/components/project_selector/project_list_item_spec.js b/spec/javascripts/vue_shared/components/project_selector/project_list_item_spec.js
index 271ae1b645f..e73fb97b741 100644
--- a/spec/javascripts/vue_shared/components/project_selector/project_list_item_spec.js
+++ b/spec/javascripts/vue_shared/components/project_selector/project_list_item_spec.js
@@ -18,7 +18,6 @@ describe('ProjectListItem component', () => {
project,
selected: false,
},
- sync: false,
localVue,
};
});
diff --git a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
index 6815da31436..2b60ea0fd74 100644
--- a/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/javascripts/vue_shared/components/project_selector/project_selector_spec.js
@@ -33,7 +33,6 @@ describe('ProjectSelector component', () => {
showLoadingIndicator: false,
showSearchErrorMessage: false,
},
- sync: false,
attachToDocument: true,
});
diff --git a/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js b/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
index ad8d5a53291..a8d39b7b5fe 100644
--- a/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
+++ b/spec/javascripts/vue_shared/components/tooltip_on_truncate_spec.js
@@ -15,7 +15,6 @@ describe('TooltipOnTruncate component', () => {
const createComponent = ({ propsData, ...options } = {}) => {
wrapper = shallowMount(localVue.extend(TooltipOnTruncate), {
localVue,
- sync: false,
attachToDocument: true,
propsData: {
title: TEST_TITLE,
diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb
index 2d5bec2e752..796c753d6c4 100644
--- a/spec/lib/api/helpers/pagination_spec.rb
+++ b/spec/lib/api/helpers/pagination_spec.rb
@@ -5,70 +5,14 @@ require 'spec_helper'
describe API::Helpers::Pagination do
subject { Class.new.include(described_class).new }
- let(:expected_result) { double("result", to_a: double) }
- let(:relation) { double("relation") }
- let(:params) { {} }
+ let(:paginator) { double('paginator') }
+ let(:relation) { double('relation') }
+ let(:expected_result) { double('expected result') }
- before do
- allow(subject).to receive(:params).and_return(params)
- end
-
- describe '#paginate' do
- let(:offset_pagination) { double("offset pagination") }
-
- it 'delegates to OffsetPagination' do
- expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
- expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
-
- result = subject.paginate(relation)
-
- expect(result).to eq(expected_result)
- end
- end
-
- describe '#paginate_and_retrieve!' do
- context 'for offset pagination' do
- before do
- allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(false)
- end
-
- it 'delegates to paginate' do
- expect(subject).to receive(:paginate).with(relation).and_return(expected_result)
-
- result = subject.paginate_and_retrieve!(relation)
-
- expect(result).to eq(expected_result.to_a)
- end
- end
-
- context 'for keyset pagination' do
- let(:params) { { pagination: 'keyset' } }
- let(:request_context) { double('request context') }
-
- before do
- allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
- end
-
- context 'when keyset pagination is available' do
- it 'delegates to KeysetPagination' do
- expect(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
- expect(Gitlab::Pagination::Keyset).to receive(:paginate).with(request_context, relation).and_return(expected_result)
-
- result = subject.paginate_and_retrieve!(relation)
-
- expect(result).to eq(expected_result.to_a)
- end
- end
-
- context 'when keyset pagination is not available' do
- it 'renders a 501 error if keyset pagination isnt available yet' do
- expect(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
- expect(Gitlab::Pagination::Keyset).not_to receive(:paginate)
- expect(subject).to receive(:error!).with(/not yet available/, 405)
+ it 'delegates to OffsetPagination' do
+ expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
+ expect(paginator).to receive(:paginate).with(relation).and_return(expected_result)
- subject.paginate_and_retrieve!(relation)
- end
- end
- end
+ expect(subject.paginate(relation)).to eq(expected_result)
end
end
diff --git a/spec/lib/api/helpers/pagination_strategies_spec.rb b/spec/lib/api/helpers/pagination_strategies_spec.rb
new file mode 100644
index 00000000000..a418c09a824
--- /dev/null
+++ b/spec/lib/api/helpers/pagination_strategies_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Helpers::PaginationStrategies do
+ subject { Class.new.include(described_class).new }
+
+ let(:expected_result) { double("result") }
+ let(:relation) { double("relation") }
+ let(:params) { {} }
+
+ before do
+ allow(subject).to receive(:params).and_return(params)
+ end
+
+ describe '#paginate_with_strategies' do
+ let(:paginator) { double("paginator", paginate: expected_result, finalize: nil) }
+
+ before do
+ allow(subject).to receive(:paginator).with(relation).and_return(paginator)
+ end
+
+ it 'yields paginated relation' do
+ expect { |b| subject.paginate_with_strategies(relation, &b) }.to yield_with_args(expected_result)
+ end
+
+ it 'calls #finalize with first value returned from block' do
+ return_value = double
+ expect(paginator).to receive(:finalize).with(return_value)
+
+ subject.paginate_with_strategies(relation) do |records|
+ some_options = {}
+ [return_value, some_options]
+ end
+ end
+
+ it 'returns whatever the block returns' do
+ return_value = [double, double]
+
+ result = subject.paginate_with_strategies(relation) do |records|
+ return_value
+ end
+
+ expect(result).to eq(return_value)
+ end
+ end
+
+ describe '#paginator' do
+ context 'offset pagination' do
+ let(:paginator) { double("paginator") }
+
+ before do
+ allow(subject).to receive(:keyset_pagination_enabled?).and_return(false)
+ end
+
+ it 'delegates to OffsetPagination' do
+ expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
+
+ expect(subject.paginator(relation)).to eq(paginator)
+ end
+ end
+
+ context 'for keyset pagination' do
+ let(:params) { { pagination: 'keyset' } }
+ let(:request_context) { double('request context') }
+ let(:pager) { double('pager') }
+
+ before do
+ allow(subject).to receive(:keyset_pagination_enabled?).and_return(true)
+ allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
+ end
+
+ context 'when keyset pagination is available' do
+ before do
+ allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
+ allow(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
+ end
+
+ it 'delegates to Pager' do
+ expect(subject.paginator(relation)).to eq(pager)
+ end
+ end
+
+ context 'when keyset pagination is not available' do
+ before do
+ allow(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
+ end
+
+ it 'renders a 501 error' do
+ expect(subject).to receive(:error!).with(/not yet available/, 405)
+
+ subject.paginator(relation)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
index 3e8b0ea113f..798112d0f53 100644
--- a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
@@ -3,30 +3,27 @@
require 'spec_helper'
describe Banzai::Filter::AbstractReferenceFilter do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+
+ let(:doc) { Nokogiri::HTML.fragment('') }
+ let(:filter) { described_class.new(doc, project: project) }
describe '#references_per_parent' do
- it 'returns a Hash containing references grouped per parent paths' do
- doc = Nokogiri::HTML.fragment("#1 #{project.full_path}#2")
- filter = described_class.new(doc, project: project)
+ let(:doc) { Nokogiri::HTML.fragment("#1 #{project.full_path}#2 #2") }
- expect(filter).to receive(:object_class).exactly(4).times.and_return(Issue)
- expect(filter).to receive(:object_sym).twice.and_return(:issue)
+ it 'returns a Hash containing references grouped per parent paths' do
+ expect(described_class).to receive(:object_class).exactly(6).times.and_return(Issue)
refs = filter.references_per_parent
- expect(refs).to be_an_instance_of(Hash)
- expect(refs[project.full_path]).to eq(Set.new(%w[1 2]))
+ expect(refs).to match(a_hash_including(project.full_path => contain_exactly(1, 2)))
end
end
describe '#parent_per_reference' do
it 'returns a Hash containing projects grouped per parent paths' do
- doc = Nokogiri::HTML.fragment('')
- filter = described_class.new(doc, project: project)
-
expect(filter).to receive(:references_per_parent)
- .and_return({ project.full_path => Set.new(%w[1]) })
+ .and_return({ project.full_path => Set.new([1]) })
expect(filter.parent_per_reference)
.to eq({ project.full_path => project })
@@ -34,9 +31,6 @@ describe Banzai::Filter::AbstractReferenceFilter do
end
describe '#find_for_paths' do
- let(:doc) { Nokogiri::HTML.fragment('') }
- let(:filter) { described_class.new(doc, project: project) }
-
context 'with RequestStore disabled' do
it 'returns a list of Projects for a list of paths' do
expect(filter.find_for_paths([project.full_path]))
diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb
index 713bab4527b..abe525ac47a 100644
--- a/spec/lib/banzai/filter/plantuml_filter_spec.rb
+++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb
@@ -26,7 +26,7 @@ describe Banzai::Filter::PlantumlFilter do
it 'does not replace plantuml pre tag with img tag if url is invalid' do
stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid")
input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
- output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> PlantUML Error: cannot connect to PlantUML server at "invalid"</pre></div></div>'
+ output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>'
doc = filter(input)
expect(doc.to_s).to eq output
diff --git a/spec/lib/banzai/filter/relative_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb
index 9f467d7a6fd..c87f452a3df 100644
--- a/spec/lib/banzai/filter/relative_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Banzai::Filter::RelativeLinkFilter do
+describe Banzai::Filter::RepositoryLinkFilter do
include GitHelpers
include RepoHelpers
@@ -128,11 +128,6 @@ describe Banzai::Filter::RelativeLinkFilter do
expect { filter(act) }.not_to raise_error
end
- it 'does not raise an exception on URIs containing invalid utf-8 byte sequences in uploads' do
- act = link("/uploads/%FF")
- expect { filter(act) }.not_to raise_error
- end
-
it 'does not raise an exception on URIs containing invalid utf-8 byte sequences in context requested path' do
expect { filter(link("files/test.md"), requested_path: '%FF') }.not_to raise_error
end
@@ -147,11 +142,6 @@ describe Banzai::Filter::RelativeLinkFilter do
expect { filter(act) }.not_to raise_error
end
- it 'does not raise an exception with a space in the path' do
- act = link("/uploads/d18213acd3732630991986120e167e3d/Landscape_8.jpg \nBut here's some more unexpected text :smile:)")
- expect { filter(act) }.not_to raise_error
- end
-
it 'ignores ref if commit is passed' do
doc = filter(link('non/existent.file'), commit: project.commit('empty-branch') )
expect(doc.at_css('a')['href'])
@@ -350,166 +340,4 @@ describe Banzai::Filter::RelativeLinkFilter do
include_examples :valid_repository
end
-
- context 'with a /upload/ URL' do
- # not needed
- let(:commit) { nil }
- let(:ref) { nil }
- let(:requested_path) { nil }
- let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
- let(:relative_path) { "/#{project.full_path}#{upload_path}" }
-
- context 'to a project upload' do
- shared_examples 'rewrite project uploads' do
- context 'with an absolute URL' do
- let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
- let(:only_path) { false }
-
- it 'rewrites the link correctly' do
- doc = filter(link(upload_path))
-
- expect(doc.at_css('a')['href']).to eq(absolute_path)
- end
- end
-
- it 'rebuilds relative URL for a link' do
- doc = filter(link(upload_path))
- expect(doc.at_css('a')['href']).to eq(relative_path)
-
- doc = filter(nested(link(upload_path)))
- expect(doc.at_css('a')['href']).to eq(relative_path)
- end
-
- it 'rebuilds relative URL for an image' do
- doc = filter(image(upload_path))
- expect(doc.at_css('img')['src']).to eq(relative_path)
-
- doc = filter(nested(image(upload_path)))
- expect(doc.at_css('img')['src']).to eq(relative_path)
- end
-
- it 'does not modify absolute URL' do
- doc = filter(link('http://example.com'))
- expect(doc.at_css('a')['href']).to eq 'http://example.com'
- end
-
- it 'supports unescaped Unicode filenames' do
- path = '/uploads/한글.png'
- doc = filter(link(path))
-
- expect(doc.at_css('a')['href']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
- end
-
- it 'supports escaped Unicode filenames' do
- path = '/uploads/한글.png'
- escaped = Addressable::URI.escape(path)
- doc = filter(image(escaped))
-
- expect(doc.at_css('img')['src']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
- end
- end
-
- context 'without project repository access' do
- let(:project) { create(:project, :repository, repository_access_level: ProjectFeature::PRIVATE) }
-
- it_behaves_like 'rewrite project uploads'
- end
-
- context 'with project repository access' do
- it_behaves_like 'rewrite project uploads'
- end
- end
-
- context 'to a group upload' do
- let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
- let(:group) { create(:group) }
- let(:project) { nil }
- let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
-
- context 'with an absolute URL' do
- let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
- let(:only_path) { false }
-
- it 'rewrites the link correctly' do
- doc = filter(upload_link)
-
- expect(doc.at_css('a')['href']).to eq(absolute_path)
- end
- end
-
- it 'rewrites the link correctly' do
- doc = filter(upload_link)
-
- expect(doc.at_css('a')['href']).to eq(relative_path)
- end
-
- it 'rewrites the link correctly for subgroup' do
- group.update!(parent: create(:group))
-
- doc = filter(upload_link)
-
- expect(doc.at_css('a')['href']).to eq(relative_path)
- end
-
- it 'does not modify absolute URL' do
- doc = filter(link('http://example.com'))
-
- expect(doc.at_css('a')['href']).to eq 'http://example.com'
- end
- end
-
- context 'to a personal snippet' do
- let(:group) { nil }
- let(:project) { nil }
- let(:relative_path) { '/uploads/-/system/personal_snippet/6/674e4f07fbf0a7736c3439212896e51a/example.tar.gz' }
-
- context 'with an absolute URL' do
- let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
- let(:only_path) { false }
-
- it 'rewrites the link correctly' do
- doc = filter(link(relative_path))
-
- expect(doc.at_css('a')['href']).to eq(absolute_path)
- end
- end
-
- context 'with a relative URL root' do
- let(:gitlab_root) { '/gitlab' }
- let(:absolute_path) { Gitlab.config.gitlab.url + gitlab_root + relative_path }
-
- before do
- stub_config_setting(relative_url_root: gitlab_root)
- end
-
- context 'with an absolute URL' do
- let(:only_path) { false }
-
- it 'rewrites the link correctly' do
- doc = filter(link(relative_path))
-
- expect(doc.at_css('a')['href']).to eq(absolute_path)
- end
- end
-
- it 'rewrites the link correctly' do
- doc = filter(link(relative_path))
-
- expect(doc.at_css('a')['href']).to eq(gitlab_root + relative_path)
- end
- end
-
- it 'rewrites the link correctly' do
- doc = filter(link(relative_path))
-
- expect(doc.at_css('a')['href']).to eq(relative_path)
- end
-
- it 'does not modify absolute URL' do
- doc = filter(link('http://example.com'))
-
- expect(doc.at_css('a')['href']).to eq 'http://example.com'
- end
- end
- end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
new file mode 100644
index 00000000000..3f181dce7bc
--- /dev/null
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::UploadLinkFilter do
+ def filter(doc, contexts = {})
+ contexts.reverse_merge!(
+ project: project,
+ group: group,
+ only_path: only_path
+ )
+
+ described_class.call(doc, contexts)
+ end
+
+ def image(path)
+ %(<img src="#{path}" />)
+ end
+
+ def video(path)
+ %(<video src="#{path}"></video>)
+ end
+
+ def audio(path)
+ %(<audio src="#{path}"></audio>)
+ end
+
+ def link(path)
+ %(<a href="#{path}">#{path}</a>)
+ end
+
+ def nested(element)
+ %(<div>#{element}</div>)
+ end
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let(:group) { nil }
+ let(:project_path) { project.full_path }
+ let(:only_path) { true }
+ let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
+ let(:relative_path) { "/#{project.full_path}#{upload_path}" }
+
+ context 'to a project upload' do
+ context 'with an absolute URL' do
+ let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
+ let(:only_path) { false }
+
+ it 'rewrites the link correctly' do
+ doc = filter(link(upload_path))
+
+ expect(doc.at_css('a')['href']).to eq(absolute_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+ end
+
+ it 'rebuilds relative URL for a link' do
+ doc = filter(link(upload_path))
+
+ expect(doc.at_css('a')['href']).to eq(relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+
+ doc = filter(nested(link(upload_path)))
+
+ expect(doc.at_css('a')['href']).to eq(relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+
+ it 'rebuilds relative URL for an image' do
+ doc = filter(image(upload_path))
+
+ expect(doc.at_css('img')['src']).to eq(relative_path)
+ expect(doc.at_css('img').classes).to include('gfm')
+
+ doc = filter(nested(image(upload_path)))
+
+ expect(doc.at_css('img')['src']).to eq(relative_path)
+ expect(doc.at_css('img').classes).to include('gfm')
+ end
+
+ it 'does not modify absolute URL' do
+ doc = filter(link('http://example.com'))
+
+ expect(doc.at_css('a')['href']).to eq 'http://example.com'
+ expect(doc.at_css('a').classes).not_to include('gfm')
+ end
+
+ it 'supports unescaped Unicode filenames' do
+ path = '/uploads/한글.png'
+ doc = filter(link(path))
+
+ expect(doc.at_css('a')['href']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+
+ it 'supports escaped Unicode filenames' do
+ path = '/uploads/한글.png'
+ escaped = Addressable::URI.escape(path)
+ doc = filter(image(escaped))
+
+ expect(doc.at_css('img')['src']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
+ expect(doc.at_css('img').classes).to include('gfm')
+ end
+ end
+
+ context 'to a group upload' do
+ let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
+ let_it_be(:group) { create(:group) }
+ let(:project) { nil }
+ let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
+
+ context 'with an absolute URL' do
+ let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
+ let(:only_path) { false }
+
+ it 'rewrites the link correctly' do
+ doc = filter(upload_link)
+
+ expect(doc.at_css('a')['href']).to eq(absolute_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+ end
+
+ it 'rewrites the link correctly' do
+ doc = filter(upload_link)
+
+ expect(doc.at_css('a')['href']).to eq(relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+
+ it 'rewrites the link correctly for subgroup' do
+ group.update!(parent: create(:group))
+
+ doc = filter(upload_link)
+
+ expect(doc.at_css('a')['href']).to eq(relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+
+ it 'does not modify absolute URL' do
+ doc = filter(link('http://example.com'))
+
+ expect(doc.at_css('a')['href']).to eq 'http://example.com'
+ expect(doc.at_css('a').classes).not_to include('gfm')
+ end
+ end
+
+ context 'to a personal snippet' do
+ let(:group) { nil }
+ let(:project) { nil }
+ let(:relative_path) { '/uploads/-/system/personal_snippet/6/674e4f07fbf0a7736c3439212896e51a/example.tar.gz' }
+
+ context 'with an absolute URL' do
+ let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
+ let(:only_path) { false }
+
+ it 'rewrites the link correctly' do
+ doc = filter(link(relative_path))
+
+ expect(doc.at_css('a')['href']).to eq(absolute_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+ end
+
+ context 'with a relative URL root' do
+ let(:gitlab_root) { '/gitlab' }
+ let(:absolute_path) { Gitlab.config.gitlab.url + gitlab_root + relative_path }
+
+ before do
+ stub_config_setting(relative_url_root: gitlab_root)
+ end
+
+ context 'with an absolute URL' do
+ let(:only_path) { false }
+
+ it 'rewrites the link correctly' do
+ doc = filter(link(relative_path))
+
+ expect(doc.at_css('a')['href']).to eq(absolute_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+ end
+
+ it 'rewrites the link correctly' do
+ doc = filter(link(relative_path))
+
+ expect(doc.at_css('a')['href']).to eq(gitlab_root + relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+ end
+
+ it 'rewrites the link correctly' do
+ doc = filter(link(relative_path))
+
+ expect(doc.at_css('a')['href']).to eq(relative_path)
+ expect(doc.at_css('a').classes).to include('gfm')
+ end
+
+ it 'does not modify absolute URL' do
+ doc = filter(link('http://example.com'))
+
+ expect(doc.at_css('a')['href']).to eq 'http://example.com'
+ expect(doc.at_css('a').classes).not_to include('gfm')
+ end
+ end
+
+ context 'invalid input' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :href) do
+ 'invalid URI' | '://foo'
+ 'invalid UTF-8 byte sequences' | '%FF'
+ 'garbled path' | 'open(/var/tmp/):%20/location%0Afrom:%20/test'
+ 'whitespace' | "d18213acd3732630991986120e167e3d/Landscape_8.jpg\nand more"
+ end
+
+ with_them do
+ it { expect { filter(link("/uploads/#{href}")) }.not_to raise_error }
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
new file mode 100644
index 00000000000..ab72354edcf
--- /dev/null
+++ b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Pipeline::PostProcessPipeline do
+ context 'when a document only has upload links' do
+ it 'does not make any Gitaly calls', :request_store do
+ markdown = <<-MARKDOWN.strip_heredoc
+ [Relative Upload Link](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
+
+ ![Relative Upload Image](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
+ MARKDOWN
+
+ context = {
+ project: create(:project, :public, :repository),
+ ref: 'master'
+ }
+
+ Gitlab::GitalyClient.reset_counts
+
+ described_class.call(markdown, context)
+
+ expect(Gitlab::GitalyClient.get_request_count).to eq(0)
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index e1814ea403e..8c009bc409b 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -230,7 +230,7 @@ describe Banzai::Pipeline::WikiPipeline do
]
invalid_slugs.each do |slug|
- context "with the invalid slug #{slug}" do
+ context "with the invalid slug #{slug.delete("\000")}" do
invalid_js_links.each do |link|
it "doesn't include a prohibited slug in a (.) relative link '#{link}'" do
output = described_class.to_html(
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 7897164d985..b1002c1db25 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -312,6 +312,12 @@ describe Banzai::ReferenceParser::BaseParser do
expect(subject.collection_objects_for_ids(Project, [project.id]))
.to eq([project])
end
+
+ it 'will not overflow the stack' do
+ ids = 1.upto(1_000_000).to_a
+
+ expect { subject.collection_objects_for_ids(User, ids) }.not_to raise_error
+ end
end
end
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index 394efa85701..1b8ec2b1979 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -100,7 +100,7 @@ describe ExpandVariables do
end
with_them do
- subject { ExpandVariables.expand(value, variables) } # rubocop:disable RSpec/DescribedClass
+ subject { ExpandVariables.expand(value, variables) }
it { is_expected.to eq(result) }
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 3d59b1f35a9..2525dd17b89 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -171,6 +171,13 @@ describe Feature do
end
end
+ it 'returns the default value when the database does not exist' do
+ fake_default = double('fake default')
+ expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
+
+ expect(described_class.enabled?(:a_feature, default_enabled: fake_default)).to eq(fake_default)
+ end
+
context 'cached feature flag', :request_store do
let(:flag) { :some_feature_flag }
diff --git a/spec/lib/gitlab/app_json_logger_spec.rb b/spec/lib/gitlab/app_json_logger_spec.rb
new file mode 100644
index 00000000000..22a398f8bca
--- /dev/null
+++ b/spec/lib/gitlab/app_json_logger_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::AppJsonLogger do
+ subject { described_class.new('/dev/null') }
+
+ let(:hash_message) { { 'message' => 'Message', 'project_id' => '123' } }
+ let(:string_message) { 'Information' }
+
+ it 'logs a hash as a JSON' do
+ expect(JSON.parse(subject.format_message('INFO', Time.now, nil, hash_message))).to include(hash_message)
+ end
+
+ it 'logs a string as a JSON' do
+ expect(JSON.parse(subject.format_message('INFO', Time.now, nil, string_message))).to include('message' => string_message)
+ end
+end
diff --git a/spec/lib/gitlab/app_logger_spec.rb b/spec/lib/gitlab/app_logger_spec.rb
index 3b21104b15d..132a10b9409 100644
--- a/spec/lib/gitlab/app_logger_spec.rb
+++ b/spec/lib/gitlab/app_logger_spec.rb
@@ -2,13 +2,21 @@
require 'spec_helper'
-describe Gitlab::AppLogger, :request_store do
+describe Gitlab::AppLogger do
subject { described_class }
- it 'builds a logger once' do
- expect(::Logger).to receive(:new).and_call_original
+ it 'builds a Gitlab::Logger object twice' do
+ expect(Gitlab::Logger).to receive(:new)
+ .exactly(described_class.loggers.size)
+ .and_call_original
- subject.info('hello world')
- subject.error('hello again')
+ subject.info('Hello World!')
+ end
+
+ it 'logs info to AppLogger and AppJsonLogger' do
+ expect_any_instance_of(Gitlab::AppTextLogger).to receive(:info).and_call_original
+ expect_any_instance_of(Gitlab::AppJsonLogger).to receive(:info).and_call_original
+
+ subject.info('Hello World!')
end
end
diff --git a/spec/lib/gitlab/app_text_logger_spec.rb b/spec/lib/gitlab/app_text_logger_spec.rb
new file mode 100644
index 00000000000..c84b986ce40
--- /dev/null
+++ b/spec/lib/gitlab/app_text_logger_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::AppTextLogger do
+ subject { described_class.new('/dev/null') }
+
+ let(:hash_message) { { message: 'Message', project_id: 123 } }
+ let(:string_message) { 'Information' }
+
+ it 'logs a hash as string' do
+ expect(subject.format_message('INFO', Time.now, nil, hash_message )).to include(hash_message.to_s)
+ end
+
+ it 'logs a string unchanged' do
+ expect(subject.format_message('INFO', Time.now, nil, string_message)).to include(string_message)
+ end
+
+ it 'logs time in UTC with ISO8601.3 standard' do
+ Timecop.freeze do
+ expect(subject.format_message('INFO', Time.now, nil, string_message))
+ .to include(Time.now.utc.iso8601(3))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
new file mode 100644
index 00000000000..482bf0dc192
--- /dev/null
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ApplicationContext do
+ describe '.with_context' do
+ it 'yields the block' do
+ expect { |b| described_class.with_context({}, &b) }.to yield_control
+ end
+
+ it 'passes the expected context on to labkit' do
+ fake_proc = duck_type(:call)
+ expected_context = hash_including(user: fake_proc, project: fake_proc, root_namespace: fake_proc)
+
+ expect(Labkit::Context).to receive(:with_context).with(expected_context)
+
+ described_class.with_context(
+ user: build(:user),
+ project: build(:project),
+ namespace: build(:namespace)) {}
+ end
+
+ it 'raises an error when passing invalid options' do
+ expect { described_class.with_context(no: 'option') {} }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '.push' do
+ it 'passes the expected context on to labkit' do
+ fake_proc = duck_type(:call)
+ expected_context = { user: fake_proc }
+
+ expect(Labkit::Context).to receive(:push).with(expected_context)
+
+ described_class.push(user: build(:user))
+ end
+
+ it 'raises an error when passing invalid options' do
+ expect { described_class.push(no: 'option')}.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#to_lazy_hash' do
+ let(:user) { build(:user) }
+ let(:project) { build(:project) }
+ let(:namespace) { build(:group) }
+ let(:subgroup) { build(:group, parent: namespace) }
+
+ def result(context)
+ context.to_lazy_hash.transform_values { |v| v.call }
+ end
+
+ it 'does not call the attributes until needed' do
+ fake_proc = double('Proc')
+
+ expect(fake_proc).not_to receive(:call)
+
+ described_class.new(user: fake_proc, project: fake_proc, namespace: fake_proc).to_lazy_hash
+ end
+
+ it 'correctly loads the expected values when they are wrapped in a block' do
+ context = described_class.new(user: -> { user }, project: -> { project }, namespace: -> { subgroup })
+
+ expect(result(context))
+ .to include(user: user.username, project: project.full_path, root_namespace: namespace.full_path)
+ end
+
+ it 'correctly loads the expected values when passed directly' do
+ context = described_class.new(user: user, project: project, namespace: subgroup)
+
+ expect(result(context))
+ .to include(user: user.username, project: project.full_path, root_namespace: namespace.full_path)
+ end
+
+ it 'falls back to a projects namespace when a project is passed but no namespace' do
+ context = described_class.new(project: project)
+
+ expect(result(context))
+ .to include(project: project.full_path, root_namespace: project.full_path_components.first)
+ end
+
+ context 'only include values for which an option was specified' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:provided_options, :expected_context_keys) do
+ [:user, :namespace, :project] | [:user, :project, :root_namespace]
+ [:user, :project] | [:user, :project, :root_namespace]
+ [:user, :namespace] | [:user, :root_namespace]
+ [:user] | [:user]
+ [] | []
+ end
+
+ with_them do
+ it do
+ # Build a hash that has all `provided_options` as keys, and `nil` as value
+ provided_values = provided_options.map { |key| [key, nil] }.to_h
+ context = described_class.new(provided_values)
+
+ expect(context.to_lazy_hash.keys).to contain_exactly(*expected_context_keys)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 38ec04ebe81..c8d159d1e84 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -481,7 +481,6 @@ module Gitlab
['../sample.adoc', 'doc/sample.adoc', 'relative path to a file up one directory'],
['../../sample.adoc', 'sample.adoc', 'relative path for a file up multiple directories']
].each do |include_path_, file_path_, desc|
-
context "the file is specified by #{desc}" do
let(:include_path) { include_path_ }
let(:file_path) { file_path_ }
@@ -519,6 +518,28 @@ module Gitlab
end
end
+ context 'when repository is passed into the context' do
+ let(:wiki_repo) { project.wiki.repository }
+ let(:include_path) { 'wiki_file.adoc' }
+
+ before do
+ project.create_wiki
+ context.merge!(repository: wiki_repo)
+ end
+
+ context 'when the file exists' do
+ before do
+ create_file(include_path, 'Content from wiki', repository: wiki_repo)
+ end
+
+ it { is_expected.to include('<p>Content from wiki</p>') }
+ end
+
+ context 'when the file does not exist' do
+ it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")}
+ end
+ end
+
context 'recursive includes with relative paths' do
let(:input) do
<<~ADOC
@@ -563,8 +584,8 @@ module Gitlab
end
end
- def create_file(path, content)
- project.repository.create_file(project.creator, path, content,
+ def create_file(path, content, repository: project.repository)
+ repository.create_file(project.creator, path, content,
message: "Add #{path}", branch_name: 'asciidoc')
end
end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 82ff8e7f76c..bffaaef4ed4 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -446,6 +446,93 @@ describe Gitlab::Auth::AuthFinders do
end
end
+ describe '#find_user_from_job_token' do
+ let(:job) { create(:ci_build, user: user) }
+ let(:route_authentication_setting) { { job_token_allowed: true } }
+
+ subject { find_user_from_job_token }
+
+ context 'when the job token is in the headers' do
+ it 'returns the user if valid job token' do
+ env[described_class::JOB_TOKEN_HEADER] = job.token
+
+ is_expected.to eq(user)
+ expect(@current_authenticated_job).to eq(job)
+ end
+
+ it 'returns nil without job token' do
+ env[described_class::JOB_TOKEN_HEADER] = ''
+
+ is_expected.to be_nil
+ end
+
+ it 'returns exception if invalid job token' do
+ env[described_class::JOB_TOKEN_HEADER] = 'invalid token'
+
+ expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+
+ context 'when route is not allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: false } }
+
+ it 'sets current_user to nil' do
+ env[described_class::JOB_TOKEN_HEADER] = job.token
+
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(true)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ context 'when the job token is in the params' do
+ shared_examples 'job token params' do |token_key_name|
+ before do
+ set_param(token_key_name, token)
+ end
+
+ context 'with valid job token' do
+ let(:token) { job.token }
+
+ it 'returns the user' do
+ is_expected.to eq(user)
+ expect(@current_authenticated_job).to eq(job)
+ end
+ end
+
+ context 'with empty job token' do
+ let(:token) { '' }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'with invalid job token' do
+ let(:token) { 'invalid token' }
+
+ it 'returns exception' do
+ expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ context 'when route is not allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: false } }
+ let(:token) { job.token }
+
+ it 'sets current_user to nil' do
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(true)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ it_behaves_like 'job token params', described_class::JOB_TOKEN_PARAM
+ it_behaves_like 'job token params', described_class::RUNNER_JOB_TOKEN_PARAM
+ end
+ end
+
describe '#find_runner_from_token' do
let(:runner) { create(:ci_runner) }
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index 4dbcd0df302..87c96803c3a 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -42,6 +42,8 @@ describe Gitlab::Auth::RequestAuthenticator do
describe '#find_sessionless_user' do
let!(:access_token_user) { build(:user) }
let!(:feed_token_user) { build(:user) }
+ let!(:static_object_token_user) { build(:user) }
+ let!(:job_token_user) { build(:user) }
it 'returns access_token user first' do
allow_any_instance_of(described_class).to receive(:find_user_from_web_access_token).and_return(access_token_user)
@@ -56,6 +58,22 @@ describe Gitlab::Auth::RequestAuthenticator do
expect(subject.find_sessionless_user([:api])).to eq feed_token_user
end
+ it 'returns static_object_token user if no feed_token user found' do
+ allow_any_instance_of(described_class)
+ .to receive(:find_user_from_static_object_token)
+ .and_return(static_object_token_user)
+
+ expect(subject.find_sessionless_user([:api])).to eq static_object_token_user
+ end
+
+ it 'returns job_token user if no static_object_token user found' do
+ allow_any_instance_of(described_class)
+ .to receive(:find_user_from_job_token)
+ .and_return(job_token_user)
+
+ expect(subject.find_sessionless_user([:api])).to eq job_token_user
+ end
+
it 'returns nil if no user found' do
expect(subject.find_sessionless_user([:api])).to be_blank
end
@@ -67,6 +85,39 @@ describe Gitlab::Auth::RequestAuthenticator do
end
end
+ describe '#find_user_from_job_token' do
+ let!(:user) { build(:user) }
+ let!(:job) { build(:ci_build, user: user) }
+
+ before do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = 'token'
+ end
+
+ context 'with API requests' do
+ before do
+ env['SCRIPT_NAME'] = '/api/endpoint'
+ end
+
+ it 'tries to find the user' do
+ expect(::Ci::Build).to receive(:find_by_token).and_return(job)
+
+ expect(subject.find_sessionless_user([:api])).to eq user
+ end
+ end
+
+ context 'without API requests' do
+ before do
+ env['SCRIPT_NAME'] = '/web/endpoint'
+ end
+
+ it 'does not search for job users' do
+ expect(::Ci::Build).not_to receive(:find_by_token)
+
+ expect(subject.find_sessionless_user([:api])).to be_nil
+ end
+ end
+ end
+
describe '#runner' do
let!(:runner) { build(:ci_runner) }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 311cbd4dd7e..1f943bebbec 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -130,6 +130,15 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')
end
+ it 'rate limits a user by unique IPs' do
+ expect_next_instance_of(Gitlab::Auth::IpRateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:reset!)
+ end
+ expect(Gitlab::Auth::UniqueIpsLimiter).to receive(:limit_user!).twice.and_call_original
+
+ gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')
+ end
+
it 'registers failure for failed auth' do
expect_next_instance_of(Gitlab::Auth::IpRateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:register_fail!)
@@ -415,6 +424,12 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
.to eq(auth_success)
end
+ it 'does not attempt to rate limit unique IPs for a deploy token' do
+ expect(Gitlab::Auth::UniqueIpsLimiter).not_to receive(:limit_user!)
+
+ gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip')
+ end
+
it 'fails when login is not valid' do
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
.to eq(auth_failure)
diff --git a/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb b/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb
new file mode 100644
index 00000000000..0edf87e1354
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/activate_prometheus_services_for_shared_cluster_applications_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::ActivatePrometheusServicesForSharedClusterApplications, :migration, schema: 2020_01_14_113341 do
+ include MigrationHelpers::PrometheusServiceHelpers
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:services) { table(:services) }
+ let(:namespace) { namespaces.create(name: 'user', path: 'user') }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+
+ let(:columns) do
+ %w(project_id active properties type template push_events
+ issues_events merge_requests_events tag_push_events
+ note_events category default wiki_page_events pipeline_events
+ confidential_issues_events commit_events job_events
+ confidential_note_events deployment_events)
+ end
+
+ describe '#perform' do
+ it 'is idempotent' do
+ expect { subject.perform(project.id) }.to change { services.order(:id).map { |row| row.attributes } }
+
+ expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
+ end
+
+ context 'non prometheus services' do
+ it 'does not change them' do
+ other_type = 'SomeOtherService'
+ services.create(service_params_for(project.id, active: true, type: other_type))
+
+ expect { subject.perform(project.id) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } }
+ end
+ end
+
+ context 'prometheus services are configured manually ' do
+ it 'does not change them' do
+ properties = '{"api_url":"http://test.dev","manual_configuration":"1"}'
+ services.create(service_params_for(project.id, properties: properties, active: false))
+
+ expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
+ end
+ end
+
+ context 'prometheus integration services do not exist' do
+ it 'creates missing services entries' do
+ subject.perform(project.id)
+
+ rows = services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
+
+ expect([service_params_for(project.id, active: true)]).to eq rows
+ end
+ end
+
+ context 'prometheus integration services exist' do
+ context 'in active state' do
+ it 'does not change them' do
+ services.create(service_params_for(project.id, active: true))
+
+ expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
+ end
+ end
+
+ context 'not in active state' do
+ it 'sets active attribute to true' do
+ service = services.create(service_params_for(project.id))
+
+ expect { subject.perform(project.id) }.to change { service.reload.active? }.from(false).to(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
new file mode 100644
index 00000000000..3ccb2379936
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, :migration, schema: 20200106071113 do
+ subject(:fingerprint_migrator) { described_class.new }
+
+ let(:key_table) { table(:keys) }
+
+ before do
+ generate_fingerprints!
+ end
+
+ it 'correctly creates a sha256 fingerprint for a key' do
+ key_1 = Key.find(1017)
+ key_2 = Key.find(1027)
+
+ expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
+ expect(key_1.fingerprint_sha256).to eq(nil)
+
+ expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
+ expect(key_2.fingerprint_sha256).to eq(nil)
+
+ query_count = ActiveRecord::QueryRecorder.new do
+ fingerprint_migrator.perform(1, 10000)
+ end.count
+
+ expect(query_count).to eq(8)
+
+ key_1.reload
+ key_2.reload
+
+ expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
+ expect(key_1.fingerprint_sha256).to eq('nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg')
+
+ expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
+ expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM')
+ end
+
+ context 'with invalid keys' do
+ before do
+ key = Key.find(1017)
+ # double space after "ssh-rsa" leads to a
+ # OpenSSL::PKey::PKeyError in Net::SSH::KeyFactory.load_data_public_key
+ key.update_column(:key, key.key.gsub('ssh-rsa ', 'ssh-rsa '))
+ end
+
+ it 'ignores errors and does not set the fingerprint' do
+ fingerprint_migrator.perform(1, 10000)
+
+ key_1 = Key.find(1017)
+ key_2 = Key.find(1027)
+
+ expect(key_1.fingerprint_sha256).to be_nil
+ expect(key_2.fingerprint_sha256).not_to be_nil
+ end
+ end
+
+ it 'migrates all keys' do
+ expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count)
+
+ fingerprint_migrator.perform(1, 10000)
+
+ expect(Key.where(fingerprint_sha256: nil).count).to eq(0)
+ end
+
+ def generate_fingerprints!
+ values = ""
+ (1000..2000).to_a.each do |record|
+ key = base_key_for(record)
+ fingerprint = fingerprint_for(key)
+
+ values += "(#{record}, #{record}, 'test-#{record}', '#{key}', '#{fingerprint}'),"
+ end
+
+ update_query = <<~SQL
+ INSERT INTO keys ( id, user_id, title, key, fingerprint )
+ VALUES
+ #{values.chomp(",")};
+ SQL
+
+ ActiveRecord::Base.connection.execute(update_query)
+ end
+
+ def base_key_for(record)
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt0000k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0='
+ .gsub("0000", "%04d" % (record - 1)) # generate arbitrary keys with placeholder 0000 within the key above
+ end
+
+ def fingerprint_for(key)
+ Gitlab::SSHPublicKey.new(key).fingerprint("md5")
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
new file mode 100644
index 00000000000..664e3810fc9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -0,0 +1,320 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, :migration, schema: 20190924152703 do
+ let(:services) { table(:services) }
+
+ # we need to define the classes due to encryption
+ class IssueTrackerData < ApplicationRecord
+ self.table_name = 'issue_tracker_data'
+
+ def self.encryption_options
+ {
+ key: Settings.attr_encrypted_db_key_base_32,
+ encode: true,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm'
+ }
+ end
+
+ attr_encrypted :project_url, encryption_options
+ attr_encrypted :issues_url, encryption_options
+ attr_encrypted :new_issue_url, encryption_options
+ end
+
+ class JiraTrackerData < ApplicationRecord
+ self.table_name = 'jira_tracker_data'
+
+ def self.encryption_options
+ {
+ key: Settings.attr_encrypted_db_key_base_32,
+ encode: true,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm'
+ }
+ end
+
+ attr_encrypted :url, encryption_options
+ attr_encrypted :api_url, encryption_options
+ attr_encrypted :username, encryption_options
+ attr_encrypted :password, encryption_options
+ end
+
+ let(:url) { 'http://base-url.tracker.com' }
+ let(:new_issue_url) { 'http://base-url.tracker.com/new_issue' }
+ let(:issues_url) { 'http://base-url.tracker.com/issues' }
+ let(:api_url) { 'http://api.tracker.com' }
+ let(:password) { 'passw1234' }
+ let(:username) { 'user9' }
+ let(:title) { 'Issue tracker' }
+ let(:description) { 'Issue tracker description' }
+
+ let(:jira_properties) do
+ {
+ 'api_url' => api_url,
+ 'jira_issue_transition_id' => '5',
+ 'password' => password,
+ 'url' => url,
+ 'username' => username,
+ 'title' => title,
+ 'description' => description,
+ 'other_field' => 'something'
+ }
+ end
+
+ let(:tracker_properties) do
+ {
+ 'project_url' => url,
+ 'new_issue_url' => new_issue_url,
+ 'issues_url' => issues_url,
+ 'title' => title,
+ 'description' => description,
+ 'other_field' => 'something'
+ }
+ end
+
+ let(:tracker_properties_no_url) do
+ {
+ 'new_issue_url' => new_issue_url,
+ 'issues_url' => issues_url,
+ 'title' => title,
+ 'description' => description
+ }
+ end
+
+ subject { described_class.new.perform(1, 100) }
+
+ shared_examples 'handle properties' do
+ it 'does not clear the properties' do
+ expect { subject }.not_to change { service.reload.properties}
+ end
+ end
+
+ context 'with jira service' do
+ let!(:service) do
+ services.create(id: 10, type: 'JiraService', title: nil, properties: jira_properties.to_json, category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'migrates data' do
+ expect { subject }.to change { JiraTrackerData.count }.by(1)
+
+ service.reload
+ data = JiraTrackerData.find_by(service_id: service.id)
+
+ expect(data.url).to eq(url)
+ expect(data.api_url).to eq(api_url)
+ expect(data.username).to eq(username)
+ expect(data.password).to eq(password)
+ expect(service.title).to eq(title)
+ expect(service.description).to eq(description)
+ end
+ end
+
+ context 'with bugzilla service' do
+ let!(:service) do
+ services.create(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'migrates data' do
+ expect { subject }.to change { IssueTrackerData.count }.by(1)
+
+ service.reload
+ data = IssueTrackerData.find_by(service_id: service.id)
+
+ expect(data.project_url).to eq(url)
+ expect(data.issues_url).to eq(issues_url)
+ expect(data.new_issue_url).to eq(new_issue_url)
+ expect(service.title).to eq(title)
+ expect(service.description).to eq(description)
+ end
+ end
+
+ context 'with youtrack service' do
+ let!(:service) do
+ services.create(id: 12, type: 'YoutrackService', title: nil, properties: tracker_properties_no_url.to_json, category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'migrates data' do
+ expect { subject }.to change { IssueTrackerData.count }.by(1)
+
+ service.reload
+ data = IssueTrackerData.find_by(service_id: service.id)
+
+ expect(data.project_url).to be_nil
+ expect(data.issues_url).to eq(issues_url)
+ expect(data.new_issue_url).to eq(new_issue_url)
+ expect(service.title).to eq(title)
+ expect(service.description).to eq(description)
+ end
+ end
+
+ context 'with gitlab service with no properties' do
+ let!(:service) do
+ services.create(id: 13, type: 'GitlabIssueTrackerService', title: nil, properties: {}, category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not migrate data' do
+ expect { subject }.not_to change { IssueTrackerData.count }
+ end
+ end
+
+ context 'with redmine service already with data fields' do
+ let!(:service) do
+ services.create(id: 14, type: 'RedmineService', title: nil, properties: tracker_properties_no_url.to_json, category: 'issue_tracker').tap do |service|
+ IssueTrackerData.create!(service_id: service.id, project_url: url, new_issue_url: new_issue_url, issues_url: issues_url)
+ end
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not create new data fields record' do
+ expect { subject }.not_to change { IssueTrackerData.count }
+ end
+ end
+
+ context 'with custom issue tracker which has data fields record inconsistent with properties field' do
+ let!(:service) do
+ services.create(id: 15, type: 'CustomIssueTrackerService', title: 'Existing title', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service|
+ IssueTrackerData.create!(service_id: service.id, project_url: 'http://other_url', new_issue_url: 'http://other_url/new_issue', issues_url: 'http://other_url/issues')
+ end
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not update the data fields record' do
+ expect { subject }.not_to change { IssueTrackerData.count }
+
+ service.reload
+ data = IssueTrackerData.find_by(service_id: service.id)
+
+ expect(data.project_url).to eq('http://other_url')
+ expect(data.issues_url).to eq('http://other_url/issues')
+ expect(data.new_issue_url).to eq('http://other_url/new_issue')
+ expect(service.title).to eq('Existing title')
+ end
+ end
+
+ context 'with jira service which has data fields record inconsistent with properties field' do
+ let!(:service) do
+ services.create(id: 16, type: 'CustomIssueTrackerService', description: 'Existing description', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service|
+ JiraTrackerData.create!(service_id: service.id, url: 'http://other_jira_url')
+ end
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not update the data fields record' do
+ expect { subject }.not_to change { JiraTrackerData.count }
+
+ service.reload
+ data = JiraTrackerData.find_by(service_id: service.id)
+
+ expect(data.url).to eq('http://other_jira_url')
+ expect(data.password).to be_nil
+ expect(data.username).to be_nil
+ expect(data.api_url).to be_nil
+ expect(service.description).to eq('Existing description')
+ end
+ end
+
+ context 'non issue tracker service' do
+ let!(:service) do
+ services.create(id: 17, title: nil, description: nil, type: 'OtherService', properties: tracker_properties.to_json)
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not migrate any data' do
+ expect { subject }.not_to change { IssueTrackerData.count }
+
+ service.reload
+ expect(service.title).to be_nil
+ expect(service.description).to be_nil
+ end
+ end
+
+ context 'jira service with empty properties' do
+ let!(:service) do
+ services.create(id: 18, type: 'JiraService', properties: '', category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not migrate any data' do
+ expect { subject }.not_to change { JiraTrackerData.count }
+ end
+ end
+
+ context 'jira service with nil properties' do
+ let!(:service) do
+ services.create(id: 18, type: 'JiraService', properties: nil, category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not migrate any data' do
+ expect { subject }.not_to change { JiraTrackerData.count }
+ end
+ end
+
+ context 'jira service with invalid properties' do
+ let!(:service) do
+ services.create(id: 18, type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
+ end
+
+ it_behaves_like 'handle properties'
+
+ it 'does not migrate any data' do
+ expect { subject }.not_to change { JiraTrackerData.count }
+ end
+ end
+
+ context 'with jira service with invalid properties, valid jira service and valid bugzilla service' do
+ let!(:jira_service_invalid) do
+ services.create(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
+ end
+ let!(:jira_service_valid) do
+ services.create(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker')
+ end
+ let!(:bugzilla_service_valid) do
+ services.create(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker')
+ end
+
+ it 'migrates data for the valid service' do
+ subject
+
+ jira_service_invalid.reload
+ expect(JiraTrackerData.find_by(service_id: jira_service_invalid.id)).to be_nil
+ expect(jira_service_invalid.title).to eq('invalid - title')
+ expect(jira_service_invalid.description).to eq('invalid - description')
+ expect(jira_service_invalid.properties).to eq('invalid data')
+
+ jira_service_valid.reload
+ data = JiraTrackerData.find_by(service_id: jira_service_valid.id)
+
+ expect(data.url).to eq(url)
+ expect(data.api_url).to eq(api_url)
+ expect(data.username).to eq(username)
+ expect(data.password).to eq(password)
+ expect(jira_service_valid.title).to eq(title)
+ expect(jira_service_valid.description).to eq(description)
+
+ bugzilla_service_valid.reload
+ data = IssueTrackerData.find_by(service_id: bugzilla_service_valid.id)
+
+ expect(data.project_url).to eq(url)
+ expect(data.issues_url).to eq(issues_url)
+ expect(data.new_issue_url).to eq(new_issue_url)
+ expect(bugzilla_service_valid.title).to eq(title)
+ expect(bugzilla_service_valid.description).to eq(description)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index 8960ac706e6..66a0b11606f 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -152,6 +152,17 @@ describe Gitlab::BackgroundMigration do
described_class.perform('Foo', [10, 20])
end
+
+ context 'backward compatibility' do
+ it 'performs a background migration for fully-qualified job classes' do
+ expect(migration).to receive(:perform).with(10, 20).once
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(instance_of(StandardError), hash_including(:class_name))
+
+ described_class.perform('Gitlab::BackgroundMigration::Foo', [10, 20])
+ end
+ end
end
describe '.exists?' do
diff --git a/spec/lib/gitlab/backtrace_cleaner_spec.rb b/spec/lib/gitlab/backtrace_cleaner_spec.rb
new file mode 100644
index 00000000000..f3aded9faad
--- /dev/null
+++ b/spec/lib/gitlab/backtrace_cleaner_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BacktraceCleaner do
+ describe '.clean_backtrace' do
+ it 'uses the Rails backtrace cleaner' do
+ backtrace = []
+
+ expect(Rails.backtrace_cleaner).to receive(:clean).with(backtrace)
+
+ described_class.clean_backtrace(backtrace)
+ end
+
+ it 'removes lines from IGNORE_BACKTRACES' do
+ backtrace = [
+ "lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'",
+ "lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'",
+ "lib/gitlab/gitaly_client.rb:280:in `block in migrate'",
+ "lib/gitlab/metrics/influx_db.rb:103:in `measure'",
+ "lib/gitlab/gitaly_client.rb:278:in `migrate'",
+ "lib/gitlab/git/repository.rb:1451:in `gitaly_migrate'",
+ "lib/gitlab/git/commit.rb:66:in `find'",
+ "app/models/repository.rb:1047:in `find_commit'",
+ "lib/gitlab/metrics/instrumentation.rb:159:in `block in find_commit'",
+ "lib/gitlab/metrics/method_call.rb:36:in `measure'",
+ "lib/gitlab/metrics/instrumentation.rb:159:in `find_commit'",
+ "app/models/repository.rb:113:in `commit'",
+ "lib/gitlab/i18n.rb:50:in `with_locale'",
+ "lib/gitlab/middleware/multipart.rb:95:in `call'",
+ "lib/gitlab/request_profiler/middleware.rb:14:in `call'",
+ "ee/lib/gitlab/database/load_balancing/rack_middleware.rb:37:in `call'",
+ "ee/lib/gitlab/jira/middleware.rb:15:in `call'"
+ ]
+
+ expect(described_class.clean_backtrace(backtrace))
+ .to eq([
+ "lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'",
+ "lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'",
+ "lib/gitlab/gitaly_client.rb:280:in `block in migrate'",
+ "lib/gitlab/gitaly_client.rb:278:in `migrate'",
+ "lib/gitlab/git/repository.rb:1451:in `gitaly_migrate'",
+ "lib/gitlab/git/commit.rb:66:in `find'",
+ "app/models/repository.rb:1047:in `find_commit'",
+ "app/models/repository.rb:113:in `commit'",
+ "ee/lib/gitlab/jira/middleware.rb:15:in `call'"
+ ])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/badge/coverage/report_spec.rb b/spec/lib/gitlab/badge/coverage/report_spec.rb
index eee3f96ab85..560072a3d83 100644
--- a/spec/lib/gitlab/badge/coverage/report_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/report_spec.rb
@@ -102,7 +102,7 @@ describe Gitlab::Badge::Coverage::Report do
create(:ci_pipeline, opts).tap do |pipeline|
yield pipeline
- pipeline.update_status
+ pipeline.update_legacy_status
end
end
end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index 8fc1e0a4e88..c32fdc5c72e 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -98,6 +98,34 @@ describe Gitlab::Ci::Build::Policy::Refs do
.not_to be_satisfied_by(pipeline)
end
end
+
+ context 'when source is pipeline' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :pipeline) }
+
+ it 'is satisfied with only: pipelines' do
+ expect(described_class.new(%w[pipelines]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is satisfied with only: pipeline' do
+ expect(described_class.new(%w[pipeline]))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when source is parent_pipeline' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :parent_pipeline) }
+
+ it 'is satisfied with only: parent_pipelines' do
+ expect(described_class.new(%w[parent_pipelines]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is satisfied with only: parent_pipeline' do
+ expect(described_class.new(%w[parent_pipeline]))
+ .to be_satisfied_by(pipeline)
+ end
+ end
end
context 'when matching a ref by a regular expression' do
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 4fa0a57dc82..f7b14360af3 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -31,13 +31,13 @@ describe Gitlab::Ci::Config::Entry::Cache do
it_behaves_like 'hash key value'
context 'with files' do
- let(:key) { { files: ['a-file', 'other-file'] } }
+ let(:key) { { files: %w[a-file other-file] } }
it_behaves_like 'hash key value'
end
context 'with files and prefix' do
- let(:key) { { files: ['a-file', 'other-file'], prefix: 'prefix-value' } }
+ let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
it_behaves_like 'hash key value'
end
@@ -55,7 +55,7 @@ describe Gitlab::Ci::Config::Entry::Cache do
it { is_expected.to be_valid }
context 'with files' do
- let(:key) { { files: ['a-file', 'other-file'] } }
+ let(:key) { { files: %w[a-file other-file] } }
it { is_expected.to be_valid }
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index cc1ee63ff04..649689f7d3b 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -24,7 +24,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
- environment coverage retry interruptible timeout tags]
+ environment coverage retry interruptible timeout release tags]
end
it { is_expected.to match_array result }
@@ -122,6 +122,21 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
end
+
+ context 'when it is a release' do
+ let(:config) do
+ {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: "v0.06",
+ name: "Release $CI_TAG_NAME",
+ description: "./release_changelog.txt"
+ }
+ }
+ end
+
+ it { expect(entry).to be_valid }
+ end
end
end
@@ -443,6 +458,25 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.timeout).to eq('1m 1s')
end
end
+
+ context 'when it is a release' do
+ context 'when `release:description` is missing' do
+ let(:config) do
+ {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: "v0.06",
+ name: "Release $CI_TAG_NAME"
+ }
+ }
+ end
+
+ it "returns error" do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "release description can't be blank"
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/release/assets/link_spec.rb b/spec/lib/gitlab/ci/config/entry/release/assets/link_spec.rb
new file mode 100644
index 00000000000..0e346de3d9e
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/release/assets/link_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Release::Assets::Link do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validation' do
+ context 'when entry config value is correct' do
+ let(:config) do
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ }
+ end
+
+ describe '#value' do
+ it 'returns link configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ context 'when name is not a string' do
+ let(:config) { { name: 123, url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'link name should be a string'
+ end
+ end
+
+ context 'when name is not present' do
+ let(:config) { { url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "link name can't be blank"
+ end
+ end
+
+ context 'when url is not addressable' do
+ let(:config) { { name: "cool-app.zip", url: "xyz" } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "link url is blocked: only allowed schemes are http, https"
+ end
+ end
+
+ context 'when url is not present' do
+ let(:config) { { name: "cool-app.zip" } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "link url can't be blank"
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'link config contains unknown keys: test'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/release/assets/links_spec.rb b/spec/lib/gitlab/ci/config/entry/release/assets/links_spec.rb
new file mode 100644
index 00000000000..d12e8d966ab
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/release/assets/links_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Release::Assets::Links do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validation' do
+ context 'when entry config value is correct' do
+ let(:config) do
+ [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ },
+ {
+ name: "cool-app.exe",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
+ }
+ ]
+ end
+
+ describe '#value' do
+ it 'returns links configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ context 'when value of link is invalid' do
+ let(:config) { { link: 'xyz' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'links config should be a array'
+ end
+ end
+
+ context 'when value of links link is empty' do
+ let(:config) { { link: [] } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "links config should be a array"
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'links config should be a array'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/release/assets_spec.rb b/spec/lib/gitlab/ci/config/entry/release/assets_spec.rb
new file mode 100644
index 00000000000..08ad5764eaa
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/release/assets_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Release::Assets do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validation' do
+ context 'when entry config value is correct' do
+ let(:config) do
+ {
+ links: [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ },
+ {
+ name: "cool-app.exe",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
+ }
+ ]
+ }
+ end
+
+ describe '#value' do
+ it 'returns assets configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ context 'when value of assets is invalid' do
+ let(:config) { { links: 'xyz' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'assets links should be an array of hashes'
+ end
+ end
+
+ context 'when value of assets:links is empty' do
+ let(:config) { { links: [] } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "assets links can't be blank"
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'assets config contains unknown keys: test'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/release_spec.rb b/spec/lib/gitlab/ci/config/entry/release_spec.rb
new file mode 100644
index 00000000000..500897569e9
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/release_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Release do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validation' do
+ context 'when entry config value is correct' do
+ let(:config) { { tag_name: 'v0.06', description: "./release_changelog.txt" } }
+
+ describe '#value' do
+ it 'returns release configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context "when value includes 'assets' keyword" do
+ let(:config) do
+ {
+ tag_name: 'v0.06',
+ description: "./release_changelog.txt",
+ assets: [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ }
+ ]
+ }
+ end
+
+ describe '#value' do
+ it 'returns release configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context "when value includes 'name' keyword" do
+ let(:config) do
+ {
+ tag_name: 'v0.06',
+ description: "./release_changelog.txt",
+ name: "Release $CI_TAG_NAME"
+ }
+ end
+
+ describe '#value' do
+ it 'returns release configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ context 'when value of attribute is invalid' do
+ let(:config) { { description: 10 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'release description should be a string'
+ end
+ end
+
+ context 'when release description is missing' do
+ let(:config) { { tag_name: 'v0.06' } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "release description can't be blank"
+ end
+ end
+
+ context 'when release tag_name is missing' do
+ let(:config) { { description: "./release_changelog.txt" } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "release tag name can't be blank"
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'release config contains unknown keys: test'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 3c352c30e55..8562885c90c 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -41,6 +41,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:container_scanning | 'gl-container-scanning-report.json'
:dast | 'gl-dast-report.json'
:license_management | 'gl-license-management-report.json'
+ :license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 43bd53b780f..95a5b8e88fb 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -27,16 +27,29 @@ describe Gitlab::Ci::Config::Entry::Root do
context 'when configuration is valid' do
context 'when top-level entries are defined' do
let(:hash) do
- { before_script: %w(ls pwd),
+ {
+ before_script: %w(ls pwd),
image: 'ruby:2.2',
default: {},
services: ['postgres:9.1', 'mysql:5.5'],
variables: { VAR: 'value' },
after_script: ['make clean'],
- stages: %w(build pages),
+ stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] },
rspec: { script: %w[rspec ls] },
- spinach: { before_script: [], variables: {}, script: 'spinach' } }
+ spinach: { before_script: [], variables: {}, script: 'spinach' },
+ release: {
+ stage: 'release',
+ before_script: [],
+ after_script: [],
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: 'v0.06',
+ name: "Release $CI_TAG_NAME",
+ description: "./release_changelog.txt"
+ }
+ }
+ }
end
describe '#compose!' do
@@ -87,7 +100,7 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#stages_value' do
context 'when stages key defined' do
it 'returns array of stages' do
- expect(root.stages_value).to eq %w[build pages]
+ expect(root.stages_value).to eq %w[build pages release]
end
end
@@ -105,8 +118,9 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#jobs_value' do
it 'returns jobs configuration' do
- expect(root.jobs_value).to eq(
- rspec: { name: :rspec,
+ expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
+ expect(root.jobs_value[:rspec]).to eq(
+ { name: :rspec,
script: %w[rspec ls],
before_script: %w(ls pwd),
image: { name: 'ruby:2.2' },
@@ -116,8 +130,10 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {},
ignore: false,
after_script: ['make clean'],
- only: { refs: %w[branches tags] } },
- spinach: { name: :spinach,
+ only: { refs: %w[branches tags] } }
+ )
+ expect(root.jobs_value[:spinach]).to eq(
+ { name: :spinach,
before_script: [],
script: %w[spinach],
image: { name: 'ruby:2.2' },
@@ -129,6 +145,20 @@ describe Gitlab::Ci::Config::Entry::Root do
after_script: ['make clean'],
only: { refs: %w[branches tags] } }
)
+ expect(root.jobs_value[:release]).to eq(
+ { name: :release,
+ stage: 'release',
+ before_script: [],
+ script: ["make changelog | tee release_changelog.txt"],
+ release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
+ image: { name: "ruby:2.2" },
+ services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
+ cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
+ only: { refs: %w(branches tags) },
+ variables: {},
+ after_script: [],
+ ignore: false }
+ )
end
end
end
@@ -261,7 +291,7 @@ describe Gitlab::Ci::Config::Entry::Root do
# despite the fact, that key is present. See issue #18775 for more
# details.
#
- context 'when entires specified but not defined' do
+ context 'when entries are specified but not defined' do
before do
root.compose!
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
index 7ebe5842fd0..4c4359ad5d2 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
@@ -15,6 +15,42 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
stub_feature_flags(ci_root_config_content: false)
end
+ context 'when bridge job is passed in as parameter' do
+ let(:ci_config_path) { nil }
+ let(:bridge) { create(:ci_bridge) }
+
+ before do
+ command.bridge = bridge
+ end
+
+ context 'when bridge job has downstream yaml' do
+ before do
+ allow(bridge).to receive(:yaml_for_downstream).and_return('the-yaml')
+ end
+
+ it 'returns the content already available in command' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'bridge_source'
+ expect(command.config_content).to eq 'the-yaml'
+ end
+ end
+
+ context 'when bridge job does not have downstream yaml' do
+ before do
+ allow(bridge).to receive(:yaml_for_downstream).and_return(nil)
+ end
+
+ it 'returns the next available source' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'auto_devops_source'
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Beta/Auto-DevOps')
+ expect(command.config_content).to eq(template.content)
+ end
+ end
+ end
+
context 'when config is defined in a custom path in the repository' do
let(:ci_config_path) { 'path/to/config.yml' }
@@ -29,6 +65,7 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
+ expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to eq('the-content')
end
end
@@ -40,7 +77,8 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
- template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
+ expect(pipeline.pipeline_config).to be_nil
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Beta/Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
@@ -52,7 +90,8 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
- template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
+ expect(pipeline.pipeline_config).to be_nil
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Beta/Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
@@ -71,6 +110,7 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
+ expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to eq('the-content')
end
end
@@ -82,12 +122,34 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
expect(project).to receive(:auto_devops_enabled?).and_return(true)
end
- it 'returns the content of AutoDevops template' do
- subject.perform!
+ context 'when beta is enabled' do
+ before do
+ stub_feature_flags(auto_devops_beta: true)
+ end
- expect(pipeline.config_source).to eq 'auto_devops_source'
- template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
- expect(command.config_content).to eq(template.content)
+ it 'returns the content of AutoDevops template' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'auto_devops_source'
+ expect(pipeline.pipeline_config).to be_nil
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Beta/Auto-DevOps')
+ expect(command.config_content).to eq(template.content)
+ end
+ end
+
+ context 'when beta is disabled' do
+ before do
+ stub_feature_flags(auto_devops_beta: false)
+ end
+
+ it 'returns the content of AutoDevops template' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'auto_devops_source'
+ expect(pipeline.pipeline_config).to be_nil
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
+ expect(command.config_content).to eq(template.content)
+ end
end
end
@@ -102,14 +164,39 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq('unknown_source')
+ expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to be_nil
expect(pipeline.errors.full_messages).to include('Missing CI config file')
end
end
end
+ context 'when bridge job is passed in as parameter' do
+ let(:ci_config_path) { nil }
+ let(:bridge) { create(:ci_bridge) }
+
+ before do
+ command.bridge = bridge
+ allow(bridge).to receive(:yaml_for_downstream).and_return('the-yaml')
+ end
+
+ it 'returns the content already available in command' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'bridge_source'
+ expect(command.config_content).to eq 'the-yaml'
+ end
+ end
+
context 'when config is defined in a custom path in the repository' do
let(:ci_config_path) { 'path/to/config.yml' }
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - local: #{ci_config_path}
+ EOY
+ end
before do
expect(project.repository)
@@ -122,47 +209,59 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
- expect(command.config_content).to eq(<<~EOY)
- ---
- include:
- - local: #{ci_config_path}
- EOY
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
end
end
context 'when config is defined remotely' do
let(:ci_config_path) { 'http://example.com/path/to/ci/config.yml' }
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - remote: #{ci_config_path}
+ EOY
+ end
it 'builds root config including the remote config' do
subject.perform!
expect(pipeline.config_source).to eq 'remote_source'
- expect(command.config_content).to eq(<<~EOY)
- ---
- include:
- - remote: #{ci_config_path}
- EOY
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
end
end
context 'when config is defined in a separate repository' do
let(:ci_config_path) { 'path/to/.gitlab-ci.yml@another-group/another-repo' }
-
- it 'builds root config including the path to another repository' do
- subject.perform!
-
- expect(pipeline.config_source).to eq 'external_project_source'
- expect(command.config_content).to eq(<<~EOY)
+ let(:config_content_result) do
+ <<~EOY
---
include:
- project: another-group/another-repo
file: path/to/.gitlab-ci.yml
EOY
end
+
+ it 'builds root config including the path to another repository' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'external_project_source'
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
+ end
end
context 'when config is defined in the default .gitlab-ci.yml' do
let(:ci_config_path) { nil }
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - local: ".gitlab-ci.yml"
+ EOY
+ end
before do
expect(project.repository)
@@ -175,30 +274,59 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
- expect(command.config_content).to eq(<<~EOY)
- ---
- include:
- - local: ".gitlab-ci.yml"
- EOY
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
end
end
context 'when config is the Auto-Devops template' do
let(:ci_config_path) { nil }
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - template: Beta/Auto-DevOps.gitlab-ci.yml
+ EOY
+ end
before do
expect(project).to receive(:auto_devops_enabled?).and_return(true)
end
- it 'builds root config including the auto-devops template' do
- subject.perform!
+ context 'when beta is enabled' do
+ before do
+ stub_feature_flags(auto_devops_beta: true)
+ end
- expect(pipeline.config_source).to eq 'auto_devops_source'
- expect(command.config_content).to eq(<<~EOY)
- ---
- include:
- - template: Auto-DevOps.gitlab-ci.yml
- EOY
+ it 'builds root config including the auto-devops template' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'auto_devops_source'
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
+ end
+ end
+
+ context 'when beta is disabled' do
+ before do
+ stub_feature_flags(auto_devops_beta: false)
+ end
+
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - template: Auto-DevOps.gitlab-ci.yml
+ EOY
+ end
+
+ it 'builds root config including the auto-devops template' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'auto_devops_source'
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
+ end
end
end
@@ -213,6 +341,7 @@ describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject.perform!
expect(pipeline.config_source).to eq('unknown_source')
+ expect(pipeline.pipeline_config).to be_nil
expect(command.config_content).to be_nil
expect(pipeline.errors.full_messages).to include('Missing CI config file')
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
index ac370433955..24d3beb35b9 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb
@@ -76,45 +76,8 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do
end
end
- context 'when pipeline triggered by legacy trigger' do
- let(:user) { nil }
- let(:trigger_request) do
- build_stubbed(:ci_trigger_request, trigger: build_stubbed(:ci_trigger, owner: nil))
- end
-
- context 'when :use_legacy_pipeline_triggers feature flag is enabled' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: true)
- step.perform!
- end
-
- it 'allows legacy triggers to create a pipeline' do
- expect(pipeline).to be_valid
- end
-
- it 'does not break the chain' do
- expect(step.break?).to eq false
- end
- end
-
- context 'when :use_legacy_pipeline_triggers feature flag is disabled' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: false)
- step.perform!
- end
-
- it 'prevents legacy triggers from creating a pipeline' do
- expect(pipeline.errors.to_a).to include /Trigger token is invalid/
- end
-
- it 'breaks the pipeline builder chain' do
- expect(step.break?).to eq true
- end
- end
- end
-
- describe '#allowed_to_create?' do
- subject { step.allowed_to_create? }
+ describe '#allowed_to_write_ref?' do
+ subject { step.send(:allowed_to_write_ref?) }
context 'when user is a developer' do
before do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/resource_group_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/resource_group_spec.rb
new file mode 100644
index 00000000000..bf6985156d3
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/resource_group_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Pipeline::Seed::Build::ResourceGroup do
+ let_it_be(:project) { create(:project) }
+ let(:job) { build(:ci_build, project: project) }
+ let(:seed) { described_class.new(job, resource_group_key) }
+
+ describe '#to_resource' do
+ subject { seed.to_resource }
+
+ context 'when resource group key is specified' do
+ let(:resource_group_key) { 'iOS' }
+
+ it 'returns a resource group object' do
+ is_expected.to be_a(Ci::ResourceGroup)
+ expect(subject.key).to eq('iOS')
+ end
+
+ context 'when environment has an invalid URL' do
+ let(:resource_group_key) { ':::' }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when there is a resource group already' do
+ let!(:resource_group) { create(:ci_resource_group, project: project, key: 'iOS') }
+
+ it 'does not create a new resource group' do
+ expect { subject }.not_to change { Ci::ResourceGroup.count }
+ end
+ end
+ end
+
+ context 'when resource group key is nil' do
+ let(:resource_group_key) { nil }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 2ae513aea1b..5526ec9e16f 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -231,6 +231,15 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
end
+
+ context 'when job belongs to a resource group' do
+ let(:attributes) { { name: 'rspec', ref: 'master', resource_group_key: 'iOS' } }
+
+ it 'returns a job with resource group' do
+ expect(subject.resource_group).not_to be_nil
+ expect(subject.resource_group.key).to eq('iOS')
+ end
+ end
end
context 'when job is a bridge' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
index 4e63f60ea6b..90f4b06cea0 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
@@ -3,8 +3,13 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Seed::Deployment do
- let_it_be(:project) { create(:project) }
- let(:job) { build(:ci_build, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
+ end
+
+ let(:job) { build(:ci_build, project: project, pipeline: pipeline) }
let(:seed) { described_class.new(job) }
let(:attributes) { {} }
diff --git a/spec/lib/gitlab/ci/status/external/factory_spec.rb b/spec/lib/gitlab/ci/status/external/factory_spec.rb
index 9d7dfc42848..9c11e42fc5a 100644
--- a/spec/lib/gitlab/ci/status/external/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/external/factory_spec.rb
@@ -22,7 +22,7 @@ describe Gitlab::Ci::Status::External::Factory do
end
let(:expected_status) do
- Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
+ Gitlab::Ci::Status.const_get(simple_status.to_s.camelize, false)
end
it "fabricates a core status #{simple_status}" do
diff --git a/spec/lib/gitlab/ci/status/factory_spec.rb b/spec/lib/gitlab/ci/status/factory_spec.rb
index c6d7a1ec5d9..219eb53d9df 100644
--- a/spec/lib/gitlab/ci/status/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/factory_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::Ci::Status::Factory do
let(:resource) { double('resource', status: simple_status) }
let(:expected_status) do
- Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
+ Gitlab::Ci::Status.const_get(simple_status.to_s.camelize, false)
end
it "fabricates a core status #{simple_status}" do
diff --git a/spec/lib/gitlab/ci/status/pipeline/factory_spec.rb b/spec/lib/gitlab/ci/status/pipeline/factory_spec.rb
index 3acc767ab7a..838154759cb 100644
--- a/spec/lib/gitlab/ci/status/pipeline/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/pipeline/factory_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Pipeline::Factory do
let(:pipeline) { create(:ci_pipeline, status: simple_status) }
let(:expected_status) do
- Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
+ Gitlab::Ci::Status.const_get(simple_status.camelize, false)
end
it "matches correct core status for #{simple_status}" do
diff --git a/spec/lib/gitlab/ci/status/stage/factory_spec.rb b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
index dcb53712157..317756ea13c 100644
--- a/spec/lib/gitlab/ci/status/stage/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::Ci::Status::Stage::Factory do
it "fabricates a core status #{core_status}" do
expect(status).to be_a(
- Gitlab::Ci::Status.const_get(core_status.capitalize, false))
+ Gitlab::Ci::Status.const_get(core_status.camelize, false))
end
it 'extends core status with common stage methods' do
diff --git a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
new file mode 100644
index 00000000000..ed00dac8560
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::WaitingForResource do
+ subject do
+ described_class.new(double('subject'), double('user'))
+ end
+
+ describe '#text' do
+ it { expect(subject.text).to eq 'waiting' }
+ end
+
+ describe '#label' do
+ it { expect(subject.label).to eq 'waiting for resource' }
+ end
+
+ describe '#icon' do
+ it { expect(subject.icon).to eq 'status_pending' }
+ end
+
+ describe '#favicon' do
+ it { expect(subject.favicon).to eq 'favicon_pending' }
+ end
+
+ describe '#group' do
+ it { expect(subject.group).to eq 'waiting-for-resource' }
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index c2f9930056a..12600d97b2f 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -9,7 +9,7 @@ describe 'Auto-DevOps.gitlab-ci.yml' do
let(:user) { create(:admin) }
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
- let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
@@ -107,4 +107,52 @@ describe 'Auto-DevOps.gitlab-ci.yml' do
end
end
end
+
+ describe 'build-pack detection' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :files, :variables, :include_build_names, :not_include_build_names) do
+ 'No match' | { 'README.md' => '' } | {} | %w() | %w(build test)
+ 'Buildpack' | { 'README.md' => '' } | { 'BUILDPACK_URL' => 'http://example.com' } | %w(build test) | %w()
+ 'Explicit set' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '1' } | %w(build test) | %w()
+ 'Explicit unset' | { 'README.md' => '' } | { 'AUTO_DEVOPS_EXPLICITLY_ENABLED' => '0' } | %w() | %w(build test)
+ 'Dockerfile' | { 'Dockerfile' => '' } | {} | %w(build test) | %w()
+ 'Clojure' | { 'project.clj' => '' } | {} | %w(build test) | %w()
+ 'Go modules' | { 'go.mod' => '' } | {} | %w(build test) | %w()
+ 'Go gb' | { 'src/gitlab.com/gopackage.go' => '' } | {} | %w(build test) | %w()
+ 'Gradle' | { 'gradlew' => '' } | {} | %w(build test) | %w()
+ 'Java' | { 'pom.xml' => '' } | {} | %w(build test) | %w()
+ 'Multi-buildpack' | { '.buildpacks' => '' } | {} | %w(build test) | %w()
+ 'NodeJS' | { 'package.json' => '' } | {} | %w(build test) | %w()
+ 'PHP' | { 'composer.json' => '' } | {} | %w(build test) | %w()
+ 'Play' | { 'conf/application.conf' => '' } | {} | %w(build test) | %w()
+ 'Python' | { 'Pipfile' => '' } | {} | %w(build test) | %w()
+ 'Ruby' | { 'Gemfile' => '' } | {} | %w(build test) | %w()
+ 'Scala' | { 'build.sbt' => '' } | {} | %w(build test) | %w()
+ 'Static' | { '.static' => '' } | {} | %w(build test) | %w()
+ end
+
+ with_them do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Beta/Auto-DevOps') }
+
+ let(:user) { create(:admin) }
+ let(:project) { create(:project, :custom_repo, files: files) }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: 'master' ) }
+ let(:pipeline) { service.execute(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ variables.each do |(key, value)|
+ create(:ci_variable, project: project, key: key, value: value)
+ end
+ end
+
+ it 'creates a pipeline with the expected jobs' do
+ expect(build_names).to include(*include_build_names)
+ expect(build_names).not_to include(*not_include_build_names)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index f7bc5686b68..574c2b73722 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -26,4 +26,66 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
it_behaves_like 'trace with enabled live trace feature'
end
+
+ describe '#update_interval' do
+ context 'it is not being watched' do
+ it 'returns 30 seconds' do
+ expect(trace.update_interval).to eq(30.seconds)
+ end
+ end
+
+ context 'it is being watched' do
+ before do
+ trace.being_watched!
+ end
+
+ it 'returns 3 seconds' do
+ expect(trace.update_interval).to eq(3.seconds)
+ end
+ end
+ end
+
+ describe '#being_watched!' do
+ let(:cache_key) { "gitlab:ci:trace:#{build.id}:watched" }
+
+ it 'sets gitlab:ci:trace:<job.id>:watched in redis' do
+ trace.being_watched!
+
+ result = Gitlab::Redis::SharedState.with do |redis|
+ redis.exists(cache_key)
+ end
+
+ expect(result).to eq(true)
+ end
+
+ it 'updates the expiry of gitlab:ci:trace:<job.id>:watched in redis', :clean_gitlab_redis_shared_state do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(cache_key, true, ex: 4.seconds)
+ end
+
+ expect do
+ trace.being_watched!
+ end.to change { Gitlab::Redis::SharedState.with { |redis| redis.pttl(cache_key) } }
+ end
+ end
+
+ describe '#being_watched?' do
+ context 'gitlab:ci:trace:<job.id>:watched in redis is set', :clean_gitlab_redis_shared_state do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set("gitlab:ci:trace:#{build.id}:watched", true)
+ end
+ end
+
+ it 'returns true' do
+ expect(trace.being_watched?).to be(true)
+ end
+ end
+
+ context 'gitlab:ci:trace:<job.id>:watched in redis is not set' do
+ it 'returns false' do
+ expect(trace.being_watched?).to be(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index fea8073f999..11168a969fc 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -241,6 +241,21 @@ module Gitlab
end
end
end
+
+ describe 'resource group' do
+ context 'when resource group is defined' do
+ let(:config) do
+ YAML.dump(rspec: {
+ script: 'test',
+ resource_group: 'iOS'
+ })
+ end
+
+ it 'has the attributes' do
+ expect(subject[:resource_group_key]).to eq 'iOS'
+ end
+ end
+ end
end
describe '#stages_attributes' do
@@ -1270,6 +1285,59 @@ module Gitlab
end
end
+ describe "release" do
+ let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ let(:config) do
+ {
+ stages: ["build", "test", "release"], # rubocop:disable Style/WordArray
+ release: {
+ stage: "release",
+ only: ["tags"],
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: "$CI_COMMIT_TAG",
+ name: "Release $CI_TAG_NAME",
+ description: "./release_changelog.txt",
+ assets: {
+ links: [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ },
+ {
+ name: "cool-app.exe",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
+ }
+ ]
+ }
+ }
+ }
+ }
+ end
+
+ context 'with feature flag active' do
+ before do
+ stub_feature_flags(ci_release_generation: true)
+ end
+
+ it "returns release info" do
+ expect(processor.stage_builds_attributes('release').first[:options])
+ .to eq(config[:release].except(:stage, :only))
+ end
+ end
+
+ context 'with feature flag inactive' do
+ before do
+ stub_feature_flags(ci_release_generation: false)
+ end
+
+ it 'raises error' do
+ expect { processor }.to raise_error(
+ 'jobs:release config release features are not enabled: release')
+ end
+ end
+ end
+
describe '#environment' do
let(:config) do
{
@@ -1667,6 +1735,39 @@ module Gitlab
it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build2 should be part of needs') }
end
+
+ context 'needs with a Hash type and dependencies with a string type that are mismatching' do
+ let(:needs) do
+ [
+ "build1",
+ { job: "build2" }
+ ]
+ end
+ let(:dependencies) { %w(build3) }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build3 should be part of needs') }
+ end
+
+ context 'needs with an array type and dependency with a string type' do
+ let(:needs) { %w(build1) }
+ let(:dependencies) { 'deploy' }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies should be an array of strings') }
+ end
+
+ context 'needs with a string type and dependency with an array type' do
+ let(:needs) { 'build1' }
+ let(:dependencies) { %w(deploy) }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1:needs config can only be a hash or an array') }
+ end
+
+ context 'needs with a Hash type and dependency with a string type' do
+ let(:needs) { { job: 'build1' } }
+ let(:dependencies) { 'deploy' }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies should be an array of strings') }
+ end
end
context 'with when/rules conflict' do
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index fdd01f58c9d..510876a5945 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -438,6 +438,17 @@ describe Gitlab::ClosingIssueExtractor do
.to match_array([issue])
end
end
+
+ context "with autoclose referenced issues disabled" do
+ before do
+ project.update!(autoclose_referenced_issues: false)
+ end
+
+ it do
+ message = "Awesome commit (Closes #{reference})"
+ expect(subject.closed_by_message(message)).to eq([])
+ end
+ end
end
def urls
diff --git a/spec/lib/gitlab/config/entry/attributable_spec.rb b/spec/lib/gitlab/config/entry/attributable_spec.rb
index 6b548d5c4a8..bc29a194181 100644
--- a/spec/lib/gitlab/config/entry/attributable_spec.rb
+++ b/spec/lib/gitlab/config/entry/attributable_spec.rb
@@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do
end
end
- expectation.to raise_error(ArgumentError, 'Method already defined!')
+ expectation.to raise_error(ArgumentError, 'Method already defined: length')
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb
index 4d0cc91a318..eceea474988 100644
--- a/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'lib/gitlab/cycle_analytics/shared_stage_spec'
describe Gitlab::CycleAnalytics::ProductionStage do
- let(:stage_name) { :production }
+ let(:stage_name) { 'Total' }
it_behaves_like 'base stage'
end
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb
index 689957993ec..64f87ec8cd3 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/lib/gitlab/danger/changelog_spec.rb
@@ -106,18 +106,6 @@ describe Gitlab::Danger::Changelog do
end
end
- describe '#sanitized_mr_title' do
- subject { changelog.sanitized_mr_title }
-
- [
- 'WIP: My MR title',
- 'My MR title'
- ].each do |mr_title|
- let(:mr_json) { { "title" => mr_title } }
- it { is_expected.to eq("My MR title") }
- end
- end
-
describe '#ee_changelog?' do
context 'is ee changelog' do
[
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
new file mode 100644
index 00000000000..0cf7ac64e43
--- /dev/null
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -0,0 +1,315 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require_relative 'danger_spec_helper'
+
+require 'gitlab/danger/commit_linter'
+
+describe Gitlab::Danger::CommitLinter do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:total_files_changed) { 2 }
+ let(:total_lines_changed) { 10 }
+ let(:stats) { { total: { files: total_files_changed, lines: total_lines_changed } } }
+ let(:diff_parent) { Struct.new(:stats).new(stats) }
+ let(:commit_class) do
+ Struct.new(:message, :sha, :diff_parent)
+ end
+ let(:commit_message) { 'A commit message' }
+ let(:commit_sha) { 'abcd1234' }
+ let(:commit) { commit_class.new(commit_message, commit_sha, diff_parent) }
+
+ subject(:commit_linter) { described_class.new(commit) }
+
+ describe '#fixup?' do
+ where(:commit_message, :is_fixup) do
+ 'A commit message' | false
+ 'fixup!' | true
+ 'fixup! A commit message' | true
+ 'squash!' | true
+ 'squash! A commit message' | true
+ end
+
+ with_them do
+ it 'is true when commit message starts with "fixup!" or "squash!"' do
+ expect(commit_linter.fixup?).to be(is_fixup)
+ end
+ end
+ end
+
+ describe '#suggestion?' do
+ where(:commit_message, :is_suggestion) do
+ 'A commit message' | false
+ 'Apply suggestion to' | true
+ 'Apply suggestion to "A commit message"' | true
+ end
+
+ with_them do
+ it 'is true when commit message starts with "Apply suggestion to"' do
+ expect(commit_linter.suggestion?).to be(is_suggestion)
+ end
+ end
+ end
+
+ describe '#merge?' do
+ where(:commit_message, :is_merge) do
+ 'A commit message' | false
+ 'Merge branch' | true
+ 'Merge branch "A commit message"' | true
+ end
+
+ with_them do
+ it 'is true when commit message starts with "Merge branch"' do
+ expect(commit_linter.merge?).to be(is_merge)
+ end
+ end
+ end
+
+ describe '#revert?' do
+ where(:commit_message, :is_revert) do
+ 'A commit message' | false
+ 'Revert' | false
+ 'Revert "' | true
+ 'Revert "A commit message"' | true
+ end
+
+ with_them do
+ it 'is true when commit message starts with "Revert \""' do
+ expect(commit_linter.revert?).to be(is_revert)
+ end
+ end
+ end
+
+ describe '#multi_line?' do
+ where(:commit_message, :is_multi_line) do
+ "A commit message" | false
+ "A commit message\n" | false
+ "A commit message\n\n" | false
+ "A commit message\n\nWith details" | true
+ end
+
+ with_them do
+ it 'is true when commit message contains details' do
+ expect(commit_linter.multi_line?).to be(is_multi_line)
+ end
+ end
+ end
+
+ describe '#failed?' do
+ context 'with no failures' do
+ it { expect(commit_linter).not_to be_failed }
+ end
+
+ context 'with failures' do
+ before do
+ commit_linter.add_problem(:details_line_too_long)
+ end
+
+ it { expect(commit_linter).to be_failed }
+ end
+ end
+
+ describe '#add_problem' do
+ it 'stores messages in #failures' do
+ commit_linter.add_problem(:details_line_too_long)
+
+ expect(commit_linter.problems).to eq({ details_line_too_long: described_class::PROBLEMS[:details_line_too_long] })
+ end
+ end
+
+ shared_examples 'a valid commit' do
+ it 'does not have any problem' do
+ commit_linter.lint
+
+ expect(commit_linter.problems).to be_empty
+ end
+ end
+
+ describe '#lint' do
+ describe 'subject' do
+ context 'when subject valid' do
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when subject is too short' do
+ let(:commit_message) { 'A B' }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_too_short, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when subject is too long' do
+ let(:commit_message) { 'A B ' + 'C' * described_class::MAX_LINE_LENGTH }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_too_long, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when subject is too short and too long' do
+ let(:commit_message) { 'A ' + 'B' * described_class::MAX_LINE_LENGTH }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_too_short, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+ expect(commit_linter).to receive(:add_problem).with(:subject_too_long, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when subject is above warning' do
+ let(:commit_message) { 'A B ' + 'C' * described_class::WARN_SUBJECT_LENGTH }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_above_warning, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when subject starts with lowercase' do
+ let(:commit_message) { 'a B C' }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_starts_with_lowercase, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when subject ands with a period' do
+ let(:commit_message) { 'A B C.' }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:subject_ends_with_a_period, described_class::DEFAULT_SUBJECT_DESCRIPTION)
+
+ commit_linter.lint
+ end
+ end
+ end
+
+ describe 'separator' do
+ context 'when separator is missing' do
+ let(:commit_message) { "A B C\n" }
+
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when separator is a blank line' do
+ let(:commit_message) { "A B C\n\nMore details." }
+
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when separator is missing' do
+ let(:commit_message) { "A B C\nMore details." }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:separator_missing)
+
+ commit_linter.lint
+ end
+ end
+ end
+
+ describe 'details' do
+ context 'when details are valid' do
+ let(:commit_message) { "A B C\n\nMore details." }
+
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when no details are given and many files are changed' do
+ let(:total_files_changed) { described_class::MAX_CHANGED_FILES_IN_COMMIT + 1 }
+
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when no details are given and many lines are changed' do
+ let(:total_lines_changed) { described_class::MAX_CHANGED_LINES_IN_COMMIT + 1 }
+
+ it_behaves_like 'a valid commit'
+ end
+
+ context 'when no details are given and many files and lines are changed' do
+ let(:total_files_changed) { described_class::MAX_CHANGED_FILES_IN_COMMIT + 1 }
+ let(:total_lines_changed) { described_class::MAX_CHANGED_LINES_IN_COMMIT + 1 }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:details_too_many_changes)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when details exceeds the max line length' do
+ let(:commit_message) { "A B C\n\n" + 'D' * (described_class::MAX_LINE_LENGTH + 1) }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:details_line_too_long)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when details exceeds the max line length including a URL' do
+ let(:commit_message) { "A B C\n\nhttps://gitlab.com" + 'D' * described_class::MAX_LINE_LENGTH }
+
+ it_behaves_like 'a valid commit'
+ end
+ end
+
+ describe 'message' do
+ context 'when message includes a text emoji' do
+ let(:commit_message) { "A commit message :+1:" }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:message_contains_text_emoji)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when message includes a unicode emoji' do
+ let(:commit_message) { "A commit message 🚀" }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:message_contains_unicode_emoji)
+
+ commit_linter.lint
+ end
+ end
+
+ context 'when message includes a short reference' do
+ [
+ 'A commit message to fix #1234',
+ 'A commit message to fix !1234',
+ 'A commit message to fix &1234',
+ 'A commit message to fix %1234',
+ 'A commit message to fix gitlab#1234',
+ 'A commit message to fix gitlab!1234',
+ 'A commit message to fix gitlab&1234',
+ 'A commit message to fix gitlab%1234',
+ 'A commit message to fix gitlab-org/gitlab#1234',
+ 'A commit message to fix gitlab-org/gitlab!1234',
+ 'A commit message to fix gitlab-org/gitlab&1234',
+ 'A commit message to fix gitlab-org/gitlab%1234'
+ ].each do |message|
+ let(:commit_message) { message }
+
+ it 'adds a problem' do
+ expect(commit_linter).to receive(:add_problem).with(:message_contains_short_reference)
+
+ commit_linter.lint
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/emoji_checker_spec.rb b/spec/lib/gitlab/danger/emoji_checker_spec.rb
new file mode 100644
index 00000000000..0cdc18ce626
--- /dev/null
+++ b/spec/lib/gitlab/danger/emoji_checker_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+require 'gitlab/danger/emoji_checker'
+
+describe Gitlab::Danger::EmojiChecker do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#includes_text_emoji?' do
+ where(:text, :includes_emoji) do
+ 'Hello World!' | false
+ ':+1:' | true
+ 'Hello World! :+1:' | true
+ end
+
+ with_them do
+ it 'is true when text includes a text emoji' do
+ expect(subject.includes_text_emoji?(text)).to be(includes_emoji)
+ end
+ end
+ end
+
+ describe '#includes_unicode_emoji?' do
+ where(:text, :includes_emoji) do
+ 'Hello World!' | false
+ '🚀' | true
+ 'Hello World! 🚀' | true
+ end
+
+ with_them do
+ it 'is true when text includes a text emoji' do
+ expect(subject.includes_unicode_emoji?(text)).to be(includes_emoji)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index edcd020a10f..ae0fcf443c5 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -313,6 +313,19 @@ describe Gitlab::Danger::Helper do
end
end
+ describe '#sanitize_mr_title' do
+ where(:mr_title, :expected_mr_title) do
+ 'My MR title' | 'My MR title'
+ 'WIP: My MR title' | 'My MR title'
+ end
+
+ with_them do
+ subject { helper.sanitize_mr_title(mr_title) }
+
+ it { is_expected.to eq(expected_mr_title) }
+ end
+ end
+
describe '#security_mr?' do
it 'returns false when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
diff --git a/spec/lib/gitlab/data_builder/note_spec.rb b/spec/lib/gitlab/data_builder/note_spec.rb
index 3c26daba5a5..4b799c23de8 100644
--- a/spec/lib/gitlab/data_builder/note_spec.rb
+++ b/spec/lib/gitlab/data_builder/note_spec.rb
@@ -137,7 +137,7 @@ describe Gitlab::DataBuilder::Note do
it 'returns the note and project snippet data' do
expect(data).to have_key(:snippet)
expect(data[:snippet].except('updated_at'))
- .to eq(snippet.reload.hook_attrs.except('updated_at'))
+ .to eq(snippet.hook_attrs.except('updated_at'))
expect(data[:snippet]['updated_at'])
.to be >= snippet.hook_attrs['updated_at']
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index cac6908f4b4..e0b4c8ae1f7 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -325,6 +325,67 @@ describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ describe 'validate option' do
+ let(:args) { [:projects, :users] }
+ let(:options) { { column: :user_id, on_delete: nil } }
+
+ context 'when validate is supplied with a falsey value' do
+ it_behaves_like 'skips validation', validate: false
+ it_behaves_like 'skips validation', validate: nil
+ end
+
+ context 'when validate is supplied with a truthy value' do
+ it_behaves_like 'performs validation', validate: true
+ it_behaves_like 'performs validation', validate: :whatever
+ end
+
+ context 'when validate is not supplied' do
+ it_behaves_like 'performs validation', {}
+ end
+ end
+ end
+ end
+
+ describe '#validate_foreign_key' do
+ context 'when name is provided' do
+ it 'does not infer the foreign key constraint name' do
+ expect(model).to receive(:foreign_key_exists?).with(:projects, name: :foo).and_return(true)
+
+ aggregate_failures do
+ expect(model).not_to receive(:concurrent_foreign_key_name)
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ end
+
+ model.validate_foreign_key(:projects, :user_id, name: :foo)
+ end
+ end
+
+ context 'when name is not provided' do
+ it 'infers the foreign key constraint name' do
+ expect(model).to receive(:foreign_key_exists?).with(:projects, name: anything).and_return(true)
+
+ aggregate_failures do
+ expect(model).to receive(:concurrent_foreign_key_name)
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+ end
+
+ model.validate_foreign_key(:projects, :user_id)
+ end
+
+ context 'when the inferred foreign key constraint does not exist' do
+ it 'raises an error' do
+ expect(model).to receive(:foreign_key_exists?).and_return(false)
+
+ expect { model.validate_foreign_key(:projects, :user_id) }.to raise_error(/cannot find/)
+ end
+ end
end
end
@@ -1414,7 +1475,11 @@ describe Gitlab::Database::MigrationHelpers do
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
- expect(model.index_exists_by_name?(:projects, 'index_projects_on_path'))
+ ActiveRecord::Base.connection.execute(
+ 'CREATE INDEX test_index_for_index_exists ON projects (path);'
+ )
+
+ expect(model.index_exists_by_name?(:projects, 'test_index_for_index_exists'))
.to be_truthy
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
index b09258ae227..56767c21ab7 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1_spec.rb
@@ -6,12 +6,12 @@ shared_examples 'renames child namespaces' do |type|
it 'renames namespaces' do
rename_namespaces = double
expect(described_class::RenameNamespaces)
- .to receive(:new).with(['first-path', 'second-path'], subject)
+ .to receive(:new).with(%w[first-path second-path], subject)
.and_return(rename_namespaces)
expect(rename_namespaces).to receive(:rename_namespaces)
.with(type: :child)
- subject.rename_wildcard_paths(['first-path', 'second-path'])
+ subject.rename_wildcard_paths(%w[first-path second-path])
end
end
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
new file mode 100644
index 00000000000..97f4a7eec75
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
+ describe '#execute' do
+ let(:result) { subject.execute }
+
+ context 'without application_settings' do
+ it 'returns error' do
+ expect(subject).to receive(:log_error).and_call_original
+ expect(result).to eq(
+ status: :error,
+ message: 'No application_settings found',
+ last_step: :validate_application_settings
+ )
+
+ expect(Group.count).to eq(0)
+ end
+ end
+
+ context 'without admin users' do
+ let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
+
+ before do
+ allow(ApplicationSetting).to receive(:current_without_cache) { application_setting }
+ end
+
+ it 'returns error' do
+ expect(subject).to receive(:log_error).and_call_original
+ expect(result).to eq(
+ status: :error,
+ message: 'No active admin user found',
+ last_step: :validate_admins
+ )
+
+ expect(Group.count).to eq(0)
+ end
+ end
+
+ context 'with application settings and admin users' do
+ let(:group) { result[:group] }
+ let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
+
+ let!(:user) { create(:user, :admin) }
+
+ before do
+ allow(ApplicationSetting).to receive(:current_without_cache) { application_setting }
+ end
+
+ it 'returns correct keys' do
+ expect(result.keys).to contain_exactly(
+ :status, :group
+ )
+ end
+
+ it "tracks successful install" do
+ expect(::Gitlab::Tracking).to receive(:event).with(
+ 'instance_administrators_group', 'group_created'
+ )
+
+ result
+ end
+
+ it 'creates group' do
+ expect(result[:status]).to eq(:success)
+ expect(group).to be_persisted
+ expect(group.name).to eq('GitLab Instance Administrators')
+ expect(group.path).to start_with('gitlab-instance-administrators')
+ expect(group.path.split('-').last.length).to eq(8)
+ expect(group.visibility_level).to eq(described_class::VISIBILITY_LEVEL)
+ end
+
+ it 'adds all admins as maintainers' do
+ admin1 = create(:user, :admin)
+ admin2 = create(:user, :admin)
+ create(:user)
+
+ expect(result[:status]).to eq(:success)
+ expect(group.members.collect(&:user)).to contain_exactly(user, admin1, admin2)
+ expect(group.members.collect(&:access_level)).to contain_exactly(
+ Gitlab::Access::OWNER,
+ Gitlab::Access::MAINTAINER,
+ Gitlab::Access::MAINTAINER
+ )
+ end
+
+ it 'saves the group id' do
+ expect(result[:status]).to eq(:success)
+ expect(application_setting.instance_administrators_group_id).to eq(group.id)
+ end
+
+ it 'returns error when saving group ID fails' do
+ allow(application_setting).to receive(:save) { false }
+
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not save group ID',
+ last_step: :save_group_id
+ )
+ end
+
+ context 'when group already exists' do
+ let(:existing_group) { create(:group) }
+
+ before do
+ admin1 = create(:user, :admin)
+ admin2 = create(:user, :admin)
+
+ existing_group.add_owner(user)
+ existing_group.add_users([admin1, admin2], Gitlab::Access::MAINTAINER)
+
+ application_setting.instance_administrators_group_id = existing_group.id
+ end
+
+ it 'returns success' do
+ expect(result).to eq(
+ status: :success,
+ group: existing_group
+ )
+
+ expect(Group.count).to eq(1)
+ end
+ end
+
+ context 'when group cannot be created' do
+ let(:group) { build(:group) }
+
+ before do
+ group.errors.add(:base, "Test error")
+
+ expect_next_instance_of(::Groups::CreateService) do |group_create_service|
+ expect(group_create_service).to receive(:execute)
+ .and_return(group)
+ end
+ end
+
+ it 'returns error' do
+ expect(subject).to receive(:log_error).and_call_original
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not create group',
+ last_step: :create_group
+ )
+ end
+ end
+
+ context 'when user cannot be added to group' do
+ before do
+ subject.instance_variable_set(:@instance_admins, [user, build(:user, :admin)])
+ end
+
+ it 'returns error' do
+ expect(subject).to receive(:log_error).and_call_original
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not add admins as members',
+ last_step: :add_group_members
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index ee3c99afdf1..10efdd44f20 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
describe '#execute' do
- let(:result) { subject.execute! }
+ let(:result) { subject.execute }
let(:prometheus_settings) do
{
@@ -18,10 +18,12 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
context 'without application_settings' do
- it 'does not fail' do
+ it 'returns error' do
expect(subject).to receive(:log_error).and_call_original
expect(result).to eq(
- status: :success
+ status: :error,
+ message: 'No application_settings found',
+ last_step: :validate_application_settings
)
expect(Project.count).to eq(0)
@@ -36,10 +38,11 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
allow(ApplicationSetting).to receive(:current_without_cache) { application_setting }
end
- it 'does not fail' do
- expect(subject).to receive(:log_error).and_call_original
+ it 'returns error' do
expect(result).to eq(
- status: :success
+ status: :error,
+ message: 'No active admin user found',
+ last_step: :create_group
)
expect(Project.count).to eq(0)
@@ -47,7 +50,7 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
end
- context 'with admin users' do
+ context 'with application settings and admin users' do
let(:project) { result[:project] }
let(:group) { result[:group] }
let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
@@ -73,13 +76,16 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it "tracks successful install" do
+ expect(::Gitlab::Tracking).to receive(:event).twice
+ expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created')
+
+ result
+ end
+
it 'creates group' do
expect(result[:status]).to eq(:success)
expect(group).to be_persisted
- expect(group.name).to eq('GitLab Instance Administrators')
- expect(group.path).to start_with('gitlab-instance-administrators')
- expect(group.path.split('-').last.length).to eq(8)
- expect(group.visibility_level).to eq(described_class::VISIBILITY_LEVEL)
end
it 'creates project with internal visibility' do
@@ -109,19 +115,9 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
expect(File).to exist("doc/#{path}.md")
end
- it 'adds all admins as maintainers' do
- admin1 = create(:user, :admin)
- admin2 = create(:user, :admin)
- create(:user)
-
+ it 'creates project with group as owner' do
expect(result[:status]).to eq(:success)
expect(project.owner).to eq(group)
- expect(group.members.collect(&:user)).to contain_exactly(user, admin1, admin2)
- expect(group.members.collect(&:access_level)).to contain_exactly(
- Gitlab::Access::OWNER,
- Gitlab::Access::MAINTAINER,
- Gitlab::Access::MAINTAINER
- )
end
it 'saves the project id' do
@@ -130,9 +126,16 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
end
it 'returns error when saving project ID fails' do
- allow(application_setting).to receive(:save) { false }
+ allow(application_setting).to receive(:update).and_call_original
+ allow(application_setting).to receive(:update)
+ .with(instance_administration_project_id: anything)
+ .and_return(false)
- expect { result }.to raise_error(StandardError, 'Could not save project ID')
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not save project ID',
+ last_step: :save_project_id
+ )
end
context 'when project already exists' do
@@ -140,18 +143,12 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
let(:existing_project) { create(:project, namespace: existing_group) }
before do
- admin1 = create(:user, :admin)
- admin2 = create(:user, :admin)
-
- existing_group.add_owner(user)
- existing_group.add_users([admin1, admin2], Gitlab::Access::MAINTAINER)
-
+ application_setting.instance_administrators_group_id = existing_group.id
application_setting.instance_administration_project_id = existing_project.id
end
- it 'does not fail' do
- expect(subject).to receive(:log_error).and_call_original
- expect(result[:status]).to eq(:success)
+ it 'returns success' do
+ expect(result).to include(status: :success)
expect(Project.count).to eq(1)
expect(Group.count).to eq(1)
@@ -250,18 +247,11 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it 'returns error' do
expect(subject).to receive(:log_error).and_call_original
- expect { result }.to raise_error(StandardError, 'Could not create project')
- end
- end
-
- context 'when user cannot be added to project' do
- before do
- subject.instance_variable_set(:@instance_admins, [user, build(:user, :admin)])
- end
-
- it 'returns error' do
- expect(subject).to receive(:log_error).and_call_original
- expect { result }.to raise_error(StandardError, 'Could not add admins as members')
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not create project',
+ last_step: :create_project
+ )
end
end
@@ -275,15 +265,13 @@ describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService do
it 'returns error' do
expect(subject).to receive(:log_error).and_call_original
- expect { result }.to raise_error(StandardError, 'Could not save prometheus manual configuration')
+ expect(result).to eq(
+ status: :error,
+ message: 'Could not save prometheus manual configuration',
+ last_step: :add_prometheus_manual_configuration
+ )
end
end
end
-
- it "tracks successful install" do
- expect(Gitlab::Tracking).to receive(:event).with("self_monitoring", "project_created")
-
- result
- end
end
end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
new file mode 100644
index 00000000000..6446ab1beb4
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService do
+ describe '#execute' do
+ let!(:application_setting) { create(:application_setting) }
+ let(:result) { subject.execute }
+
+ context 'when project does not exist' do
+ it 'returns error' do
+ expect(result).to eq(
+ status: :error,
+ message: 'Self monitoring project does not exist',
+ last_step: :validate_self_monitoring_project_exists
+ )
+ end
+ end
+
+ context 'when self monitoring project exists' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+
+ let(:application_setting) do
+ create(
+ :application_setting,
+ instance_administration_project_id: project.id,
+ instance_administrators_group_id: group.id
+ )
+ end
+
+ it 'destroys project' do
+ subject.execute
+
+ expect { project.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'deletes project ID from application settings' do
+ subject.execute
+
+ expect(application_setting.reload.instance_administration_project_id).to be_nil
+ end
+
+ it 'does not delete group' do
+ subject.execute
+
+ expect(application_setting.instance_administrators_group).to eq(group)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 3db8900ed8e..4a0eab3ea27 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -396,6 +396,20 @@ describe Gitlab::Database do
end
end
+ describe '.exists?' do
+ it 'returns true if `ActiveRecord::Base.connection` succeeds' do
+ expect(ActiveRecord::Base).to receive(:connection)
+
+ expect(described_class.exists?).to be(true)
+ end
+
+ it 'returns false if `ActiveRecord::Base.connection` fails' do
+ expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, 'broken' }
+
+ expect(described_class.exists?).to be(false)
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb b/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb
new file mode 100644
index 00000000000..86d5bc93bf7
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker/cargo_toml_linker_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DependencyLinker::CargoTomlLinker do
+ describe '.support?' do
+ it 'supports Cargo.toml' do
+ expect(described_class.support?('Cargo.toml')).to be_truthy
+ end
+
+ it 'does not support other files' do
+ expect(described_class.support?('cargo.yaml')).to be_falsey
+ end
+ end
+
+ describe '#link' do
+ let(:file_name) { "Cargo.toml" }
+
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ # See https://doc.rust-lang.org/cargo/reference/manifest.html
+ [package]
+ # Package shouldn't be matched
+ name = "gitlab-test"
+ version = "0.0.1"
+ authors = ["Some User <some.user@example.org>"]
+ description = "A GitLab test Cargo.toml."
+ keywords = ["gitlab", "test", "rust", "crago"]
+ readme = "README.md"
+
+ [dependencies]
+ # Default dependencies format with fixed version and version range
+ chrono = "0.4.7"
+ xml-rs = ">=0.8.0"
+
+ [dependencies.memchr]
+ # Specific dependency with optional info
+ version = "2.2.1"
+ optional = true
+
+ [dev-dependencies]
+ # Dev dependency with version modifier
+ commandspec = "~0.12.2"
+
+ [build-dependencies]
+ # Build dependency with version wildcard
+ thread_local = "0.3.*"
+ CONTENT
+ end
+
+ subject { Gitlab::Highlight.highlight(file_name, file_content) }
+
+ def link(name, url)
+ %{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
+ end
+
+ it 'links dependencies' do
+ expect(subject).to include(link('chrono', 'https://crates.io/crates/chrono'))
+ expect(subject).to include(link('xml-rs', 'https://crates.io/crates/xml-rs'))
+ expect(subject).to include(link('memchr', 'https://crates.io/crates/memchr'))
+ expect(subject).to include(link('commandspec', 'https://crates.io/crates/commandspec'))
+ expect(subject).to include(link('thread_local', 'https://crates.io/crates/thread_local'))
+ end
+
+ it 'does not contain metadata identified as package' do
+ expect(subject).not_to include(link('version', 'https://crates.io/crates/version'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb
index 3ea3334caf0..570a994f520 100644
--- a/spec/lib/gitlab/dependency_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker_spec.rb
@@ -83,5 +83,13 @@ describe Gitlab::DependencyLinker do
described_class.link(blob_name, nil, nil)
end
+
+ it 'links using CargoTomlLinker' do
+ blob_name = 'Cargo.toml'
+
+ expect(described_class::CargoTomlLinker).to receive(:link)
+
+ described_class.link(blob_name, nil, nil)
+ end
end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 716fc8ae987..c468af4db68 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -347,6 +347,16 @@ describe Gitlab::Diff::File do
end
describe '#simple_viewer' do
+ context 'when the file is collapsed' do
+ before do
+ allow(diff_file).to receive(:collapsed?).and_return(true)
+ end
+
+ it 'returns a Collapsed viewer' do
+ expect(diff_file.simple_viewer).to be_a(DiffViewer::Collapsed)
+ end
+ end
+
context 'when the file is not diffable' do
before do
allow(diff_file).to receive(:diffable?).and_return(false)
diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb
index d66a746284d..c69b2f1eabc 100644
--- a/spec/lib/gitlab/email/attachment_uploader_spec.rb
+++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb
@@ -9,7 +9,7 @@ describe Gitlab::Email::AttachmentUploader do
let(:message) { Mail::Message.new(message_raw) }
it "uploads all attachments and returns their links" do
- links = described_class.new(message).execute(project)
+ links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
link = links.first
expect(link).not_to be_nil
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 50e473c459e..909a7618df4 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -181,10 +181,21 @@ describe Gitlab::Email::Handler::CreateNoteHandler do
it_behaves_like 'a reply to existing comment'
it "adds all attachments" do
+ expect_next_instance_of(Gitlab::Email::AttachmentUploader) do |uploader|
+ expect(uploader).to receive(:execute).with(upload_parent: project, uploader_class: FileUploader).and_return(
+ [
+ {
+ url: "uploads/image.png",
+ alt: "image",
+ markdown: markdown
+ }
+ ]
+ )
+ end
+
receiver.execute
note = noteable.notes.last
-
expect(note.note).to include(markdown)
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 43c73242f5f..018219e5647 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -5,22 +5,27 @@ require 'spec_helper'
describe Gitlab::Email::Receiver do
include_context :email_shared_context
- context "when the email contains a valid email address in a Delivered-To header" do
- let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
+ context 'when the email contains a valid email address in a header' do
let(:handler) { double(:handler) }
before do
- stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo")
-
allow(handler).to receive(:execute)
allow(handler).to receive(:metrics_params)
allow(handler).to receive(:metrics_event)
+
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+ end
+
+ context 'when in a Delivered-To header' do
+ let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
+
+ it_behaves_like 'correctly finds the mail key'
end
- it "finds the mail key" do
- expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
+ context 'when in an Envelope-To header' do
+ let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
- receiver.execute
+ it_behaves_like 'correctly finds the mail key'
end
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index b8be72cf8d7..e4624accd58 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -54,7 +54,7 @@ describe Gitlab::Experimentation do
describe '#experiment_enabled?' do
context 'cookie is not present' do
it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil) # rubocop:disable RSpec/DescribedClass
+ expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil)
controller.experiment_enabled?(:test_experiment)
end
end
@@ -67,7 +67,7 @@ describe Gitlab::Experimentation do
it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
# 'abcd1234'.hex % 100 = 76
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, 76) # rubocop:disable RSpec/DescribedClass
+ expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, 76)
controller.experiment_enabled?(:test_experiment)
end
end
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 23f7deba7f7..3972bd24e80 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -16,23 +16,30 @@ describe Gitlab::FileDetector do
end
describe '.type_of' do
- it 'returns the type of a README file' do
- filenames = Gitlab::MarkupHelper::PLAIN_FILENAMES + Gitlab::MarkupHelper::PLAIN_FILENAMES.map(&:upcase)
- extensions = Gitlab::MarkupHelper::EXTENSIONS + Gitlab::MarkupHelper::EXTENSIONS.map(&:upcase)
+ it 'returns the type of a README without extension' do
+ expect(described_class.type_of('README')).to eq(:readme)
+ expect(described_class.type_of('INDEX')).to eq(:readme)
+ end
- filenames.each do |filename|
- expect(described_class.type_of(filename)).to eq(:readme)
+ it 'returns the type of a README file with a recognized extension' do
+ extensions = ['txt', *Gitlab::MarkupHelper::EXTENSIONS]
- extensions.each do |extname|
- expect(described_class.type_of("#{filename}.#{extname}")).to eq(:readme)
+ extensions.each do |ext|
+ %w(index readme).each do |file|
+ expect(described_class.type_of("#{file}.#{ext}")).to eq(:readme)
end
end
end
- it 'returns nil for a README.rb file' do
+ it 'returns nil for a README with unrecognized extension' do
expect(described_class.type_of('README.rb')).to be_nil
end
+ it 'is case insensitive' do
+ expect(described_class.type_of('ReadMe')).to eq(:readme)
+ expect(described_class.type_of('index.TXT')).to eq(:readme)
+ end
+
it 'returns nil for a README file in a directory' do
expect(described_class.type_of('foo/README.md')).to be_nil
end
diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb
index 6cc5141a6fe..90aa759671a 100644
--- a/spec/lib/gitlab/file_finder_spec.rb
+++ b/spec/lib/gitlab/file_finder_spec.rb
@@ -30,5 +30,11 @@ describe Gitlab::FileFinder do
expect(results.count).to eq(1)
end
+
+ it 'does not cause N+1 query' do
+ expect(Gitlab::GitalyClient).to receive(:call).at_most(10).times.and_call_original
+
+ subject.find(': filename:wm.svg')
+ end
end
end
diff --git a/spec/lib/gitlab/plugin_spec.rb b/spec/lib/gitlab/file_hook_spec.rb
index 5d9f6d04caa..d184eb483d4 100644
--- a/spec/lib/gitlab/plugin_spec.rb
+++ b/spec/lib/gitlab/file_hook_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-describe Gitlab::Plugin do
- let(:plugin) { Rails.root.join('plugins', 'test.rb') }
- let(:tmp_file) { Tempfile.new('plugin-dump') }
+describe Gitlab::FileHook do
+ let(:file_hook) { Rails.root.join('plugins', 'test.rb') }
+ let(:tmp_file) { Tempfile.new('file_hook-dump') }
- let(:plugin_source) do
+ let(:file_hook_source) do
<<~EOS
#!/usr/bin/env ruby
x = STDIN.read
@@ -14,13 +14,13 @@ describe Gitlab::Plugin do
EOS
end
- context 'with plugins present' do
+ context 'with file_hooks present' do
before do
- File.write(plugin, plugin_source)
+ File.write(file_hook, file_hook_source)
end
after do
- FileUtils.rm(plugin)
+ FileUtils.rm(file_hook)
end
describe '.any?' do
@@ -30,13 +30,13 @@ describe Gitlab::Plugin do
end
describe '.files?' do
- it 'returns a list of plugins' do
- expect(described_class.files).to match_array([plugin.to_s])
+ it 'returns a list of file_hooks' do
+ expect(described_class.files).to match_array([file_hook.to_s])
end
end
end
- context 'without any plugins' do
+ context 'without any file_hooks' do
describe '.any?' do
it 'returns false' do
expect(described_class.any?).to be false
@@ -52,21 +52,21 @@ describe Gitlab::Plugin do
describe '.execute' do
let(:data) { Gitlab::DataBuilder::Push::SAMPLE_DATA }
- let(:result) { described_class.execute(plugin.to_s, data) }
+ let(:result) { described_class.execute(file_hook.to_s, data) }
let(:success) { result.first }
let(:message) { result.last }
before do
- File.write(plugin, plugin_source)
+ File.write(file_hook, file_hook_source)
end
after do
- FileUtils.rm(plugin)
+ FileUtils.rm(file_hook)
end
context 'successful execution' do
before do
- File.chmod(0o777, plugin)
+ File.chmod(0o777, file_hook)
end
after do
@@ -76,7 +76,7 @@ describe Gitlab::Plugin do
it { expect(success).to be true }
it { expect(message).to be_empty }
- it 'ensures plugin received data via stdin' do
+ it 'ensures file_hook received data via stdin' do
result
expect(File.read(tmp_file.path)).to eq(data.to_json)
@@ -89,7 +89,7 @@ describe Gitlab::Plugin do
end
context 'non-zero exit' do
- let(:plugin_source) do
+ let(:file_hook_source) do
<<~EOS
#!/usr/bin/env ruby
exit 1
@@ -97,7 +97,7 @@ describe Gitlab::Plugin do
end
before do
- File.chmod(0o777, plugin)
+ File.chmod(0o777, file_hook)
end
it { expect(success).to be false }
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index cc26b7e7fcd..cb3f4df2dbd 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -71,9 +71,7 @@ describe Gitlab::Git::Branch, :seed_helper do
end
let(:user) { create(:user) }
- let(:committer) do
- Gitlab::Git.committer_hash(email: user.email, name: user.name)
- end
+ let(:committer) { { email: user.email, name: user.name } }
let(:params) do
parents = [rugged.head.target]
tree = parents.first.tree
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 7ec655eb113..c2fc228d34a 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -57,7 +57,7 @@ describe Gitlab::Git::Commit, :seed_helper do
it { expect(@commit.different_committer?).to be_truthy }
it { expect(@commit.parents).to eq(@gitlab_parents) }
it { expect(@commit.parent_id).to eq(@parents.first.oid) }
- it { expect(@commit.no_commit_message).to eq("--no commit message") }
+ it { expect(@commit.no_commit_message).to eq("No commit message") }
after do
# Erase the new commit so other tests get the original repo
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 474240cf620..9b29046fce9 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -53,30 +53,46 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false)
end
- it 'returns true when gitaly matches disk' do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ context 'when running puma with multiple threads' do
+ before do
+ allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
+ end
+
+ it 'returns false' do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
+ end
end
- it 'returns false when disk access fails' do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
+ context 'when not running puma with multiple threads' do
+ before do
+ allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
+ end
- expect(subject.use_rugged?(repository, feature_flag_name)).to be false
- end
+ it 'returns true when gitaly matches disk' do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ end
- it "returns false when gitaly doesn't match disk" do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
+ it 'returns false when disk access fails' do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
- expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
+ end
- File.delete(temp_gitaly_metadata_file)
- end
+ it "returns false when gitaly doesn't match disk" do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
- it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
- expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
+ File.delete(temp_gitaly_metadata_file)
+ end
- subject.use_rugged?(repository, feature_flag_name)
+ it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+
+ expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
+
+ subject.use_rugged?(repository, feature_flag_name)
+ end
end
end
@@ -99,6 +115,37 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
+ describe '#running_puma_with_multiple_threads?' do
+ context 'when using Puma' do
+ before do
+ stub_const('::Puma', class_double('Puma'))
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ end
+
+ it 'returns false for single thread Puma' do
+ allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 1)
+
+ expect(subject.running_puma_with_multiple_threads?).to be false
+ end
+
+ it 'returns true for multi-threaded Puma' do
+ allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
+
+ expect(subject.running_puma_with_multiple_threads?).to be true
+ end
+ end
+
+ context 'when not using Puma' do
+ before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(subject.running_puma_with_multiple_threads?).to be false
+ end
+ end
+ end
+
def create_temporary_gitaly_metadata_file
tmp = Tempfile.new('.gitaly-metadata')
gitaly_metadata = {
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index fbc49e05c37..d6d12b84724 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -6,32 +6,6 @@ describe Gitlab::Git do
let(:committer_email) { 'user@example.org' }
let(:committer_name) { 'John Doe' }
- describe 'committer_hash' do
- it "returns a hash containing the given email and name" do
- committer_hash = described_class.committer_hash(email: committer_email, name: committer_name)
-
- expect(committer_hash[:email]).to eq(committer_email)
- expect(committer_hash[:name]).to eq(committer_name)
- expect(committer_hash[:time]).to be_a(Time)
- end
-
- context 'when email is nil' do
- it "returns nil" do
- committer_hash = described_class.committer_hash(email: nil, name: committer_name)
-
- expect(committer_hash).to be_nil
- end
- end
-
- context 'when name is nil' do
- it "returns nil" do
- committer_hash = described_class.committer_hash(email: committer_email, name: nil)
-
- expect(committer_hash).to be_nil
- end
- end
- end
-
describe '.ref_name' do
it 'ensure ref is a valid UTF-8 string' do
utf8_invalid_ref = Gitlab::Git::BRANCH_REF_PREFIX + "an_invalid_ref_\xE5"
@@ -73,7 +47,8 @@ describe Gitlab::Git do
[sha, short_sha, true],
[sha, sha.reverse, false],
[sha, too_short_sha, false],
- [sha, nil, false]
+ [sha, nil, false],
+ [nil, nil, true]
]
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index 887a6baf659..fc6ac491671 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -12,7 +12,7 @@ describe Gitlab::GitalyClient::BlobService do
describe '#get_new_lfs_pointers' do
let(:revision) { 'master' }
let(:limit) { 5 }
- let(:not_in) { ['branch-a', 'branch-b'] }
+ let(:not_in) { %w[branch-a branch-b] }
let(:expected_params) do
{ revision: revision, limit: limit, not_in_refs: not_in, not_in_all: false }
end
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index 929ff5dee5d..73ae4cd95ce 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -69,7 +69,7 @@ describe Gitlab::GitalyClient::RemoteService do
describe '#update_remote_mirror' do
let(:ref_name) { 'remote_mirror_1' }
- let(:only_branches_matching) { ['my-branch', 'master'] }
+ let(:only_branches_matching) { %w[my-branch master] }
let(:ssh_key) { 'KEY' }
let(:known_hosts) { 'KNOWN HOSTS' }
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 4b69b4734f1..ebf56c0ae66 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::GitalyClient do
context 'running in Unicorn' do
before do
- stub_const('Unicorn', 1)
+ allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true)
end
it { expect(subject.long_timeout).to eq(55) }
@@ -34,7 +34,7 @@ describe Gitlab::GitalyClient do
context 'running in Puma' do
before do
- stub_const('Puma', 1)
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
it { expect(subject.long_timeout).to eq(55) }
@@ -229,6 +229,59 @@ describe Gitlab::GitalyClient do
end
end
end
+
+ context 'deadlines', :request_store do
+ let(:request_deadline) { real_time + 10.0 }
+
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:request_deadline).and_return(request_deadline)
+ end
+
+ it 'includes the deadline information' do
+ kword_args = described_class.request_kwargs('default', timeout: 2)
+
+ expect(kword_args[:deadline])
+ .to be_within(1).of(real_time + 2)
+ expect(kword_args[:metadata][:deadline_type]).to eq("regular")
+ end
+
+ it 'limits the deadline do the request deadline if that is closer', :aggregate_failures do
+ kword_args = described_class.request_kwargs('default', timeout: 15)
+
+ expect(kword_args[:deadline]).to eq(request_deadline)
+ expect(kword_args[:metadata][:deadline_type]).to eq("limited")
+ end
+
+ it 'does not limit calls in sidekiq' do
+ expect(Sidekiq).to receive(:server?).and_return(true)
+
+ kword_args = described_class.request_kwargs('default', timeout: 6.hours.to_i)
+
+ expect(kword_args[:deadline]).to be_within(1).of(real_time + 6.hours.to_i)
+ expect(kword_args[:metadata][:deadline_type]).to be_nil
+ end
+
+ it 'does not limit calls in sidekiq when allowed unlimited' do
+ expect(Sidekiq).to receive(:server?).and_return(true)
+
+ kword_args = described_class.request_kwargs('default', timeout: 0)
+
+ expect(kword_args[:deadline]).to be_nil
+ expect(kword_args[:metadata][:deadline_type]).to be_nil
+ end
+
+ it 'includes only the deadline specified by the timeout when there was no deadline' do
+ allow(Gitlab::RequestContext.instance).to receive(:request_deadline).and_return(nil)
+ kword_args = described_class.request_kwargs('default', timeout: 6.hours.to_i)
+
+ expect(kword_args[:deadline]).to be_within(1).of(Gitlab::Metrics::System.real_time + 6.hours.to_i)
+ expect(kword_args[:metadata][:deadline_type]).to be_nil
+ end
+
+ def real_time
+ Gitlab::Metrics::System.real_time
+ end
+ end
end
describe 'enforce_gitaly_request_limits?' do
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 877b4d4bbaf..bffae9e2ba0 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -50,6 +50,10 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
.and_return([mr, false])
expect(importer)
+ .to receive(:set_merge_request_assignees)
+ .with(mr)
+
+ expect(importer)
.to receive(:insert_git_data)
.with(mr, false)
@@ -75,11 +79,6 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
.to receive(:author_id_for)
.with(pull_request)
.and_return([user.id, true])
-
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
end
it 'imports the pull request with the pull request author as the merge request author' do
@@ -97,7 +96,6 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
state_id: 3,
milestone_id: milestone.id,
author_id: user.id,
- assignee_id: user.id,
created_at: created_at,
updated_at: updated_at
},
@@ -114,20 +112,72 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
expect(mr).to be_instance_of(MergeRequest)
expect(exists).to eq(false)
end
+
+ context 'when the source and target branch are identical' do
+ before do
+ allow(pull_request).to receive_messages(
+ source_repository_id: pull_request.target_repository_id,
+ source_branch: 'master'
+ )
+ end
+
+ it 'uses a generated source branch name for the merge request' do
+ expect(importer)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'master-42',
+ target_branch: 'master',
+ state_id: 3,
+ milestone_id: milestone.id,
+ author_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ expect(importer)
+ .to receive(:insert_and_return_id)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.create_merge_request }.not_to raise_error
+ end
+ end
+
+ context 'when the merge request already exists' do
+ it 'returns the existing merge request' do
+ mr1, exists1 = importer.create_merge_request
+ mr2, exists2 = importer.create_merge_request
+
+ expect(mr2).to eq(mr1)
+ expect(exists1).to eq(false)
+ expect(exists2).to eq(true)
+ end
+ end
end
context 'when the author could not be found' do
- it 'imports the pull request with the project creator as the merge request author' do
+ before do
allow(importer.user_finder)
.to receive(:author_id_for)
.with(pull_request)
.and_return([project.creator_id, false])
+ end
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
-
+ it 'imports the pull request with the project creator as the merge request author' do
expect(importer)
.to receive(:insert_and_return_id)
.with(
@@ -142,7 +192,6 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
state_id: 3,
milestone_id: milestone.id,
author_id: project.creator_id,
- assignee_id: user.id,
created_at: created_at,
updated_at: updated_at
},
@@ -153,93 +202,33 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
importer.create_merge_request
end
end
+ end
- context 'when the source and target branch are identical' do
- it 'uses a generated source branch name for the merge request' do
- allow(importer.user_finder)
- .to receive(:author_id_for)
- .with(pull_request)
- .and_return([user.id, true])
-
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
-
- allow(pull_request)
- .to receive(:source_repository_id)
- .and_return(pull_request.target_repository_id)
-
- allow(pull_request)
- .to receive(:source_branch)
- .and_return('master')
+ describe '#set_merge_request_assignees' do
+ let_it_be(:merge_request) { create(:merge_request) }
- expect(importer)
- .to receive(:insert_and_return_id)
- .with(
- {
- iid: 42,
- title: 'My Pull Request',
- description: 'This is my pull request',
- source_project_id: project.id,
- target_project_id: project.id,
- source_branch: 'master-42',
- target_branch: 'master',
- state_id: 3,
- milestone_id: milestone.id,
- author_id: user.id,
- assignee_id: user.id,
- created_at: created_at,
- updated_at: updated_at
- },
- project.merge_requests
- )
- .and_call_original
+ before do
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user_id)
- importer.create_merge_request
- end
+ importer.set_merge_request_assignees(merge_request)
end
- context 'when the import fails due to a foreign key error' do
- it 'does not raise any errors' do
- allow(importer.user_finder)
- .to receive(:author_id_for)
- .with(pull_request)
- .and_return([user.id, true])
-
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
-
- expect(importer)
- .to receive(:insert_and_return_id)
- .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+ context 'when pull request has an assignee' do
+ let(:user_id) { user.id }
- expect { importer.create_merge_request }.not_to raise_error
+ it 'sets merge request assignees' do
+ expect(merge_request.assignee_ids).to eq [user.id]
end
end
- context 'when the merge request already exists' do
- before do
- allow(importer.user_finder)
- .to receive(:author_id_for)
- .with(pull_request)
- .and_return([user.id, true])
-
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
- end
-
- it 'returns the existing merge request' do
- mr1, exists1 = importer.create_merge_request
- mr2, exists2 = importer.create_merge_request
+ context 'when pull request does not have any assignees' do
+ let(:user_id) { nil }
- expect(mr2).to eq(mr1)
- expect(exists1).to eq(false)
- expect(exists2).to eq(true)
+ it 'does not set merge request assignees' do
+ expect(merge_request.assignee_ids).to eq []
end
end
end
@@ -255,11 +244,6 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
.to receive(:author_id_for)
.with(pull_request)
.and_return([user.id, true])
-
- allow(importer.user_finder)
- .to receive(:assignee_id_for)
- .with(pull_request)
- .and_return(user.id)
end
it 'does not create the source branch if merge request is merged' do
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 8600ef223c6..27a3010eeed 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -236,7 +236,7 @@ describe Gitlab::Gpg do
context 'when running in Sidekiq' do
before do
- allow(Sidekiq).to receive(:server?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
end
it_behaves_like 'multiple deletion attempts of the tmp-dir', described_class::BG_CLEANUP_RUNTIME_S
diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
index 0cfda80b854..c9021e2f436 100644
--- a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
@@ -39,7 +39,7 @@ describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do
before do
current_backtrace = caller
allow(exception).to receive(:backtrace).and_return(current_backtrace)
- expected['exception.backtrace'] = Gitlab::Profiler.clean_backtrace(current_backtrace)
+ expected['exception.backtrace'] = Gitlab::BacktraceCleaner.clean_backtrace(current_backtrace)
end
it 'includes the backtrace' do
diff --git a/spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb b/spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb
new file mode 100644
index 00000000000..83c94ed6260
--- /dev/null
+++ b/spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection do
+ let(:prev_cursor) { 1 }
+ let(:next_cursor) { 6 }
+ let(:values) { [2, 3, 4, 5] }
+ let(:all_nodes) { Gitlab::Graphql::ExternallyPaginatedArray.new(prev_cursor, next_cursor, *values) }
+ let(:arguments) { {} }
+
+ subject(:connection) do
+ described_class.new(all_nodes, arguments)
+ end
+
+ describe '#sliced_nodes' do
+ let(:sliced_nodes) { connection.sliced_nodes }
+
+ it 'returns all the nodes' do
+ expect(connection.sliced_nodes).to eq(values)
+ end
+ end
+
+ describe '#paged_nodes' do
+ let(:paged_nodes) { connection.send(:paged_nodes) }
+
+ it_behaves_like "connection with paged nodes" do
+ let(:paged_nodes_size) { values.size }
+ end
+ end
+
+ describe '#start_cursor' do
+ it 'returns the prev cursor' do
+ expect(connection.start_cursor).to eq(prev_cursor)
+ end
+
+ context 'when there is none' do
+ let(:prev_cursor) { nil }
+
+ it 'returns nil' do
+ expect(connection.start_cursor).to eq(nil)
+ end
+ end
+ end
+
+ describe '#end_cursor' do
+ it 'returns the next cursor' do
+ expect(connection.end_cursor).to eq(next_cursor)
+ end
+
+ context 'when there is none' do
+ let(:next_cursor) { nil }
+
+ it 'returns nil' do
+ expect(connection.end_cursor).to eq(nil)
+ end
+ end
+ end
+
+ describe '#has_next_page' do
+ it 'returns true when there is a end cursor' do
+ expect(connection.has_next_page).to eq(true)
+ end
+
+ context 'there is no end cursor' do
+ let(:next_cursor) { nil }
+
+ it 'returns false' do
+ expect(connection.has_next_page).to eq(false)
+ end
+ end
+ end
+
+ describe '#has_previous_page' do
+ it 'returns true when there is a start cursor' do
+ expect(connection.has_previous_page).to eq(true)
+ end
+
+ context 'there is no start cursor' do
+ let(:prev_cursor) { nil }
+
+ it 'returns false' do
+ expect(connection.has_previous_page).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb b/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb
index 20e87daa0d6..b2f0862be62 100644
--- a/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb
@@ -14,7 +14,9 @@ describe Gitlab::Graphql::Connections::FilterableArrayConnection do
describe '#paged_nodes' do
let(:paged_nodes) { subject.paged_nodes }
- it_behaves_like "connection with paged nodes"
+ it_behaves_like "connection with paged nodes" do
+ let(:paged_nodes_size) { 3 }
+ end
context 'when callback filters some nodes' do
let(:callback) { proc { |nodes| nodes[1..-1] } }
diff --git a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
index bd0fcbbdeb2..f617e8b3ce7 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
@@ -232,7 +232,9 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do
let_it_be(:all_nodes) { create_list(:project, 5) }
let(:paged_nodes) { subject.paged_nodes }
- it_behaves_like "connection with paged nodes"
+ it_behaves_like "connection with paged nodes" do
+ let(:paged_nodes_size) { 3 }
+ end
context 'when both are passed' do
let(:arguments) { { first: 2, last: 2 } }
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 570b0cb7401..746f505c877 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -67,5 +67,11 @@ describe Gitlab::GroupSearchResults do
expect(result).to eq []
end
+
+ it 'sets include_subgroups flag by default' do
+ result = described_class.new(user, anything, group, 'gob')
+
+ expect(result.issuable_params[:include_subgroups]).to eq(true)
+ end
end
end
diff --git a/spec/lib/gitlab/health_checks/puma_check_spec.rb b/spec/lib/gitlab/health_checks/puma_check_spec.rb
index dd052a4dd2c..93ef81978a8 100644
--- a/spec/lib/gitlab/health_checks/puma_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/puma_check_spec.rb
@@ -22,6 +22,7 @@ describe Gitlab::HealthChecks::PumaCheck do
context 'when Puma is not loaded' do
before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(false)
hide_const('Puma')
end
@@ -33,6 +34,7 @@ describe Gitlab::HealthChecks::PumaCheck do
context 'when Puma is loaded' do
before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
stub_const('Puma', Module.new)
end
diff --git a/spec/lib/gitlab/health_checks/unicorn_check_spec.rb b/spec/lib/gitlab/health_checks/unicorn_check_spec.rb
index 931b61cb168..7c57b6f1ca5 100644
--- a/spec/lib/gitlab/health_checks/unicorn_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/unicorn_check_spec.rb
@@ -26,6 +26,7 @@ describe Gitlab::HealthChecks::UnicornCheck do
context 'when Unicorn is not loaded' do
before do
+ allow(Gitlab::Runtime).to receive(:unicorn?).and_return(false)
hide_const('Unicorn')
end
@@ -39,6 +40,7 @@ describe Gitlab::HealthChecks::UnicornCheck do
let(:http_server_class) { Struct.new(:worker_processes) }
before do
+ allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true)
stub_const('Unicorn::HttpServer', http_server_class)
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 5a45d724b83..2140cbae488 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -111,7 +111,7 @@ describe Gitlab::Highlight do
end
it 'utilizes longer timeout for sidekiq' do
- allow(Sidekiq).to receive(:server?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
subject.highlight("Content")
diff --git a/spec/lib/gitlab/import/merge_request_helpers_spec.rb b/spec/lib/gitlab/import/merge_request_helpers_spec.rb
index 42515888d4f..2b165994152 100644
--- a/spec/lib/gitlab/import/merge_request_helpers_spec.rb
+++ b/spec/lib/gitlab/import/merge_request_helpers_spec.rb
@@ -19,8 +19,7 @@ describe Gitlab::Import::MergeRequestHelpers, type: :helper do
source_branch: 'master-42',
target_branch: 'master',
state_id: 3,
- author_id: user.id,
- assignee_id: user.id
+ author_id: user.id
}
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 817aedc19b0..08e57e541a4 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -6,8 +6,11 @@ issues:
- assignees
- updated_by
- milestone
+- issue_milestones
+- milestones
- notes
- resource_label_events
+- resource_weight_events
- sentry_issue
- label_links
- labels
@@ -35,6 +38,8 @@ issues:
- vulnerability_links
- related_vulnerabilities
- user_mentions
+- blocked_by_issue_links
+- blocked_by_issues
events:
- author
- project
@@ -77,6 +82,8 @@ milestone:
- boards
- milestone_releases
- releases
+- issue_milestones
+- merge_request_milestones
snippets:
- author
- project
@@ -105,6 +112,8 @@ merge_requests:
- assignee
- updated_by
- milestone
+- merge_request_milestones
+- milestones
- notes
- resource_label_events
- label_links
@@ -145,6 +154,12 @@ merge_requests:
- deployment_merge_requests
- deployments
- user_mentions
+issue_milestones:
+- milestone
+- issue
+merge_request_milestones:
+- milestone
+- merge_request
external_pull_requests:
- project
merge_request_diff:
@@ -188,16 +203,20 @@ ci_pipelines:
- sourced_pipelines
- triggered_by_pipeline
- triggered_pipelines
+- child_pipelines
+- parent_pipeline
- downstream_bridges
- job_artifacts
- vulnerabilities_occurrence_pipelines
- vulnerability_findings
+- pipeline_config
pipeline_variables:
- pipeline
stages:
- project
- pipeline
- statuses
+- processables
- builds
- bridges
statuses:
@@ -446,6 +465,8 @@ project:
- service_desk_setting
- import_failures
- container_expiration_policy
+- resource_groups
+- autoclose_referenced_issues
award_emoji:
- awardable
- user
@@ -560,3 +581,30 @@ zoom_meetings:
sentry_issue:
- issue
design_versions: *version
+epic:
+- subscriptions
+- award_emoji
+- description_versions
+- author
+- assignee
+- issues
+- epic_issues
+- milestone
+- notes
+- label_links
+- labels
+- todos
+- metrics
+- group
+- parent
+- children
+- updated_by
+- last_edited_by
+- closed_by
+- start_date_sourcing_milestone
+- due_date_sourcing_milestone
+- start_date_sourcing_epic
+- due_date_sourcing_epic
+- events
+- resource_label_events
+- user_mentions \ No newline at end of file
diff --git a/spec/lib/gitlab/import_export/base_object_builder_spec.rb b/spec/lib/gitlab/import_export/base_object_builder_spec.rb
new file mode 100644
index 00000000000..fbb3b08cf56
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base_object_builder_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::BaseObjectBuilder do
+ let(:project) do
+ create(:project, :repository,
+ :builds_disabled,
+ :issues_disabled,
+ name: 'project',
+ path: 'project')
+ end
+ let(:klass) { Milestone }
+ let(:attributes) { { 'title' => 'Test BaseObjectBuilder Milestone', 'project' => project } }
+
+ subject { described_class.build(klass, attributes) }
+
+ describe '#build' do
+ context 'when object exists' do
+ context 'when where_clauses are implemented' do
+ before do
+ allow_next_instance_of(described_class) do |object_builder|
+ allow(object_builder).to receive(:where_clauses).and_return([klass.arel_table['title'].eq(attributes['title'])])
+ end
+ end
+
+ let!(:milestone) { create(:milestone, title: attributes['title'], project: project) }
+
+ it 'finds existing object instead of creating one' do
+ expect(subject).to eq(milestone)
+ end
+ end
+
+ context 'when where_clauses are not implemented' do
+ it 'raises NotImplementedError' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+
+ context 'when object does not exist' do
+ before do
+ allow_next_instance_of(described_class) do |object_builder|
+ allow(object_builder).to receive(:find_object).and_return(nil)
+ end
+ end
+
+ it 'creates new object' do
+ expect { subject }.to change { Milestone.count }.from(0).to(1)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/base_relation_factory_spec.rb b/spec/lib/gitlab/import_export/base_relation_factory_spec.rb
new file mode 100644
index 00000000000..def3e43de9b
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base_relation_factory_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::BaseRelationFactory do
+ let(:user) { create(:admin) }
+ let(:project) { create(:project) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:relation_sym) { :project_snippets }
+ let(:merge_requests_mapping) { {} }
+ let(:relation_hash) { {} }
+ let(:excluded_keys) { [] }
+
+ subject do
+ described_class.create(relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
+ members_mapper: members_mapper,
+ merge_requests_mapping: merge_requests_mapping,
+ user: user,
+ importable: project,
+ excluded_keys: excluded_keys)
+ end
+
+ describe '#create' do
+ context 'when relation is invalid' do
+ before do
+ expect_next_instance_of(described_class) do |relation_factory|
+ expect(relation_factory).to receive(:invalid_relation?).and_return(true)
+ end
+ end
+
+ it 'returns without creating new relations' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when #setup_models is not implemented' do
+ it 'raises NotImplementedError' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+
+ context 'when #setup_models is implemented' do
+ let(:relation_sym) { :notes }
+ let(:relation_hash) do
+ {
+ "id" => 4947,
+ "note" => "merged",
+ "noteable_type" => "MergeRequest",
+ "author_id" => 999,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "project_id" => 1,
+ "attachment" => {
+ "url" => nil
+ },
+ "noteable_id" => 377,
+ "system" => true,
+ "events" => []
+ }
+ end
+
+ before do
+ expect_next_instance_of(described_class) do |relation_factory|
+ expect(relation_factory).to receive(:setup_models).and_return(true)
+ end
+ end
+
+ it 'creates imported object' do
+ expect(subject).to be_instance_of(Note)
+ end
+
+ context 'when relation contains user references' do
+ let(:new_user) { create(:user) }
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => 999,
+ "email" => new_user.email,
+ "username" => new_user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: project)
+ end
+
+ it 'maps the right author to the imported note' do
+ expect(subject.author).to eq(new_user)
+ end
+ end
+
+ context 'when relation contains token attributes' do
+ let(:relation_sym) { 'ProjectHook' }
+ let(:relation_hash) { { token: 'secret' } }
+
+ it 'removes token attributes' do
+ expect(subject.token).to be_nil
+ end
+ end
+
+ context 'when relation contains encrypted attributes' do
+ let(:relation_sym) { 'Ci::Variable' }
+ let(:relation_hash) do
+ create(:ci_variable).as_json
+ end
+
+ it 'removes encrypted attributes' do
+ expect(subject.value).to be_nil
+ end
+ end
+ end
+ end
+
+ describe '.relation_class' do
+ context 'when relation name is pluralized' do
+ let(:relation_name) { 'MergeRequest::Metrics' }
+
+ it 'returns constantized class' do
+ expect(described_class.relation_class(relation_name)).to eq(MergeRequest::Metrics)
+ end
+ end
+
+ context 'when relation name is singularized' do
+ let(:relation_name) { 'Badge' }
+
+ it 'returns constantized class' do
+ expect(described_class.relation_class(relation_name)).to eq(Badge)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
index 0d0a2df4423..355757654da 100644
--- a/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/group_project_object_builder_spec.rb
@@ -12,6 +12,59 @@ describe Gitlab::ImportExport::GroupProjectObjectBuilder do
group: create(:group))
end
+ let(:lru_cache) { subject.send(:lru_cache) }
+ let(:cache_key) { subject.send(:cache_key) }
+
+ context 'request store is not active' do
+ subject do
+ described_class.new(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)
+ end
+
+ it 'ignore cache initialize' do
+ expect(lru_cache).to be_nil
+ expect(cache_key).to be_nil
+ end
+ end
+
+ context 'request store is active', :request_store do
+ subject do
+ described_class.new(Label,
+ 'title' => 'group label',
+ 'project' => project,
+ 'group' => project.group)
+ end
+
+ it 'initialize cache in memory' do
+ expect(lru_cache).not_to be_nil
+ expect(cache_key).not_to be_nil
+ end
+
+ it 'cache object when first time find the object' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
+
+ expect(subject).to receive(:find_object).and_call_original
+ expect { subject.find }
+ .to change { lru_cache[cache_key] }
+ .from(nil).to(group_label)
+
+ expect(subject.find).to eq(group_label)
+ end
+
+ it 'read from cache when object has been cached' do
+ group_label = create(:group_label, name: 'group label', group: project.group)
+
+ subject.find
+
+ expect(subject).not_to receive(:find_object)
+ expect { subject.find }.not_to change { lru_cache[cache_key] }
+
+ expect(subject.find).to eq(group_label)
+ end
+ end
+
context 'labels' do
it 'finds the existing group label' do
group_label = create(:group_label, name: 'group label', group: project.group)
diff --git a/spec/lib/gitlab/import_export/import_failure_service_spec.rb b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
new file mode 100644
index 00000000000..0351f88afdb
--- /dev/null
+++ b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::ImportFailureService do
+ let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:label) { create(:label) }
+ let(:subject) { described_class.new(importable) }
+ let(:relation_key) { "labels" }
+ let(:relation_index) { 0 }
+
+ describe '#log_import_failure' do
+ let(:standard_error_message) { "StandardError message" }
+ let(:exception) { StandardError.new(standard_error_message) }
+ let(:correlation_id) { 'my-correlation-id' }
+ let(:retry_count) { 2 }
+ let(:log_import_failure) do
+ subject.log_import_failure(relation_key, relation_index, exception, retry_count)
+ end
+
+ before do
+ # Import is running from the rake task, `correlation_id` is not assigned
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return(correlation_id)
+ end
+
+ context 'when importable is a group' do
+ let(:importable) { create(:group) }
+
+ it_behaves_like 'log import failure', :group_id
+ end
+
+ context 'when importable is a project' do
+ it_behaves_like 'log import failure', :project_id
+ end
+
+ context 'when ImportFailure does not support importable class' do
+ let(:importable) { create(:merge_request) }
+
+ it 'raise exception' do
+ expect { subject }.to raise_exception(ActiveRecord::AssociationNotFoundError, "Association named 'import_failures' was not found on MergeRequest; perhaps you misspelled it?")
+ end
+ end
+ end
+
+ describe '#with_retry' do
+ let(:perform_retry) do
+ subject.with_retry(relation_key, relation_index) do
+ label.save!
+ end
+ end
+
+ context 'when exceptions are retriable' do
+ where(:exception) { Gitlab::ImportExport::ImportFailureService::RETRIABLE_EXCEPTIONS }
+
+ with_them do
+ context 'when retry succeeds' do
+ before do
+ expect(label).to receive(:save!).and_raise(exception.new)
+ expect(label).to receive(:save!).and_return(true)
+ end
+
+ it 'retries and logs import failure once with correct params' do
+ expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), 1).once
+
+ perform_retry
+ end
+ end
+
+ context 'when retry continues to fail with intermittent errors' do
+ let(:maximum_retry_count) do
+ Retriable.config.tries
+ end
+
+ before do
+ expect(label).to receive(:save!)
+ .exactly(maximum_retry_count).times
+ .and_raise(exception.new)
+ end
+
+ it 'retries the number of times allowed and raise exception', :aggregate_failures do
+ expect { perform_retry }.to raise_exception(exception)
+ end
+
+ it 'logs import failure each time and raise exception', :aggregate_failures do
+ maximum_retry_count.times do |index|
+ retry_count = index + 1
+
+ expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), retry_count)
+ end
+
+ expect { perform_retry }.to raise_exception(exception)
+ end
+ end
+ end
+ end
+
+ context 'when exception is not retriable' do
+ let(:exception) { StandardError.new }
+
+ it 'raise the exception', :aggregate_failures do
+ expect(label).to receive(:save!).once.and_raise(exception)
+ expect(subject).not_to receive(:log_import_failure)
+ expect { perform_retry }.to raise_exception(exception)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
new file mode 100644
index 00000000000..97d5ce07d47
--- /dev/null
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# We want to test Import on "complete" data set,
+# which means that every relation (as in our Import/Export definition) is covered.
+# Fixture JSONs we use for testing Import such as
+# `spec/fixtures/lib/gitlab/import_export/complex/project.json`
+# should include these relations being non-empty.
+describe 'Test coverage of the Project Import' do
+ include ConfigurationHelper
+
+ # `MUTED_RELATIONS` is a technical debt.
+ # This list expected to be empty or used as a workround
+ # in case this spec blocks an important urgent MR.
+ # It is also expected that adding a relation in the list should lead to
+ # opening a follow-up issue to fix this.
+ MUTED_RELATIONS = %w[
+ project.milestones.events.push_event_payload
+ project.issues.events
+ project.issues.events.push_event_payload
+ project.issues.notes.events
+ project.issues.notes.events.push_event_payload
+ project.issues.milestone.events.push_event_payload
+ project.issues.issue_milestones
+ project.issues.issue_milestones.milestone
+ project.issues.resource_label_events.label.priorities
+ project.issues.designs.notes
+ project.issues.designs.notes.author
+ project.issues.designs.notes.events
+ project.issues.designs.notes.events.push_event_payload
+ project.merge_requests.metrics
+ project.merge_requests.notes.events.push_event_payload
+ project.merge_requests.events.push_event_payload
+ project.merge_requests.timelogs
+ project.merge_requests.label_links
+ project.merge_requests.label_links.label
+ project.merge_requests.label_links.label.priorities
+ project.merge_requests.milestone
+ project.merge_requests.milestone.events
+ project.merge_requests.milestone.events.push_event_payload
+ project.merge_requests.merge_request_milestones
+ project.merge_requests.merge_request_milestones.milestone
+ project.merge_requests.resource_label_events.label
+ project.merge_requests.resource_label_events.label.priorities
+ project.ci_pipelines.notes.events
+ project.ci_pipelines.notes.events.push_event_payload
+ project.protected_branches.unprotect_access_levels
+ project.prometheus_metrics
+ project.metrics_setting
+ project.boards.lists.label.priorities
+ project.service_desk_setting
+ ].freeze
+
+ # A list of JSON fixture files we use to test Import.
+ # Note that we use separate fixture to test ee-only features.
+ # Most of the relations are present in `complex/project.json`
+ # which is our main fixture.
+ PROJECT_JSON_FIXTURES_EE =
+ if Gitlab.ee?
+ ['ee/spec/fixtures/lib/gitlab/import_export/designs/project.json'].freeze
+ else
+ []
+ end
+
+ PROJECT_JSON_FIXTURES = [
+ 'spec/fixtures/lib/gitlab/import_export/complex/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/group/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/light/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json'
+ ].freeze + PROJECT_JSON_FIXTURES_EE
+
+ it 'ensures that all imported/exported relations are present in test JSONs' do
+ not_tested_relations = (relations_from_config - tested_relations) - MUTED_RELATIONS
+
+ expect(not_tested_relations).to be_empty, failure_message(not_tested_relations)
+ end
+
+ def relations_from_config
+ relation_paths_for(:project)
+ .map { |relation_names| relation_names.join(".") }
+ .to_set
+ end
+
+ def tested_relations
+ PROJECT_JSON_FIXTURES.flat_map(&method(:relations_from_json)).to_set
+ end
+
+ def relations_from_json(json_file)
+ json = ActiveSupport::JSON.decode(IO.read(json_file))
+
+ Gitlab::ImportExport::RelationRenameService.rename(json)
+
+ [].tap {|res| gather_relations({ project: json }, res, [])}
+ .map {|relation_names| relation_names.join('.')}
+ end
+
+ def gather_relations(item, res, path)
+ case item
+ when Hash
+ item.each do |k, v|
+ if (v.is_a?(Array) || v.is_a?(Hash)) && v.present?
+ new_path = path + [k]
+ res << new_path
+ gather_relations(v, res, new_path)
+ end
+ end
+ when Array
+ item.each {|i| gather_relations(i, res, path)}
+ end
+ end
+
+ def failure_message(not_tested_relations)
+ <<~MSG
+ These relations seem to be added recenty and
+ they expected to be covered in our Import specs: #{not_tested_relations}.
+
+ To do that, expand one of the files listed in `PROJECT_JSON_FIXTURES`
+ (or expand the list if you consider adding a new fixture file).
+
+ After that, add a new spec into
+ `spec/lib/gitlab/import_export/project_tree_restorer_spec.rb`
+ to check that the relation is being imported correctly.
+
+ In case the spec breaks the master or there is a sense of urgency,
+ you could include the relations into the `MUTED_RELATIONS` list.
+
+ Muting relations is considered to be a temporary solution, so please
+ open a follow-up issue and try to fix that when it is possible.
+ MSG
+ end
+end
diff --git a/spec/lib/gitlab/import_export/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project_relation_factory_spec.rb
index 41d6e6f24fc..0ade7ac4fc7 100644
--- a/spec/lib/gitlab/import_export/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project_relation_factory_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
-describe Gitlab::ImportExport::RelationFactory do
- let(:project) { create(:project) }
+describe Gitlab::ImportExport::ProjectRelationFactory do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
let(:merge_requests_mapping) { {} }
let(:user) { create(:admin) }
@@ -11,10 +12,11 @@ describe Gitlab::ImportExport::RelationFactory do
let(:created_object) do
described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder,
members_mapper: members_mapper,
merge_requests_mapping: merge_requests_mapping,
user: user,
- project: project,
+ importable: project,
excluded_keys: excluded_keys)
end
@@ -59,7 +61,7 @@ describe Gitlab::ImportExport::RelationFactory do
end
it 'has the new project_id' do
- expect(created_object.project_id).to eq(project.id)
+ expect(created_object.project_id).to eql(project.id)
end
it 'has a nil token' do
@@ -96,6 +98,100 @@ describe Gitlab::ImportExport::RelationFactory do
end
end
+ context 'merge_request object' do
+ let(:relation_sym) { :merge_requests }
+
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => user.id,
+ "email" => user.email,
+ "username" => user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: user,
+ importable: project)
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 27,
+ 'target_branch' => "feature",
+ 'source_branch' => "feature_conflict",
+ 'source_project_id' => project.id,
+ 'target_project_id' => project.id,
+ 'author_id' => user.id,
+ 'assignee_id' => user.id,
+ 'updated_by_id' => user.id,
+ 'title' => "MR1",
+ 'created_at' => "2016-06-14T15:02:36.568Z",
+ 'updated_at' => "2016-06-14T15:02:56.815Z",
+ 'state' => "opened",
+ 'merge_status' => "unchecked",
+ 'description' => "Description",
+ 'position' => 0,
+ 'source_branch_sha' => "ABCD",
+ 'target_branch_sha' => "DCBA",
+ 'merge_when_pipeline_succeeds' => true
+ }
+ end
+
+ it 'has preloaded author' do
+ expect(created_object.author).to equal(user)
+ end
+
+ it 'has preloaded updated_by' do
+ expect(created_object.updated_by).to equal(user)
+ end
+
+ it 'has preloaded source project' do
+ expect(created_object.source_project).to equal(project)
+ end
+
+ it 'has preloaded target project' do
+ expect(created_object.source_project).to equal(project)
+ end
+ end
+
+ context 'label object' do
+ let(:relation_sym) { :labels }
+ let(:relation_hash) do
+ {
+ "id": 3,
+ "title": "test3",
+ "color": "#428bca",
+ "group_id": project.group.id,
+ "created_at": "2016-07-22T08:55:44.161Z",
+ "updated_at": "2016-07-22T08:55:44.161Z",
+ "template": false,
+ "description": "",
+ "project_id": project.id,
+ "type": "GroupLabel"
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+
+ it 'has preloaded group' do
+ expect(created_object.group).to equal(project.group)
+ end
+ end
+
# `project_id`, `described_class.USER_REFERENCES`, noteable_id, target_id, and some project IDs are already
# re-assigned by described_class.
context 'Potentially hazardous foreign keys' do
@@ -118,6 +214,10 @@ describe Gitlab::ImportExport::RelationFactory do
attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
end
+ before do
+ allow(HazardousFooModel).to receive(:reflect_on_association).and_return(nil)
+ end
+
it 'does not preserve any foreign key IDs' do
expect(created_object.values).not_to include(99)
end
@@ -145,11 +245,15 @@ describe Gitlab::ImportExport::RelationFactory do
context 'Project references' do
let(:relation_sym) { :project_foo_model }
let(:relation_hash) do
- Gitlab::ImportExport::RelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
+ Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
end
class ProjectFooModel < FooModel
- attr_accessor(*Gitlab::ImportExport::RelationFactory::PROJECT_REFERENCES)
+ attr_accessor(*Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES)
+ end
+
+ before do
+ allow(ProjectFooModel).to receive(:reflect_on_association).and_return(nil)
end
it 'does not preserve any project foreign key IDs' do
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index ec1b935ad63..ac9a63e8414 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -36,10 +36,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
context 'JSON' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: false)
- end
-
it 'restores models based on JSON' do
expect(@restored_project_json).to be_truthy
end
@@ -120,6 +116,15 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty
end
+ it 'restores timelogs for issues' do
+ timelog = Issue.find_by(title: 'issue_with_timelogs').timelogs.last
+
+ aggregate_failures do
+ expect(timelog.time_spent).to eq(72000)
+ expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z")
+ end
+ end
+
it 'contains the merge access levels on a protected branch' do
expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
end
@@ -219,10 +224,25 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee')
end
+ it 'snippet has notes' do
+ expect(@project.snippets.first.notes.count).to eq(1)
+ end
+
+ it 'snippet has award emojis on notes' do
+ award_emoji = @project.snippets.first.notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('thumbsup')
+ end
+
it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do
expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false)
end
+ it 'restores `auto_devops`' do
+ expect(@project.auto_devops_enabled?).to eq(true)
+ expect(@project.auto_devops.deploy_strategy).to eq('continuous')
+ end
+
it 'restores the correct service' do
expect(CustomIssueTrackerService.first).not_to be_nil
end
@@ -240,6 +260,18 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(sentry_issue.sentry_issue_identifier).to eq(1234567891)
end
+ it 'has award emoji for an issue' do
+ award_emoji = @project.issues.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('musical_keyboard')
+ end
+
+ it 'has award emoji for a note in an issue' do
+ award_emoji = @project.issues.first.notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('clapper')
+ end
+
it 'restores container_expiration_policy' do
policy = Project.find_by_path('project').container_expiration_policy
@@ -250,6 +282,55 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
+ it 'restores error_tracking_setting' do
+ setting = @project.error_tracking_setting
+
+ aggregate_failures do
+ expect(setting.api_url).to eq("https://gitlab.example.com/api/0/projects/sentry-org/sentry-project")
+ expect(setting.project_name).to eq("Sentry Project")
+ expect(setting.organization_name).to eq("Sentry Org")
+ end
+ end
+
+ it 'restores external pull requests' do
+ external_pr = @project.external_pull_requests.last
+
+ aggregate_failures do
+ expect(external_pr.pull_request_iid).to eq(4)
+ expect(external_pr.source_branch).to eq("feature")
+ expect(external_pr.target_branch).to eq("master")
+ expect(external_pr.status).to eq("open")
+ end
+ end
+
+ it 'restores pipeline schedules' do
+ pipeline_schedule = @project.pipeline_schedules.last
+
+ aggregate_failures do
+ expect(pipeline_schedule.description).to eq('Schedule Description')
+ expect(pipeline_schedule.ref).to eq('master')
+ expect(pipeline_schedule.cron).to eq('0 4 * * 0')
+ expect(pipeline_schedule.cron_timezone).to eq('UTC')
+ expect(pipeline_schedule.active).to eq(true)
+ end
+ end
+
+ it 'restores releases with links' do
+ release = @project.releases.last
+ link = release.links.last
+
+ aggregate_failures do
+ expect(release.tag).to eq('release-1.1')
+ expect(release.description).to eq('Some release notes')
+ expect(release.name).to eq('release-1.1')
+ expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9')
+ expect(release.released_at).to eq('2019-12-26T10:17:14.615Z')
+
+ expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download')
+ expect(link.name).to eq('release-1.1.dmg')
+ end
+ end
+
context 'Merge requests' do
it 'always has the new project as a target' do
expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project)
@@ -266,6 +347,20 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it 'has no source if source/target differ' do
expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
end
+
+ it 'has award emoji' do
+ award_emoji = MergeRequest.find_by_title('MR1').award_emoji
+
+ expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum')
+ end
+
+ context 'notes' do
+ it 'has award emoji' do
+ award_emoji = MergeRequest.find_by_title('MR1').notes.first.award_emoji.first
+
+ expect(award_emoji.name).to eq('tada')
+ end
+ end
end
context 'tokens are regenerated' do
@@ -289,9 +384,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has the correct number of pipelines and statuses' do
- expect(@project.ci_pipelines.size).to eq(6)
+ expect(@project.ci_pipelines.size).to eq(7)
- @project.ci_pipelines.order(:id).zip([2, 2, 2, 2, 2, 0])
+ @project.ci_pipelines.order(:id).zip([2, 2, 2, 2, 2, 0, 0])
.each do |(pipeline, expected_status_size)|
expect(pipeline.statuses.size).to eq(expected_status_size)
end
@@ -300,7 +395,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'when restoring hierarchy of pipeline, stages and jobs' do
it 'restores pipelines' do
- expect(Ci::Pipeline.all.count).to be 6
+ expect(Ci::Pipeline.all.count).to be 7
end
it 'restores pipeline stages' do
@@ -326,6 +421,12 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it 'restores a Hash for CommitStatus options' do
expect(CommitStatus.all.map(&:options).compact).to all(be_a(Hash))
end
+
+ it 'restores external pull request for the restored pipeline' do
+ pipeline_with_external_pr = @project.ci_pipelines.order(:id).last
+
+ expect(pipeline_with_external_pr.external_pull_request).to be_persisted
+ end
end
end
end
@@ -466,7 +567,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it_behaves_like 'restores project successfully',
- issues: 2,
+ issues: 3,
labels: 2,
label_with_priorities: 'A project label',
milestones: 2,
@@ -479,7 +580,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it 'restores issue states' do
expect(project.issues.with_state(:closed).count).to eq(1)
- expect(project.issues.with_state(:opened).count).to eq(1)
+ expect(project.issues.with_state(:opened).count).to eq(2)
end
end
@@ -654,13 +755,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
- let(:correlation_id) { 'my-correlation-id' }
before do
setup_import_export_config('with_invalid_records')
- # Import is running from the rake task, `correlation_id` is not assigned
- expect(Labkit::Correlation::CorrelationId).to receive(:new_id).and_return(correlation_id)
subject
end
@@ -682,7 +780,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(import_failure.relation_index).to be_present
expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid')
expect(import_failure.exception_message).to be_present
- expect(import_failure.correlation_id_value).to eq('my-correlation-id')
+ expect(import_failure.correlation_id_value).not_to be_empty
expect(import_failure.created_at).to be_present
end
end
diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
index c761f9652ab..edb2c0a131a 100644
--- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
@@ -27,6 +27,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
shared: shared,
tree_hash: tree_hash,
importable: importable,
+ object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
@@ -38,7 +39,8 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
context 'when restoring a project' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
- let(:relation_factory) { Gitlab::ImportExport::RelationFactory }
+ let(:object_builder) { Gitlab::ImportExport::GroupProjectObjectBuilder }
+ let(:relation_factory) { Gitlab::ImportExport::ProjectRelationFactory }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree_hash) { importable_hash }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 79442c35797..ad363233bfe 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -333,6 +333,7 @@ CommitStatus:
- scheduled_at
- upstream_pipeline_id
- interruptible
+- processed
Ci::Variable:
- id
- project_id
@@ -534,6 +535,8 @@ Project:
- pages_https_only
- merge_requests_disable_committers_approval
- require_password_to_approve
+- autoclose_referenced_issues
+- suggestion_commit_message
ProjectTracingSetting:
- external_url
Author:
@@ -542,6 +545,7 @@ ProjectFeature:
- id
- project_id
- merge_requests_access_level
+- forking_access_level
- issues_access_level
- wiki_access_level
- snippets_access_level
@@ -764,3 +768,33 @@ ContainerExpirationPolicy:
- older_than
- keep_n
- enabled
+Epic:
+ - id
+ - milestone_id
+ - group_id
+ - author_id
+ - assignee_id
+ - iid
+ - updated_by_id
+ - last_edited_by_id
+ - lock_version
+ - start_date
+ - end_date
+ - last_edited_at
+ - created_at
+ - updated_at
+ - title
+ - description
+ - start_date_sourcing_milestone_id
+ - due_date_sourcing_milestone_id
+ - start_date_fixed
+ - due_date_fixed
+ - start_date_is_fixed
+ - due_date_is_fixed
+ - closed_by_id
+ - closed_at
+ - parent_id
+ - relative_position
+ - state_id
+ - start_date_sourcing_epic_id
+ - due_date_sourcing_epic_id
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index 5d9beec093a..e493acd7bad 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -6,7 +6,8 @@ describe Gitlab::Kubernetes::Helm::Api do
let(:client) { double('kubernetes client') }
let(:helm) { described_class.new(client) }
let(:gitlab_namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
- let(:namespace) { Gitlab::Kubernetes::Namespace.new(gitlab_namespace, client) }
+ let(:gitlab_namespace_labels) { Gitlab::Kubernetes::Helm::NAMESPACE_LABELS }
+ let(:namespace) { Gitlab::Kubernetes::Namespace.new(gitlab_namespace, client, labels: gitlab_namespace_labels) }
let(:application_name) { 'app-name' }
let(:rbac) { false }
let(:files) { {} }
@@ -23,13 +24,17 @@ describe Gitlab::Kubernetes::Helm::Api do
subject { helm }
before do
- allow(Gitlab::Kubernetes::Namespace).to receive(:new).with(gitlab_namespace, client).and_return(namespace)
+ allow(Gitlab::Kubernetes::Namespace).to(
+ receive(:new).with(gitlab_namespace, client, labels: gitlab_namespace_labels).and_return(namespace)
+ )
allow(client).to receive(:create_config_map)
end
describe '#initialize' do
it 'creates a namespace object' do
- expect(Gitlab::Kubernetes::Namespace).to receive(:new).with(gitlab_namespace, client)
+ expect(Gitlab::Kubernetes::Namespace).to(
+ receive(:new).with(gitlab_namespace, client, labels: gitlab_namespace_labels)
+ )
subject
end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 59e81d89a50..e08981a3415 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -136,6 +136,20 @@ describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe '#istio_client' do
+ subject { client.istio_client }
+
+ it_behaves_like 'a Kubeclient'
+
+ it 'has the Istio API group endpoint' do
+ expect(subject.api_endpoint.to_s).to match(%r{\/apis\/networking.istio.io\Z})
+ end
+
+ it 'has the api_version' do
+ expect(subject.instance_variable_get(:@api_version)).to eq('v1alpha3')
+ end
+ end
+
describe '#knative_client' do
subject { client.knative_client }
@@ -233,6 +247,29 @@ describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe 'istio API group' do
+ let(:istio_client) { client.istio_client }
+
+ [
+ :create_gateway,
+ :get_gateway,
+ :update_gateway
+ ].each do |method|
+ describe "##{method}" do
+ include_examples 'redirection not allowed', method
+ include_examples 'dns rebinding not allowed', method
+
+ it 'delegates to the istio client' do
+ expect(client).to delegate_method(method).to(:istio_client)
+ end
+
+ it 'responds to the method' do
+ expect(client).to respond_to method
+ end
+ end
+ end
+ end
+
describe 'non-entity methods' do
it 'does not proxy for non-entity methods' do
expect(client).not_to respond_to :proxy_url
diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb
index 16634cc48e6..d44a803410f 100644
--- a/spec/lib/gitlab/kubernetes/namespace_spec.rb
+++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Namespace do
let(:name) { 'a_namespace' }
let(:client) { double('kubernetes client') }
+ let(:labels) { nil }
- subject { described_class.new(name, client) }
+ subject { described_class.new(name, client, labels: labels) }
it { expect(subject.name).to eq(name) }
@@ -49,6 +50,17 @@ describe Gitlab::Kubernetes::Namespace do
expect { subject.create! }.not_to raise_error
end
+
+ context 'with labels' do
+ let(:labels) { { foo: :bar } }
+
+ it 'creates a namespace with labels' do
+ matcher = have_attributes(metadata: have_attributes(name: name, labels: have_attributes(foo: :bar)))
+ expect(client).to receive(:create_namespace).with(matcher).once
+
+ expect { subject.create! }.not_to raise_error
+ end
+ end
end
describe '#ensure_exists!' do
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 4fa136bc405..e186a383059 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -86,6 +86,16 @@ describe Gitlab::Metrics::Dashboard::Processor do
expect(metrics).to eq %w(metric_b metric_a2 metric_a1)
end
end
+
+ context 'when sample_metrics are requested' do
+ let(:process_params) { [project, dashboard_yml, sequence, { environment: environment, sample_metrics: true }] }
+
+ it 'includes a sample metrics path for the prometheus endpoint with each metric' do
+ expect(all_metrics).to satisfy_all do |metric|
+ metric[:prometheus_endpoint_path] == sample_metrics_path(metric[:id])
+ end
+ end
+ end
end
shared_examples_for 'errors with message' do |expected_message|
@@ -147,4 +157,12 @@ describe Gitlab::Metrics::Dashboard::Processor do
query: query
)
end
+
+ def sample_metrics_path(metric)
+ Gitlab::Routing.url_helpers.sample_metrics_project_environment_path(
+ project,
+ environment,
+ identifier: metric
+ )
+ end
end
diff --git a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
index 2d4b27a6ac1..939c057c342 100644
--- a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
@@ -63,7 +63,7 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do
describe '#add_metric' do
it 'prefixes the series name for a Rails process' do
- expect(sampler).to receive(:sidekiq?).and_return(false)
+ expect(Gitlab::Runtime).to receive(:sidekiq?).and_return(false)
expect(Gitlab::Metrics::Metric).to receive(:new)
.with('rails_cats', { value: 10 }, {})
@@ -73,7 +73,7 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do
end
it 'prefixes the series name for a Sidekiq process' do
- expect(sampler).to receive(:sidekiq?).and_return(true)
+ expect(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
expect(Gitlab::Metrics::Metric).to receive(:new)
.with('sidekiq_cats', { value: 10 }, {})
diff --git a/spec/lib/gitlab/middleware/request_context_spec.rb b/spec/lib/gitlab/middleware/request_context_spec.rb
new file mode 100644
index 00000000000..1ed06a97c1e
--- /dev/null
+++ b/spec/lib/gitlab/middleware/request_context_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+require 'fast_spec_helper'
+require 'rack'
+require 'request_store'
+require_relative '../../../support/helpers/next_instance_of'
+
+describe Gitlab::Middleware::RequestContext do
+ include NextInstanceOf
+
+ let(:app) { -> (env) {} }
+ let(:env) { {} }
+
+ around do |example|
+ RequestStore.begin!
+ example.run
+ RequestStore.end!
+ RequestStore.clear!
+ end
+
+ describe '#call' do
+ context 'setting the client ip' do
+ subject { Gitlab::RequestContext.instance.client_ip }
+
+ context 'with X-Forwarded-For headers' do
+ let(:load_balancer_ip) { '1.2.3.4' }
+ let(:headers) do
+ {
+ 'HTTP_X_FORWARDED_FOR' => "#{load_balancer_ip}, 127.0.0.1",
+ 'REMOTE_ADDR' => '127.0.0.1'
+ }
+ end
+
+ let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
+
+ it 'returns the load balancer IP' do
+ endpoint = proc do
+ [200, {}, ["Hello"]]
+ end
+
+ described_class.new(endpoint).call(env)
+
+ expect(subject).to eq(load_balancer_ip)
+ end
+ end
+
+ context 'request' do
+ let(:ip) { '192.168.1.11' }
+
+ before do
+ allow_next_instance_of(Rack::Request) do |instance|
+ allow(instance).to receive(:ip).and_return(ip)
+ end
+ described_class.new(app).call(env)
+ end
+
+ it { is_expected.to eq(ip) }
+ end
+
+ context 'before RequestContext middleware run' do
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ context 'setting the thread cpu time' do
+ it 'sets the `start_thread_cpu_time`' do
+ expect { described_class.new(app).call(env) }
+ .to change { Gitlab::RequestContext.instance.start_thread_cpu_time }.from(nil).to(Float)
+ end
+ end
+
+ context 'setting the request start time' do
+ it 'sets the `request_start_time`' do
+ expect { described_class.new(app).call(env) }
+ .to change { Gitlab::RequestContext.instance.request_start_time }.from(nil).to(Float)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/multi_destination_logger_spec.rb b/spec/lib/gitlab/multi_destination_logger_spec.rb
new file mode 100644
index 00000000000..7acd7906a26
--- /dev/null
+++ b/spec/lib/gitlab/multi_destination_logger_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+class FakeLogger
+end
+
+class LoggerA < Gitlab::Logger
+ def self.file_name_noext
+ 'loggerA'
+ end
+end
+
+class LoggerB < Gitlab::JsonLogger
+ def self.file_name_noext
+ 'loggerB'
+ end
+end
+
+class TestLogger < Gitlab::MultiDestinationLogger
+ LOGGERS = [LoggerA, LoggerB].freeze
+
+ def self.loggers
+ LOGGERS
+ end
+end
+
+class EmptyLogger < Gitlab::MultiDestinationLogger
+ def self.loggers
+ []
+ end
+end
+
+describe Gitlab::MultiDestinationLogger do
+ after(:all) do
+ TestLogger.loggers.each do |logger|
+ log_file_path = "#{Rails.root}/log/#{logger.file_name}"
+ File.delete(log_file_path)
+ end
+ end
+
+ context 'with no primary logger set' do
+ subject { EmptyLogger }
+
+ it 'primary_logger raises an error' do
+ expect { subject.primary_logger }.to raise_error(NotImplementedError)
+ end
+ end
+
+ context 'with 2 loggers set' do
+ subject { TestLogger }
+
+ it 'logs info to 2 loggers' do
+ expect(subject.loggers).to all(receive(:build).and_call_original)
+
+ subject.info('Hello World')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pages_spec.rb b/spec/lib/gitlab/pages_spec.rb
index aecbc74385e..5889689cb81 100644
--- a/spec/lib/gitlab/pages_spec.rb
+++ b/spec/lib/gitlab/pages_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Gitlab::Pages do
+ using RSpec::Parameterized::TableSyntax
+
let(:pages_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
before do
@@ -26,4 +28,24 @@ describe Gitlab::Pages do
expect(described_class.verify_api_request(headers)).to eq([{ "iss" => "gitlab-pages" }, { "alg" => "HS256" }])
end
end
+
+ describe '.access_control_is_forced?' do
+ subject { described_class.access_control_is_forced? }
+
+ where(:access_control_is_enabled, :access_control_is_forced, :result) do
+ false | false | false
+ false | true | false
+ true | false | false
+ true | true | true
+ end
+
+ with_them do
+ before do
+ stub_pages_setting(access_control: access_control_is_enabled)
+ stub_application_setting(force_pages_access_control: access_control_is_forced)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/page_spec.rb b/spec/lib/gitlab/pagination/keyset/page_spec.rb
index 5c03224c05a..c5ca27231d8 100644
--- a/spec/lib/gitlab/pagination/keyset/page_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/page_spec.rb
@@ -30,16 +30,14 @@ describe Gitlab::Pagination::Keyset::Page do
end
describe '#next' do
- let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page, end_reached: end_reached) }
- subject { page.next(new_lower_bounds, new_end_reached) }
+ let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page) }
+ subject { page.next(new_lower_bounds) }
let(:order_by) { { id: :desc } }
let(:lower_bounds) { { id: 42 } }
let(:per_page) { 10 }
- let(:end_reached) { false }
let(:new_lower_bounds) { { id: 21 } }
- let(:new_end_reached) { true }
it 'copies over order_by' do
expect(subject.order_by).to eq(page.order_by)
@@ -57,10 +55,5 @@ describe Gitlab::Pagination::Keyset::Page do
expect(subject.lower_bounds).to eq(new_lower_bounds)
expect(page.lower_bounds).to eq(lower_bounds)
end
-
- it 'sets end_reached only on new instance' do
- expect(subject.end_reached?).to eq(new_end_reached)
- expect(page.end_reached?).to eq(end_reached)
- end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/pager_spec.rb b/spec/lib/gitlab/pagination/keyset/pager_spec.rb
index 6d23fe2adcc..3ad1bee7225 100644
--- a/spec/lib/gitlab/pagination/keyset/pager_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/pager_spec.rb
@@ -15,15 +15,37 @@ describe Gitlab::Pagination::Keyset::Pager do
describe '#paginate' do
subject { described_class.new(request).paginate(relation) }
- it 'loads the result relation only once' do
+ it 'does not execute a query' do
expect do
subject
- end.not_to exceed_query_limit(1)
+ end.not_to exceed_query_limit(0)
end
+ it 'applies a LIMIT' do
+ expect(subject.limit_value).to eq(page.per_page)
+ end
+
+ it 'returns the limited relation' do
+ expect(subject).to eq(relation.limit(page.per_page))
+ end
+
+ context 'validating the order clause' do
+ let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { created_at: :asc }, per_page: 3) }
+
+ it 'raises an error if has a different order clause than the page' do
+ expect { subject }.to raise_error(ArgumentError, /order_by does not match/)
+ end
+ end
+ end
+
+ describe '#finalize' do
+ let(:records) { relation.limit(page.per_page).load }
+
+ subject { described_class.new(request).finalize(records) }
+
it 'passes information about next page to request' do
- lower_bounds = relation.limit(page.per_page).last.slice(:id)
- expect(page).to receive(:next).with(lower_bounds, false).and_return(next_page)
+ lower_bounds = records.last.slice(:id)
+ expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page)
subject
@@ -32,10 +54,10 @@ describe Gitlab::Pagination::Keyset::Pager do
context 'when retrieving the last page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) }
- it 'indicates this is the last page' do
- expect(request).to receive(:apply_headers) do |next_page|
- expect(next_page.end_reached?).to be_truthy
- end
+ it 'indicates there is another (likely empty) page' do
+ lower_bounds = records.last.slice(:id)
+ expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
+ expect(request).to receive(:apply_headers).with(next_page)
subject
end
@@ -45,24 +67,10 @@ describe Gitlab::Pagination::Keyset::Pager do
let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) }
it 'indicates this is the last page' do
- expect(request).to receive(:apply_headers) do |next_page|
- expect(next_page.end_reached?).to be_truthy
- end
+ expect(request).not_to receive(:apply_headers)
subject
end
end
-
- it 'returns an array with the loaded records' do
- expect(subject).to eq(relation.limit(page.per_page).to_a)
- end
-
- context 'validating the order clause' do
- let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { created_at: :asc }, per_page: 3) }
-
- it 'raises an error if has a different order clause than the page' do
- expect { subject }.to raise_error(ArgumentError, /order_by does not match/)
- end
- end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
index 344ef90efa3..6cd5ccc3c19 100644
--- a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
@@ -53,7 +53,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?foo=bar") }
let(:params) { { foo: 'bar' } }
let(:request_context) { double('request context', params: params, request: request) }
- let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }, end_reached?: false) }
+ let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }) }
subject { described_class.new(request_context).apply_headers(next_page) }
@@ -92,7 +92,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
end
context 'with descending order' do
- let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }, end_reached?: false) }
+ let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }) }
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
diff --git a/spec/lib/gitlab/pagination/keyset_spec.rb b/spec/lib/gitlab/pagination/keyset_spec.rb
index 5c2576d7b45..bde280c5fca 100644
--- a/spec/lib/gitlab/pagination/keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset_spec.rb
@@ -3,22 +3,6 @@
require 'spec_helper'
describe Gitlab::Pagination::Keyset do
- describe '.paginate' do
- subject { described_class.paginate(request_context, relation) }
-
- let(:request_context) { double }
- let(:relation) { double }
- let(:pager) { double }
- let(:result) { double }
-
- it 'uses Pager to paginate the relation' do
- expect(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
- expect(pager).to receive(:paginate).with(relation).and_return(result)
-
- expect(subject).to eq(result)
- end
- end
-
describe '.available?' do
subject { described_class }
diff --git a/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb b/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb
new file mode 100644
index 00000000000..5f0e1f40231
--- /dev/null
+++ b/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Patch::ActionDispatchJourneyFormatter do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let(:url) { Gitlab::Routing.url_helpers.project_pipeline_url(project, pipeline) }
+ let(:expected_path) { "#{project.full_path}/pipelines/#{pipeline.id}" }
+
+ context 'custom implementation of #missing_keys' do
+ before do
+ expect_any_instance_of(Gitlab::Patch::ActionDispatchJourneyFormatter).to receive(:missing_keys)
+ end
+
+ it 'generates correct url' do
+ expect(url).to end_with(expected_path)
+ end
+ end
+
+ context 'original implementation of #missing_keys' do
+ before do
+ allow_any_instance_of(Gitlab::Patch::ActionDispatchJourneyFormatter).to receive(:missing_keys) do |instance, route, parts|
+ instance.send(:old_missing_keys, route, parts) # test the old implementation
+ end
+ end
+
+ it 'generates correct url' do
+ expect(url).to end_with(expected_path)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
index e88eec2c393..f3928f390bc 100644
--- a/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
@@ -15,13 +15,13 @@ describe Gitlab::PhabricatorImport::Conduit::User do
it 'calls the api with the correct params' do
expected_params = {
- constraints: { phids: ['phid-1', 'phid-2'] }
+ constraints: { phids: %w[phid-1 phid-2] }
}
expect(fake_client).to receive(:get).with('user.search',
params: expected_params)
- user_client.users(['phid-1', 'phid-2'])
+ user_client.users(%w[phid-1 phid-2])
end
it 'returns an array of parsed responses' do
@@ -43,7 +43,7 @@ describe Gitlab::PhabricatorImport::Conduit::User do
expect(fake_client).to receive(:get).with('user.search',
params: second_params).once
- user_client.users(['phid-1', 'phid-2'])
+ user_client.users(%w[phid-1 phid-2])
end
end
end
diff --git a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
index 14a00deeb16..f260e38b7c8 100644
--- a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::PhabricatorImport::UserFinder, :clean_gitlab_redis_cache do
let(:project) { create(:project, namespace: create(:group)) }
- subject(:finder) { described_class.new(project, ['first-phid', 'second-phid']) }
+ subject(:finder) { described_class.new(project, %w[first-phid second-phid]) }
before do
project.namespace.add_developer(existing_user)
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index a19392f4bcb..8f6fb6eda65 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -84,7 +84,7 @@ describe Gitlab::Profiler do
expect(severity).to eq(Logger::DEBUG)
expect(message).to include('public').and include(described_class::FILTERED_STRING)
expect(message).not_to include(private_token)
- end.twice
+ end.at_least(1) # This spec could be wrapped in more blocks in the future
custom_logger.debug("public #{private_token}")
end
@@ -120,51 +120,6 @@ describe Gitlab::Profiler do
end
end
- describe '.clean_backtrace' do
- it 'uses the Rails backtrace cleaner' do
- backtrace = []
-
- expect(Rails.backtrace_cleaner).to receive(:clean).with(backtrace)
-
- described_class.clean_backtrace(backtrace)
- end
-
- it 'removes lines from IGNORE_BACKTRACES' do
- backtrace = [
- "lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'",
- "lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'",
- "lib/gitlab/gitaly_client.rb:280:in `block in migrate'",
- "lib/gitlab/metrics/influx_db.rb:103:in `measure'",
- "lib/gitlab/gitaly_client.rb:278:in `migrate'",
- "lib/gitlab/git/repository.rb:1451:in `gitaly_migrate'",
- "lib/gitlab/git/commit.rb:66:in `find'",
- "app/models/repository.rb:1047:in `find_commit'",
- "lib/gitlab/metrics/instrumentation.rb:159:in `block in find_commit'",
- "lib/gitlab/metrics/method_call.rb:36:in `measure'",
- "lib/gitlab/metrics/instrumentation.rb:159:in `find_commit'",
- "app/models/repository.rb:113:in `commit'",
- "lib/gitlab/i18n.rb:50:in `with_locale'",
- "lib/gitlab/middleware/multipart.rb:95:in `call'",
- "lib/gitlab/request_profiler/middleware.rb:14:in `call'",
- "ee/lib/gitlab/database/load_balancing/rack_middleware.rb:37:in `call'",
- "ee/lib/gitlab/jira/middleware.rb:15:in `call'"
- ]
-
- expect(described_class.clean_backtrace(backtrace))
- .to eq([
- "lib/gitlab/gitaly_client.rb:294:in `block (2 levels) in migrate'",
- "lib/gitlab/gitaly_client.rb:331:in `allow_n_plus_1_calls'",
- "lib/gitlab/gitaly_client.rb:280:in `block in migrate'",
- "lib/gitlab/gitaly_client.rb:278:in `migrate'",
- "lib/gitlab/git/repository.rb:1451:in `gitaly_migrate'",
- "lib/gitlab/git/commit.rb:66:in `find'",
- "app/models/repository.rb:1047:in `find_commit'",
- "app/models/repository.rb:113:in `commit'",
- "ee/lib/gitlab/jira/middleware.rb:15:in `call'"
- ])
- end
- end
-
describe '.with_custom_logger' do
context 'when the logger is set' do
it 'uses the replacement logger for the duration of the block' do
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 6f4844d4543..ae4c14e4deb 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -86,8 +86,7 @@ describe Gitlab::ProjectSearchResults do
it "loads all blobs for path matches in single batch" do
expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
- expected = project.repository.search_files_by_name(query, 'master')
- expect(results.map(&:path)).to include(*expected)
+ results.map(&:data)
end
it 'finds by content' do
diff --git a/spec/services/prometheus/adapter_service_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 52e035e1f70..202bf65f92b 100644
--- a/spec/services/prometheus/adapter_service_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -2,14 +2,13 @@
require 'spec_helper'
-describe Prometheus::AdapterService do
- let(:project) { create(:project) }
+describe Gitlab::Prometheus::Adapter do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:cluster, reload: true) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [project]) }
- subject { described_class.new(project) }
+ subject { described_class.new(project, cluster) }
describe '#prometheus_adapter' do
- let(:cluster) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [project]) }
-
context 'prometheus service can execute queries' do
let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
index 4bdc57c8c04..15edc649702 100644
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
@@ -8,7 +8,8 @@ describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
end
include_examples 'additional metrics query' do
- let(:deployment) { create(:deployment, environment: environment) }
+ let(:project) { create(:project, :repository) }
+ let(:deployment) { create(:deployment, environment: environment, project: project) }
let(:query_params) { [deployment.id] }
it 'queries using specific time' do
diff --git a/spec/lib/gitlab/quick_actions/dsl_spec.rb b/spec/lib/gitlab/quick_actions/dsl_spec.rb
index c98c36622f5..1145a7edc85 100644
--- a/spec/lib/gitlab/quick_actions/dsl_spec.rb
+++ b/spec/lib/gitlab/quick_actions/dsl_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::QuickActions::Dsl do
before :all do
DummyClass = Struct.new(:project) do
- include Gitlab::QuickActions::Dsl # rubocop:disable RSpec/DescribedClass
+ include Gitlab::QuickActions::Dsl
desc 'A command with no args'
command :no_args, :none do
diff --git a/spec/lib/gitlab/repository_cache_spec.rb b/spec/lib/gitlab/repository_cache_spec.rb
index 6a684595eb8..1b7dd1766da 100644
--- a/spec/lib/gitlab/repository_cache_spec.rb
+++ b/spec/lib/gitlab/repository_cache_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::RepositoryCache do
+ let_it_be(:project) { create(:project) }
let(:backend) { double('backend').as_null_object }
- let(:project) { create(:project) }
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository, backend: backend) }
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 87e51f801e5..de0f3602346 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
diff --git a/spec/lib/gitlab/request_context_spec.rb b/spec/lib/gitlab/request_context_spec.rb
index 87b8029de2e..5785dbfd850 100644
--- a/spec/lib/gitlab/request_context_spec.rb
+++ b/spec/lib/gitlab/request_context_spec.rb
@@ -2,59 +2,58 @@
require 'spec_helper'
-describe Gitlab::RequestContext do
- describe '#client_ip' do
- subject { described_class.client_ip }
+describe Gitlab::RequestContext, :request_store do
+ subject { described_class.instance }
- let(:app) { -> (env) {} }
- let(:env) { Hash.new }
+ it { is_expected.to have_attributes(client_ip: nil, start_thread_cpu_time: nil, request_start_time: nil) }
- context 'with X-Forwarded-For headers', :request_store do
- let(:load_balancer_ip) { '1.2.3.4' }
- let(:headers) do
- {
- 'HTTP_X_FORWARDED_FOR' => "#{load_balancer_ip}, 127.0.0.1",
- 'REMOTE_ADDR' => '127.0.0.1'
- }
- end
+ describe '#request_deadline' do
+ let(:request_start_time) { 1575982156.206008 }
- let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
+ before do
+ allow(subject).to receive(:request_start_time).and_return(request_start_time)
+ end
+
+ it "sets the time to #{Settings.gitlab.max_request_duration_seconds} seconds in the future" do
+ expect(subject.request_deadline).to eq(request_start_time + Settings.gitlab.max_request_duration_seconds)
+ expect(subject.request_deadline).to be_a(Float)
+ end
+
+ it 'returns nil if there is no start time' do
+ allow(subject).to receive(:request_start_time).and_return(nil)
+
+ expect(subject.request_deadline).to be_nil
+ end
- it 'returns the load balancer IP' do
- client_ip = nil
+ it 'only checks the feature once per request-instance' do
+ expect(Feature).to receive(:enabled?).with(:request_deadline).once
- endpoint = proc do
- client_ip = Gitlab::SafeRequestStore[:client_ip]
- [200, {}, ["Hello"]]
- end
+ 2.times { subject.request_deadline }
+ end
- described_class.new(endpoint).call(env)
+ it 'returns nil when the feature is disabled' do
+ stub_feature_flags(request_deadline: false)
- expect(client_ip).to eq(load_balancer_ip)
- end
+ expect(subject.request_deadline).to be_nil
end
+ end
- context 'when RequestStore::Middleware is used' do
- around do |example|
- RequestStore::Middleware.new(-> (env) { example.run }).call({})
- end
+ describe '#ensure_request_deadline_not_exceeded!' do
+ it 'does not raise an error when there was no deadline' do
+ expect(subject).to receive(:request_deadline).and_return(nil)
+ expect { subject.ensure_deadline_not_exceeded! }.not_to raise_error
+ end
- context 'request' do
- let(:ip) { '192.168.1.11' }
+ it 'does not raise an error if the deadline is in the future' do
+ allow(subject).to receive(:request_deadline).and_return(Gitlab::Metrics::System.real_time + 10)
- before do
- allow_next_instance_of(Rack::Request) do |instance|
- allow(instance).to receive(:ip).and_return(ip)
- end
- described_class.new(app).call(env)
- end
+ expect { subject.ensure_deadline_not_exceeded! }.not_to raise_error
+ end
- it { is_expected.to eq(ip) }
- end
+ it 'raises an error when the deadline is in the past' do
+ allow(subject).to receive(:request_deadline).and_return(Gitlab::Metrics::System.real_time - 10)
- context 'before RequestContext middleware run' do
- it { is_expected.to be_nil }
- end
+ expect { subject.ensure_deadline_not_exceeded! }.to raise_error(described_class::RequestDeadlineExceeded)
end
end
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
new file mode 100644
index 00000000000..194ed49bb32
--- /dev/null
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Runtime do
+ before do
+ allow(described_class).to receive(:process_name).and_return('ruby')
+ end
+
+ context "when unknown" do
+ it "raises an exception when trying to identify" do
+ expect { subject.identify }.to raise_error(subject::UnknownProcessError)
+ end
+ end
+
+ context "on multiple matches" do
+ before do
+ stub_const('::Puma', double)
+ stub_const('::Rails::Console', double)
+ end
+
+ it "raises an exception when trying to identify" do
+ expect { subject.identify }.to raise_error(subject::AmbiguousProcessError)
+ end
+ end
+
+ context "puma" do
+ let(:puma_type) { double('::Puma') }
+ let(:options) do
+ {
+ max_threads: 2
+ }
+ end
+
+ before do
+ stub_const('::Puma', puma_type)
+ allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(options)
+ end
+
+ it "identifies itself" do
+ expect(subject.identify).to eq(:puma)
+ expect(subject.puma?).to be(true)
+ end
+
+ it "does not identify as others" do
+ expect(subject.unicorn?).to be(false)
+ expect(subject.sidekiq?).to be(false)
+ expect(subject.console?).to be(false)
+ expect(subject.rake?).to be(false)
+ expect(subject.rspec?).to be(false)
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(2)
+ end
+ end
+
+ context "unicorn" do
+ let(:unicorn_type) { Module.new }
+ let(:unicorn_server_type) { Class.new }
+
+ before do
+ stub_const('::Unicorn', unicorn_type)
+ stub_const('::Unicorn::HttpServer', unicorn_server_type)
+ end
+
+ it "identifies itself" do
+ expect(subject.identify).to eq(:unicorn)
+ expect(subject.unicorn?).to be(true)
+ end
+
+ it "does not identify as others" do
+ expect(subject.puma?).to be(false)
+ expect(subject.sidekiq?).to be(false)
+ expect(subject.console?).to be(false)
+ expect(subject.rake?).to be(false)
+ expect(subject.rspec?).to be(false)
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(1)
+ end
+ end
+
+ context "sidekiq" do
+ let(:sidekiq_type) { double('::Sidekiq') }
+ let(:options) do
+ {
+ concurrency: 2
+ }
+ end
+
+ before do
+ stub_const('::Sidekiq', sidekiq_type)
+ allow(sidekiq_type).to receive(:server?).and_return(true)
+ allow(sidekiq_type).to receive(:options).and_return(options)
+ end
+
+ it "identifies itself" do
+ expect(subject.identify).to eq(:sidekiq)
+ expect(subject.sidekiq?).to be(true)
+ end
+
+ it "does not identify as others" do
+ expect(subject.unicorn?).to be(false)
+ expect(subject.puma?).to be(false)
+ expect(subject.console?).to be(false)
+ expect(subject.rake?).to be(false)
+ expect(subject.rspec?).to be(false)
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(2)
+ end
+ end
+
+ context "console" do
+ let(:console_type) { double('::Rails::Console') }
+
+ before do
+ stub_const('::Rails::Console', console_type)
+ end
+
+ it "identifies itself" do
+ expect(subject.identify).to eq(:console)
+ expect(subject.console?).to be(true)
+ end
+
+ it "does not identify as others" do
+ expect(subject.unicorn?).to be(false)
+ expect(subject.sidekiq?).to be(false)
+ expect(subject.puma?).to be(false)
+ expect(subject.rake?).to be(false)
+ expect(subject.rspec?).to be(false)
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(1)
+ end
+ end
+
+ context "rspec" do
+ before do
+ allow(described_class).to receive(:process_name).and_return('rspec')
+ end
+
+ it "identifies itself" do
+ expect(subject.identify).to eq(:rspec)
+ expect(subject.rspec?).to be(true)
+ end
+
+ it "does not identify as others" do
+ expect(subject.unicorn?).to be(false)
+ expect(subject.sidekiq?).to be(false)
+ expect(subject.rake?).to be(false)
+ expect(subject.puma?).to be(false)
+ end
+
+ it "reports its maximum concurrency" do
+ expect(subject.max_threads).to eq(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb b/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
index 24b6090cb19..a79a0678e2b 100644
--- a/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
@@ -33,7 +33,7 @@ describe Gitlab::SidekiqLogging::ExceptionHandler do
error_class: 'RuntimeError',
error_message: exception_message,
context: 'Test',
- error_backtrace: Gitlab::Profiler.clean_backtrace(backtrace)
+ error_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(backtrace)
)
expect(logger).to receive(:warn).with(expected_data)
diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
index a2cb38ec5b1..f2092334117 100644
--- a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
@@ -3,28 +3,53 @@
require 'spec_helper'
describe Gitlab::SidekiqLogging::JSONFormatter do
- let(:hash_input) { { foo: 1, bar: 'test' } }
let(:message) { 'This is a test' }
- let(:timestamp) { Time.now }
-
- it 'wraps a Hash' do
- result = subject.call('INFO', timestamp, 'my program', hash_input)
-
- data = JSON.parse(result)
- expected_output = hash_input.stringify_keys
- expected_output['severity'] = 'INFO'
- expected_output['time'] = timestamp.utc.iso8601(3)
-
- expect(data).to eq(expected_output)
+ let(:now) { Time.now }
+ let(:timestamp) { now.utc.to_f }
+ let(:timestamp_iso8601) { now.iso8601(3) }
+
+ describe 'with a Hash' do
+ let(:hash_input) do
+ {
+ foo: 1,
+ 'bar' => 'test',
+ 'created_at' => timestamp,
+ 'enqueued_at' => timestamp,
+ 'started_at' => timestamp,
+ 'retried_at' => timestamp,
+ 'failed_at' => timestamp,
+ 'completed_at' => timestamp_iso8601
+ }
+ end
+
+ it 'properly formats timestamps into ISO 8601 form' do
+ result = subject.call('INFO', now, 'my program', hash_input)
+
+ data = JSON.parse(result)
+ expected_output = hash_input.stringify_keys.merge!(
+ {
+ 'severity' => 'INFO',
+ 'time' => timestamp_iso8601,
+ 'created_at' => timestamp_iso8601,
+ 'enqueued_at' => timestamp_iso8601,
+ 'started_at' => timestamp_iso8601,
+ 'retried_at' => timestamp_iso8601,
+ 'failed_at' => timestamp_iso8601,
+ 'completed_at' => timestamp_iso8601
+ }
+ )
+
+ expect(data).to eq(expected_output)
+ end
end
it 'wraps a String' do
- result = subject.call('DEBUG', timestamp, 'my string', message)
+ result = subject.call('DEBUG', now, 'my string', message)
data = JSON.parse(result)
expected_output = {
severity: 'DEBUG',
- time: timestamp.utc.iso8601(3),
+ time: timestamp_iso8601,
message: message
}
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index cb870cc996b..43cdb998091 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
describe Gitlab::SidekiqLogging::StructuredLogger do
describe '#call' do
- let(:timestamp) { Time.iso8601('2018-01-01T12:00:00Z') }
+ let(:timestamp) { Time.iso8601('2018-01-01T12:00:00.000Z') }
let(:created_at) { timestamp - 1.second }
let(:scheduling_latency_s) { 1.0 }
@@ -30,8 +30,8 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
- 'created_at' => created_at.iso8601(6),
- 'enqueued_at' => created_at.iso8601(6),
+ 'created_at' => created_at.to_f,
+ 'enqueued_at' => created_at.to_f,
'scheduling_latency_s' => scheduling_latency_s
)
end
@@ -40,8 +40,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
'duration' => 0.0,
- "completed_at" => timestamp.iso8601(6),
- "cpu_s" => 1.111112
+ 'completed_at' => timestamp.to_f,
+ 'cpu_s' => 1.111112,
+ 'db_duration' => 0,
+ 'db_duration_s' => 0
)
end
let(:exception_payload) do
@@ -145,7 +147,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'with latency' do
- let(:created_at) { Time.iso8601('2018-01-01T10:00:00Z') }
+ let(:created_at) { Time.iso8601('2018-01-01T10:00:00.000Z') }
let(:scheduling_latency_s) { 7200.0 }
it 'logs with scheduling latency' do
@@ -183,22 +185,59 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when the job performs database queries' do
+ before do
+ allow(Time).to receive(:now).and_return(timestamp)
+ allow(Process).to receive(:clock_gettime).and_call_original
+ end
+
+ let(:expected_start_payload) { start_payload.except('args') }
+
+ let(:expected_end_payload) do
+ end_payload.except('args').merge('cpu_s' => a_value > 0)
+ end
+
+ let(:expected_end_payload_with_db) do
+ expected_end_payload.merge(
+ 'db_duration' => a_value >= 100,
+ 'db_duration_s' => a_value >= 0.1
+ )
+ end
+
+ it 'logs the database time' do
+ expect(logger).to receive(:info).with(expected_start_payload).ordered
+ expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
+
+ subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
+ end
+
+ it 'prevents database time from leaking to the next job' do
+ expect(logger).to receive(:info).with(expected_start_payload).ordered
+ expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered
+ expect(logger).to receive(:info).with(expected_start_payload).ordered
+ expect(logger).to receive(:info).with(expected_end_payload).ordered
+
+ subject.call(job, 'test_queue') { ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') }
+ subject.call(job, 'test_queue') { }
+ end
+ end
end
describe '#add_time_keys!' do
let(:time) { { duration: 0.1231234, cputime: 1.2342345 } }
let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } }
- let(:current_utc_time) { '2019-09-23 10:00:58 UTC' }
- let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time } }
+ let(:current_utc_time) { Time.now.utc }
+ let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } }
subject { described_class.new }
it 'update payload correctly' do
- expect(Time).to receive_message_chain(:now, :utc).and_return(current_utc_time)
+ Timecop.freeze(current_utc_time) do
+ subject.send(:add_time_keys!, time, payload)
- subject.send(:add_time_keys!, time, payload)
-
- expect(payload).to eq(payload_with_time_keys)
+ expect(payload).to eq(payload_with_time_keys)
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
new file mode 100644
index 00000000000..6516016e67f
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::ClientMetrics do
+ context "with worker attribution" do
+ subject { described_class.new }
+
+ let(:queue) { :test }
+ let(:worker_class) { worker.class }
+ let(:job) { {} }
+ let(:default_labels) { { queue: queue.to_s, boundary: "", external_dependencies: "no", feature_category: "", latency_sensitive: "no" } }
+
+ shared_examples "a metrics client middleware" do
+ context "with mocked prometheus" do
+ let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
+ end
+
+ describe '#call' do
+ it 'yields block' do
+ expect { |b| subject.call(worker, job, :test, double, &b) }.to yield_control.once
+ end
+
+ it 'increments enqueued jobs metric' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
+
+ subject.call(worker, job, :test, double) { nil }
+ end
+ end
+ end
+ end
+
+ context "when workers are not attributed" do
+ class TestNonAttributedWorker
+ include Sidekiq::Worker
+ end
+
+ it_behaves_like "a metrics client middleware" do
+ let(:worker) { TestNonAttributedWorker.new }
+ let(:labels) { default_labels }
+ end
+ end
+
+ context "when workers are attributed" do
+ def create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, category)
+ Class.new do
+ include Sidekiq::Worker
+ include WorkerAttributes
+
+ latency_sensitive_worker! if latency_sensitive
+ worker_has_external_dependencies! if external_dependencies
+ worker_resource_boundary resource_boundary unless resource_boundary == :unknown
+ feature_category category unless category.nil?
+ end
+ end
+
+ let(:latency_sensitive) { false }
+ let(:external_dependencies) { false }
+ let(:resource_boundary) { :unknown }
+ let(:feature_category) { nil }
+ let(:worker_class) { create_attributed_worker_class(latency_sensitive, external_dependencies, resource_boundary, feature_category) }
+ let(:worker) { worker_class.new }
+
+ context "latency sensitive" do
+ it_behaves_like "a metrics client middleware" do
+ let(:latency_sensitive) { true }
+ let(:labels) { default_labels.merge(latency_sensitive: "yes") }
+ end
+ end
+
+ context "external dependencies" do
+ it_behaves_like "a metrics client middleware" do
+ let(:external_dependencies) { true }
+ let(:labels) { default_labels.merge(external_dependencies: "yes") }
+ end
+ end
+
+ context "cpu boundary" do
+ it_behaves_like "a metrics client middleware" do
+ let(:resource_boundary) { :cpu }
+ let(:labels) { default_labels.merge(boundary: "cpu") }
+ end
+ end
+
+ context "memory boundary" do
+ it_behaves_like "a metrics client middleware" do
+ let(:resource_boundary) { :memory }
+ let(:labels) { default_labels.merge(boundary: "memory") }
+ end
+ end
+
+ context "feature category" do
+ it_behaves_like "a metrics client middleware" do
+ let(:feature_category) { :authentication }
+ let(:labels) { default_labels.merge(feature_category: "authentication") }
+ end
+ end
+
+ context "combined" do
+ it_behaves_like "a metrics client middleware" do
+ let(:latency_sensitive) { true }
+ let(:external_dependencies) { true }
+ let(:resource_boundary) { :cpu }
+ let(:feature_category) { :authentication }
+ let(:labels) { default_labels.merge(latency_sensitive: "yes", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/correlation_injector_spec.rb b/spec/lib/gitlab/sidekiq_middleware/correlation_injector_spec.rb
deleted file mode 100644
index d5ed939485a..00000000000
--- a/spec/lib/gitlab/sidekiq_middleware/correlation_injector_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::SidekiqMiddleware::CorrelationInjector do
- class TestWorker
- include ApplicationWorker
- end
-
- before do |example|
- Sidekiq.client_middleware do |chain|
- chain.add described_class
- end
- end
-
- after do |example|
- Sidekiq.client_middleware do |chain|
- chain.remove described_class
- end
-
- Sidekiq::Queues.clear_all
- end
-
- around do |example|
- Sidekiq::Testing.fake! do
- example.run
- end
- end
-
- it 'injects into payload the correlation id' do
- expect_next_instance_of(described_class) do |instance|
- expect(instance).to receive(:call).and_call_original
- end
-
- Labkit::Correlation::CorrelationId.use_id('new-correlation-id') do
- TestWorker.perform_async(1234)
- end
-
- expected_job_params = {
- "class" => "TestWorker",
- "args" => [1234],
- "correlation_id" => "new-correlation-id"
- }
-
- expect(Sidekiq::Queues.jobs_by_worker).to a_hash_including(
- "TestWorker" => a_collection_containing_exactly(
- a_hash_including(expected_job_params)))
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb b/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb
deleted file mode 100644
index 27eea963402..00000000000
--- a/spec/lib/gitlab/sidekiq_middleware/correlation_logger_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::SidekiqMiddleware::CorrelationLogger do
- class TestWorker
- include ApplicationWorker
- end
-
- before do |example|
- Sidekiq::Testing.server_middleware do |chain|
- chain.add described_class
- end
- end
-
- after do |example|
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove described_class
- end
- end
-
- it 'injects into payload the correlation id', :sidekiq_might_not_need_inline do
- expect_any_instance_of(described_class).to receive(:call).and_call_original
-
- expect_any_instance_of(TestWorker).to receive(:perform).with(1234) do
- expect(Labkit::Correlation::CorrelationId.current_id).to eq('new-correlation-id')
- end
-
- Sidekiq::Client.push(
- 'queue' => 'test',
- 'class' => TestWorker,
- 'args' => [1234],
- 'correlation_id' => 'new-correlation-id')
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 36c6f377bde..65a961b34f8 100644
--- a/spec/lib/gitlab/sidekiq_middleware/metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
-describe Gitlab::SidekiqMiddleware::Metrics do
+describe Gitlab::SidekiqMiddleware::ServerMetrics do
context "with worker attribution" do
subject { described_class.new }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index aef472e0648..473d85c0143 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -38,10 +38,10 @@ describe Gitlab::SidekiqMiddleware do
[
Gitlab::SidekiqMiddleware::Monitor,
Gitlab::SidekiqMiddleware::BatchLoader,
- Gitlab::SidekiqMiddleware::CorrelationLogger,
+ Labkit::Middleware::Sidekiq::Server,
Gitlab::SidekiqMiddleware::InstrumentationLogger,
Gitlab::SidekiqStatus::ServerMiddleware,
- Gitlab::SidekiqMiddleware::Metrics,
+ Gitlab::SidekiqMiddleware::ServerMetrics,
Gitlab::SidekiqMiddleware::ArgumentsLogger,
Gitlab::SidekiqMiddleware::MemoryKiller,
Gitlab::SidekiqMiddleware::RequestStoreMiddleware
@@ -74,7 +74,7 @@ describe Gitlab::SidekiqMiddleware do
let(:request_store) { false }
let(:disabled_sidekiq_middlewares) do
[
- Gitlab::SidekiqMiddleware::Metrics,
+ Gitlab::SidekiqMiddleware::ServerMetrics,
Gitlab::SidekiqMiddleware::ArgumentsLogger,
Gitlab::SidekiqMiddleware::MemoryKiller,
Gitlab::SidekiqMiddleware::RequestStoreMiddleware
@@ -120,7 +120,7 @@ describe Gitlab::SidekiqMiddleware do
# This test ensures that this does not happen
it "invokes the chain" do
expect_any_instance_of(Gitlab::SidekiqStatus::ClientMiddleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
- expect_any_instance_of(Gitlab::SidekiqMiddleware::CorrelationInjector).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ expect_any_instance_of(Labkit::Middleware::Sidekiq::Client).to receive(:call).with(*middleware_expected_args).once.and_call_original
expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once
end
diff --git a/spec/lib/gitlab/slash_commands/command_spec.rb b/spec/lib/gitlab/slash_commands/command_spec.rb
index 73b93589fac..9849cf78b2f 100644
--- a/spec/lib/gitlab/slash_commands/command_spec.rb
+++ b/spec/lib/gitlab/slash_commands/command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::SlashCommands::Command do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:chat_name) { double(:chat_name, user: user) }
diff --git a/spec/lib/gitlab/slash_commands/deploy_spec.rb b/spec/lib/gitlab/slash_commands/deploy_spec.rb
index 93a724d8e12..fb9969800a2 100644
--- a/spec/lib/gitlab/slash_commands/deploy_spec.rb
+++ b/spec/lib/gitlab/slash_commands/deploy_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::SlashCommands::Deploy do
describe '#execute' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:chat_name) { double(:chat_name, user: user) }
let(:regex_match) { described_class.match('deploy staging to production') }
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 6ab23b00d5c..cf1dacd088e 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -17,8 +17,8 @@ describe Gitlab::UsageData do
create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
- create(:service, project: projects[2], type: 'MattermostService', active: true)
- create(:service, project: projects[2], type: 'JenkinsService', active: true)
+ create(:service, project: projects[2], type: 'MattermostService', active: false)
+ create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
@@ -168,13 +168,15 @@ describe Gitlab::UsageData do
pool_repositories
projects
projects_imported_from_github
+ projects_asana_active
projects_jira_active
projects_jira_server_active
projects_jira_cloud_active
projects_slack_notifications_active
projects_slack_slash_active
+ projects_slack_active
+ projects_slack_slash_commands_active
projects_custom_issue_tracker_active
- projects_jenkins_active
projects_mattermost_active
projects_prometheus_active
projects_with_repositories_enabled
@@ -203,15 +205,17 @@ describe Gitlab::UsageData do
count_data = subject[:counts]
expect(count_data[:projects]).to eq(4)
+ expect(count_data[:projects_asana_active]).to eq(0)
expect(count_data[:projects_prometheus_active]).to eq(1)
expect(count_data[:projects_jira_active]).to eq(4)
expect(count_data[:projects_jira_server_active]).to eq(2)
expect(count_data[:projects_jira_cloud_active]).to eq(2)
expect(count_data[:projects_slack_notifications_active]).to eq(2)
expect(count_data[:projects_slack_slash_active]).to eq(1)
+ expect(count_data[:projects_slack_active]).to eq(2)
+ expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
- expect(count_data[:projects_jenkins_active]).to eq(1)
- expect(count_data[:projects_mattermost_active]).to eq(1)
+ expect(count_data[:projects_mattermost_active]).to eq(0)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
@@ -339,12 +343,6 @@ describe Gitlab::UsageData do
expect(described_class.count(relation)).to eq(1)
end
- it 'returns the count for count_by when provided' do
- allow(relation).to receive(:count).with(:creator_id).and_return(2)
-
- expect(described_class.count(relation, count_by: :creator_id)).to eq(2)
- end
-
it 'returns the fallback value when counting fails' do
allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
diff --git a/spec/lib/gitlab/utils/lazy_attributes_spec.rb b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
new file mode 100644
index 00000000000..c0005c194c4
--- /dev/null
+++ b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+require 'fast_spec_helper'
+require 'active_support/concern'
+
+describe Gitlab::Utils::LazyAttributes do
+ subject(:klass) do
+ Class.new do
+ include Gitlab::Utils::LazyAttributes
+
+ lazy_attr_reader :number, type: Numeric
+ lazy_attr_reader :reader_1, :reader_2
+ lazy_attr_accessor :incorrect_type, :string_attribute, :accessor_2, type: String
+
+ def initialize
+ @number = -> { 1 }
+ @reader_1, @reader_2 = 'reader_1', -> { 'reader_2' }
+ @incorrect_type, @accessor_2 = -> { :incorrect_type }, -> { 'accessor_2' }
+ end
+ end
+ end
+
+ context 'class methods' do
+ it { is_expected.to respond_to(:lazy_attr_reader, :lazy_attr_accessor) }
+ it { is_expected.not_to respond_to(:define_lazy_reader, :define_lazy_writer) }
+ end
+
+ context 'instance methods' do
+ subject(:instance) { klass.new }
+
+ it do
+ is_expected.to respond_to(:number, :reader_1, :reader_2, :incorrect_type,
+ :incorrect_type=, :accessor_2, :accessor_2=,
+ :string_attribute, :string_attribute=)
+ end
+
+ context 'reading attributes' do
+ it 'returns the correct values for procs', :aggregate_failures do
+ expect(instance.number).to eq(1)
+ expect(instance.reader_2).to eq('reader_2')
+ expect(instance.accessor_2).to eq('accessor_2')
+ end
+
+ it 'does not return the value if the type did not match what was specified' do
+ expect(instance.incorrect_type).to be_nil
+ end
+
+ it 'only calls the block once even if it returned `nil`', :aggregate_failures do
+ expect(instance.instance_variable_get('@number')).to receive(:call).once.and_call_original
+ expect(instance.instance_variable_get('@accessor_2')).to receive(:call).once.and_call_original
+ expect(instance.instance_variable_get('@incorrect_type')).to receive(:call).once.and_call_original
+
+ 2.times do
+ instance.number
+ instance.incorrect_type
+ instance.accessor_2
+ end
+ end
+ end
+
+ context 'writing attributes' do
+ it 'sets the correct values', :aggregate_failures do
+ instance.string_attribute = -> { 'updated 1' }
+ instance.accessor_2 = nil
+
+ expect(instance.string_attribute).to eq('updated 1')
+ expect(instance.accessor_2).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 890918d4a7c..85a536ee6ad 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
describe Gitlab::Utils do
- delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which, :ensure_array_from_string,
- :bytes_to_megabytes, :append_path, :check_path_traversal!, to: :described_class
+ delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
+ :ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
+ :append_path, :check_path_traversal!, to: :described_class
describe '.check_path_traversal!' do
it 'detects path traversal at the start of the string' do
@@ -46,6 +47,36 @@ describe Gitlab::Utils do
end
end
+ describe '.to_exclusive_sentence' do
+ it 'calls #to_sentence on the array' do
+ array = double
+
+ expect(array).to receive(:to_sentence)
+
+ to_exclusive_sentence(array)
+ end
+
+ it 'joins arrays with two elements correctly' do
+ array = %w(foo bar)
+
+ expect(to_exclusive_sentence(array)).to eq('foo or bar')
+ end
+
+ it 'joins arrays with more than two elements correctly' do
+ array = %w(foo bar baz)
+
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, or baz')
+ end
+
+ it 'localizes the connector words' do
+ array = %w(foo bar baz)
+
+ expect(described_class).to receive(:_).with(' or ').and_return(' <1> ')
+ expect(described_class).to receive(:_).with(', or ').and_return(', <2> ')
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, <2> baz')
+ end
+ end
+
describe '.nlbr' do
it 'replaces new lines with <br>' do
expect(described_class.nlbr("<b>hello</b>\n<i>world</i>".freeze)).to eq("hello<br>world")
diff --git a/spec/lib/prometheus/pid_provider_spec.rb b/spec/lib/prometheus/pid_provider_spec.rb
index 6fdc11b14c4..5a17f25f144 100644
--- a/spec/lib/prometheus/pid_provider_spec.rb
+++ b/spec/lib/prometheus/pid_provider_spec.rb
@@ -6,16 +6,13 @@ describe Prometheus::PidProvider do
describe '.worker_id' do
subject { described_class.worker_id }
- let(:sidekiq_module) { Module.new }
-
before do
- allow(sidekiq_module).to receive(:server?).and_return(false)
- stub_const('Sidekiq', sidekiq_module)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false)
end
context 'when running in Sidekiq server mode' do
before do
- expect(Sidekiq).to receive(:server?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
end
context 'in a clustered setup' do
@@ -33,8 +30,7 @@ describe Prometheus::PidProvider do
context 'when running in Unicorn mode' do
before do
- stub_const('Unicorn::Worker', Class.new)
- hide_const('Puma')
+ allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true)
expect(described_class).to receive(:process_name)
.at_least(:once)
@@ -94,8 +90,7 @@ describe Prometheus::PidProvider do
context 'when running in Puma mode' do
before do
- stub_const('Puma', Module.new)
- hide_const('Unicorn::Worker')
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
expect(described_class).to receive(:process_name)
.at_least(:once)
@@ -116,11 +111,6 @@ describe Prometheus::PidProvider do
end
context 'when running in unknown mode' do
- before do
- hide_const('Puma')
- hide_const('Unicorn::Worker')
- end
-
it { is_expected.to eq "process_#{Process.pid}" }
end
end
diff --git a/spec/lib/quality/helm_client_spec.rb b/spec/lib/quality/helm_client_spec.rb
index 795aa43b849..8d199fe3531 100644
--- a/spec/lib/quality/helm_client_spec.rb
+++ b/spec/lib/quality/helm_client_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Quality::HelmClient do
end
context 'with multiple release names' do
- let(:release_name) { ['my-release', 'my-release-2'] }
+ let(:release_name) { %w[my-release my-release-2] }
it 'raises an error if the Helm command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail)
diff --git a/spec/lib/quality/kubernetes_client_spec.rb b/spec/lib/quality/kubernetes_client_spec.rb
index 59d4a977d5e..6a62ef456c1 100644
--- a/spec/lib/quality/kubernetes_client_spec.rb
+++ b/spec/lib/quality/kubernetes_client_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Quality::KubernetesClient do
end
context 'with multiple releases' do
- let(:release_name) { ['my-release', 'my-release-2'] }
+ let(:release_name) { %w[my-release my-release-2] }
it 'raises an error if the Kubernetes command fails' do
expect(Gitlab::Popen).to receive(:popen_with_detail)
diff --git a/spec/lib/sentry/api_urls_spec.rb b/spec/lib/sentry/api_urls_spec.rb
new file mode 100644
index 00000000000..78455f8d51f
--- /dev/null
+++ b/spec/lib/sentry/api_urls_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Sentry::ApiUrls do
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' }
+ let(:token) { 'test-token' }
+ let(:issue_id) { '123456' }
+ let(:issue_id_with_reserved_chars) { '123$%' }
+ let(:escaped_issue_id) { '123%24%25' }
+ let(:api_urls) { Sentry::ApiUrls.new(sentry_url) }
+
+ # Sentry API returns 404 if there are extra slashes in the URL!
+ shared_examples 'correct url with extra slashes' do
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects//sentry-org/sentry-project/' }
+
+ it_behaves_like 'correct url'
+ end
+
+ shared_examples 'correctly escapes issue ID' do
+ context 'with param a string with reserved chars' do
+ let(:issue_id) { issue_id_with_reserved_chars }
+
+ it { expect(subject.to_s).to include(escaped_issue_id) }
+ end
+
+ context 'with param a symbol with reserved chars' do
+ let(:issue_id) { issue_id_with_reserved_chars.to_sym }
+
+ it { expect(subject.to_s).to include(escaped_issue_id) }
+ end
+
+ context 'with param an integer' do
+ let(:issue_id) { 12345678 }
+
+ it { expect(subject.to_s).to include(issue_id.to_s) }
+ end
+ end
+
+ describe '#issues_url' do
+ subject { api_urls.issues_url }
+
+ shared_examples 'correct url' do
+ it { is_expected.to eq_uri('https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/issues/') }
+ end
+
+ it_behaves_like 'correct url'
+ it_behaves_like 'correct url with extra slashes'
+ end
+
+ describe '#issue_url' do
+ subject { api_urls.issue_url(issue_id) }
+
+ shared_examples 'correct url' do
+ it { is_expected.to eq_uri("https://sentrytest.gitlab.com/api/0/issues/#{issue_id}/") }
+ end
+
+ it_behaves_like 'correct url'
+ it_behaves_like 'correct url with extra slashes'
+ it_behaves_like 'correctly escapes issue ID'
+ end
+
+ describe '#projects_url' do
+ subject { api_urls.projects_url }
+
+ shared_examples 'correct url' do
+ it { is_expected.to eq_uri('https://sentrytest.gitlab.com/api/0/projects/') }
+ end
+
+ it_behaves_like 'correct url'
+ it_behaves_like 'correct url with extra slashes'
+ end
+
+ describe '#issue_latest_event_url' do
+ subject { api_urls.issue_latest_event_url(issue_id) }
+
+ shared_examples 'correct url' do
+ it { is_expected.to eq_uri("https://sentrytest.gitlab.com/api/0/issues/#{issue_id}/events/latest/") }
+ end
+
+ it_behaves_like 'correct url'
+ it_behaves_like 'correct url with extra slashes'
+ it_behaves_like 'correctly escapes issue ID'
+ end
+end
diff --git a/spec/lib/sentry/client/event_spec.rb b/spec/lib/sentry/client/event_spec.rb
new file mode 100644
index 00000000000..c8604d72ada
--- /dev/null
+++ b/spec/lib/sentry/client/event_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Sentry::Client do
+ include SentryClientHelpers
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:token) { 'test-token' }
+ let(:default_httparty_options) do
+ {
+ follow_redirects: false,
+ headers: { "Authorization" => "Bearer test-token" }
+ }
+ end
+ let(:client) { described_class.new(sentry_url, token) }
+
+ describe '#issue_latest_event' do
+ let(:sample_response) do
+ Gitlab::Utils.deep_indifferent_access(
+ JSON.parse(fixture_file('sentry/issue_latest_event_sample_response.json'))
+ )
+ end
+ let(:issue_id) { '1234' }
+ let(:sentry_api_response) { sample_response }
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
+ let(:sentry_request_url) { "#{sentry_url}/issues/#{issue_id}/events/latest/" }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
+
+ subject { client.issue_latest_event(issue_id: issue_id) }
+
+ it_behaves_like 'calls sentry api'
+
+ it 'has correct return type' do
+ expect(subject).to be_a(Gitlab::ErrorTracking::ErrorEvent)
+ end
+
+ shared_examples 'assigns error tracking event correctly' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:event_object, :sentry_response) do
+ :issue_id | :groupID
+ :date_received | :dateReceived
+ end
+
+ with_them do
+ it { expect(subject.public_send(event_object)).to eq(sentry_api_response.dig(*sentry_response)) }
+ end
+ end
+
+ context 'error object created from sentry response' do
+ it_behaves_like 'assigns error tracking event correctly'
+
+ it 'parses the stack trace' do
+ expect(subject.stack_trace_entries).to be_a Array
+ expect(subject.stack_trace_entries).not_to be_empty
+ end
+
+ context 'error without stack trace' do
+ before do
+ sample_response['entries'] = []
+ stub_sentry_request(sentry_request_url, body: sample_response)
+ end
+
+ it_behaves_like 'assigns error tracking event correctly'
+
+ it 'returns an empty array for stack_trace_entries' do
+ expect(subject.stack_trace_entries).to eq []
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sentry/client/issue_link_spec.rb b/spec/lib/sentry/client/issue_link_spec.rb
new file mode 100644
index 00000000000..35a69be6de5
--- /dev/null
+++ b/spec/lib/sentry/client/issue_link_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Sentry::Client::IssueLink do
+ include SentryClientHelpers
+
+ let(:error_tracking_setting) { create(:project_error_tracking_setting, api_url: sentry_url) }
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:client) { error_tracking_setting.sentry_client }
+
+ let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/issue_link_sample_response.json')) }
+
+ describe '#create_issue_link' do
+ let(:integration_id) { 44444 }
+ let(:sentry_issue_id) { 11111111 }
+ let(:issue) { create(:issue, project: error_tracking_setting.project) }
+
+ let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/groups/#{sentry_issue_id}/integrations/#{integration_id}/" }
+ let(:sentry_api_response) { issue_link_sample_response }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_issue_link_url, :put, body: sentry_api_response, status: 201) }
+
+ subject { client.create_issue_link(integration_id, sentry_issue_id, issue) }
+
+ it_behaves_like 'calls sentry api'
+
+ it { is_expected.to be_present }
+
+ context 'redirects' do
+ let(:sentry_api_url) { sentry_issue_link_url }
+
+ it_behaves_like 'no Sentry redirects', :put
+ end
+
+ context 'when exception is raised' do
+ let(:sentry_request_url) { sentry_issue_link_url }
+
+ it_behaves_like 'maps Sentry exceptions', :put
+ end
+ end
+end
diff --git a/spec/lib/sentry/client/issue_spec.rb b/spec/lib/sentry/client/issue_spec.rb
new file mode 100644
index 00000000000..061ebcfdc06
--- /dev/null
+++ b/spec/lib/sentry/client/issue_spec.rb
@@ -0,0 +1,299 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Sentry::Client::Issue do
+ include SentryClientHelpers
+
+ let(:token) { 'test-token' }
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
+ let(:client) { Sentry::Client.new(sentry_url, token) }
+ let(:issue_id) { 503504 }
+
+ describe '#list_issues' do
+ shared_examples 'issues have correct return type' do |klass|
+ it "returns objects of type #{klass}" do
+ expect(subject[:issues]).to all( be_a(klass) )
+ end
+ end
+
+ shared_examples 'issues have correct length' do |length|
+ it { expect(subject[:issues].length).to eq(length) }
+ end
+
+ let(:issues_sample_response) do
+ Gitlab::Utils.deep_indifferent_access(
+ JSON.parse(fixture_file('sentry/issues_sample_response.json'))
+ )
+ end
+
+ let(:default_httparty_options) do
+ {
+ follow_redirects: false,
+ headers: { 'Content-Type' => 'application/json', 'Authorization' => "Bearer test-token" }
+ }
+ end
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:issue_status) { 'unresolved' }
+ let(:limit) { 20 }
+ let(:search_term) { '' }
+ let(:cursor) { nil }
+ let(:sort) { 'last_seen' }
+ let(:sentry_api_response) { issues_sample_response }
+ let(:sentry_request_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
+
+ subject { client.list_issues(issue_status: issue_status, limit: limit, search_term: search_term, sort: sort, cursor: cursor) }
+
+ it_behaves_like 'calls sentry api'
+
+ it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
+ it_behaves_like 'issues have correct length', 1
+
+ shared_examples 'has correct external_url' do
+ context 'external_url' do
+ it 'is constructed correctly' do
+ expect(subject[:issues][0].external_url).to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/11')
+ end
+ end
+ end
+
+ context 'when response has a pagination info' do
+ let(:headers) do
+ {
+ link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
+ }
+ end
+ let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response, headers: headers) }
+
+ it 'parses the pagination' do
+ expect(subject[:pagination]).to eq(
+ 'previous' => { 'cursor' => '1573556671000:0:1' },
+ 'next' => { 'cursor' => '1572959139000:0:0' }
+ )
+ end
+ end
+
+ context 'error object created from sentry response' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:error_object, :sentry_response) do
+ :id | :id
+ :first_seen | :firstSeen
+ :last_seen | :lastSeen
+ :title | :title
+ :type | :type
+ :user_count | :userCount
+ :count | :count
+ :message | [:metadata, :value]
+ :culprit | :culprit
+ :short_id | :shortId
+ :status | :status
+ :frequency | [:stats, '24h']
+ :project_id | [:project, :id]
+ :project_name | [:project, :name]
+ :project_slug | [:project, :slug]
+ end
+
+ with_them do
+ it { expect(subject[:issues][0].public_send(error_object)).to eq(sentry_api_response[0].dig(*sentry_response)) }
+ end
+
+ it_behaves_like 'has correct external_url'
+ end
+
+ context 'redirects' do
+ let(:sentry_api_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' }
+
+ it_behaves_like 'no Sentry redirects'
+ end
+
+ context 'requests with sort parameter in sentry api' do
+ let(:sentry_request_url) do
+ 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' \
+ 'issues/?limit=20&query=is:unresolved&sort=freq'
+ end
+ let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
+
+ subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'frequency') }
+
+ it 'calls the sentry api with sort params' do
+ expect(Gitlab::HTTP).to receive(:get).with(
+ URI("#{sentry_url}/issues/"),
+ default_httparty_options.merge(query: { limit: 20, query: "is:unresolved", sort: "freq" })
+ ).and_call_original
+
+ subject
+
+ expect(sentry_api_request).to have_been_requested
+ end
+ end
+
+ context 'with invalid sort params' do
+ subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'fish') }
+
+ it 'throws an error' do
+ expect { subject }.to raise_error(Sentry::Client::BadRequestError, 'Invalid value for sort param')
+ end
+ end
+
+ context 'Older sentry versions where keys are not present' do
+ let(:sentry_api_response) do
+ issues_sample_response[0...1].map do |issue|
+ issue[:project].delete(:id)
+ issue
+ end
+ end
+
+ it_behaves_like 'calls sentry api'
+
+ it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
+ it_behaves_like 'issues have correct length', 1
+
+ it_behaves_like 'has correct external_url'
+ end
+
+ context 'essential keys missing in API response' do
+ let(:sentry_api_response) do
+ issues_sample_response[0...1].map do |issue|
+ issue.except(:id)
+ end
+ end
+
+ it 'raises exception' do
+ expect { subject }.to raise_error(Sentry::Client::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"')
+ end
+ end
+
+ context 'sentry api response too large' do
+ it 'raises exception' do
+ deep_size = double('Gitlab::Utils::DeepSize', valid?: false)
+ allow(Gitlab::Utils::DeepSize).to receive(:new).with(sentry_api_response).and_return(deep_size)
+
+ expect { subject }.to raise_error(Sentry::Client::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.')
+ end
+ end
+
+ it_behaves_like 'maps Sentry exceptions'
+
+ context 'when search term is present' do
+ let(:search_term) { 'NoMethodError' }
+ let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved NoMethodError" }
+
+ it_behaves_like 'calls sentry api'
+
+ it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
+ it_behaves_like 'issues have correct length', 1
+ end
+
+ context 'when cursor is present' do
+ let(:cursor) { '1572959139000:0:0' }
+ let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&cursor=#{cursor}&query=is:unresolved" }
+
+ it_behaves_like 'calls sentry api'
+
+ it_behaves_like 'issues have correct return type', Gitlab::ErrorTracking::Error
+ it_behaves_like 'issues have correct length', 1
+ end
+ end
+
+ describe '#issue_details' do
+ let(:issue_sample_response) do
+ Gitlab::Utils.deep_indifferent_access(
+ JSON.parse(fixture_file('sentry/issue_sample_response.json'))
+ )
+ end
+
+ let(:sentry_request_url) { "#{sentry_url}/issues/#{issue_id}/" }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: issue_sample_response) }
+
+ subject { client.issue_details(issue_id: issue_id) }
+
+ context 'error object created from sentry response' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:error_object, :sentry_response) do
+ :id | :id
+ :first_seen | :firstSeen
+ :last_seen | :lastSeen
+ :title | :title
+ :type | :type
+ :user_count | :userCount
+ :count | :count
+ :message | [:metadata, :value]
+ :culprit | :culprit
+ :short_id | :shortId
+ :status | :status
+ :frequency | [:stats, '24h']
+ :project_id | [:project, :id]
+ :project_name | [:project, :name]
+ :project_slug | [:project, :slug]
+ :first_release_last_commit | [:firstRelease, :lastCommit]
+ :last_release_last_commit | [:lastRelease, :lastCommit]
+ :first_release_short_version | [:firstRelease, :shortVersion]
+ :last_release_short_version | [:lastRelease, :shortVersion]
+ :first_release_version | [:firstRelease, :version]
+ end
+
+ with_them do
+ it do
+ expect(subject.public_send(error_object)).to eq(issue_sample_response.dig(*sentry_response))
+ end
+ end
+
+ it 'has a correct external URL' do
+ expect(subject.external_url).to eq('https://sentrytest.gitlab.com/api/0/issues/503504')
+ end
+
+ it 'issue has a correct external base url' do
+ expect(subject.external_base_url).to eq('https://sentrytest.gitlab.com/api/0')
+ end
+
+ it 'has a correct GitLab issue url' do
+ expect(subject.gitlab_issue).to eq('https://gitlab.com/gitlab-org/gitlab/issues/1')
+ end
+
+ it 'has the correct tags' do
+ expect(subject.tags).to eq({ level: issue_sample_response['level'], logger: issue_sample_response['logger'] })
+ end
+ end
+ end
+
+ describe '#update_issue' do
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
+ let(:sentry_request_url) { "#{sentry_url}/issues/#{issue_id}/" }
+
+ before do
+ stub_sentry_request(sentry_request_url, :put)
+ end
+
+ let(:params) do
+ {
+ status: 'resolved'
+ }
+ end
+
+ subject { client.update_issue(issue_id: issue_id, params: params) }
+
+ it_behaves_like 'calls sentry api' do
+ let(:sentry_api_request) { stub_sentry_request(sentry_request_url, :put) }
+ end
+
+ it 'returns a truthy result' do
+ expect(subject).to be_truthy
+ end
+
+ context 'error encountered' do
+ let(:error) { StandardError.new('error') }
+
+ before do
+ allow(client).to receive(:update_issue).and_raise(error)
+ end
+
+ it 'raises the error' do
+ expect { subject }.to raise_error(error)
+ end
+ end
+ end
+end
diff --git a/spec/lib/sentry/client/projects_spec.rb b/spec/lib/sentry/client/projects_spec.rb
index 462f74eaac9..6183d4c5816 100644
--- a/spec/lib/sentry/client/projects_spec.rb
+++ b/spec/lib/sentry/client/projects_spec.rb
@@ -91,25 +91,6 @@ describe Sentry::Client::Projects do
it_behaves_like 'no Sentry redirects'
end
- # Sentry API returns 404 if there are extra slashes in the URL!
- context 'extra slashes in URL' do
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api//0/projects//' }
- let!(:valid_req_stub) do
- stub_sentry_request(sentry_list_projects_url)
- end
-
- it 'removes extra slashes in api url' do
- expect(Gitlab::HTTP).to receive(:get).with(
- URI(sentry_list_projects_url),
- anything
- ).and_call_original
-
- subject
-
- expect(valid_req_stub).to have_been_requested
- end
- end
-
context 'when exception is raised' do
let(:sentry_request_url) { sentry_list_projects_url }
diff --git a/spec/lib/sentry/client/repo_spec.rb b/spec/lib/sentry/client/repo_spec.rb
new file mode 100644
index 00000000000..7bc2811ef03
--- /dev/null
+++ b/spec/lib/sentry/client/repo_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Sentry::Client::Repo do
+ include SentryClientHelpers
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+ let(:token) { 'test-token' }
+ let(:client) { Sentry::Client.new(sentry_url, token) }
+ let(:repos_sample_response) { JSON.parse(fixture_file('sentry/repos_sample_response.json')) }
+
+ describe '#repos' do
+ let(:organization_slug) { 'gitlab' }
+ let(:sentry_repos_url) { "https://sentrytest.gitlab.com/api/0/organizations/#{organization_slug}/repos/" }
+ let(:sentry_api_response) { repos_sample_response }
+ let!(:sentry_api_request) { stub_sentry_request(sentry_repos_url, body: sentry_api_response) }
+
+ subject { client.repos(organization_slug) }
+
+ it_behaves_like 'calls sentry api'
+
+ it { is_expected.to all( be_a(Gitlab::ErrorTracking::Repo)) }
+
+ it { expect(subject.length).to eq(1) }
+
+ context 'redirects' do
+ let(:sentry_api_url) { sentry_repos_url }
+
+ it_behaves_like 'no Sentry redirects'
+ end
+
+ context 'when exception is raised' do
+ let(:sentry_request_url) { sentry_repos_url }
+
+ it_behaves_like 'maps Sentry exceptions'
+ end
+ end
+end
diff --git a/spec/lib/sentry/client_spec.rb b/spec/lib/sentry/client_spec.rb
index cff06bf4a5f..e2da4564ca1 100644
--- a/spec/lib/sentry/client_spec.rb
+++ b/spec/lib/sentry/client_spec.rb
@@ -3,219 +3,15 @@
require 'spec_helper'
describe Sentry::Client do
- include SentryClientHelpers
-
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
- let(:default_httparty_options) do
- {
- follow_redirects: false,
- headers: { "Authorization" => "Bearer test-token" }
- }
- end
-
- let(:issues_sample_response) do
- Gitlab::Utils.deep_indifferent_access(
- JSON.parse(fixture_file('sentry/issues_sample_response.json'))
- )
- end
-
- subject(:client) { described_class.new(sentry_url, token) }
-
- shared_examples 'issues has correct return type' do |klass|
- it "returns objects of type #{klass}" do
- expect(subject[:issues]).to all( be_a(klass) )
- end
- end
-
- shared_examples 'issues has correct length' do |length|
- it { expect(subject[:issues].length).to eq(length) }
- end
-
- describe '#list_issues' do
- let(:issue_status) { 'unresolved' }
- let(:limit) { 20 }
- let(:search_term) { '' }
- let(:cursor) { nil }
- let(:sort) { 'last_seen' }
- let(:sentry_api_response) { issues_sample_response }
- let(:sentry_request_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' }
-
- let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
-
- subject { client.list_issues(issue_status: issue_status, limit: limit, search_term: search_term, sort: sort, cursor: cursor) }
-
- it_behaves_like 'calls sentry api'
-
- it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues has correct length', 1
-
- shared_examples 'has correct external_url' do
- context 'external_url' do
- it 'is constructed correctly' do
- expect(subject[:issues][0].external_url).to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/11')
- end
- end
- end
-
- context 'when response has a pagination info' do
- let(:headers) do
- {
- link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
- }
- end
- let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response, headers: headers) }
-
- it 'parses the pagination' do
- expect(subject[:pagination]).to eq(
- 'previous' => { 'cursor' => '1573556671000:0:1' },
- 'next' => { 'cursor' => '1572959139000:0:0' }
- )
- end
- end
-
- context 'error object created from sentry response' do
- using RSpec::Parameterized::TableSyntax
-
- where(:error_object, :sentry_response) do
- :id | :id
- :first_seen | :firstSeen
- :last_seen | :lastSeen
- :title | :title
- :type | :type
- :user_count | :userCount
- :count | :count
- :message | [:metadata, :value]
- :culprit | :culprit
- :short_id | :shortId
- :status | :status
- :frequency | [:stats, '24h']
- :project_id | [:project, :id]
- :project_name | [:project, :name]
- :project_slug | [:project, :slug]
- end
-
- with_them do
- it { expect(subject[:issues][0].public_send(error_object)).to eq(sentry_api_response[0].dig(*sentry_response)) }
- end
-
- it_behaves_like 'has correct external_url'
- end
-
- context 'redirects' do
- let(:sentry_api_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' }
-
- it_behaves_like 'no Sentry redirects'
- end
-
- # Sentry API returns 404 if there are extra slashes in the URL!
- context 'extra slashes in URL' do
- let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects//sentry-org/sentry-project/' }
-
- let(:sentry_request_url) do
- 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' \
- 'issues/?limit=20&query=is:unresolved'
- end
-
- it 'removes extra slashes in api url' do
- expect(client.url).to eq(sentry_url)
- expect(Gitlab::HTTP).to receive(:get).with(
- URI('https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/issues/'),
- anything
- ).and_call_original
-
- subject
-
- expect(sentry_api_request).to have_been_requested
- end
- end
-
- context 'requests with sort parameter in sentry api' do
- let(:sentry_request_url) do
- 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' \
- 'issues/?limit=20&query=is:unresolved&sort=freq'
- end
- let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
-
- subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'frequency') }
-
- it 'calls the sentry api with sort params' do
- expect(Gitlab::HTTP).to receive(:get).with(
- URI("#{sentry_url}/issues/"),
- default_httparty_options.merge(query: { limit: 20, query: "is:unresolved", sort: "freq" })
- ).and_call_original
-
- subject
-
- expect(sentry_api_request).to have_been_requested
- end
- end
-
- context 'with invalid sort params' do
- subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'fish') }
-
- it 'throws an error' do
- expect { subject }.to raise_error(Sentry::Client::BadRequestError, 'Invalid value for sort param')
- end
- end
-
- context 'Older sentry versions where keys are not present' do
- let(:sentry_api_response) do
- issues_sample_response[0...1].map do |issue|
- issue[:project].delete(:id)
- issue
- end
- end
-
- it_behaves_like 'calls sentry api'
-
- it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues has correct length', 1
-
- it_behaves_like 'has correct external_url'
- end
-
- context 'essential keys missing in API response' do
- let(:sentry_api_response) do
- issues_sample_response[0...1].map do |issue|
- issue.except(:id)
- end
- end
-
- it 'raises exception' do
- expect { subject }.to raise_error(Sentry::Client::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"')
- end
- end
-
- context 'sentry api response too large' do
- it 'raises exception' do
- deep_size = double('Gitlab::Utils::DeepSize', valid?: false)
- allow(Gitlab::Utils::DeepSize).to receive(:new).with(sentry_api_response).and_return(deep_size)
-
- expect { subject }.to raise_error(Sentry::Client::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.')
- end
- end
-
- it_behaves_like 'maps Sentry exceptions'
-
- context 'when search term is present' do
- let(:search_term) { 'NoMethodError' }
- let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved NoMethodError" }
-
- it_behaves_like 'calls sentry api'
-
- it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues has correct length', 1
- end
-
- context 'when cursor is present' do
- let(:cursor) { '1572959139000:0:0' }
- let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&cursor=#{cursor}&query=is:unresolved" }
- it_behaves_like 'calls sentry api'
+ subject { Sentry::Client.new(sentry_url, token) }
- it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
- it_behaves_like 'issues has correct length', 1
- end
- end
+ it { is_expected.to respond_to :projects }
+ it { is_expected.to respond_to :list_issues }
+ it { is_expected.to respond_to :issue_details }
+ it { is_expected.to respond_to :issue_latest_event }
+ it { is_expected.to respond_to :repos }
+ it { is_expected.to respond_to :create_issue_link }
end
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
new file mode 100644
index 00000000000..53c176fc46f
--- /dev/null
+++ b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190924152703_migrate_issue_trackers_data.rb')
+
+describe MigrateIssueTrackersData, :migration, :sidekiq do
+ let(:services) { table(:services) }
+ let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ let(:properties) do
+ {
+ 'url' => 'http://example.com'
+ }
+ end
+ let!(:jira_service) do
+ services.create(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:jira_service_nil) do
+ services.create(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
+ end
+ let!(:bugzilla_service) do
+ services.create(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:youtrack_service) do
+ services.create(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:youtrack_service_empty) do
+ services.create(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
+ end
+ let!(:gitlab_service) do
+ services.create(id: 15, type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
+ end
+ let!(:gitlab_service_empty) do
+ services.create(id: 16, type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
+ end
+ let!(:other_service) do
+ services.create(id: 17, type: 'OtherService', properties: properties, category: 'other_category')
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'schedules background migrations at correct time' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_service.id, bugzilla_service.id)
+ expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_service.id, gitlab_service.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb b/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
new file mode 100644
index 00000000000..c2660d699ca
--- /dev/null
+++ b/spec/migrations/20191204114127_delete_legacy_triggers_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20191204114127_delete_legacy_triggers.rb')
+
+describe DeleteLegacyTriggers, :migration, schema: 2019_11_25_140458 do
+ let(:ci_trigger_table) { table(:ci_triggers) }
+ let(:user) { table(:users).create!(name: 'test', email: 'test@example.com', projects_limit: 1) }
+
+ before do
+ @trigger_with_user = ci_trigger_table.create!(owner_id: user.id)
+ ci_trigger_table.create!(owner_id: nil)
+ ci_trigger_table.create!(owner_id: nil)
+ end
+
+ it 'removes legacy triggers which has null owner_id' do
+ expect do
+ migrate!
+ end.to change(ci_trigger_table, :count).by(-2)
+
+ expect(ci_trigger_table.all).to eq([@trigger_with_user])
+ end
+end
diff --git a/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb b/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb
new file mode 100644
index 00000000000..b0d2aea7015
--- /dev/null
+++ b/spec/migrations/20200107172020_add_timestamp_softwarelicensespolicy_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200107172020_add_timestamp_softwarelicensespolicy.rb')
+
+describe AddTimestampSoftwarelicensespolicy, :migration do
+ let(:software_licenses_policy) { table(:software_license_policies) }
+ let(:projects) { table(:projects) }
+ let(:licenses) { table(:software_licenses) }
+
+ before do
+ projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
+ licenses.create!(name: 'MIT')
+ software_licenses_policy.create!(project_id: projects.first.id, software_license_id: licenses.first.id)
+ end
+
+ it 'creates timestamps' do
+ migrate!
+
+ expect(software_licenses_policy.first.created_at).to be_nil
+ expect(software_licenses_policy.first.updated_at).to be_nil
+ end
+end
diff --git a/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb b/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb
new file mode 100644
index 00000000000..2d12fec5cb3
--- /dev/null
+++ b/spec/migrations/add_temporary_partial_index_on_project_id_to_services_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200114112932_add_temporary_partial_index_on_project_id_to_services.rb')
+
+describe AddTemporaryPartialIndexOnProjectIdToServices, :migration do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'creates temporary partial index on type' do
+ expect { migration.up }.to change { migration.index_exists?(:services, :project_id, name: described_class::INDEX_NAME) }.from(false).to(true)
+ end
+ end
+
+ describe '#down' do
+ it 'removes temporary partial index on type' do
+ migration.up
+
+ expect { migration.down }.to change { migration.index_exists?(:services, :project_id, name: described_class::INDEX_NAME) }.from(true).to(false)
+ end
+ end
+end
diff --git a/spec/migrations/backfill_operations_feature_flags_active_spec.rb b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
new file mode 100644
index 00000000000..ad69b776052
--- /dev/null
+++ b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20191213184609_backfill_operations_feature_flags_active.rb')
+
+describe BackfillOperationsFeatureFlagsActive, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:flags) { table(:operations_feature_flags) }
+
+ def setup
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+
+ project
+ end
+
+ it 'executes successfully when there are no flags in the table' do
+ setup
+
+ disable_migrations_output { migrate! }
+
+ expect(flags.count).to eq(0)
+ end
+
+ it 'updates active to true' do
+ project = setup
+ flag = flags.create!(project_id: project.id, name: 'test_flag', active: false)
+
+ disable_migrations_output { migrate! }
+
+ expect(flag.reload.active).to eq(true)
+ end
+
+ it 'updates active to true for multiple flags' do
+ project = setup
+ flag_a = flags.create!(project_id: project.id, name: 'test_flag', active: false)
+ flag_b = flags.create!(project_id: project.id, name: 'other_flag', active: false)
+
+ disable_migrations_output { migrate! }
+
+ expect(flag_a.reload.active).to eq(true)
+ expect(flag_b.reload.active).to eq(true)
+ end
+
+ it 'leaves active true if it is already true' do
+ project = setup
+ flag = flags.create!(project_id: project.id, name: 'test_flag', active: true)
+
+ disable_migrations_output { migrate! }
+
+ expect(flag.reload.active).to eq(true)
+ end
+end
diff --git a/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb b/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
new file mode 100644
index 00000000000..1b0e6e140ca
--- /dev/null
+++ b/spec/migrations/drop_project_ci_cd_settings_merge_trains_enabled_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20191128162854_drop_project_ci_cd_settings_merge_trains_enabled.rb')
+
+describe DropProjectCiCdSettingsMergeTrainsEnabled, :migration do
+ let!(:project_ci_cd_setting) { table(:project_ci_cd_settings) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(project_ci_cd_setting.column_names).to include("merge_trains_enabled")
+ }
+
+ migration.after -> {
+ project_ci_cd_setting.reset_column_information
+ expect(project_ci_cd_setting.column_names).not_to include("merge_trains_enabled")
+ }
+ end
+ end
+end
diff --git a/spec/migrations/fix_max_pages_size_spec.rb b/spec/migrations/fix_max_pages_size_spec.rb
new file mode 100644
index 00000000000..36b5445603e
--- /dev/null
+++ b/spec/migrations/fix_max_pages_size_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20191213120427_fix_max_pages_size.rb')
+
+describe FixMaxPagesSize, :migration do
+ let(:application_settings) { table(:application_settings) }
+ let!(:default_setting) { application_settings.create! }
+ let!(:max_possible_setting) { application_settings.create!(max_pages_size: described_class::MAX_SIZE) }
+ let!(:higher_than_maximum_setting) { application_settings.create!(max_pages_size: described_class::MAX_SIZE + 1) }
+
+ it 'correctly updates settings only if needed' do
+ migrate!
+
+ expect(default_setting.reload.max_pages_size).to eq(100)
+ expect(max_possible_setting.reload.max_pages_size).to eq(described_class::MAX_SIZE)
+ expect(higher_than_maximum_setting.reload.max_pages_size).to eq(described_class::MAX_SIZE)
+ end
+end
diff --git a/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb b/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb
new file mode 100644
index 00000000000..83f994c2a94
--- /dev/null
+++ b/spec/migrations/patch_prometheus_services_for_shared_cluster_applications_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200114113341_patch_prometheus_services_for_shared_cluster_applications.rb')
+
+describe PatchPrometheusServicesForSharedClusterApplications, :migration, :sidekiq do
+ include MigrationHelpers::PrometheusServiceHelpers
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:services) { table(:services) }
+ let(:clusters) { table(:clusters) }
+ let(:cluster_groups) { table(:cluster_groups) }
+ let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+
+ let(:application_statuses) do
+ {
+ errored: -1,
+ installed: 3,
+ updated: 5
+ }
+ end
+
+ let(:cluster_types) do
+ {
+ instance_type: 1,
+ group_type: 2
+ }
+ end
+
+ describe '#up' do
+ let!(:project_with_missing_service) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
+ let(:project_with_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+ let(:project_with_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+ let(:project_with_manual_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+ let(:project_with_manual_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+ let(:project_with_active_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+ let(:project_with_inactive_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
+
+ before do
+ services.create(service_params_for(project_with_inactive_service.id, active: false))
+ services.create(service_params_for(project_with_active_service.id, active: true))
+ services.create(service_params_for(project_with_active_not_prometheus_service.id, active: true, type: 'other'))
+ services.create(service_params_for(project_with_inactive_not_prometheus_service.id, active: false, type: 'other'))
+ services.create(service_params_for(project_with_manual_inactive_service.id, active: false, properties: { some: 'data' }.to_json))
+ services.create(service_params_for(project_with_manual_active_service.id, active: true, properties: { some: 'data' }.to_json))
+ end
+
+ shared_examples 'patch prometheus services post migration' do
+ context 'prometheus application is installed on the cluster' do
+ it 'schedules a background migration' do
+ clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
+
+ migrate!
+
+ enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
+ expect(enqueued_migrations).to match_array(background_migrations)
+ end
+ end
+ end
+ end
+
+ context 'prometheus application was recently updated on the cluster' do
+ it 'schedules a background migration' do
+ clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
+ ["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
+
+ migrate!
+
+ enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
+ expect(enqueued_migrations).to match_array(background_migrations)
+ end
+ end
+ end
+ end
+
+ context 'prometheus application failed to install on the cluster' do
+ it 'does not schedule a background migration' do
+ clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq 0
+ end
+ end
+ end
+ end
+
+ context 'prometheus application is NOT installed on the cluster' do
+ it 'does not schedule a background migration' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq 0
+ end
+ end
+ end
+ end
+ end
+
+ context 'Cluster is group_type' do
+ let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:group_type]) }
+
+ before do
+ cluster_groups.create(group_id: namespace.id, cluster_id: cluster.id)
+ end
+
+ it_behaves_like 'patch prometheus services post migration'
+ end
+
+ context 'Cluster is instance_type' do
+ let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:instance_type]) }
+
+ it_behaves_like 'patch prometheus services post migration'
+ end
+ end
+end
diff --git a/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
new file mode 100644
index 00000000000..bdb661af904
--- /dev/null
+++ b/spec/migrations/update_fingerprint_sha256_within_keys_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200106071113_update_fingerprint_sha256_within_keys.rb')
+
+describe UpdateFingerprintSha256WithinKeys, :sidekiq, :migration do
+ let(:key_table) { table(:keys) }
+
+ describe '#up' do
+ it 'the BackgroundMigrationWorker will be triggered and fingerprint_sha256 populated' do
+ key_table.create!(
+ id: 1,
+ user_id: 1,
+ title: 'test',
+ key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1016k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
+ fingerprint: 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1',
+ fingerprint_sha256: nil
+ )
+
+ expect(Key.first.fingerprint_sha256).to eq(nil)
+
+ described_class.new.up
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(BackgroundMigrationWorker.jobs.first["args"][0]).to eq("MigrateFingerprintSha256WithinKeys")
+ expect(BackgroundMigrationWorker.jobs.first["args"][1]).to eq([1, 1])
+ end
+ end
+end
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 072d0fa86e5..bff3ac313c4 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -44,6 +44,19 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
end
+ describe '#public_id' do
+ it 'returns an encrypted, url-encoded session id' do
+ original_session_id = "!*'();:@&\n=+$,/?%abcd#123[4567]8"
+ active_session = ActiveSession.new(session_id: original_session_id)
+ encrypted_encoded_id = active_session.public_id
+
+ encrypted_id = CGI.unescape(encrypted_encoded_id)
+ derived_session_id = Gitlab::CryptoHelper.aes256_gcm_decrypt(encrypted_id)
+
+ expect(original_session_id).to eq derived_session_id
+ end
+ end
+
describe '.list' do
it 'returns all sessions by user' do
Gitlab::Redis::SharedState.with do |redis|
@@ -139,7 +152,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
redis = double(:redis)
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
- sessions = ['session-a', 'session-b']
+ sessions = %w[session-a session-b]
mget_responses = sessions.map { |session| [Marshal.dump(session)]}
expect(redis).to receive(:mget).twice.and_return(*mget_responses)
@@ -173,8 +186,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
device_name: 'iPhone 6',
device_type: 'smartphone',
created_at: Time.zone.parse('2018-03-12 09:06'),
- updated_at: Time.zone.parse('2018-03-12 09:06'),
- session_id: '6919a6f1bb119dd7396fadc38fd18d0d'
+ updated_at: Time.zone.parse('2018-03-12 09:06')
)
end
end
@@ -244,6 +256,40 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
end
+ describe '.destroy_with_public_id' do
+ it 'receives a user and public id and destroys the associated session' do
+ ActiveSession.set(user, request)
+ session = ActiveSession.list(user).first
+
+ ActiveSession.destroy_with_public_id(user, session.public_id)
+
+ total_sessions = ActiveSession.list(user).count
+ expect(total_sessions).to eq 0
+ end
+
+ it 'handles invalid input for public id' do
+ expect do
+ ActiveSession.destroy_with_public_id(user, nil)
+ end.not_to raise_error
+
+ expect do
+ ActiveSession.destroy_with_public_id(user, "")
+ end.not_to raise_error
+
+ expect do
+ ActiveSession.destroy_with_public_id(user, "aaaaaaaa")
+ end.not_to raise_error
+ end
+
+ it 'does not attempt to destroy session when given invalid input for public id' do
+ expect(ActiveSession).not_to receive(:destroy)
+
+ ActiveSession.destroy_with_public_id(user, nil)
+ ActiveSession.destroy_with_public_id(user, "")
+ ActiveSession.destroy_with_public_id(user, "aaaaaaaa")
+ end
+ end
+
describe '.cleanup' do
before do
stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index a403aa296d4..bbd50f1c0ef 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -67,6 +67,13 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
+ it { is_expected.to validate_presence_of(:max_artifacts_size) }
+ it do
+ is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than(0)
+ .is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
+ end
+ it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
+ it { is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than(0) }
it { is_expected.not_to allow_value(7).for(:minimum_password_length) }
it { is_expected.not_to allow_value(129).for(:minimum_password_length) }
@@ -312,6 +319,11 @@ describe ApplicationSetting do
end
context 'gitaly timeouts' do
+ it "validates that the default_timeout is lower than the max_request_duration" do
+ is_expected.to validate_numericality_of(:gitaly_timeout_default)
+ .is_less_than_or_equal_to(Settings.gitlab.max_request_duration_seconds)
+ end
+
[:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name|
it do
is_expected.to validate_presence_of(timeout_name)
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index 2c141cae98d..c7ca0625b77 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -424,6 +424,7 @@ describe Blob do
describe 'policy' do
let(:project) { build(:project) }
+
subject { described_class.new(fake_blob(path: 'foo'), project) }
it 'works with policy' do
diff --git a/spec/models/blob_viewer/changelog_spec.rb b/spec/models/blob_viewer/changelog_spec.rb
index 0fcc94182af..b71531ff3c2 100644
--- a/spec/models/blob_viewer/changelog_spec.rb
+++ b/spec/models/blob_viewer/changelog_spec.rb
@@ -7,6 +7,7 @@ describe BlobViewer::Changelog do
let(:project) { create(:project, :repository) }
let(:blob) { fake_blob(path: 'CHANGELOG') }
+
subject { described_class.new(blob) }
describe '#render_error' do
diff --git a/spec/models/blob_viewer/composer_json_spec.rb b/spec/models/blob_viewer/composer_json_spec.rb
index eda34779679..a6bb64ba121 100644
--- a/spec/models/blob_viewer/composer_json_spec.rb
+++ b/spec/models/blob_viewer/composer_json_spec.rb
@@ -15,6 +15,7 @@ describe BlobViewer::ComposerJson do
SPEC
end
let(:blob) { fake_blob(path: 'composer.json', data: data) }
+
subject { described_class.new(blob) }
describe '#package_name' do
diff --git a/spec/models/blob_viewer/gemspec_spec.rb b/spec/models/blob_viewer/gemspec_spec.rb
index b6cc82c03ba..291d14e2d72 100644
--- a/spec/models/blob_viewer/gemspec_spec.rb
+++ b/spec/models/blob_viewer/gemspec_spec.rb
@@ -15,6 +15,7 @@ describe BlobViewer::Gemspec do
SPEC
end
let(:blob) { fake_blob(path: 'activerecord.gemspec', data: data) }
+
subject { described_class.new(blob) }
describe '#package_name' do
diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
index db405ceb4f1..02993052124 100644
--- a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
+++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
@@ -12,6 +12,7 @@ describe BlobViewer::GitlabCiYml do
let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) }
let(:sha) { sample_commit.id }
+
subject { described_class.new(blob) }
describe '#validation_message' do
diff --git a/spec/models/blob_viewer/license_spec.rb b/spec/models/blob_viewer/license_spec.rb
index e02bfae3829..b0426401932 100644
--- a/spec/models/blob_viewer/license_spec.rb
+++ b/spec/models/blob_viewer/license_spec.rb
@@ -7,6 +7,7 @@ describe BlobViewer::License do
let(:project) { create(:project, :repository) }
let(:blob) { fake_blob(path: 'LICENSE') }
+
subject { described_class.new(blob) }
describe '#license' do
diff --git a/spec/models/blob_viewer/package_json_spec.rb b/spec/models/blob_viewer/package_json_spec.rb
index b317278f3c8..7f7b1dcfcb3 100644
--- a/spec/models/blob_viewer/package_json_spec.rb
+++ b/spec/models/blob_viewer/package_json_spec.rb
@@ -15,6 +15,7 @@ describe BlobViewer::PackageJson do
SPEC
end
let(:blob) { fake_blob(path: 'package.json', data: data) }
+
subject { described_class.new(blob) }
describe '#package_name' do
@@ -54,6 +55,7 @@ describe BlobViewer::PackageJson do
SPEC
end
let(:blob) { fake_blob(path: 'package.json', data: data) }
+
subject { described_class.new(blob) }
describe '#package_url' do
diff --git a/spec/models/blob_viewer/podspec_json_spec.rb b/spec/models/blob_viewer/podspec_json_spec.rb
index 7f1fb8666fd..dd5ed03b77d 100644
--- a/spec/models/blob_viewer/podspec_json_spec.rb
+++ b/spec/models/blob_viewer/podspec_json_spec.rb
@@ -15,6 +15,7 @@ describe BlobViewer::PodspecJson do
SPEC
end
let(:blob) { fake_blob(path: 'AFNetworking.podspec.json', data: data) }
+
subject { described_class.new(blob) }
describe '#package_name' do
diff --git a/spec/models/blob_viewer/podspec_spec.rb b/spec/models/blob_viewer/podspec_spec.rb
index 527ae79d766..2d9b184c5cb 100644
--- a/spec/models/blob_viewer/podspec_spec.rb
+++ b/spec/models/blob_viewer/podspec_spec.rb
@@ -15,6 +15,7 @@ describe BlobViewer::Podspec do
SPEC
end
let(:blob) { fake_blob(path: 'Reachability.podspec', data: data) }
+
subject { described_class.new(blob) }
describe '#package_name' do
diff --git a/spec/models/blob_viewer/readme_spec.rb b/spec/models/blob_viewer/readme_spec.rb
index 958927bddb4..6586adbc373 100644
--- a/spec/models/blob_viewer/readme_spec.rb
+++ b/spec/models/blob_viewer/readme_spec.rb
@@ -7,6 +7,7 @@ describe BlobViewer::Readme do
let(:project) { create(:project, :repository, :wiki_repo) }
let(:blob) { fake_blob(path: 'README.md') }
+
subject { described_class.new(blob) }
describe '#render_error' do
diff --git a/spec/models/blob_viewer/route_map_spec.rb b/spec/models/blob_viewer/route_map_spec.rb
index f7ce873c9d1..6c703df5c4c 100644
--- a/spec/models/blob_viewer/route_map_spec.rb
+++ b/spec/models/blob_viewer/route_map_spec.rb
@@ -14,6 +14,7 @@ describe BlobViewer::RouteMap do
MAP
end
let(:blob) { fake_blob(path: '.gitlab/route-map.yml', data: data) }
+
subject { described_class.new(blob) }
describe '#validation_message' do
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index f6eee67e539..0987c8e2b65 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
describe Board do
+ let(:project) { create(:project) }
+ let(:other_project) { create(:project) }
+
describe 'relationships' do
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:lists).order(list_type: :asc, position: :asc).dependent(:delete_all) }
@@ -11,4 +14,28 @@ describe Board do
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
end
+
+ describe '#order_by_name_asc' do
+ let!(:second_board) { create(:board, name: 'Secondary board', project: project) }
+ let!(:first_board) { create(:board, name: 'First board', project: project) }
+
+ it 'returns in alphabetical order' do
+ expect(project.boards.order_by_name_asc).to eq [first_board, second_board]
+ end
+ end
+
+ describe '#first_board' do
+ let!(:other_board) { create(:board, name: 'Other board', project: other_project) }
+ let!(:second_board) { create(:board, name: 'Secondary board', project: project) }
+ let!(:first_board) { create(:board, name: 'First board', project: project) }
+
+ it 'return the first alphabetical board as a relation' do
+ expect(project.boards.first_board).to eq [first_board]
+ end
+
+ # BoardsActions#board expects this behavior
+ it 'raises an error when find is done on a non-existent record' do
+ expect { project.boards.first_board.find(second_board.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 900e0feaccc..38e15fc4582 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -341,6 +341,36 @@ describe Ci::Build do
end
end
+ describe '#enqueue_preparing' do
+ let(:build) { create(:ci_build, :preparing) }
+
+ subject { build.enqueue_preparing }
+
+ before do
+ allow(build).to receive(:any_unmet_prerequisites?).and_return(has_unmet_prerequisites)
+ end
+
+ context 'build completed prerequisites' do
+ let(:has_unmet_prerequisites) { false }
+
+ it 'transitions to pending' do
+ subject
+
+ expect(build).to be_pending
+ end
+ end
+
+ context 'build did not complete prerequisites' do
+ let(:has_unmet_prerequisites) { true }
+
+ it 'remains in preparing' do
+ subject
+
+ expect(build).to be_preparing
+ end
+ end
+ end
+
describe '#actionize' do
context 'when build is a created' do
before do
@@ -610,6 +640,7 @@ describe Ci::Build do
context 'artifacts archive is a zip file and metadata exists' do
let(:build) { create(:ci_build, :artifacts) }
+
it { is_expected.to be_truthy }
end
end
@@ -1053,7 +1084,7 @@ describe Ci::Build do
end
describe 'state transition as a deployable' do
- let!(:build) { create(:ci_build, :with_deployment, :start_review_app) }
+ let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) }
let(:deployment) { build.deployment }
let(:environment) { deployment.environment }
@@ -1118,6 +1149,60 @@ describe Ci::Build do
end
end
+ describe 'state transition with resource group' do
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ context 'when build status is created' do
+ let(:build) { create(:ci_build, :created, project: project, resource_group: resource_group) }
+
+ it 'is waiting for resource when build is enqueued' do
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(resource_group.id)
+
+ expect { build.enqueue! }.to change { build.status }.from('created').to('waiting_for_resource')
+
+ expect(build.waiting_for_resource_at).not_to be_nil
+ end
+
+ context 'when build is waiting for resource' do
+ before do
+ build.update_column(:status, 'waiting_for_resource')
+ end
+
+ it 'is enqueued when build requests resource' do
+ expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('pending')
+ end
+
+ it 'releases a resource when build finished' do
+ expect(build.resource_group).to receive(:release_resource_from).with(build).and_call_original
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async).with(build.resource_group_id)
+
+ build.enqueue_waiting_for_resource!
+ build.success!
+ end
+
+ context 'when build has prerequisites' do
+ before do
+ allow(build).to receive(:any_unmet_prerequisites?) { true }
+ end
+
+ it 'is preparing when build is enqueued' do
+ expect { build.enqueue_waiting_for_resource! }.to change { build.status }.from('waiting_for_resource').to('preparing')
+ end
+ end
+
+ context 'when there are no available resources' do
+ before do
+ resource_group.assign_resource_to(create(:ci_build))
+ end
+
+ it 'stays as waiting for resource when build requests resource' do
+ expect { build.enqueue_waiting_for_resource }.not_to change { build.status }
+ end
+ end
+ end
+ end
+ end
+
describe '#on_stop' do
subject { build.on_stop }
@@ -1408,6 +1493,7 @@ describe Ci::Build do
describe '#erased?' do
let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
+
subject { build.erased? }
context 'job has not been erased' do
@@ -1469,6 +1555,7 @@ describe Ci::Build do
describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
+
subject { described_class.first_pending }
it { is_expected.to be_a(described_class) }
@@ -1553,6 +1640,12 @@ describe Ci::Build do
it { is_expected.to be_cancelable }
end
+
+ context 'when build is waiting for resource' do
+ let(:build) { create(:ci_build, :waiting_for_resource) }
+
+ it { is_expected.to be_cancelable }
+ end
end
context 'when build is not cancelable' do
@@ -2296,6 +2389,7 @@ describe Ci::Build do
{ key: 'CI_BUILD_STAGE', value: 'test', public: true, masked: false },
{ key: 'CI', value: 'true', public: true, masked: false },
{ key: 'GITLAB_CI', value: 'true', public: true, masked: false },
+ { key: 'CI_SERVER_URL', value: Gitlab.config.gitlab.url, public: true, masked: false },
{ key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false },
@@ -3894,7 +3988,7 @@ describe Ci::Build do
end
context 'when build is a last deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production') }
+ let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline, project: project) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
@@ -3902,7 +3996,7 @@ describe Ci::Build do
end
context 'when there is a newer build with deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production') }
+ let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline, project: project) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
let!(:last_deployment) { create(:deployment, :success, environment: environment, project: environment.project) }
@@ -3911,7 +4005,7 @@ describe Ci::Build do
end
context 'when build with deployment has failed' do
- let(:build) { create(:ci_build, :failed, environment: 'production') }
+ let(:build) { create(:ci_build, :failed, environment: 'production', pipeline: pipeline, project: project) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
@@ -3919,7 +4013,7 @@ describe Ci::Build do
end
context 'when build with deployment is running' do
- let(:build) { create(:ci_build, environment: 'production') }
+ let(:build) { create(:ci_build, environment: 'production', pipeline: pipeline, project: project) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 96d81f4cc49..69fd167e0c8 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -604,7 +604,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when traces are archived' do
let(:subject) do
project.builds.each do |build|
- build.success!
+ build.reset.success!
end
end
diff --git a/spec/models/ci/pipeline_config_spec.rb b/spec/models/ci/pipeline_config_spec.rb
new file mode 100644
index 00000000000..25f514ee5ab
--- /dev/null
+++ b/spec/models/ci/pipeline_config_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineConfig, type: :model do
+ it { is_expected.to belong_to(:pipeline) }
+
+ it { is_expected.to validate_presence_of(:pipeline) }
+ it { is_expected.to validate_presence_of(:content) }
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 3f9e882ea52..013581c0d94 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -35,6 +35,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to have_one(:source_pipeline) }
it { is_expected.to have_one(:triggered_by_pipeline) }
it { is_expected.to have_one(:source_job) }
+ it { is_expected.to have_one(:pipeline_config) }
it { is_expected.to validate_presence_of(:sha) }
it { is_expected.to validate_presence_of(:status) }
@@ -1007,22 +1008,22 @@ describe Ci::Pipeline, :mailer do
end
end
- describe '#duration', :sidekiq_might_not_need_inline do
+ describe '#duration', :sidekiq_inline do
context 'when multiple builds are finished' do
before do
travel_to(current + 30) do
build.run!
- build.success!
+ build.reload.success!
build_b.run!
build_c.run!
end
travel_to(current + 40) do
- build_b.drop!
+ build_b.reload.drop!
end
travel_to(current + 70) do
- build_c.success!
+ build_c.reload.success!
end
end
@@ -1043,7 +1044,7 @@ describe Ci::Pipeline, :mailer do
end
travel_to(current + 5.minutes) do
- build.success!
+ build.reload.success!
end
end
@@ -1182,6 +1183,38 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe 'auto devops pipeline metrics' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:pipeline) { create(:ci_empty_pipeline, config_source: config_source) }
+ let(:config_source) { :auto_devops_source }
+
+ where(:action, :status) do
+ :succeed | 'success'
+ :drop | 'failed'
+ :skip | 'skipped'
+ :cancel | 'canceled'
+ end
+
+ with_them do
+ context "when pipeline receives action '#{params[:action]}'" do
+ subject { pipeline.public_send(action) }
+
+ it { expect { subject }.to change { auto_devops_pipelines_completed_total(status) }.by(1) }
+
+ context 'when not auto_devops_source?' do
+ let(:config_source) { :repository_source }
+
+ it { expect { subject }.not_to change { auto_devops_pipelines_completed_total(status) } }
+ end
+ end
+ end
+
+ def auto_devops_pipelines_completed_total(status)
+ Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines').get(status: status)
+ end
+ end
+
def create_build(name, *traits, queued_at: current, started_from: 0, **opts)
create(:ci_build, *traits,
name: name,
@@ -1552,6 +1585,30 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#needs_processing?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { pipeline.needs_processing? }
+
+ where(:processed, :result) do
+ nil | true
+ false | true
+ true | false
+ end
+
+ with_them do
+ let(:build) do
+ create(:ci_build, :success, pipeline: pipeline, name: 'rubocop')
+ end
+
+ before do
+ build.update_column(:processed, processed)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
shared_context 'with some outdated pipelines' do
before do
create_pipeline(:canceled, 'ref', 'A', project)
@@ -1749,16 +1806,27 @@ describe Ci::Pipeline, :mailer do
subject { described_class.bridgeable_statuses }
it { is_expected.to be_an(Array) }
- it { is_expected.not_to include('created', 'preparing', 'pending') }
+ it { is_expected.not_to include('created', 'waiting_for_resource', 'preparing', 'pending') }
end
- describe '#status', :sidekiq_might_not_need_inline do
+ describe '#status', :sidekiq_inline do
let(:build) do
create(:ci_build, :created, pipeline: pipeline, name: 'test')
end
subject { pipeline.reload.status }
+ context 'on waiting for resource' do
+ before do
+ allow(build).to receive(:requires_resource?) { true }
+ allow(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_async)
+
+ build.enqueue
+ end
+
+ it { is_expected.to eq('waiting_for_resource') }
+ end
+
context 'on prepare' do
before do
# Prevent skipping directly to 'pending'
@@ -1782,7 +1850,7 @@ describe Ci::Pipeline, :mailer do
context 'on run' do
before do
build.enqueue
- build.run
+ build.reload.run
end
it { is_expected.to eq('running') }
@@ -1841,7 +1909,7 @@ describe Ci::Pipeline, :mailer do
it 'updates does not change pipeline status' do
expect(pipeline.statuses.latest.slow_composite_status).to be_nil
- expect { pipeline.update_status }
+ expect { pipeline.update_legacy_status }
.to change { pipeline.reload.status }
.from('created')
.to('skipped')
@@ -1854,7 +1922,7 @@ describe Ci::Pipeline, :mailer do
end
it 'updates pipeline status to running' do
- expect { pipeline.update_status }
+ expect { pipeline.update_legacy_status }
.to change { pipeline.reload.status }
.from('created')
.to('running')
@@ -1867,7 +1935,7 @@ describe Ci::Pipeline, :mailer do
end
it 'updates pipeline status to scheduled' do
- expect { pipeline.update_status }
+ expect { pipeline.update_legacy_status }
.to change { pipeline.reload.status }
.from('created')
.to('scheduled')
@@ -1882,7 +1950,7 @@ describe Ci::Pipeline, :mailer do
end
it 'raises an exception' do
- expect { pipeline.update_status }
+ expect { pipeline.update_legacy_status }
.to raise_error(HasStatus::UnknownStatusError)
end
end
@@ -2170,11 +2238,11 @@ describe Ci::Pipeline, :mailer do
stub_full_request(hook.url, method: :post)
end
- context 'with multiple builds', :sidekiq_might_not_need_inline do
+ context 'with multiple builds', :sidekiq_inline do
context 'when build is queued' do
before do
- build_a.enqueue
- build_b.enqueue
+ build_a.reload.enqueue
+ build_b.reload.enqueue
end
it 'receives a pending event once' do
@@ -2184,10 +2252,10 @@ describe Ci::Pipeline, :mailer do
context 'when build is run' do
before do
- build_a.enqueue
- build_a.run
- build_b.enqueue
- build_b.run
+ build_a.reload.enqueue
+ build_a.reload.run!
+ build_b.reload.enqueue
+ build_b.reload.run!
end
it 'receives a running event once' do
@@ -2248,6 +2316,7 @@ describe Ci::Pipeline, :mailer do
:created,
pipeline: pipeline,
name: name,
+ stage: "stage:#{stage_idx}",
stage_idx: stage_idx)
end
end
@@ -2704,4 +2773,114 @@ describe Ci::Pipeline, :mailer do
end
end
end
+
+ describe '#parent_pipeline' do
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when pipeline is triggered by a pipeline from the same project' do
+ let(:upstream_pipeline) { create(:ci_pipeline, project: pipeline.project) }
+
+ before do
+ create(:ci_sources_pipeline,
+ source_pipeline: upstream_pipeline,
+ source_project: project,
+ pipeline: pipeline,
+ project: project)
+ end
+
+ it 'returns the parent pipeline' do
+ expect(pipeline.parent_pipeline).to eq(upstream_pipeline)
+ end
+
+ it 'is child' do
+ expect(pipeline).to be_child
+ end
+ end
+
+ context 'when pipeline is triggered by a pipeline from another project' do
+ let(:upstream_pipeline) { create(:ci_pipeline) }
+
+ before do
+ create(:ci_sources_pipeline,
+ source_pipeline: upstream_pipeline,
+ source_project: upstream_pipeline.project,
+ pipeline: pipeline,
+ project: project)
+ end
+
+ it 'returns nil' do
+ expect(pipeline.parent_pipeline).to be_nil
+ end
+
+ it 'is not child' do
+ expect(pipeline).not_to be_child
+ end
+ end
+
+ context 'when pipeline is not triggered by a pipeline' do
+ it 'returns nil' do
+ expect(pipeline.parent_pipeline).to be_nil
+ end
+
+ it 'is not child' do
+ expect(pipeline).not_to be_child
+ end
+ end
+ end
+
+ describe '#child_pipelines' do
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when pipeline triggered other pipelines on same project' do
+ let(:downstream_pipeline) { create(:ci_pipeline, project: pipeline.project) }
+
+ before do
+ create(:ci_sources_pipeline,
+ source_pipeline: pipeline,
+ source_project: pipeline.project,
+ pipeline: downstream_pipeline,
+ project: pipeline.project)
+ end
+
+ it 'returns the child pipelines' do
+ expect(pipeline.child_pipelines).to eq [downstream_pipeline]
+ end
+
+ it 'is parent' do
+ expect(pipeline).to be_parent
+ end
+ end
+
+ context 'when pipeline triggered other pipelines on another project' do
+ let(:downstream_pipeline) { create(:ci_pipeline) }
+
+ before do
+ create(:ci_sources_pipeline,
+ source_pipeline: pipeline,
+ source_project: pipeline.project,
+ pipeline: downstream_pipeline,
+ project: downstream_pipeline.project)
+ end
+
+ it 'returns empty array' do
+ expect(pipeline.child_pipelines).to be_empty
+ end
+
+ it 'is not parent' do
+ expect(pipeline).not_to be_parent
+ end
+ end
+
+ context 'when pipeline did not trigger any pipelines' do
+ it 'returns empty array' do
+ expect(pipeline.child_pipelines).to be_empty
+ end
+
+ it 'is not parent' do
+ expect(pipeline).not_to be_parent
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
new file mode 100644
index 00000000000..87dbcbf870e
--- /dev/null
+++ b/spec/models/ci/processable_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::Processable do
+ set(:project) { create(:project) }
+ set(:pipeline) { create(:ci_pipeline, project: project) }
+
+ describe '#aggregated_needs_names' do
+ let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
+
+ context 'with created status' do
+ let!(:processable) { create(:ci_build, :created, project: project, pipeline: pipeline) }
+
+ context 'with needs' do
+ before do
+ create(:ci_build_need, build: processable, name: 'test1')
+ create(:ci_build_need, build: processable, name: 'test2')
+ end
+
+ it 'returns all processables' do
+ expect(with_aggregated_needs).to contain_exactly(processable)
+ end
+
+ it 'returns all needs' do
+ expect(with_aggregated_needs.first.aggregated_needs_names).to contain_exactly('test1', 'test2')
+ end
+
+ context 'with ci_dag_support disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'returns all processables' do
+ expect(with_aggregated_needs).to contain_exactly(processable)
+ end
+
+ it 'returns empty needs' do
+ expect(with_aggregated_needs.first.aggregated_needs_names).to be_nil
+ end
+ end
+ end
+
+ context 'without needs' do
+ it 'returns all processables' do
+ expect(with_aggregated_needs).to contain_exactly(processable)
+ end
+
+ it 'returns empty needs' do
+ expect(with_aggregated_needs.first.aggregated_needs_names).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb
new file mode 100644
index 00000000000..ce8b03282bc
--- /dev/null
+++ b/spec/models/ci/resource_group_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::ResourceGroup do
+ describe 'validation' do
+ it 'valids when key includes allowed character' do
+ resource_group = build(:ci_resource_group, key: 'test')
+
+ expect(resource_group).to be_valid
+ end
+
+ it 'invalids when key includes invalid character' do
+ resource_group = build(:ci_resource_group, key: ':::')
+
+ expect(resource_group).not_to be_valid
+ end
+ end
+
+ describe '#ensure_resource' do
+ it 'creates one resource when resource group is created' do
+ resource_group = create(:ci_resource_group)
+
+ expect(resource_group.resources.count).to eq(1)
+ expect(resource_group.resources.all?(&:persisted?)).to eq(true)
+ end
+ end
+
+ describe '#assign_resource_to' do
+ subject { resource_group.assign_resource_to(build) }
+
+ let(:build) { create(:ci_build) }
+ let(:resource_group) { create(:ci_resource_group) }
+
+ it 'retains resource for the build' do
+ expect(resource_group.resources.first.build).to be_nil
+
+ is_expected.to eq(true)
+
+ expect(resource_group.resources.first.build).to eq(build)
+ end
+
+ context 'when there are no free resources' do
+ before do
+ resource_group.assign_resource_to(create(:ci_build))
+ end
+
+ it 'fails to retain resource' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when the build has already retained a resource' do
+ let!(:another_resource) { create(:ci_resource, resource_group: resource_group, build: build) }
+
+ it 'fails to retain resource' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotUnique)
+ end
+ end
+ end
+
+ describe '#release_resource_from' do
+ subject { resource_group.release_resource_from(build) }
+
+ let(:build) { create(:ci_build) }
+ let(:resource_group) { create(:ci_resource_group) }
+
+ context 'when the build has already retained a resource' do
+ before do
+ resource_group.assign_resource_to(build)
+ end
+
+ it 'releases resource from the build' do
+ expect(resource_group.resources.first.build).to eq(build)
+
+ is_expected.to eq(true)
+
+ expect(resource_group.resources.first.build).to be_nil
+ end
+ end
+
+ context 'when the build has already released a resource' do
+ it 'fails to release resource' do
+ is_expected.to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/resource_spec.rb b/spec/models/ci/resource_spec.rb
new file mode 100644
index 00000000000..27e512e2c45
--- /dev/null
+++ b/spec/models/ci/resource_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::Resource do
+ describe '.free' do
+ subject { described_class.free }
+
+ let(:resource_group) { create(:ci_resource_group) }
+ let!(:free_resource) { resource_group.resources.take }
+ let!(:retained_resource) { create(:ci_resource, :retained, resource_group: resource_group) }
+
+ it 'returns free resources' do
+ is_expected.to eq([free_resource])
+ end
+ end
+
+ describe '.retained_by' do
+ subject { described_class.retained_by(build) }
+
+ let(:build) { create(:ci_build) }
+ let!(:resource) { create(:ci_resource, build: build) }
+
+ it 'returns retained resources' do
+ is_expected.to eq([resource])
+ end
+ end
+end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index ac438f7d473..5c9a03a26ec 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -755,11 +755,13 @@ describe Ci::Runner do
context 'when group runner' do
let(:runner) { create(:ci_runner, :group, description: 'Group runner', groups: [group]) }
let(:group) { create(:group) }
+
it { is_expected.to be_falsey }
end
context 'when shared runner' do
let(:runner) { create(:ci_runner, :instance, description: 'Shared runner') }
+
it { is_expected.to be_falsey }
end
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index c997f1ef405..3aeaa27abce 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -63,7 +63,7 @@ describe Ci::Stage, :models do
end
it 'updates stage status correctly' do
- expect { stage.update_status }
+ expect { stage.update_legacy_status }
.to change { stage.reload.status }
.to eq 'running'
end
@@ -87,7 +87,7 @@ describe Ci::Stage, :models do
end
it 'updates status to skipped' do
- expect { stage.update_status }
+ expect { stage.update_legacy_status }
.to change { stage.reload.status }
.to eq 'skipped'
end
@@ -99,15 +99,27 @@ describe Ci::Stage, :models do
end
it 'updates status to scheduled' do
- expect { stage.update_status }
+ expect { stage.update_legacy_status }
.to change { stage.reload.status }
.to 'scheduled'
end
end
+ context 'when build is waiting for resource' do
+ before do
+ create(:ci_build, :waiting_for_resource, stage_id: stage.id)
+ end
+
+ it 'updates status to waiting for resource' do
+ expect { stage.update_legacy_status }
+ .to change { stage.reload.status }
+ .to 'waiting_for_resource'
+ end
+ end
+
context 'when stage is skipped because is empty' do
it 'updates status to skipped' do
- expect { stage.update_status }
+ expect { stage.update_legacy_status }
.to change { stage.reload.status }
.to eq('skipped')
end
@@ -121,7 +133,7 @@ describe Ci::Stage, :models do
it 'retries a lock to update a stage status' do
stage.lock_version = 100
- stage.update_status
+ stage.update_legacy_status
expect(stage.reload).to be_failed
end
@@ -135,7 +147,7 @@ describe Ci::Stage, :models do
end
it 'raises an exception' do
- expect { stage.update_status }
+ expect { stage.update_legacy_status }
.to raise_error(HasStatus::UnknownStatusError)
end
end
@@ -146,6 +158,7 @@ describe Ci::Stage, :models do
let(:user) { create(:user) }
let(:stage) { create(:ci_stage_entity, status: :created) }
+
subject { stage.detailed_status(user) }
where(:statuses, :label) do
@@ -166,7 +179,7 @@ describe Ci::Stage, :models do
stage_id: stage.id,
status: status)
- stage.update_status
+ stage.update_legacy_status
end
end
@@ -183,7 +196,7 @@ describe Ci::Stage, :models do
status: :failed,
allow_failure: true)
- stage.update_status
+ stage.update_legacy_status
end
it 'is passed with warnings' do
@@ -230,7 +243,7 @@ describe Ci::Stage, :models do
it 'recalculates index before updating status' do
expect(stage.reload.position).to be_nil
- stage.update_status
+ stage.update_legacy_status
expect(stage.reload.position).to eq 10
end
@@ -240,7 +253,7 @@ describe Ci::Stage, :models do
it 'fallbacks to zero' do
expect(stage.reload.position).to be_nil
- stage.update_status
+ stage.update_legacy_status
expect(stage.reload.position).to eq 0
end
diff --git a/spec/models/ci/trigger_spec.rb b/spec/models/ci/trigger_spec.rb
index 5b5d6f51b33..5b0815f8156 100644
--- a/spec/models/ci/trigger_spec.rb
+++ b/spec/models/ci/trigger_spec.rb
@@ -11,6 +11,10 @@ describe Ci::Trigger do
it { is_expected.to have_many(:trigger_requests) }
end
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:owner) }
+ end
+
describe 'before_validation' do
it 'sets an random token if none provided' do
trigger = create(:ci_trigger_without_token, project: project)
@@ -35,63 +39,22 @@ describe Ci::Trigger do
end
end
- describe '#legacy?' do
- let(:trigger) { create(:ci_trigger, owner: owner, project: project) }
-
- subject { trigger }
-
- context 'when owner is blank' do
- let(:owner) { nil }
-
- it { is_expected.to be_legacy }
- end
-
- context 'when owner is set' do
- let(:owner) { create(:user) }
-
- it { is_expected.not_to be_legacy }
- end
- end
-
describe '#can_access_project?' do
let(:owner) { create(:user) }
let(:trigger) { create(:ci_trigger, owner: owner, project: project) }
- context 'when owner is blank' do
+ subject { trigger.can_access_project? }
+
+ context 'and is member of the project' do
before do
- stub_feature_flags(use_legacy_pipeline_triggers: false)
- trigger.update_attribute(:owner, nil)
+ project.add_developer(owner)
end
- subject { trigger.can_access_project? }
-
- it { is_expected.to eq(false) }
-
- context 'when :use_legacy_pipeline_triggers feature flag is enabled' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: true)
- end
-
- subject { trigger.can_access_project? }
-
- it { is_expected.to eq(true) }
- end
+ it { is_expected.to eq(true) }
end
- context 'when owner is set' do
- subject { trigger.can_access_project? }
-
- context 'and is member of the project' do
- before do
- project.add_developer(owner)
- end
-
- it { is_expected.to eq(true) }
- end
-
- context 'and is not member of the project' do
- it { is_expected.to eq(false) }
- end
+ context 'and is not member of the project' do
+ it { is_expected.to eq(false) }
end
end
end
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
index d0e0dd5ad57..d336dc752c8 100644
--- a/spec/models/clusters/applications/elastic_stack_spec.rb
+++ b/spec/models/clusters/applications/elastic_stack_spec.rb
@@ -10,45 +10,8 @@ describe Clusters::Applications::ElasticStack do
include_examples 'cluster application version specs', :clusters_applications_elastic_stack
include_examples 'cluster application helm specs', :clusters_applications_elastic_stack
- describe '#can_uninstall?' do
- let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') }
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
-
- subject { elastic_stack.can_uninstall? }
-
- it { is_expected.to be_truthy }
- end
-
- describe '#set_initial_status' do
- before do
- elastic_stack.set_initial_status
- end
-
- context 'when ingress is not installed' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: cluster) }
-
- it { expect(elastic_stack).to be_not_installable }
- end
-
- context 'when ingress is installed and external_ip is assigned' do
- let(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
-
- it { expect(elastic_stack).to be_installable }
- end
-
- context 'when ingress is installed and external_hostname is assigned' do
- let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') }
- let(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
-
- it { expect(elastic_stack).to be_installable }
- end
- end
-
describe '#install_command' do
- let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
- let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack) }
subject { elastic_stack.install_command }
@@ -80,8 +43,7 @@ describe Clusters::Applications::ElasticStack do
end
describe '#uninstall_command' do
- let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
- let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
+ let!(:elastic_stack) { create(:clusters_applications_elastic_stack) }
subject { elastic_stack.uninstall_command }
@@ -100,19 +62,6 @@ describe Clusters::Applications::ElasticStack do
end
end
- describe '#files' do
- let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') }
- let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: ingress.cluster) }
-
- let(:values) { subject[:'values.yaml'] }
-
- subject { elastic_stack.files }
-
- it 'includes elastic stack specific keys in the values.yaml file' do
- expect(values).to include('ELASTICSEARCH_HOSTS')
- end
- end
-
describe '#elasticsearch_client' do
context 'cluster is nil' do
it 'returns nil' do
@@ -123,6 +72,7 @@ describe Clusters::Applications::ElasticStack do
context "cluster doesn't have kubeclient" do
let(:cluster) { create(:cluster) }
+
subject { create(:clusters_applications_elastic_stack, cluster: cluster) }
it 'returns nil' do
diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb
index 64f58155a66..87454e1d3e2 100644
--- a/spec/models/clusters/applications/helm_spec.rb
+++ b/spec/models/clusters/applications/helm_spec.rb
@@ -52,6 +52,7 @@ describe Clusters::Applications::Helm do
describe '#issue_client_cert' do
let(:application) { create(:clusters_applications_helm) }
+
subject { application.issue_client_cert }
it 'returns a new cert' do
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index d7ad7867e1a..c1158698601 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -142,11 +142,11 @@ describe Clusters::Applications::Ingress do
let(:project) { build(:project) }
let(:cluster) { build(:cluster, projects: [project]) }
- context 'when ingress_modsecurity is enabled' do
+ context 'when modsecurity_enabled is enabled' do
before do
- stub_feature_flags(ingress_modsecurity: true)
-
allow(subject).to receive(:cluster).and_return(cluster)
+
+ allow(subject).to receive(:modsecurity_enabled).and_return(true)
end
it 'includes modsecurity module enablement' do
@@ -173,10 +173,8 @@ describe Clusters::Applications::Ingress do
end
end
- context 'when ingress_modsecurity is disabled' do
+ context 'when modsecurity_enabled is disabled' do
before do
- stub_feature_flags(ingress_modsecurity: false)
-
allow(subject).to receive(:cluster).and_return(cluster)
end
diff --git a/spec/models/clusters/applications/jupyter_spec.rb b/spec/models/clusters/applications/jupyter_spec.rb
index 0ec9333d6a7..3bc5088d1ab 100644
--- a/spec/models/clusters/applications/jupyter_spec.rb
+++ b/spec/models/clusters/applications/jupyter_spec.rb
@@ -57,7 +57,8 @@ describe Clusters::Applications::Jupyter do
it 'is initialized with 4 arguments' do
expect(subject.name).to eq('jupyter')
expect(subject.chart).to eq('jupyter/jupyterhub')
- expect(subject.version).to eq('0.9-174bbd5')
+ expect(subject.version).to eq('0.9.0-beta.2')
+
expect(subject).to be_rbac
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.files).to eq(jupyter.files)
@@ -75,7 +76,7 @@ describe Clusters::Applications::Jupyter do
let(:jupyter) { create(:clusters_applications_jupyter, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('0.9-174bbd5')
+ expect(subject.version).to eq('0.9.0-beta.2')
end
end
end
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index c1057af5f80..68ac3f0d483 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -131,6 +131,7 @@ describe Clusters::Applications::Knative do
describe '#update_command' do
let!(:current_installed_version) { knative.version = '0.1.0' }
+
subject { knative.update_command }
it 'is initialized with current version' do
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index d588ce3bc38..cf33d2b4273 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -66,6 +66,7 @@ describe Clusters::Applications::Prometheus do
context "cluster doesn't have kubeclient" do
let(:cluster) { create(:cluster) }
+
subject { create(:clusters_applications_prometheus, cluster: cluster) }
it 'returns nil' do
@@ -116,6 +117,12 @@ describe Clusters::Applications::Prometheus do
let(:exception) { Errno::ECONNRESET }
end
end
+
+ context 'when the network is unreachable' do
+ it_behaves_like 'exception caught for prometheus client' do
+ let(:exception) { Errno::ENETUNREACH }
+ end
+ end
end
end
@@ -129,7 +136,7 @@ describe Clusters::Applications::Prometheus do
it 'is initialized with 3 arguments' do
expect(subject.name).to eq('prometheus')
expect(subject.chart).to eq('stable/prometheus')
- expect(subject.version).to eq('6.7.3')
+ expect(subject.version).to eq('9.5.2')
expect(subject).to be_rbac
expect(subject.files).to eq(prometheus.files)
end
@@ -146,7 +153,7 @@ describe Clusters::Applications::Prometheus do
let(:prometheus) { create(:clusters_applications_prometheus, :errored, version: '2.0.0') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('6.7.3')
+ expect(subject.version).to eq('9.5.2')
end
end
@@ -217,7 +224,7 @@ describe Clusters::Applications::Prometheus do
it 'is initialized with 3 arguments' do
expect(patch_command.name).to eq('prometheus')
expect(patch_command.chart).to eq('stable/prometheus')
- expect(patch_command.version).to eq('6.7.3')
+ expect(patch_command.version).to eq('9.5.2')
expect(patch_command.files).to eq(prometheus.files)
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 1c1b550c69b..782d1ac4552 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -63,6 +63,20 @@ describe Commit do
end
end
+ describe '#diff_refs' do
+ it 'is equal to itself' do
+ expect(commit.diff_refs).to eq(commit.diff_refs)
+ end
+
+ context 'from a factory' do
+ let(:commit) { create(:commit) }
+
+ it 'is equal to itself' do
+ expect(commit.diff_refs).to eq(commit.diff_refs)
+ end
+ end
+ end
+
describe '#author', :request_store do
it 'looks up the author in a case-insensitive way' do
user = create(:user, email: commit.author_email.upcase)
@@ -263,7 +277,7 @@ describe Commit do
describe '#title' do
it "returns no_commit_message when safe_message is blank" do
allow(commit).to receive(:safe_message).and_return('')
- expect(commit.title).to eq("--no commit message")
+ expect(commit.title).to eq("No commit message")
end
it 'truncates a message without a newline at natural break to 80 characters' do
@@ -294,7 +308,7 @@ eos
describe '#full_title' do
it "returns no_commit_message when safe_message is blank" do
allow(commit).to receive(:safe_message).and_return('')
- expect(commit.full_title).to eq("--no commit message")
+ expect(commit.full_title).to eq("No commit message")
end
it "returns entire message if there is no newline" do
@@ -316,7 +330,7 @@ eos
it 'returns no_commit_message when safe_message is blank' do
allow(commit).to receive(:safe_message).and_return(nil)
- expect(commit.description).to eq('--no commit message')
+ expect(commit.description).to eq('No commit message')
end
it 'returns description of commit message if title less than 100 characters' do
@@ -376,6 +390,17 @@ eos
expect(commit.closes_issues).to include(issue)
expect(commit.closes_issues).to include(other_issue)
end
+
+ it 'ignores referenced issues when auto-close is disabled' do
+ project.update!(autoclose_referenced_issues: false)
+
+ allow(commit).to receive_messages(
+ safe_message: "Fixes ##{issue.iid}",
+ committer_email: committer.email
+ )
+
+ expect(commit.closes_issues).to be_empty
+ end
end
it_behaves_like 'a mentionable' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 31aebac54e1..40652614101 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -63,6 +63,42 @@ describe CommitStatus do
end
end
+ describe '#processed' do
+ subject { commit_status.processed }
+
+ context 'when ci_atomic_processing is disabled' do
+ before do
+ stub_feature_flags(ci_atomic_processing: false)
+
+ commit_status.save!
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when ci_atomic_processing is enabled' do
+ before do
+ stub_feature_flags(ci_atomic_processing: true)
+ end
+
+ context 'status is latest' do
+ before do
+ commit_status.update!(retried: false, status: :pending)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'status is retried' do
+ before do
+ commit_status.update!(retried: true, status: :pending)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
describe '#started?' do
subject { commit_status.started? }
@@ -634,6 +670,30 @@ describe CommitStatus do
end
end
+ describe '#all_met_to_become_pending?' do
+ subject { commit_status.all_met_to_become_pending? }
+
+ let(:commit_status) { create(:commit_status) }
+
+ it { is_expected.to eq(true) }
+
+ context 'when build requires a resource' do
+ before do
+ allow(commit_status).to receive(:requires_resource?) { true }
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when build has a prerequisite' do
+ before do
+ allow(commit_status).to receive(:any_unmet_prerequisites?) { true }
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
describe '#enqueue' do
let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
@@ -654,12 +714,6 @@ describe CommitStatus do
it_behaves_like 'commit status enqueued'
end
- context 'when initial state is :preparing' do
- let(:commit_status) { create(:commit_status, :preparing) }
-
- it_behaves_like 'commit status enqueued'
- end
-
context 'when initial state is :skipped' do
let(:commit_status) { create(:commit_status, :skipped) }
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index 0605392c0aa..93bf7ec10dd 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -9,6 +9,32 @@ describe AtomicInternalId do
let(:scope_attrs) { { project: milestone.project } }
let(:usage) { :milestones }
+ describe '#save!' do
+ context 'when IID is provided' do
+ before do
+ milestone.iid = external_iid
+ end
+
+ it 'tracks the value' do
+ expect(milestone).to receive(:track_project_iid!)
+
+ milestone.save!
+ end
+
+ context 'when importing' do
+ before do
+ milestone.importing = true
+ end
+
+ it 'does not track the value' do
+ expect(milestone).not_to receive(:track_project_iid!)
+
+ milestone.save!
+ end
+ end
+ end
+ end
+
describe '#track_project_iid!' do
subject { milestone.track_project_iid! }
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 9a12c3d6965..06d12c14793 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -92,6 +92,7 @@ describe CacheMarkdownField, :clean_gitlab_redis_cache do
describe '#latest_cached_markdown_version' do
let(:thing) { klass.new }
+
subject { thing.latest_cached_markdown_version }
it 'returns default version' do
@@ -151,6 +152,7 @@ describe CacheMarkdownField, :clean_gitlab_redis_cache do
describe '#banzai_render_context' do
let(:thing) { klass.new(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+
subject(:context) { thing.banzai_render_context(:title) }
it 'sets project to nil if the object lacks a project' do
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index c4cf8e80f7a..294fde4f8e6 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -13,7 +13,7 @@ describe EachBatch do
end
before do
- 5.times { create(:user, updated_at: 1.day.ago) }
+ create_list(:user, 5, updated_at: 1.day.ago)
end
shared_examples 'each_batch handling' do |kwargs|
diff --git a/spec/models/concerns/has_status_spec.rb b/spec/models/concerns/has_status_spec.rb
index 21e4dda6dab..99d09af80d0 100644
--- a/spec/models/concerns/has_status_spec.rb
+++ b/spec/models/concerns/has_status_spec.rb
@@ -39,6 +39,22 @@ describe HasStatus do
it { is_expected.to eq 'running' }
end
+ context 'all waiting for resource' do
+ let!(:statuses) do
+ [create(type, status: :waiting_for_resource), create(type, status: :waiting_for_resource)]
+ end
+
+ it { is_expected.to eq 'waiting_for_resource' }
+ end
+
+ context 'at least one waiting for resource' do
+ let!(:statuses) do
+ [create(type, status: :success), create(type, status: :waiting_for_resource)]
+ end
+
+ it { is_expected.to eq 'waiting_for_resource' }
+ end
+
context 'all preparing' do
let!(:statuses) do
[create(type, status: :preparing), create(type, status: :preparing)]
@@ -219,7 +235,7 @@ describe HasStatus do
end
end
- %i[created preparing running pending success
+ %i[created waiting_for_resource preparing running pending success
failed canceled skipped].each do |status|
it_behaves_like 'having a job', status
end
@@ -265,7 +281,7 @@ describe HasStatus do
describe '.alive' do
subject { CommitStatus.alive }
- %i[running pending preparing created].each do |status|
+ %i[running pending waiting_for_resource preparing created].each do |status|
it_behaves_like 'containing the job', status
end
@@ -277,7 +293,7 @@ describe HasStatus do
describe '.alive_or_scheduled' do
subject { CommitStatus.alive_or_scheduled }
- %i[running pending preparing created scheduled].each do |status|
+ %i[running pending waiting_for_resource preparing created scheduled].each do |status|
it_behaves_like 'containing the job', status
end
@@ -313,7 +329,7 @@ describe HasStatus do
describe '.cancelable' do
subject { CommitStatus.cancelable }
- %i[running pending preparing created scheduled].each do |status|
+ %i[running pending waiting_for_resource preparing created scheduled].each do |status|
it_behaves_like 'containing the job', status
end
diff --git a/spec/models/concerns/ignorable_columns_spec.rb b/spec/models/concerns/ignorable_columns_spec.rb
index 55efa1b5fda..018b1296c62 100644
--- a/spec/models/concerns/ignorable_columns_spec.rb
+++ b/spec/models/concerns/ignorable_columns_spec.rb
@@ -49,11 +49,13 @@ describe IgnorableColumns do
context 'with single column' do
let(:columns) { :name }
+
it_behaves_like 'storing removal information'
end
context 'with array column' do
let(:columns) { %i[name created_at] }
+
it_behaves_like 'storing removal information'
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 76a3a825978..3e5c16c2491 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -53,43 +53,6 @@ describe Issuable do
it_behaves_like 'validates description length with custom validation'
it_behaves_like 'truncates the description to its allowed maximum length on import'
end
-
- describe 'milestone' do
- let(:project) { create(:project) }
- let(:milestone_id) { create(:milestone, project: project).id }
- let(:params) do
- {
- title: 'something',
- project: project,
- author: build(:user),
- milestone_id: milestone_id
- }
- end
-
- subject { issuable_class.new(params) }
-
- context 'with correct params' do
- it { is_expected.to be_valid }
- end
-
- context 'with empty string milestone' do
- let(:milestone_id) { '' }
-
- it { is_expected.to be_valid }
- end
-
- context 'with nil milestone id' do
- let(:milestone_id) { nil }
-
- it { is_expected.to be_valid }
- end
-
- context 'with a milestone id from another project' do
- let(:milestone_id) { create(:milestone).id }
-
- it { is_expected.to be_invalid }
- end
- end
end
describe "Scope" do
@@ -141,48 +104,6 @@ describe Issuable do
end
end
- describe '#milestone_available?' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
- let(:issue) { create(:issue, project: project) }
-
- def build_issuable(milestone_id)
- issuable_class.new(project: project, milestone_id: milestone_id)
- end
-
- it 'returns true with a milestone from the issue project' do
- milestone = create(:milestone, project: project)
-
- expect(build_issuable(milestone.id).milestone_available?).to be_truthy
- end
-
- it 'returns true with a milestone from the issue project group' do
- milestone = create(:milestone, group: group)
-
- expect(build_issuable(milestone.id).milestone_available?).to be_truthy
- end
-
- it 'returns true with a milestone from the the parent of the issue project group' do
- parent = create(:group)
- group.update(parent: parent)
- milestone = create(:milestone, group: parent)
-
- expect(build_issuable(milestone.id).milestone_available?).to be_truthy
- end
-
- it 'returns false with a milestone from another project' do
- milestone = create(:milestone)
-
- expect(build_issuable(milestone.id).milestone_available?).to be_falsey
- end
-
- it 'returns false with a milestone from another group' do
- milestone = create(:milestone, group: create(:group))
-
- expect(build_issuable(milestone.id).milestone_available?).to be_falsey
- end
- end
-
describe ".search" do
let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") }
let!(:searchable_issue2) { create(:issue, title: 'Aw') }
@@ -405,7 +326,7 @@ describe Issuable do
context 'when all of the results are level on the sort key' do
let!(:issues) do
- 10.times { create(:issue, project: project) }
+ create_list(:issue, 10, project: project)
end
it 'has no duplicates across pages' do
@@ -809,27 +730,6 @@ describe Issuable do
end
end
- describe '#supports_milestone?' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
-
- context "for issues" do
- let(:issue) { build(:issue, project: project) }
-
- it 'returns true' do
- expect(issue.supports_milestone?).to be_truthy
- end
- end
-
- context "for merge requests" do
- let(:merge_request) { build(:merge_request, target_project: project, source_project: project) }
-
- it 'returns true' do
- expect(merge_request.supports_milestone?).to be_truthy
- end
- end
- end
-
describe '#matches_cross_reference_regex?' do
context "issue description with long path string" do
let(:mentionable) { build(:issue, description: "/a" * 50000) }
@@ -854,91 +754,4 @@ describe Issuable do
it_behaves_like 'matches_cross_reference_regex? fails fast'
end
end
-
- describe 'release scopes' do
- let_it_be(:project) { create(:project) }
- let(:forked_project) { fork_project(project) }
-
- let_it_be(:release_1) { create(:release, tag: 'v1.0', project: project) }
- let_it_be(:release_2) { create(:release, tag: 'v2.0', project: project) }
- let_it_be(:release_3) { create(:release, tag: 'v3.0', project: project) }
- let_it_be(:release_4) { create(:release, tag: 'v4.0', project: project) }
-
- let_it_be(:milestone_1) { create(:milestone, releases: [release_1], title: 'm1', project: project) }
- let_it_be(:milestone_2) { create(:milestone, releases: [release_1, release_2], title: 'm2', project: project) }
- let_it_be(:milestone_3) { create(:milestone, releases: [release_2, release_4], title: 'm3', project: project) }
- let_it_be(:milestone_4) { create(:milestone, releases: [release_3], title: 'm4', project: project) }
- let_it_be(:milestone_5) { create(:milestone, releases: [release_3], title: 'm5', project: project) }
- let_it_be(:milestone_6) { create(:milestone, title: 'm6', project: project) }
-
- let_it_be(:issue_1) { create(:issue, milestone: milestone_1, project: project) }
- let_it_be(:issue_2) { create(:issue, milestone: milestone_1, project: project) }
- let_it_be(:issue_3) { create(:issue, milestone: milestone_2, project: project) }
- let_it_be(:issue_4) { create(:issue, milestone: milestone_5, project: project) }
- let_it_be(:issue_5) { create(:issue, milestone: milestone_6, project: project) }
- let_it_be(:issue_6) { create(:issue, project: project) }
-
- let(:mr_1) { create(:merge_request, milestone: milestone_1, target_project: project, source_project: project) }
- let(:mr_2) { create(:merge_request, milestone: milestone_3, target_project: project, source_project: forked_project) }
- let(:mr_3) { create(:merge_request, source_project: project) }
-
- let_it_be(:issue_items) { Issue.all }
- let(:mr_items) { MergeRequest.all }
-
- describe '#without_release' do
- it 'returns the issues or mrs not tied to any milestone and the ones tied to milestone with no release' do
- expect(issue_items.without_release).to contain_exactly(issue_5, issue_6)
- expect(mr_items.without_release).to contain_exactly(mr_3)
- end
- end
-
- describe '#any_release' do
- it 'returns all issues or all mrs tied to a release' do
- expect(issue_items.any_release).to contain_exactly(issue_1, issue_2, issue_3, issue_4)
- expect(mr_items.any_release).to contain_exactly(mr_1, mr_2)
- end
- end
-
- describe '#with_release' do
- it 'returns the issues tied to a specfic release' do
- expect(issue_items.with_release('v1.0', project.id)).to contain_exactly(issue_1, issue_2, issue_3)
- end
-
- it 'returns the mrs tied to a specific release' do
- expect(mr_items.with_release('v1.0', project.id)).to contain_exactly(mr_1)
- end
-
- context 'when a release has a milestone with one issue and another one with no issue' do
- it 'returns that one issue' do
- expect(issue_items.with_release('v2.0', project.id)).to contain_exactly(issue_3)
- end
-
- context 'when the milestone with no issue is added as a filter' do
- it 'returns an empty list' do
- expect(issue_items.with_release('v2.0', project.id).with_milestone('m3')).to be_empty
- end
- end
-
- context 'when the milestone with the issue is added as a filter' do
- it 'returns this issue' do
- expect(issue_items.with_release('v2.0', project.id).with_milestone('m2')).to contain_exactly(issue_3)
- end
- end
- end
-
- context 'when there is no issue or mr under a specific release' do
- it 'returns no issue or no mr' do
- expect(issue_items.with_release('v4.0', project.id)).to be_empty
- expect(mr_items.with_release('v4.0', project.id)).to be_empty
- end
- end
-
- context 'when a non-existent release tag is passed in' do
- it 'returns no issue or no mr' do
- expect(issue_items.with_release('v999.0', project.id)).to be_empty
- expect(mr_items.with_release('v999.0', project.id)).to be_empty
- end
- end
- end
- end
end
diff --git a/spec/models/concerns/loaded_in_group_list_spec.rb b/spec/models/concerns/loaded_in_group_list_spec.rb
index 7c97b580779..509811822e0 100644
--- a/spec/models/concerns/loaded_in_group_list_spec.rb
+++ b/spec/models/concerns/loaded_in_group_list_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe LoadedInGroupList do
let(:parent) { create(:group) }
+
subject(:found_group) { Group.with_selects_for_list.find_by(id: parent.id) }
describe '.with_selects_for_list' do
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
new file mode 100644
index 00000000000..186bf2c6290
--- /dev/null
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -0,0 +1,243 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Milestoneable do
+ let(:user) { create(:user) }
+ let(:milestone) { create(:milestone, project: project) }
+
+ shared_examples_for 'an object that can be assigned a milestone' do
+ describe 'Validation' do
+ describe 'milestone' do
+ let(:project) { create(:project, :repository) }
+ let(:milestone_id) { milestone.id }
+
+ subject { milestoneable_class.new(params) }
+
+ context 'with correct params' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'with empty string milestone' do
+ let(:milestone_id) { '' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with nil milestone id' do
+ let(:milestone_id) { nil }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with a milestone id from another project' do
+ let(:milestone_id) { create(:milestone).id }
+
+ it { is_expected.to be_invalid }
+ end
+
+ context 'when valid and saving' do
+ it 'copies the value to the new milestones relationship' do
+ subject.save!
+
+ expect(subject.milestones).to match_array([milestone])
+ end
+
+ context 'with old values in milestones relationship' do
+ let(:old_milestone) { create(:milestone, project: project) }
+
+ before do
+ subject.milestone = old_milestone
+ subject.save!
+ end
+
+ it 'replaces old values' do
+ expect(subject.milestones).to match_array([old_milestone])
+
+ subject.milestone = milestone
+ subject.save!
+
+ expect(subject.milestones).to match_array([milestone])
+ end
+
+ it 'can nullify the milestone' do
+ expect(subject.milestones).to match_array([old_milestone])
+
+ subject.milestone = nil
+ subject.save!
+
+ expect(subject.milestones).to match_array([])
+ end
+ end
+ end
+ end
+ end
+
+ describe '#milestone_available?' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let(:issue) { create(:issue, project: project) }
+
+ def build_milestoneable(milestone_id)
+ milestoneable_class.new(project: project, milestone_id: milestone_id)
+ end
+
+ it 'returns true with a milestone from the issue project' do
+ milestone = create(:milestone, project: project)
+
+ expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns true with a milestone from the issue project group' do
+ milestone = create(:milestone, group: group)
+
+ expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns true with a milestone from the the parent of the issue project group' do
+ parent = create(:group)
+ group.update(parent: parent)
+ milestone = create(:milestone, group: parent)
+
+ expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns false with a milestone from another project' do
+ milestone = create(:milestone)
+
+ expect(build_milestoneable(milestone.id).milestone_available?).to be_falsey
+ end
+
+ it 'returns false with a milestone from another group' do
+ milestone = create(:milestone, group: create(:group))
+
+ expect(build_milestoneable(milestone.id).milestone_available?).to be_falsey
+ end
+ end
+ end
+
+ describe '#supports_milestone?' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ context "for issues" do
+ let(:issue) { build(:issue, project: project) }
+
+ it 'returns true' do
+ expect(issue.supports_milestone?).to be_truthy
+ end
+ end
+
+ context "for merge requests" do
+ let(:merge_request) { build(:merge_request, target_project: project, source_project: project) }
+
+ it 'returns true' do
+ expect(merge_request.supports_milestone?).to be_truthy
+ end
+ end
+ end
+
+ describe 'release scopes' do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:release_1) { create(:release, tag: 'v1.0', project: project) }
+ let_it_be(:release_2) { create(:release, tag: 'v2.0', project: project) }
+ let_it_be(:release_3) { create(:release, tag: 'v3.0', project: project) }
+ let_it_be(:release_4) { create(:release, tag: 'v4.0', project: project) }
+
+ let_it_be(:milestone_1) { create(:milestone, releases: [release_1], title: 'm1', project: project) }
+ let_it_be(:milestone_2) { create(:milestone, releases: [release_1, release_2], title: 'm2', project: project) }
+ let_it_be(:milestone_3) { create(:milestone, releases: [release_2, release_4], title: 'm3', project: project) }
+ let_it_be(:milestone_4) { create(:milestone, releases: [release_3], title: 'm4', project: project) }
+ let_it_be(:milestone_5) { create(:milestone, releases: [release_3], title: 'm5', project: project) }
+ let_it_be(:milestone_6) { create(:milestone, title: 'm6', project: project) }
+
+ let_it_be(:issue_1) { create(:issue, milestone: milestone_1, project: project) }
+ let_it_be(:issue_2) { create(:issue, milestone: milestone_1, project: project) }
+ let_it_be(:issue_3) { create(:issue, milestone: milestone_2, project: project) }
+ let_it_be(:issue_4) { create(:issue, milestone: milestone_5, project: project) }
+ let_it_be(:issue_5) { create(:issue, milestone: milestone_6, project: project) }
+ let_it_be(:issue_6) { create(:issue, project: project) }
+
+ let_it_be(:items) { Issue.all }
+
+ describe '#without_release' do
+ it 'returns the issues not tied to any milestone and the ones tied to milestone with no release' do
+ expect(items.without_release).to contain_exactly(issue_5, issue_6)
+ end
+ end
+
+ describe '#any_release' do
+ it 'returns all issues tied to a release' do
+ expect(items.any_release).to contain_exactly(issue_1, issue_2, issue_3, issue_4)
+ end
+ end
+
+ describe '#with_release' do
+ it 'returns the issues tied a specfic release' do
+ expect(items.with_release('v1.0', project.id)).to contain_exactly(issue_1, issue_2, issue_3)
+ end
+
+ context 'when a release has a milestone with one issue and another one with no issue' do
+ it 'returns that one issue' do
+ expect(items.with_release('v2.0', project.id)).to contain_exactly(issue_3)
+ end
+
+ context 'when the milestone with no issue is added as a filter' do
+ it 'returns an empty list' do
+ expect(items.with_release('v2.0', project.id).with_milestone('m3')).to be_empty
+ end
+ end
+
+ context 'when the milestone with the issue is added as a filter' do
+ it 'returns this issue' do
+ expect(items.with_release('v2.0', project.id).with_milestone('m2')).to contain_exactly(issue_3)
+ end
+ end
+ end
+
+ context 'when there is no issue under a specific release' do
+ it 'returns no issue' do
+ expect(items.with_release('v4.0', project.id)).to be_empty
+ end
+ end
+
+ context 'when a non-existent release tag is passed in' do
+ it 'returns no issue' do
+ expect(items.with_release('v999.0', project.id)).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'Issues' do
+ let(:milestoneable_class) { Issue }
+ let(:params) do
+ {
+ title: 'something',
+ project: project,
+ author: user,
+ milestone_id: milestone_id
+ }
+ end
+
+ it_behaves_like 'an object that can be assigned a milestone'
+ end
+
+ context 'MergeRequests' do
+ let(:milestoneable_class) { MergeRequest }
+ let(:params) do
+ {
+ title: 'something',
+ source_project: project,
+ target_project: project,
+ source_branch: 'feature',
+ target_branch: 'master',
+ author: user,
+ milestone_id: milestone_id
+ }
+ end
+
+ it_behaves_like 'an object that can be assigned a milestone'
+ end
+end
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index 3d26ba95192..3ac96b308ed 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -103,6 +103,7 @@ describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
describe '#calculate_reactive_cache' do
let(:environment) { create(:environment, slug: 'env-slug') }
+
before do
service.manual_configuration = true
service.active = true
diff --git a/spec/models/concerns/resolvable_note_spec.rb b/spec/models/concerns/resolvable_note_spec.rb
index 4f46252a044..12e50ac807e 100644
--- a/spec/models/concerns/resolvable_note_spec.rb
+++ b/spec/models/concerns/resolvable_note_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe Note, ResolvableNote do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
+
subject { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
context 'resolvability scopes' do
diff --git a/spec/models/concerns/safe_url_spec.rb b/spec/models/concerns/safe_url_spec.rb
index 0ad26660a60..e523e6a15e4 100644
--- a/spec/models/concerns/safe_url_spec.rb
+++ b/spec/models/concerns/safe_url_spec.rb
@@ -4,17 +4,19 @@ require 'spec_helper'
describe SafeUrl do
describe '#safe_url' do
- class SafeUrlTestClass
- include SafeUrl
+ let(:safe_url_test_class) do
+ Class.new do
+ include SafeUrl
- attr_reader :url
+ attr_reader :url
- def initialize(url)
- @url = url
+ def initialize(url)
+ @url = url
+ end
end
end
- let(:test_class) { SafeUrlTestClass.new(url) }
+ let(:test_class) { safe_url_test_class.new(url) }
let(:url) { 'http://example.com' }
subject { test_class.safe_url }
diff --git a/spec/models/concerns/schedulable_spec.rb b/spec/models/concerns/schedulable_spec.rb
new file mode 100644
index 00000000000..38ae2112e01
--- /dev/null
+++ b/spec/models/concerns/schedulable_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Schedulable do
+ shared_examples 'before_save callback' do
+ it 'updates next_run_at' do
+ expect { object.save! }.to change { object.next_run_at }
+ end
+ end
+
+ shared_examples '.runnable_schedules' do
+ it 'returns the runnable schedules' do
+ results = object.class.runnable_schedules
+
+ expect(results).to include(object)
+ expect(results).not_to include(non_runnable_object)
+ end
+ end
+
+ shared_examples '#schedule_next_run!' do
+ it 'saves the object and sets next_run_at' do
+ expect { object.schedule_next_run! }.to change { object.next_run_at }
+ end
+
+ it 'sets next_run_at to nil on error' do
+ expect(object).to receive(:save!).and_raise(ActiveRecord::RecordInvalid)
+
+ object.schedule_next_run!
+
+ expect(object.next_run_at).to be_nil
+ end
+ end
+
+ context 'for a pipeline_schedule' do
+ # let! is used to reset the next_run_at value before each spec
+ let(:object) do
+ Timecop.freeze(1.day.ago) do
+ create(:ci_pipeline_schedule, :hourly)
+ end
+ end
+
+ let(:non_runnable_object) { create(:ci_pipeline_schedule, :hourly) }
+
+ it_behaves_like '#schedule_next_run!'
+ it_behaves_like 'before_save callback'
+ it_behaves_like '.runnable_schedules'
+ end
+
+ context 'for a container_expiration_policy' do
+ # let! is used to reset the next_run_at value before each spec
+ let(:object) { create(:container_expiration_policy, :runnable) }
+ let(:non_runnable_object) { create(:container_expiration_policy) }
+
+ it_behaves_like '#schedule_next_run!'
+ it_behaves_like 'before_save callback'
+ it_behaves_like '.runnable_schedules'
+ end
+
+ describe '#next_run_at' do
+ let(:schedulable_instance) do
+ Class.new(ActiveRecord::Base) do
+ include Schedulable
+
+ # we need a table for the dummy class to operate
+ self.table_name = 'users'
+ end.new
+ end
+
+ it 'works' do
+ expect { schedulable_instance.set_next_run_at }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 43b894b5957..36eb8fdaba4 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -13,6 +13,7 @@ end
describe User, 'TokenAuthenticatable' do
let(:token_field) { :feed_token }
+
it_behaves_like 'TokenAuthenticatable'
describe 'ensures authentication token' do
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index 1ce76490448..1bce4c3b20a 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -38,4 +38,38 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
it { is_expected.not_to allow_value('foo').for(:keep_n) }
end
end
+
+ describe '.preloaded' do
+ subject { described_class.preloaded }
+
+ before do
+ create_list(:container_expiration_policy, 3)
+ end
+
+ it 'preloads the associations' do
+ subject
+
+ query = ActiveRecord::QueryRecorder.new { subject.each(&:project) }
+
+ expect(query.count).to eq(2)
+ end
+ end
+
+ describe '.runnable_schedules' do
+ subject { described_class.runnable_schedules }
+
+ let!(:policy) { create(:container_expiration_policy, :runnable) }
+
+ it 'returns the runnable schedule' do
+ is_expected.to eq([policy])
+ end
+
+ context 'when there are no runnable schedules' do
+ let!(:policy) { }
+
+ it 'returns an empty array' do
+ is_expected.to be_empty
+ end
+ end
+ end
end
diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb
index 808659552ff..441f8265629 100644
--- a/spec/models/cycle_analytics/code_spec.rb
+++ b/spec/models/cycle_analytics/code_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
describe 'CycleAnalytics#code' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
context 'with deployment' do
generate_cycle_analytics_spec(
@@ -24,8 +25,6 @@ describe 'CycleAnalytics#code' do
context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
- context.deploy_master(context.user, context.project)
end)
context "when a regular merge request (that doesn't close the issue) is created" do
@@ -56,7 +55,6 @@ describe 'CycleAnalytics#code' do
context.create_merge_request_closing_issue(context.user, context.project, data[:issue])
end]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when a regular merge request (that doesn't close the issue) is created" do
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 0d2c14c29dd..03fe8c3b50b 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
describe CycleAnalytics::GroupLevel do
- let(:group) { create(:group)}
- let(:project) { create(:project, :repository, namespace: group) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:group) { create(:group)}
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
- let(:milestone) { create(:milestone, project: project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb
index 8cdf83b1292..726f2f8b018 100644
--- a/spec/models/cycle_analytics/issue_spec.rb
+++ b/spec/models/cycle_analytics/issue_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
describe 'CycleAnalytics#issue' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
generate_cycle_analytics_spec(
phase: :issue,
@@ -28,10 +29,6 @@ describe 'CycleAnalytics#issue' do
end
end]],
post_fn: -> (context, data) do
- if data[:issue].persisted?
- context.create_merge_request_closing_issue(context.user, context.project, data[:issue].reload)
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
- end
end)
context "when a regular label (instead of a list label) is added to the issue" do
diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb
index 28ad9bd194d..3bd9f317ca7 100644
--- a/spec/models/cycle_analytics/plan_spec.rb
+++ b/spec/models/cycle_analytics/plan_spec.rb
@@ -5,17 +5,18 @@ require 'spec_helper'
describe 'CycleAnalytics#plan' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
generate_cycle_analytics_spec(
phase: :plan,
data_fn: -> (context) do
{
- issue: context.create(:issue, project: context.project),
+ issue: context.build(:issue, project: context.project),
branch_name: context.generate(:branch)
}
end,
@@ -32,8 +33,6 @@ describe 'CycleAnalytics#plan' do
context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name])
end]],
post_fn: -> (context, data) do
- context.create_merge_request_closing_issue(context.user, context.project, data[:issue], source_branch: data[:branch_name])
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when a regular label (instead of a list label) is added to the issue" do
diff --git a/spec/models/cycle_analytics/production_spec.rb b/spec/models/cycle_analytics/production_spec.rb
index 613c1786540..01d88bbeec9 100644
--- a/spec/models/cycle_analytics/production_spec.rb
+++ b/spec/models/cycle_analytics/production_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
describe 'CycleAnalytics#production' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
generate_cycle_analytics_spec(
phase: :production,
@@ -24,13 +25,7 @@ describe 'CycleAnalytics#production' do
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
- sha = context.project.repository.create_file(
- context.user,
- context.generate(:branch),
- 'content',
- message: 'commit message',
- branch_name: 'master')
- context.project.repository.commit(sha)
+ context.project.repository.commit("sha_that_does_not_matter")
context.deploy_master(context.user, context.project)
end]])
@@ -47,7 +42,7 @@ describe 'CycleAnalytics#production' do
context "when the deployment happens to a non-production environment" do
it "returns nil" do
- issue = create(:issue, project: project)
+ issue = build(:issue, project: project)
merge_request = create_merge_request_closing_issue(user, project, issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master(user, project, environment: 'staging')
diff --git a/spec/models/cycle_analytics/project_level_spec.rb b/spec/models/cycle_analytics/project_level_spec.rb
index 351eb139416..2fc81777746 100644
--- a/spec/models/cycle_analytics/project_level_spec.rb
+++ b/spec/models/cycle_analytics/project_level_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
describe CycleAnalytics::ProjectLevel do
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
- let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
- let(:milestone) { create(:milestone, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb
index ef88fd86340..50670188e85 100644
--- a/spec/models/cycle_analytics/review_spec.rb
+++ b/spec/models/cycle_analytics/review_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe 'CycleAnalytics#review' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb
index 571792559d8..cf0695f175a 100644
--- a/spec/models/cycle_analytics/staging_spec.rb
+++ b/spec/models/cycle_analytics/staging_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
describe 'CycleAnalytics#staging' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
generate_cycle_analytics_spec(
phase: :staging,
@@ -28,14 +29,7 @@ describe 'CycleAnalytics#staging' do
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
- sha = context.project.repository.create_file(
- context.user,
- context.generate(:branch),
- 'content',
- message: 'commit message',
- branch_name: 'master')
- context.project.repository.commit(sha)
-
+ context.project.repository.commit("this_sha_apparently_does_not_matter")
context.deploy_master(context.user, context.project)
end]])
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
index 7b3001d2bd8..24800aafca7 100644
--- a/spec/models/cycle_analytics/test_spec.rb
+++ b/spec/models/cycle_analytics/test_spec.rb
@@ -5,16 +5,19 @@ require 'spec_helper'
describe 'CycleAnalytics#test' do
extend CycleAnalyticsHelpers::TestGeneration
- let(:project) { create(:project, :repository) }
- let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:from_date) { 10.days.ago }
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_level) { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ let!(:merge_request) { create_merge_request_closing_issue(user, project, issue) }
- subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
+ subject { project_level }
generate_cycle_analytics_spec(
phase: :test,
data_fn: lambda do |context|
- issue = context.create(:issue, project: context.project)
+ issue = context.issue
merge_request = context.create_merge_request_closing_issue(context.user, context.project, issue)
pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project, head_pipeline_of: merge_request)
{ pipeline: pipeline, issue: issue }
@@ -22,20 +25,15 @@ describe 'CycleAnalytics#test' do
start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]],
post_fn: -> (context, data) do
- context.merge_merge_requests_closing_issue(context.user, context.project, data[:issue])
end)
context "when the pipeline is for a regular merge request (that doesn't close an issue)" do
it "returns nil" do
- issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.succeed!
- merge_merge_requests_closing_issue(user, project, issue)
-
expect(subject[:test].project_median).to be_nil
end
end
@@ -53,30 +51,22 @@ describe 'CycleAnalytics#test' do
context "when the pipeline is dropped (failed)" do
it "returns nil" do
- issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.drop!
- merge_merge_requests_closing_issue(user, project, issue)
-
expect(subject[:test].project_median).to be_nil
end
end
context "when the pipeline is cancelled" do
it "returns nil" do
- issue = create(:issue, project: project)
- merge_request = create_merge_request_closing_issue(user, project, issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.cancel!
- merge_merge_requests_closing_issue(user, project, issue)
-
expect(subject[:test].project_median).to be_nil
end
end
diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb
index 7c574a8b6c8..32c04e15b73 100644
--- a/spec/models/deployment_metrics_spec.rb
+++ b/spec/models/deployment_metrics_spec.rb
@@ -20,7 +20,7 @@ describe DeploymentMetrics do
end
context 'with a Prometheus Service' do
- let(:prometheus_service) { instance_double(PrometheusService, can_query?: true) }
+ let(:prometheus_service) { instance_double(PrometheusService, can_query?: true, configured?: true) }
before do
allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
@@ -30,7 +30,17 @@ describe DeploymentMetrics do
end
context 'with a Prometheus Service that cannot query' do
- let(:prometheus_service) { instance_double(PrometheusService, can_query?: false) }
+ let(:prometheus_service) { instance_double(PrometheusService, configured?: true, can_query?: false) }
+
+ before do
+ allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'with a Prometheus Service that is not configured' do
+ let(:prometheus_service) { instance_double(PrometheusService, configured?: false, can_query?: false) }
before do
allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
@@ -44,7 +54,7 @@ describe DeploymentMetrics do
let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: deployment.cluster) }
before do
- expect(deployment.cluster.application_prometheus).to receive(:can_query?).and_return(true)
+ expect(deployment.cluster.application_prometheus).to receive(:configured?).and_return(true)
end
it { is_expected.to be_truthy }
@@ -54,7 +64,7 @@ describe DeploymentMetrics do
describe '#metrics' do
let(:deployment) { create(:deployment, :success) }
- let(:prometheus_adapter) { instance_double(PrometheusService, can_query?: true) }
+ let(:prometheus_adapter) { instance_double(PrometheusService, can_query?: true, configured?: true) }
let(:deployment_metrics) { described_class.new(deployment.project, deployment) }
subject { deployment_metrics.metrics }
@@ -101,7 +111,7 @@ describe DeploymentMetrics do
}
end
- let(:prometheus_adapter) { instance_double('prometheus_adapter', can_query?: true) }
+ let(:prometheus_adapter) { instance_double('prometheus_adapter', can_query?: true, configured?: true) }
before do
allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter)
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 33e4cd34aa5..0c1b259d6bf 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -399,44 +399,64 @@ describe Deployment do
expect(deploy.merge_requests).to include(mr1, mr2)
end
+
+ it 'ignores already linked merge requests' do
+ deploy = create(:deployment)
+ mr1 = create(
+ :merge_request,
+ :merged,
+ target_project: deploy.project,
+ source_project: deploy.project
+ )
+
+ deploy.link_merge_requests(deploy.project.merge_requests)
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ target_project: deploy.project,
+ source_project: deploy.project
+ )
+
+ deploy.link_merge_requests(deploy.project.merge_requests)
+
+ expect(deploy.merge_requests).to include(mr1, mr2)
+ end
end
describe '#previous_environment_deployment' do
it 'returns the previous deployment of the same environment' do
- deploy1 = create(:deployment, :success, ref: 'v1.0.0')
+ deploy1 = create(:deployment, :success)
deploy2 = create(
:deployment,
:success,
project: deploy1.project,
- environment: deploy1.environment,
- ref: 'v1.0.1'
+ environment: deploy1.environment
)
expect(deploy2.previous_environment_deployment).to eq(deploy1)
end
it 'ignores deployments that were not successful' do
- deploy1 = create(:deployment, :failed, ref: 'v1.0.0')
+ deploy1 = create(:deployment, :failed)
deploy2 = create(
:deployment,
:success,
project: deploy1.project,
- environment: deploy1.environment,
- ref: 'v1.0.1'
+ environment: deploy1.environment
)
expect(deploy2.previous_environment_deployment).to be_nil
end
it 'ignores deployments for different environments' do
- deploy1 = create(:deployment, :success, ref: 'v1.0.0')
+ deploy1 = create(:deployment, :success)
preprod = create(:environment, project: deploy1.project, name: 'preprod')
deploy2 = create(
:deployment,
:success,
project: deploy1.project,
- environment: preprod,
- ref: 'v1.0.1'
+ environment: preprod
)
expect(deploy2.previous_environment_deployment).to be_nil
@@ -499,4 +519,36 @@ describe Deployment do
end
end
end
+
+ describe '#valid_sha' do
+ it 'does not add errors for a valid SHA' do
+ project = create(:project, :repository)
+ deploy = build(:deployment, project: project)
+
+ expect(deploy).to be_valid
+ end
+
+ it 'adds an error for an invalid SHA' do
+ deploy = build(:deployment, sha: 'foo')
+
+ expect(deploy).not_to be_valid
+ expect(deploy.errors[:sha]).not_to be_empty
+ end
+ end
+
+ describe '#valid_ref' do
+ it 'does not add errors for a valid ref' do
+ project = create(:project, :repository)
+ deploy = build(:deployment, project: project)
+
+ expect(deploy).to be_valid
+ end
+
+ it 'adds an error for an invalid ref' do
+ deploy = build(:deployment, ref: 'does-not-exist')
+
+ expect(deploy).not_to be_valid
+ expect(deploy.errors[:ref]).not_to be_empty
+ end
+ end
end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index 601dac21e6a..b802c8ba506 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
describe DiffNote do
include RepoHelpers
- let!(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:commit) { project.commit(sample_commit.id) }
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:project) { merge_request.project }
+ let_it_be(:commit) { project.commit(sample_commit.id) }
- let(:path) { "files/ruby/popen.rb" }
+ let_it_be(:path) { "files/ruby/popen.rb" }
let(:diff_refs) { merge_request.diff_refs }
let!(:position) do
@@ -91,18 +91,124 @@ describe DiffNote do
end
describe '#create_diff_file callback' do
- let(:noteable) { create(:merge_request) }
- let(:project) { noteable.project }
-
context 'merge request' do
- let!(:diff_note) { create(:diff_note_on_merge_request, project: project, noteable: noteable) }
+ let(:position) do
+ Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb",
+ new_path: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: 9,
+ diff_refs: merge_request.diff_refs)
+ end
- it 'creates a diff note file' do
- expect(diff_note.reload.note_diff_file).to be_present
+ subject { build(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) }
+
+ let(:diff_file_from_repository) do
+ position.diff_file(project.repository)
+ end
+
+ let(:diff_file) do
+ diffs = merge_request.diffs
+ raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['files/ruby/popen.rb'])).first
+ Gitlab::Diff::File.new(raw_diff,
+ repository: diffs.project.repository,
+ diff_refs: diffs.diff_refs,
+ fallback_diff_refs: diffs.fallback_diff_refs)
+ end
+
+ let(:diff_line) { diff_file.diff_lines.first }
+
+ let(:line_code) { '2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_14' }
+
+ before do
+ allow(subject.position).to receive(:line_code).and_return('2f6fcd96b88b36ce98c38da085c795a27d92a3dd_15_14')
+ end
+
+ context 'when diffs are already created' do
+ before do
+ allow(subject).to receive(:created_at_diff?).and_return(true)
+ end
+
+ context 'when diff_file is found in persisted diffs' do
+ before do
+ allow(merge_request).to receive_message_chain(:diffs, :diff_files, :first).and_return(diff_file)
+ end
+
+ context 'when importing' do
+ before do
+ subject.importing = true
+ subject.line_code = line_code
+ end
+
+ context 'when diff_line is found in persisted diff_file' do
+ before do
+ allow(diff_file).to receive(:line_for_position).with(position).and_return(diff_line)
+ end
+
+ it 'creates a diff note file' do
+ subject.save
+ expect(subject.note_diff_file).to be_present
+ end
+ end
+
+ context 'when diff_line is not found in persisted diff_file' do
+ before do
+ allow(diff_file).to receive(:line_for_position).and_return(nil)
+ end
+
+ it_behaves_like 'a valid diff note with after commit callback'
+ end
+ end
+
+ context 'when not importing' do
+ context 'when diff_line is not found' do
+ before do
+ allow(diff_file).to receive(:line_for_position).with(position).and_return(nil)
+ end
+
+ it 'raises an error' do
+ expect { subject.save }.to raise_error(::DiffNote::NoteDiffFileCreationError,
+ "Failed to find diff line for: #{diff_file.file_path}, "\
+ "old_line: #{position.old_line}"\
+ ", new_line: #{position.new_line}")
+ end
+ end
+
+ context 'when diff_line is found' do
+ before do
+ allow(diff_file).to receive(:line_for_position).with(position).and_return(diff_line)
+ end
+
+ it 'creates a diff note file' do
+ subject.save
+ expect(subject.reload.note_diff_file).to be_present
+ end
+ end
+ end
+ end
+
+ context 'when diff file is not found in persisted diffs' do
+ before do
+ allow_next_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff) do |merge_request_diff|
+ allow(merge_request_diff).to receive(:diff_files).and_return([])
+ end
+ end
+
+ it_behaves_like 'a valid diff note with after commit callback'
+ end
+ end
+
+ context 'when diffs are not already created' do
+ before do
+ allow(subject).to receive(:created_at_diff?).and_return(false)
+ end
+
+ it_behaves_like 'a valid diff note with after commit callback'
end
it 'does not create diff note file if it is a reply' do
- expect { create(:diff_note_on_merge_request, noteable: noteable, in_reply_to: diff_note) }
+ diff_note = create(:diff_note_on_merge_request, project: project, noteable: merge_request)
+
+ expect { create(:diff_note_on_merge_request, noteable: merge_request, in_reply_to: diff_note) }
.not_to change(NoteDiffFile, :count)
end
end
diff --git a/spec/models/diff_viewer/base_spec.rb b/spec/models/diff_viewer/base_spec.rb
index 019597993cc..0a1c4c5560e 100644
--- a/spec/models/diff_viewer/base_spec.rb
+++ b/spec/models/diff_viewer/base_spec.rb
@@ -43,34 +43,6 @@ describe DiffViewer::Base do
end
end
- context 'when the file type is supported' do
- let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }
- let(:diff_file) { commit.diffs.diff_file_with_new_path('LICENSE') }
-
- before do
- viewer_class.file_types = %i(license)
- viewer_class.binary = false
- end
-
- context 'when the binaryness matches' do
- it 'returns true' do
- expect(viewer_class.can_render?(diff_file)).to be_truthy
- end
- end
-
- context 'when the binaryness does not match' do
- before do
- allow_next_instance_of(Blob) do |instance|
- allow(instance).to receive(:binary_in_repo?).and_return(true)
- end
- end
-
- it 'returns false' do
- expect(viewer_class.can_render?(diff_file)).to be_falsey
- end
- end
- end
-
context 'when the extension and file type are not supported' do
it 'returns false' do
expect(viewer_class.can_render?(diff_file)).to be_falsey
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 0537220fcd2..af7ab24d7d6 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -6,8 +6,10 @@ describe Environment, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
using RSpec::Parameterized::TableSyntax
include RepoHelpers
+ include StubENV
+
+ let(:project) { create(:project, :repository) }
- let(:project) { create(:project, :stubbed_repository) }
subject(:environment) { create(:environment, project: project) }
it { is_expected.to be_kind_of(ReactiveCaching) }
@@ -28,7 +30,6 @@ describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_length_of(:external_url).is_at_most(255) }
describe '.order_by_last_deployed_at' do
- let(:project) { create(:project, :repository) }
let!(:environment1) { create(:environment, project: project) }
let!(:environment2) { create(:environment, project: project) }
let!(:environment3) { create(:environment, project: project) }
@@ -36,9 +37,13 @@ describe Environment, :use_clean_rails_memory_store_caching do
let!(:deployment2) { create(:deployment, environment: environment2) }
let!(:deployment3) { create(:deployment, environment: environment1) }
- it 'returns the environments in order of having been last deployed' do
+ it 'returns the environments in ascending order of having been last deployed' do
expect(project.environments.order_by_last_deployed_at.to_a).to eq([environment3, environment2, environment1])
end
+
+ it 'returns the environments in descending order of having been last deployed' do
+ expect(project.environments.order_by_last_deployed_at_desc.to_a).to eq([environment1, environment2, environment3])
+ end
end
describe 'state machine' do
@@ -134,8 +139,8 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '.with_deployment' do
subject { described_class.with_deployment(sha) }
- let(:environment) { create(:environment) }
- let(:sha) { RepoHelpers.sample_commit.id }
+ let(:environment) { create(:environment, project: project) }
+ let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
context 'when deployment has the specified sha' do
let!(:deployment) { create(:deployment, environment: environment, sha: sha) }
@@ -144,7 +149,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when deployment does not have the specified sha' do
- let!(:deployment) { create(:deployment, environment: environment, sha: 'abc') }
+ let!(:deployment) { create(:deployment, environment: environment, sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b') }
it { is_expected.to be_empty }
end
@@ -153,7 +158,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '#folder_name' do
context 'when it is inside a folder' do
subject(:environment) do
- create(:environment, name: 'staging/review-1')
+ create(:environment, name: 'staging/review-1', project: project)
end
it 'returns a top-level folder name' do
@@ -667,11 +672,11 @@ describe Environment, :use_clean_rails_memory_store_caching do
context 'when the environment is available' do
context 'with a deployment service' do
context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
context 'with deployment' do
let!(:deployment) { create(:deployment, :success, environment: environment) }
+
it { is_expected.to be_truthy }
end
@@ -788,10 +793,9 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
describe '#calculate_reactive_cache' do
- let(:cluster) { create(:cluster, :project, :provided_by_user) }
- let(:project) { cluster.project }
- let(:environment) { create(:environment, project: project) }
- let!(:deployment) { create(:deployment, :success, environment: environment) }
+ let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
+ let!(:environment) { create(:environment, project: project) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project) }
subject { environment.calculate_reactive_cache }
@@ -824,10 +828,11 @@ describe Environment, :use_clean_rails_memory_store_caching do
context 'when the environment is available' do
context 'with a deployment service' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:prometheus_project, :repository) }
context 'and a deployment' do
let!(:deployment) { create(:deployment, environment: environment) }
+
it { is_expected.to be_truthy }
end
@@ -847,6 +852,52 @@ describe Environment, :use_clean_rails_memory_store_caching do
context 'without a monitoring service' do
it { is_expected.to be_falsy }
end
+
+ context 'when sample metrics are enabled' do
+ before do
+ stub_env('USE_SAMPLE_METRICS', 'true')
+ end
+
+ context 'with no prometheus adapter configured' do
+ before do
+ allow(environment.prometheus_adapter).to receive(:configured?).and_return(false)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '#has_sample_metrics?' do
+ subject { environment.has_metrics? }
+
+ let(:project) { create(:project) }
+
+ context 'when sample metrics are enabled' do
+ before do
+ stub_env('USE_SAMPLE_METRICS', 'true')
+ end
+
+ context 'with no prometheus adapter configured' do
+ before do
+ allow(environment.prometheus_adapter).to receive(:configured?).and_return(false)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with the environment stopped' do
+ before do
+ environment.stop
+ end
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ context 'when sample metrics are not enabled' do
+ it { is_expected.to be_falsy }
+ end
end
context 'when the environment is unavailable' do
@@ -862,6 +913,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '#metrics' do
let(:project) { create(:prometheus_project) }
+
subject { environment.metrics }
context 'when the environment has metrics' do
@@ -943,6 +995,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '#additional_metrics' do
let(:project) { create(:prometheus_project) }
let(:metric_params) { [] }
+
subject { environment.additional_metrics(*metric_params) }
context 'when the environment has additional metrics' do
@@ -1059,7 +1112,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
describe '#prometheus_adapter' do
it 'calls prometheus adapter service' do
- expect_next_instance_of(Prometheus::AdapterService) do |instance|
+ expect_next_instance_of(Gitlab::Prometheus::Adapter) do |instance|
expect(instance).to receive(:prometheus_adapter)
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index ef426661066..5b402e572c3 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe ErrorTracking::ProjectErrorTrackingSetting do
include ReactiveCachingHelpers
+ include Gitlab::Routing
let_it_be(:project) { create(:project) }
@@ -63,6 +64,22 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
+ describe '.extract_sentry_external_url' do
+ subject { described_class.extract_sentry_external_url(sentry_url) }
+
+ describe 'when passing a URL' do
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
+
+ it { is_expected.to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project') }
+ end
+
+ describe 'when passing nil' do
+ let(:sentry_url) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#sentry_external_url' do
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
@@ -138,8 +155,6 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
error: 'error message',
error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_NON_20X_RESPONSE
)
- expect(subject).to have_received(:sentry_client)
- expect(sentry_client).to have_received(:list_issues)
end
end
@@ -159,8 +174,6 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
error: 'Sentry API response is missing keys. key not found: "id"',
error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_TYPE_MISSING_KEYS
)
- expect(subject).to have_received(:sentry_client)
- expect(sentry_client).to have_received(:list_issues)
end
end
@@ -181,8 +194,21 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
error: error_msg,
error_type: ErrorTracking::ProjectErrorTrackingSetting::SENTRY_API_ERROR_INVALID_SIZE
)
- expect(subject).to have_received(:sentry_client)
- expect(sentry_client).to have_received(:list_issues)
+ end
+ end
+
+ context 'when sentry client raises StandardError' do
+ let(:sentry_client) { spy(:sentry_client) }
+
+ before do
+ synchronous_reactive_cache(subject)
+
+ allow(subject).to receive(:sentry_client).and_return(sentry_client)
+ allow(sentry_client).to receive(:list_issues).with(opts).and_raise(StandardError)
+ end
+
+ it 'returns error' do
+ expect(result).to eq(error: 'Unexpected Error')
end
end
end
@@ -201,6 +227,90 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
+ describe '#issue_details' do
+ let(:issue) { build(:detailed_error_tracking_error) }
+ let(:sentry_client) { double('sentry_client', issue_details: issue) }
+ let(:commit_id) { issue.first_release_version }
+
+ let(:result) do
+ subject.issue_details
+ end
+
+ context 'when cached' do
+ before do
+ stub_reactive_cache(subject, issue, {})
+ synchronous_reactive_cache(subject)
+
+ expect(subject).to receive(:sentry_client).and_return(sentry_client)
+ end
+
+ it { expect(result).to eq(issue: issue) }
+ it { expect(result[:issue].first_release_version).to eq(commit_id) }
+ it { expect(result[:issue].gitlab_commit).to eq(nil) }
+ it { expect(result[:issue].gitlab_commit_path).to eq(nil) }
+
+ context 'when release version is nil' do
+ before do
+ issue.first_release_version = nil
+ end
+
+ it { expect(result[:issue].gitlab_commit).to eq(nil) }
+ it { expect(result[:issue].gitlab_commit_path).to eq(nil) }
+ end
+
+ context 'when repo commit matches first relase version' do
+ let(:commit) { double('commit', id: commit_id) }
+ let(:repository) { double('repository', commit: commit) }
+
+ before do
+ expect(project).to receive(:repository).and_return(repository)
+ end
+
+ it { expect(result[:issue].gitlab_commit).to eq(commit_id) }
+ it { expect(result[:issue].gitlab_commit_path).to eq("/#{project.namespace.path}/#{project.path}/commit/#{commit_id}") }
+ end
+ end
+
+ context 'when not cached' do
+ it { expect(subject).not_to receive(:sentry_client) }
+ it { expect(result).to be_nil }
+ end
+ end
+
+ describe '#update_issue' do
+ let(:opts) do
+ { status: 'resolved' }
+ end
+
+ let(:result) do
+ subject.update_issue(**opts)
+ end
+
+ let(:sentry_client) { spy(:sentry_client) }
+
+ context 'successful call to sentry' do
+ before do
+ allow(subject).to receive(:sentry_client).and_return(sentry_client)
+ allow(sentry_client).to receive(:update_issue).with(opts).and_return(true)
+ end
+
+ it 'returns the successful response' do
+ expect(result).to eq(updated: true)
+ end
+ end
+
+ context 'sentry raises an error' do
+ before do
+ allow(subject).to receive(:sentry_client).and_return(sentry_client)
+ allow(sentry_client).to receive(:update_issue).with(opts).and_raise(StandardError)
+ end
+
+ it 'returns the successful response' do
+ expect(result).to eq(error: 'Unexpected Error')
+ end
+ end
+ end
+
context 'slugs' do
shared_examples_for 'slug from api_url' do |method, slug|
context 'when api_url is correct' do
diff --git a/spec/models/external_issue_spec.rb b/spec/models/external_issue_spec.rb
index 9d064d458f0..b8d85d49b07 100644
--- a/spec/models/external_issue_spec.rb
+++ b/spec/models/external_issue_spec.rb
@@ -33,6 +33,7 @@ describe ExternalIssue do
context 'if issue id is a number' do
let(:issue) { described_class.new('1234', project) }
+
it 'returns the issue ID prefixed by #' do
expect(issue.reference_link_text).to eq '#1234'
end
diff --git a/spec/models/global_milestone_spec.rb b/spec/models/global_milestone_spec.rb
index 9d901d01a52..34dbdfec60d 100644
--- a/spec/models/global_milestone_spec.rb
+++ b/spec/models/global_milestone_spec.rb
@@ -162,6 +162,7 @@ describe GlobalMilestone do
describe '#initialize' do
let(:milestone1_project1) { create(:milestone, title: "Milestone v1.2", project: project1) }
+
subject(:global_milestone) { described_class.new(milestone1_project1) }
it 'has exactly one group milestone' do
diff --git a/spec/models/group_group_link_spec.rb b/spec/models/group_group_link_spec.rb
index e4ad5703a10..a877cc803dd 100644
--- a/spec/models/group_group_link_spec.rb
+++ b/spec/models/group_group_link_spec.rb
@@ -33,4 +33,12 @@ describe GroupGroupLink do
validate_inclusion_of(:group_access).in_array(Gitlab::Access.values))
end
end
+
+ describe '#human_access' do
+ it 'delegates to Gitlab::Access' do
+ expect(Gitlab::Access).to receive(:human_access).with(group_group_link.group_access)
+
+ group_group_link.human_access
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 3fa9d71cc7d..3531c695236 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -28,6 +28,7 @@ describe Group do
describe '#members & #requesters' do
let(:requester) { create(:user) }
let(:developer) { create(:user) }
+
before do
group.request_access(requester)
group.add_developer(developer)
@@ -1002,6 +1003,57 @@ describe Group do
end
end
+ describe '#related_group_ids' do
+ let(:nested_group) { create(:group, parent: group) }
+ let(:shared_with_group) { create(:group, parent: group) }
+
+ before do
+ create(:group_group_link, shared_group: nested_group,
+ shared_with_group: shared_with_group)
+ end
+
+ subject(:related_group_ids) { nested_group.related_group_ids }
+
+ it 'returns id' do
+ expect(related_group_ids).to include(nested_group.id)
+ end
+
+ it 'returns ancestor id' do
+ expect(related_group_ids).to include(group.id)
+ end
+
+ it 'returns shared with group id' do
+ expect(related_group_ids).to include(shared_with_group.id)
+ end
+
+ context 'with more than one ancestor group' do
+ let(:ancestor_group) { create(:group) }
+
+ before do
+ group.update(parent: ancestor_group)
+ end
+
+ it 'returns all ancestor group ids' do
+ expect(related_group_ids).to(
+ include(group.id, ancestor_group.id))
+ end
+ end
+
+ context 'with more than one shared with group' do
+ let(:another_shared_with_group) { create(:group, parent: group) }
+
+ before do
+ create(:group_group_link, shared_group: nested_group,
+ shared_with_group: another_shared_with_group)
+ end
+
+ it 'returns all shared with group ids' do
+ expect(related_group_ids).to(
+ include(shared_with_group.id, another_shared_with_group.id))
+ end
+ end
+ end
+
context 'with uploads' do
it_behaves_like 'model with uploads', true do
let(:model_object) { create(:group, :with_avatar) }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 22aad2fab0a..3520720d9a4 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -53,16 +53,19 @@ describe WebHookLog do
describe '2xx' do
let(:status) { '200' }
+
it { expect(web_hook_log.success?).to be_truthy }
end
describe 'not 2xx' do
let(:status) { '500' }
+
it { expect(web_hook_log.success?).to be_falsey }
end
describe 'internal erorr' do
let(:status) { 'internal error' }
+
it { expect(web_hook_log.success?).to be_falsey }
end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
new file mode 100644
index 00000000000..d6574791a65
--- /dev/null
+++ b/spec/models/import_failure_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ImportFailure do
+ describe "Associations" do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:group) }
+ end
+
+ describe 'Validations' do
+ context 'has no group' do
+ before do
+ allow(subject).to receive(:group).and_return(nil)
+ end
+
+ it { is_expected.to validate_presence_of(:project) }
+ end
+
+ context 'has no project' do
+ before do
+ allow(subject).to receive(:project).and_return(nil)
+ end
+
+ it { is_expected.to validate_presence_of(:group) }
+ end
+ end
+end
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 43954511858..3e0181b8846 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -48,6 +48,7 @@ describe InstanceConfiguration do
describe '#gitlab_pages' do
let(:gitlab_pages) { subject.settings[:gitlab_pages] }
+
it 'returns Settings.pages' do
gitlab_pages.delete(:ip_address)
@@ -73,6 +74,7 @@ describe InstanceConfiguration do
describe '#gitlab_ci' do
let(:gitlab_ci) { subject.settings[:gitlab_ci] }
+
it 'returns Settings.gitalb_ci' do
gitlab_ci.delete(:artifacts_max_size)
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index c73ade3f896..33d03bfc0f5 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -170,6 +170,7 @@ describe InternalId do
describe '.track_greatest' do
let(:value) { 9001 }
+
subject { described_class.track_greatest(issue, scope, usage, value, init) }
context 'in the absence of a record' do
@@ -210,6 +211,7 @@ describe InternalId do
describe '#increment_and_save!' do
let(:id) { create(:internal_id) }
+
subject { id.increment_and_save! }
it 'returns incremented iid' do
@@ -236,6 +238,7 @@ describe InternalId do
describe '#track_greatest_and_save!' do
let(:id) { create(:internal_id) }
let(:new_last_value) { 9001 }
+
subject { id.track_greatest_and_save!(new_last_value) }
it 'returns new last value' do
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index d1ed06dd04d..5c3f7c09e22 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -259,6 +259,7 @@ describe Issue do
describe '#can_move?' do
let(:user) { create(:user) }
let(:issue) { create(:issue) }
+
subject { issue.can_move?(user) }
context 'user is not a member of project issue belongs to' do
@@ -277,6 +278,7 @@ describe Issue do
context 'issue not persisted' do
let(:issue) { build(:issue, project: project) }
+
it { is_expected.to eq false }
end
@@ -306,6 +308,7 @@ describe Issue do
describe '#moved?' do
let(:issue) { create(:issue) }
+
subject { issue.moved? }
context 'issue not moved' do
@@ -322,6 +325,7 @@ describe Issue do
describe '#duplicated?' do
let(:issue) { create(:issue) }
+
subject { issue.duplicated? }
context 'issue not duplicated' do
@@ -380,6 +384,7 @@ describe Issue do
describe '#has_related_branch?' do
let(:issue) { create(:issue, title: "Blue Bell Knoll") }
+
subject { issue.has_related_branch? }
context 'branch found' do
@@ -442,6 +447,7 @@ describe Issue do
describe '#can_be_worked_on?' do
let(:project) { build(:project) }
+
subject { build(:issue, :opened, project: project) }
context 'is closed' do
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 2dd9583087f..1ae90cae4b1 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -25,6 +25,7 @@ describe Key, :mailer do
describe "Methods" do
let(:user) { create(:user) }
+
it { is_expected.to respond_to :projects }
it { is_expected.to respond_to :publishable_key }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index bf6fa20dc17..c6894c04385 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -110,6 +110,7 @@ describe MergeRequest do
describe '#squash?' do
let(:merge_request) { build(:merge_request, squash: squash) }
+
subject { merge_request.squash? }
context 'disabled in database' do
@@ -383,7 +384,7 @@ describe MergeRequest do
end
it 'returns target branches sort by updated at desc' do
- expect(described_class.recent_target_branches).to match_array(['feature', 'merge-test', 'fix'])
+ expect(described_class.recent_target_branches).to match_array(%w[feature merge-test fix])
end
end
@@ -851,6 +852,7 @@ describe MergeRequest do
describe '#modified_paths' do
let(:paths) { double(:paths) }
+
subject(:merge_request) { build(:merge_request) }
before do
@@ -879,6 +881,7 @@ describe MergeRequest do
context 'when no arguments provided' do
let(:diff) { merge_request.merge_request_diff }
+
subject(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
it 'returns affected file paths for merge_request_diff' do
@@ -960,6 +963,15 @@ describe MergeRequest do
expect(subject.closes_issues).to be_empty
end
+
+ it 'ignores referenced issues when auto-close is disabled' do
+ subject.project.update!(autoclose_referenced_issues: false)
+
+ allow(subject.project).to receive(:default_branch)
+ .and_return(subject.target_branch)
+
+ expect(subject.closes_issues).to be_empty
+ end
end
describe '#issues_mentioned_but_not_closing' do
@@ -1554,6 +1566,7 @@ describe MergeRequest do
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
+
subject { merge_request.calculate_reactive_cache(service_class_name) }
context 'when given an unknown service class name' do
@@ -2009,7 +2022,7 @@ describe MergeRequest do
it 'atomically enqueues a RebaseWorker job and updates rebase_jid' do
expect(RebaseWorker)
.to receive(:perform_async)
- .with(merge_request.id, user_id)
+ .with(merge_request.id, user_id, false)
.and_return(rebase_jid)
expect(merge_request).to receive(:expire_etag_cache)
@@ -2201,6 +2214,16 @@ describe MergeRequest do
end
end
+ describe "#actual_head_pipeline_active? " do
+ it do
+ is_expected
+ .to delegate_method(:active?)
+ .to(:actual_head_pipeline)
+ .with_prefix
+ .with_arguments(allow_nil: true)
+ end
+ end
+
describe '#mergeable_ci_state?' do
let(:project) { create(:project, only_allow_merge_if_pipeline_succeeds: true) }
let(:pipeline) { create(:ci_empty_pipeline) }
@@ -2322,6 +2345,10 @@ describe MergeRequest do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:source_branch) { merge_request.source_branch }
+ let(:target_branch) { merge_request.target_branch }
+ let(:source_oid) { project.commit(source_branch).id }
+ let(:target_oid) { project.commit(target_branch).id }
before do
merge_request.source_project.add_maintainer(user)
@@ -2332,13 +2359,21 @@ describe MergeRequest do
let(:environments) { create_list(:environment, 3, project: project) }
before do
- create(:deployment, :success, environment: environments.first, ref: 'master', sha: project.commit('master').id)
- create(:deployment, :success, environment: environments.second, ref: 'feature', sha: project.commit('feature').id)
+ create(:deployment, :success, environment: environments.first, ref: source_branch, sha: source_oid)
+ create(:deployment, :success, environment: environments.second, ref: target_branch, sha: target_oid)
end
it 'selects deployed environments' do
expect(merge_request.environments_for(user)).to contain_exactly(environments.first)
end
+
+ it 'selects latest deployed environment' do
+ latest_environment = create(:environment, project: project)
+ create(:deployment, :success, environment: latest_environment, ref: source_branch, sha: source_oid)
+
+ expect(merge_request.environments_for(user)).to eq([environments.first, latest_environment])
+ expect(merge_request.environments_for(user, latest: true)).to contain_exactly(latest_environment)
+ end
end
context 'with environments on source project' do
@@ -3032,6 +3067,7 @@ describe MergeRequest do
describe 'transition to cannot_be_merged' do
let(:notification_service) { double(:notification_service) }
let(:todo_service) { double(:todo_service) }
+
subject { create(:merge_request, state, merge_status: :unchecked) }
before do
@@ -3241,6 +3277,7 @@ describe MergeRequest do
describe 'when merge_when_pipeline_succeeds? is true' do
describe 'when merge user is author' do
let(:user) { create(:user) }
+
subject do
create(:merge_request,
merge_when_pipeline_succeeds: true,
@@ -3255,6 +3292,7 @@ describe MergeRequest do
describe 'when merge user and author are different users' do
let(:merge_user) { create(:user) }
+
subject do
create(:merge_request,
merge_when_pipeline_succeeds: true,
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 2ba0d97792b..740385bbd54 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -26,6 +26,7 @@ describe Namespace do
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_length_of(:path).is_at_most(255) }
it { is_expected.to validate_presence_of(:owner) }
+ it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it 'does not allow too deep nesting' do
ancestors = (1..21).to_a
@@ -922,6 +923,12 @@ describe Namespace do
expect(group.emails_disabled?).to be_truthy
end
+
+ it 'does not query the db when there is no parent group' do
+ group = create(:group, emails_disabled: true)
+
+ expect { group.emails_disabled? }.not_to exceed_query_limit(0)
+ end
end
context 'when a subgroup' do
diff --git a/spec/models/project_deploy_token_spec.rb b/spec/models/project_deploy_token_spec.rb
index 8c8924762bd..0543bbdf2a8 100644
--- a/spec/models/project_deploy_token_spec.rb
+++ b/spec/models/project_deploy_token_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ProjectDeployToken, type: :model do
let(:project) { create(:project) }
let(:deploy_token) { create(:deploy_token) }
+
subject(:project_deploy_token) { create(:project_deploy_token, project: project, deploy_token: deploy_token) }
it { is_expected.to belong_to :project }
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 9ce1b8fd895..6a333898955 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe ProjectFeature do
+ using RSpec::Parameterized::TableSyntax
+
let(:project) { create(:project) }
let(:user) { create(:user) }
@@ -121,13 +123,14 @@ describe ProjectFeature do
end
context 'public features' do
- it "does not allow public for other than pages" do
- features = %w(issues wiki builds merge_requests snippets repository)
- project_feature = project.project_feature
+ features = %w(issues wiki builds merge_requests snippets repository)
- features.each do |feature|
+ features.each do |feature|
+ it "does not allow public access level for #{feature}" do
+ project_feature = project.project_feature
field = "#{feature}_access_level".to_sym
project_feature.update_attribute(field, ProjectFeature::PUBLIC)
+
expect(project_feature.valid?).to be_falsy
end
end
@@ -158,12 +161,13 @@ describe ProjectFeature do
end
describe 'default pages access level' do
- subject { project.project_feature.pages_access_level }
+ subject { project_feature.pages_access_level }
- before do
+ let(:project_feature) do
# project factory overrides all values in project_feature after creation
project.project_feature.destroy!
project.build_project_feature.save!
+ project.project_feature
end
context 'when new project is private' do
@@ -182,6 +186,14 @@ describe ProjectFeature do
let(:project) { create(:project, :public) }
it { is_expected.to eq(ProjectFeature::ENABLED) }
+
+ context 'when access control is forced on the admin level' do
+ before do
+ allow(::Gitlab::Pages).to receive(:access_control_is_forced?).and_return(true)
+ end
+
+ it { is_expected.to eq(ProjectFeature::PRIVATE) }
+ end
end
end
@@ -189,53 +201,59 @@ describe ProjectFeature do
it 'returns true if Pages access controll is not enabled' do
stub_config(pages: { access_control: false })
- project_feature = described_class.new
+ project_feature = described_class.new(pages_access_level: described_class::PRIVATE)
expect(project_feature.public_pages?).to eq(true)
end
- context 'Pages access control is enabled' do
+ context 'when Pages access control is enabled' do
before do
stub_config(pages: { access_control: true })
end
- it 'returns true if Pages access level is public' do
- project_feature = described_class.new(pages_access_level: described_class::PUBLIC)
-
- expect(project_feature.public_pages?).to eq(true)
+ where(:project_visibility, :pages_access_level, :result) do
+ :private | ProjectFeature::PUBLIC | true
+ :internal | ProjectFeature::PUBLIC | true
+ :internal | ProjectFeature::ENABLED | false
+ :public | ProjectFeature::ENABLED | true
+ :private | ProjectFeature::PRIVATE | false
+ :public | ProjectFeature::PRIVATE | false
end
- it 'returns true if Pages access level is enabled and the project is public' do
- project = build(:project, :public)
-
- project_feature = described_class.new(project: project, pages_access_level: described_class::ENABLED)
-
- expect(project_feature.public_pages?).to eq(true)
- end
+ with_them do
+ let(:project_feature) do
+ project = build(:project, project_visibility)
+ project_feature = project.project_feature
+ project_feature.update!(pages_access_level: pages_access_level)
+ project_feature
+ end
- it 'returns false if pages or the project are not public' do
- project = build(:project, :private)
+ it 'properly handles project and Pages visibility settings' do
+ expect(project_feature.public_pages?).to eq(result)
+ end
- project_feature = described_class.new(project: project, pages_access_level: described_class::ENABLED)
+ it 'returns false if access_control is forced on the admin level' do
+ stub_application_setting(force_pages_access_control: true)
- expect(project_feature.public_pages?).to eq(false)
+ expect(project_feature.public_pages?).to eq(false)
+ end
end
end
+ end
- describe '#private_pages?' do
- subject(:project_feature) { described_class.new }
+ describe '#private_pages?' do
+ subject(:project_feature) { described_class.new }
- it 'returns false if public_pages? is true' do
- expect(project_feature).to receive(:public_pages?).and_return(true)
+ it 'returns false if public_pages? is true' do
+ expect(project_feature).to receive(:public_pages?).and_return(true)
- expect(project_feature.private_pages?).to eq(false)
- end
+ expect(project_feature.private_pages?).to eq(false)
+ end
- it 'returns true if public_pages? is false' do
- expect(project_feature).to receive(:public_pages?).and_return(false)
+ it 'returns true if public_pages? is false' do
+ expect(project_feature).to receive(:public_pages?).and_return(false)
- expect(project_feature.private_pages?).to eq(true)
- end
+ expect(project_feature.private_pages?).to eq(true)
end
end
diff --git a/spec/models/project_services/chat_message/base_message_spec.rb b/spec/models/project_services/chat_message/base_message_spec.rb
new file mode 100644
index 00000000000..8f80cf0b074
--- /dev/null
+++ b/spec/models/project_services/chat_message/base_message_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ChatMessage::BaseMessage do
+ let(:base_message) { described_class.new(args) }
+ let(:args) { { project_url: 'https://gitlab-domain.com' } }
+
+ describe '#fallback' do
+ subject { base_message.fallback }
+
+ before do
+ allow(base_message).to receive(:message).and_return(message)
+ end
+
+ context 'without relative links' do
+ let(:message) { 'Just another *markdown* message' }
+
+ it { is_expected.to eq(message) }
+ end
+
+ context 'with relative links' do
+ let(:message) { 'Check this out ![Screenshot1](/uploads/Screenshot1.png)' }
+
+ it { is_expected.to eq('Check this out https://gitlab-domain.com/uploads/Screenshot1.png') }
+ end
+
+ context 'with multiple relative links' do
+ let(:message) { 'Check this out ![Screenshot1](/uploads/Screenshot1.png). And this ![Screenshot2](/uploads/Screenshot2.png)' }
+
+ it { is_expected.to eq('Check this out https://gitlab-domain.com/uploads/Screenshot1.png. And this https://gitlab-domain.com/uploads/Screenshot2.png') }
+ end
+ end
+end
diff --git a/spec/models/project_services/chat_message/wiki_page_message_spec.rb b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
index c3db516f253..1346a43335e 100644
--- a/spec/models/project_services/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
@@ -17,7 +17,8 @@ describe ChatMessage::WikiPageMessage do
object_attributes: {
title: 'Wiki page title',
url: 'http://url.com',
- content: 'Wiki page description'
+ content: 'Wiki page content',
+ message: 'Wiki page commit message'
}
}
end
@@ -57,10 +58,10 @@ describe ChatMessage::WikiPageMessage do
args[:object_attributes][:action] = 'create'
end
- it 'returns the attachment for a new wiki page' do
+ it 'returns the commit message for a new wiki page' do
expect(subject.attachments).to eq([
{
- text: "Wiki page description",
+ text: "Wiki page commit message",
color: color
}
])
@@ -72,10 +73,10 @@ describe ChatMessage::WikiPageMessage do
args[:object_attributes][:action] = 'update'
end
- it 'returns the attachment for an updated wiki page' do
+ it 'returns the commit message for an updated wiki page' do
expect(subject.attachments).to eq([
{
- text: "Wiki page description",
+ text: "Wiki page commit message",
color: color
}
])
@@ -119,8 +120,8 @@ describe ChatMessage::WikiPageMessage do
args[:object_attributes][:action] = 'create'
end
- it 'returns the attachment for a new wiki page' do
- expect(subject.attachments).to eq('Wiki page description')
+ it 'returns the commit message for a new wiki page' do
+ expect(subject.attachments).to eq('Wiki page commit message')
end
end
@@ -129,8 +130,8 @@ describe ChatMessage::WikiPageMessage do
args[:object_attributes][:action] = 'update'
end
- it 'returns the attachment for an updated wiki page' do
- expect(subject.attachments).to eq('Wiki page description')
+ it 'returns the commit message for an updated wiki page' do
+ expect(subject.attachments).to eq('Wiki page commit message')
end
end
end
diff --git a/spec/models/project_services/emails_on_push_service_spec.rb b/spec/models/project_services/emails_on_push_service_spec.rb
index ffe241aa880..56f094ecb48 100644
--- a/spec/models/project_services/emails_on_push_service_spec.rb
+++ b/spec/models/project_services/emails_on_push_service_spec.rb
@@ -25,19 +25,75 @@ describe EmailsOnPushService do
let(:push_data) { { object_kind: 'push' } }
let(:project) { create(:project, :repository) }
let(:service) { create(:emails_on_push_service, project: project) }
+ let(:recipients) { 'test@gitlab.com' }
- it 'does not send emails when disabled' do
- expect(project).to receive(:emails_disabled?).and_return(true)
- expect(EmailsOnPushWorker).not_to receive(:perform_async)
+ before do
+ subject.recipients = recipients
+ end
+
+ shared_examples 'sending email' do |branches_to_be_notified, branch_being_pushed_to|
+ let(:push_data) { { object_kind: 'push', object_attributes: { ref: branch_being_pushed_to } } }
- service.execute(push_data)
+ before do
+ subject.branches_to_be_notified = branches_to_be_notified
+ end
+
+ it 'sends email' do
+ expect(EmailsOnPushWorker).not_to receive(:perform_async)
+
+ service.execute(push_data)
+ end
end
- it 'does send emails when enabled' do
- expect(project).to receive(:emails_disabled?).and_return(false)
- expect(EmailsOnPushWorker).to receive(:perform_async)
+ shared_examples 'not sending email' do |branches_to_be_notified, branch_being_pushed_to|
+ let(:push_data) { { object_kind: 'push', object_attributes: { ref: branch_being_pushed_to } } }
- service.execute(push_data)
+ before do
+ subject.branches_to_be_notified = branches_to_be_notified
+ end
+
+ it 'does not send email' do
+ expect(EmailsOnPushWorker).not_to receive(:perform_async)
+
+ service.execute(push_data)
+ end
+ end
+
+ context 'when emails are disabled on the project' do
+ it 'does not send emails' do
+ expect(project).to receive(:emails_disabled?).and_return(true)
+ expect(EmailsOnPushWorker).not_to receive(:perform_async)
+
+ service.execute(push_data)
+ end
+ end
+
+ context 'when emails are enabled on the project' do
+ before do
+ create(:protected_branch, project: project, name: 'a-protected-branch')
+ expect(project).to receive(:emails_disabled?).and_return(true)
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :branches_to_be_notified, :branch_being_pushed_to, :expected_action) do
+ 'pushing to a random branch and notification configured for all branches' | 'all' | 'random' | 'sending email'
+ 'pushing to the default branch and notification configured for all branches' | 'all' | 'master' | 'sending email'
+ 'pushing to a protected branch and notification configured for all branches' | 'all' | 'a-protected-branch' | 'sending email'
+ 'pushing to a random branch and notification configured for default branch only' | 'default' | 'random' | 'not sending email'
+ 'pushing to the default branch and notification configured for default branch only' | 'default' | 'master' | 'sending email'
+ 'pushing to a protected branch and notification configured for default branch only' | 'default' | 'a-protected-branch' | 'not sending email'
+ 'pushing to a random branch and notification configured for protected branches only' | 'protected' | 'random' | 'not sending email'
+ 'pushing to the default branch and notification configured for protected branches only' | 'protected' | 'master' | 'not sending email'
+ 'pushing to a protected branch and notification configured for protected branches only' | 'protected' | 'a-protected-branch' | 'sending email'
+ 'pushing to a random branch and notification configured for default and protected branches only' | 'default_and_protected' | 'random' | 'not sending email'
+ 'pushing to the default branch and notification configured for default and protected branches only' | 'default_and_protected' | 'master' | 'sending email'
+ 'pushing to a protected branch and notification configured for default and protected branches only' | 'default_and_protected' | 'a-protected-branch' | 'sending email'
+ end
+
+ with_them do
+ include_examples params[:expected_action], branches_to_be_notified: params[:branches_to_be_notified], branch_being_pushed_to: params[:branch_being_pushed_to]
+ end
end
end
end
diff --git a/spec/models/project_services/external_wiki_service_spec.rb b/spec/models/project_services/external_wiki_service_spec.rb
index bdd8605436f..f8d88a944a5 100644
--- a/spec/models/project_services/external_wiki_service_spec.rb
+++ b/spec/models/project_services/external_wiki_service_spec.rb
@@ -26,4 +26,34 @@ describe ExternalWikiService do
it { is_expected.not_to validate_presence_of(:external_wiki_url) }
end
end
+
+ describe 'test' do
+ before do
+ subject.properties['external_wiki_url'] = url
+ end
+
+ let(:url) { 'http://foo' }
+ let(:data) { nil }
+ let(:result) { subject.test(data) }
+
+ context 'the URL is not reachable' do
+ before do
+ WebMock.stub_request(:get, url).to_return(status: 404, body: 'not a page')
+ end
+
+ it 'is not successful' do
+ expect(result[:success]).to be_falsey
+ end
+ end
+
+ context 'the URL is reachable' do
+ before do
+ WebMock.stub_request(:get, url).to_return(status: 200, body: 'foo')
+ end
+
+ it 'is successful' do
+ expect(result[:success]).to be_truthy
+ end
+ end
+ end
end
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index 275244fa5fd..83d3c8b3a70 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -38,6 +38,7 @@ describe MicrosoftTeamsService do
describe "#execute" do
let(:user) { create(:user) }
+
set(:project) { create(:project, :repository, :wiki_repo) }
before do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index d55530bf820..c57f47b5738 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -165,6 +165,7 @@ describe Project do
let(:project) { create(:project, :public) }
let(:requester) { create(:user) }
let(:developer) { create(:user) }
+
before do
project.request_access(requester)
project.add_developer(developer)
@@ -210,6 +211,7 @@ describe Project do
it { is_expected.to validate_presence_of(:creator) }
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:repository_storage) }
+ it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it 'validates build timeout constraints' do
is_expected.to validate_numericality_of(:build_timeout)
@@ -472,6 +474,32 @@ describe Project do
end
end
+ describe '#autoclose_referenced_issues' do
+ context 'when DB entry is nil' do
+ let(:project) { create(:project, autoclose_referenced_issues: nil) }
+
+ it 'returns true' do
+ expect(project.autoclose_referenced_issues).to be_truthy
+ end
+ end
+
+ context 'when DB entry is true' do
+ let(:project) { create(:project, autoclose_referenced_issues: true) }
+
+ it 'returns true' do
+ expect(project.autoclose_referenced_issues).to be_truthy
+ end
+ end
+
+ context 'when DB entry is false' do
+ let(:project) { create(:project, autoclose_referenced_issues: false) }
+
+ it 'returns false' do
+ expect(project.autoclose_referenced_issues).to be_falsey
+ end
+ end
+ end
+
describe 'project token' do
it 'sets an random token if none provided' do
project = FactoryBot.create(:project, runners_token: '')
@@ -815,6 +843,7 @@ describe Project do
context 'with external issues tracker' do
let!(:internal_issue) { create(:issue, project: project) }
+
before do
allow(project).to receive(:external_issue_tracker).and_return(true)
end
@@ -1319,9 +1348,7 @@ describe Project do
let(:project2) { create(:project, :public, group: group) }
before do
- 2.times do
- create(:note_on_commit, project: project1)
- end
+ create_list(:note_on_commit, 2, project: project1)
create(:note_on_commit, project: project2)
@@ -1335,9 +1362,7 @@ describe Project do
end
it 'does not take system notes into account' do
- 10.times do
- create(:note_on_commit, project: project2, system: true)
- end
+ create_list(:note_on_commit, 10, project: project2, system: true)
expect(described_class.trending.to_a).to eq([project1, project2])
end
@@ -2334,6 +2359,7 @@ describe Project do
describe '#has_remote_mirror?' do
let(:project) { create(:project, :remote_mirror, :import_started) }
+
subject { project.has_remote_mirror? }
before do
@@ -2353,6 +2379,7 @@ describe Project do
describe '#update_remote_mirrors' do
let(:project) { create(:project, :remote_mirror, :import_started) }
+
delegate :update_remote_mirrors, to: :project
before do
@@ -3460,6 +3487,7 @@ describe Project do
describe '#pipeline_status' do
let(:project) { create(:project, :repository) }
+
it 'builds a pipeline status' do
expect(project.pipeline_status).to be_a(Gitlab::Cache::Ci::ProjectPipelineStatus)
end
@@ -4638,6 +4666,7 @@ describe Project do
describe '#execute_hooks' do
let(:data) { { ref: 'refs/heads/master', data: 'data' } }
+
it 'executes active projects hooks with the specified scope' do
hook = create(:project_hook, merge_requests_events: false, push_events: true)
expect(ProjectHook).to receive(:select_active)
@@ -4716,7 +4745,7 @@ describe Project do
end
it 'returns true when a plugin exists' do
- expect(Gitlab::Plugin).to receive(:any?).twice.and_return(true)
+ expect(Gitlab::FileHook).to receive(:any?).twice.and_return(true)
expect(project.has_active_hooks?(:merge_request_events)).to be_truthy
expect(project.has_active_hooks?).to be_truthy
@@ -4975,6 +5004,7 @@ describe Project do
context 'when there is a gitlab deploy token associated but is has been revoked' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :revoked, projects: [project]) }
+
it { is_expected.to be_nil }
end
@@ -5018,6 +5048,7 @@ describe Project do
context '#members_among' do
let(:users) { create_list(:user, 3) }
+
set(:group) { create(:group) }
set(:project) { create(:project, namespace: group) }
@@ -5105,7 +5136,7 @@ describe Project do
describe '.deployments' do
subject { project.deployments }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
before do
allow_any_instance_of(Deployment).to receive(:create_ref)
diff --git a/spec/models/readme_blob_spec.rb b/spec/models/readme_blob_spec.rb
index f07713bd908..34182fa413f 100644
--- a/spec/models/readme_blob_spec.rb
+++ b/spec/models/readme_blob_spec.rb
@@ -7,6 +7,7 @@ describe ReadmeBlob do
describe 'policy' do
let(:project) { build(:project, :repository) }
+
subject { described_class.new(fake_blob(path: 'README.md'), project.repository) }
it 'works with policy' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index cadb8793e15..2f84b92b806 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -181,4 +181,10 @@ RSpec.describe Release do
it { is_expected.to eq(release.evidence.summary) }
end
end
+
+ describe '#milestone_titles' do
+ let(:release) { create(:release, :with_milestones) }
+
+ it { expect(release.milestone_titles).to eq(release.milestones.map {|m| m.title }.sort.join(", "))}
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index c0245dfdf1a..38f3777c902 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -717,6 +717,7 @@ describe Repository do
describe "search_files_by_content" do
let(:results) { repository.search_files_by_content('feature', 'master') }
+
subject { results }
it { is_expected.to be_an Array }
@@ -1330,6 +1331,13 @@ describe Repository do
repository.root_ref
end
+ it 'returns nil if the repository does not exist' do
+ repository = create(:project).repository
+
+ expect(repository).not_to be_exists
+ expect(repository.root_ref).to be_nil
+ end
+
it_behaves_like 'asymmetric cached method', :root_ref
end
diff --git a/spec/models/resource_weight_event_spec.rb b/spec/models/resource_weight_event_spec.rb
new file mode 100644
index 00000000000..2f00204512e
--- /dev/null
+++ b/spec/models/resource_weight_event_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceWeightEvent, type: :model do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:issue1) { create(:issue, author: user1) }
+ let_it_be(:issue2) { create(:issue, author: user1) }
+ let_it_be(:issue3) { create(:issue, author: user2) }
+
+ describe 'validations' do
+ it { is_expected.not_to allow_value(nil).for(:user) }
+ it { is_expected.not_to allow_value(nil).for(:issue) }
+ it { is_expected.to allow_value(nil).for(:weight) }
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe '.by_issue' do
+ let_it_be(:event1) { create(:resource_weight_event, issue: issue1) }
+ let_it_be(:event2) { create(:resource_weight_event, issue: issue2) }
+ let_it_be(:event3) { create(:resource_weight_event, issue: issue1) }
+
+ it 'returns the expected records for an issue with events' do
+ events = ResourceWeightEvent.by_issue(issue1)
+
+ expect(events).to contain_exactly(event1, event3)
+ end
+
+ it 'returns the expected records for an issue with no events' do
+ events = ResourceWeightEvent.by_issue(issue3)
+
+ expect(events).to be_empty
+ end
+ end
+
+ describe '.created_after' do
+ let!(:created_at1) { 1.day.ago }
+ let!(:created_at2) { 2.days.ago }
+ let!(:created_at3) { 3.days.ago }
+
+ let!(:event1) { create(:resource_weight_event, issue: issue1, created_at: created_at1) }
+ let!(:event2) { create(:resource_weight_event, issue: issue2, created_at: created_at2) }
+ let!(:event3) { create(:resource_weight_event, issue: issue2, created_at: created_at3) }
+
+ it 'returns the expected events' do
+ events = ResourceWeightEvent.created_after(created_at3)
+
+ expect(events).to contain_exactly(event1, event2)
+ end
+
+ it 'returns no events if time is after last record time' do
+ events = ResourceWeightEvent.created_after(1.minute.ago)
+
+ expect(events).to be_empty
+ end
+ end
+
+ describe '#discussion_id' do
+ let_it_be(:event) { create(:resource_weight_event, issue: issue1, created_at: Time.utc(2019, 12, 30)) }
+
+ it 'returns the expected id' do
+ allow(Digest::SHA1).to receive(:hexdigest)
+ .with("ResourceWeightEvent-2019-12-30 00:00:00 UTC-#{user1.id}")
+ .and_return('73d167c478')
+
+ expect(event.discussion_id).to eq('73d167c478')
+ end
+ end
+end
diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb
index 09be90b82ed..7539bf1e957 100644
--- a/spec/models/sent_notification_spec.rb
+++ b/spec/models/sent_notification_spec.rb
@@ -18,6 +18,7 @@ describe SentNotification do
context "when the project doesn't match the discussion project" do
let(:discussion_id) { create(:note).discussion_id }
+
subject { build(:sent_notification, in_reply_to_discussion_id: discussion_id) }
it "is invalid" do
@@ -29,6 +30,7 @@ describe SentNotification do
let(:project) { create(:project, :repository) }
let(:issue) { create(:issue, project: project) }
let(:discussion_id) { create(:note, project: project, noteable: issue).discussion_id }
+
subject { build(:sent_notification, project: project, noteable: issue, in_reply_to_discussion_id: discussion_id) }
it "is valid" do
@@ -196,6 +198,7 @@ describe SentNotification do
describe '#create_reply' do
context 'for issue' do
let(:issue) { create(:issue) }
+
subject { described_class.record(issue, issue.author.id) }
it 'creates a comment on the issue' do
@@ -206,6 +209,7 @@ describe SentNotification do
context 'for issue comment' do
let(:note) { create(:note_on_issue) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a comment on the issue' do
@@ -217,6 +221,7 @@ describe SentNotification do
context 'for issue discussion' do
let(:note) { create(:discussion_note_on_issue) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a reply on the discussion' do
@@ -228,6 +233,7 @@ describe SentNotification do
context 'for merge request' do
let(:merge_request) { create(:merge_request) }
+
subject { described_class.record(merge_request, merge_request.author.id) }
it 'creates a comment on the merge_request' do
@@ -238,6 +244,7 @@ describe SentNotification do
context 'for merge request comment' do
let(:note) { create(:note_on_merge_request) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a comment on the merge request' do
@@ -249,6 +256,7 @@ describe SentNotification do
context 'for merge request diff discussion' do
let(:note) { create(:diff_note_on_merge_request) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a reply on the discussion' do
@@ -260,6 +268,7 @@ describe SentNotification do
context 'for merge request non-diff discussion' do
let(:note) { create(:discussion_note_on_merge_request) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a reply on the discussion' do
@@ -272,6 +281,7 @@ describe SentNotification do
context 'for commit' do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
+
subject { described_class.record(commit, project.creator.id) }
it 'creates a comment on the commit' do
@@ -282,6 +292,7 @@ describe SentNotification do
context 'for commit comment' do
let(:note) { create(:note_on_commit) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a comment on the commit' do
@@ -293,6 +304,7 @@ describe SentNotification do
context 'for commit diff discussion' do
let(:note) { create(:diff_note_on_commit) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a reply on the discussion' do
@@ -304,6 +316,7 @@ describe SentNotification do
context 'for commit non-diff discussion' do
let(:note) { create(:discussion_note_on_commit) }
+
subject { described_class.record_note(note, note.author.id) }
it 'creates a reply on the discussion' do
diff --git a/spec/models/sentry_issue_spec.rb b/spec/models/sentry_issue_spec.rb
index 48f9adf64af..7dc1cea4617 100644
--- a/spec/models/sentry_issue_spec.rb
+++ b/spec/models/sentry_issue_spec.rb
@@ -13,6 +13,16 @@ describe SentryIssue do
it { is_expected.to validate_presence_of(:issue) }
it { is_expected.to validate_uniqueness_of(:issue) }
it { is_expected.to validate_presence_of(:sentry_issue_identifier) }
- it { is_expected.to validate_uniqueness_of(:sentry_issue_identifier).with_message("has already been taken") }
+ end
+
+ describe '.for_project_and_identifier' do
+ let!(:sentry_issue) { create(:sentry_issue) }
+ let(:project) { sentry_issue.issue.project }
+ let(:identifier) { sentry_issue.sentry_issue_identifier }
+ let!(:second_sentry_issue) { create(:sentry_issue) }
+
+ subject { described_class.for_project_and_identifier(project, identifier) }
+
+ it { is_expected.to eq(sentry_issue) }
end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 9c549a6d56d..ae43c0d585a 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -141,6 +141,7 @@ describe Snippet do
describe "#content_html_invalidated?" do
let(:snippet) { create(:snippet, content: "md", content_html: "html", file_name: "foo.md") }
+
it "invalidates the HTML cache of content when the filename changes" do
expect { snippet.file_name = "foo.rb" }.to change { snippet.content_html_invalidated? }.from(false).to(true)
end
diff --git a/spec/models/trending_project_spec.rb b/spec/models/trending_project_spec.rb
index 619fc8e7d38..4a248b71574 100644
--- a/spec/models/trending_project_spec.rb
+++ b/spec/models/trending_project_spec.rb
@@ -11,13 +11,9 @@ describe TrendingProject do
let(:internal_project) { create(:project, :internal) }
before do
- 3.times do
- create(:note_on_commit, project: public_project1)
- end
+ create_list(:note_on_commit, 3, project: public_project1)
- 2.times do
- create(:note_on_commit, project: public_project2)
- end
+ create_list(:note_on_commit, 2, project: public_project2)
create(:note_on_commit, project: public_project3, created_at: 5.weeks.ago)
create(:note_on_commit, project: private_project)
diff --git a/spec/models/uploads/fog_spec.rb b/spec/models/uploads/fog_spec.rb
index b93d9449da9..72a169280af 100644
--- a/spec/models/uploads/fog_spec.rb
+++ b/spec/models/uploads/fog_spec.rb
@@ -31,6 +31,7 @@ describe Uploads::Fog do
describe '#keys' do
let!(:uploads) { create_list(:upload, 2, :object_storage, uploader: FileUploader, model: project) }
+
subject { data_store.keys(relation) }
it 'returns keys' do
@@ -41,6 +42,7 @@ describe Uploads::Fog do
describe '#delete_keys' do
let(:keys) { data_store.keys(relation) }
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
+
subject { data_store.delete_keys(keys) }
before do
diff --git a/spec/models/uploads/local_spec.rb b/spec/models/uploads/local_spec.rb
index 3468399f370..374c3019edc 100644
--- a/spec/models/uploads/local_spec.rb
+++ b/spec/models/uploads/local_spec.rb
@@ -15,6 +15,7 @@ describe Uploads::Local do
describe '#keys' do
let!(:uploads) { create_list(:upload, 2, uploader: FileUploader, model: project) }
+
subject { data_store.keys(relation) }
it 'returns keys' do
@@ -25,6 +26,7 @@ describe Uploads::Local do
describe '#delete_keys' do
let(:keys) { data_store.keys(relation) }
let!(:uploads) { create_list(:upload, 2, :with_file, :issuable_upload, model: project) }
+
subject { data_store.delete_keys(keys) }
it 'deletes multiple data' do
diff --git a/spec/models/user_interacted_project_spec.rb b/spec/models/user_interacted_project_spec.rb
index b96ff08e22d..e2c485343ae 100644
--- a/spec/models/user_interacted_project_spec.rb
+++ b/spec/models/user_interacted_project_spec.rb
@@ -11,6 +11,7 @@ describe UserInteractedProject do
Event::ACTIONS.each do |action|
context "for all actions (event types)" do
let(:event) { build(:event, action: action) }
+
it 'creates a record' do
expect { subject }.to change { described_class.count }.from(0).to(1)
end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index e09c91e874a..bb88983e140 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -5,6 +5,12 @@ require 'spec_helper'
describe UserPreference do
let(:user_preference) { create(:user_preference) }
+ describe 'notes filters global keys' do
+ it 'contains expected values' do
+ expect(UserPreference::NOTES_FILTERS.keys).to match_array([:all_notes, :only_comments, :only_activity])
+ end
+ end
+
describe '#set_notes_filter' do
let(:issuable) { build_stubbed(:issue) }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 58aa945bff0..5620f211d9c 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -147,15 +147,15 @@ describe User, :do_not_mock_admin_mode do
describe 'name' do
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_length_of(:name).is_at_most(128) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
end
describe 'first name' do
- it { is_expected.to validate_length_of(:first_name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:first_name).is_at_most(127) }
end
describe 'last name' do
- it { is_expected.to validate_length_of(:last_name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:last_name).is_at_most(127) }
end
describe 'username' do
@@ -633,6 +633,27 @@ describe User, :do_not_mock_admin_mode do
end
end
end
+
+ describe '.active_without_ghosts' do
+ let_it_be(:user1) { create(:user, :external) }
+ let_it_be(:user2) { create(:user, state: 'blocked') }
+ let_it_be(:user3) { create(:user, ghost: true) }
+ let_it_be(:user4) { create(:user) }
+
+ it 'returns all active users but ghost users' do
+ expect(described_class.active_without_ghosts).to match_array([user1, user4])
+ end
+ end
+
+ describe '.without_ghosts' do
+ let_it_be(:user1) { create(:user, :external) }
+ let_it_be(:user2) { create(:user, state: 'blocked') }
+ let_it_be(:user3) { create(:user, ghost: true) }
+
+ it 'returns users without ghosts users' do
+ expect(described_class.without_ghosts).to match_array([user1, user2])
+ end
+ end
end
describe "Respond to" do
@@ -1252,7 +1273,7 @@ describe User, :do_not_mock_admin_mode do
let(:user) { double }
it 'filters by active users by default' do
- expect(described_class).to receive(:active).and_return([user])
+ expect(described_class).to receive(:active_without_ghosts).and_return([user])
expect(described_class.filter_items(nil)).to include user
end
@@ -1991,6 +2012,19 @@ describe User, :do_not_mock_admin_mode do
expect(user.blocked?).to be_truthy
expect(user.ldap_blocked?).to be_truthy
end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'does not block user' do
+ user.ldap_block
+
+ expect(user.blocked?).to be_falsey
+ expect(user.ldap_blocked?).to be_falsey
+ end
+ end
end
end
@@ -2390,6 +2424,7 @@ describe User, :do_not_mock_admin_mode do
describe '#authorizations_for_projects' do
let!(:user) { create(:user) }
+
subject { Project.where("EXISTS (?)", user.authorizations_for_projects) }
it 'includes projects that belong to a user, but no other projects' do
@@ -3700,6 +3735,7 @@ describe User, :do_not_mock_admin_mode do
describe '#required_terms_not_accepted?' do
let(:user) { build(:user) }
+
subject { user.required_terms_not_accepted? }
context "when terms are not enforced" do
diff --git a/spec/policies/ci/trigger_policy_spec.rb b/spec/policies/ci/trigger_policy_spec.rb
index e936277a391..28e5a2b2cd6 100644
--- a/spec/policies/ci/trigger_policy_spec.rb
+++ b/spec/policies/ci/trigger_policy_spec.rb
@@ -10,60 +10,6 @@ describe Ci::TriggerPolicy do
subject { described_class.new(user, trigger) }
describe '#rules' do
- context 'when owner is undefined' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: false)
- trigger.update_attribute(:owner, nil)
- end
-
- context 'when user is maintainer of the project' do
- before do
- project.add_maintainer(user)
- end
-
- it { is_expected.to be_allowed(:manage_trigger) }
- it { is_expected.not_to be_allowed(:admin_trigger) }
- end
-
- context 'when user is developer of the project' do
- before do
- project.add_developer(user)
- end
-
- it { is_expected.not_to be_allowed(:manage_trigger) }
- it { is_expected.not_to be_allowed(:admin_trigger) }
- end
-
- context 'when :use_legacy_pipeline_triggers feature flag is enabled' do
- before do
- stub_feature_flags(use_legacy_pipeline_triggers: true)
- end
-
- context 'when user is maintainer of the project' do
- before do
- project.add_maintainer(user)
- end
-
- it { is_expected.to be_allowed(:manage_trigger) }
- it { is_expected.to be_allowed(:admin_trigger) }
- end
-
- context 'when user is developer of the project' do
- before do
- project.add_developer(user)
- end
-
- it { is_expected.not_to be_allowed(:manage_trigger) }
- it { is_expected.not_to be_allowed(:admin_trigger) }
- end
-
- context 'when user is not member of the project' do
- it { is_expected.not_to be_allowed(:manage_trigger) }
- it { is_expected.not_to be_allowed(:admin_trigger) }
- end
- end
- end
-
context 'when owner is an user' do
before do
trigger.update!(owner: user)
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 188eafadfc1..e47204c774b 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -508,6 +508,34 @@ describe ProjectPolicy do
end
end
+ context 'forking a project' do
+ subject { described_class.new(current_user, project) }
+
+ context 'anonymous user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:fork_project) }
+ end
+
+ context 'project member' do
+ let_it_be(:project) { create(:project, :private) }
+
+ context 'guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:fork_project) }
+ end
+
+ %w(reporter developer maintainer).each do |role|
+ context role do
+ let(:current_user) { send(role) }
+
+ it { is_expected.to be_allowed(:fork_project) }
+ end
+ end
+ end
+ end
+
describe 'update_max_artifacts_size' do
subject { described_class.new(current_user, project) }
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index 017e94d04f1..0635c318942 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -183,29 +183,81 @@ describe Ci::BuildRunnerPresenter do
let(:pipeline) { merge_request.all_pipelines.first }
let(:build) { create(:ci_build, ref: pipeline.ref, pipeline: pipeline) }
- it 'returns the correct refspecs' do
- is_expected
- .to contain_exactly('+refs/merge-requests/1/head:refs/merge-requests/1/head')
- end
-
- context 'when GIT_DEPTH is zero' do
+ context 'when depend_on_persistent_pipeline_ref feature flag is enabled' do
before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 0, pipeline: build.pipeline)
+ stub_feature_flags(ci_force_exposing_merge_request_refs: false)
+ pipeline.persistent_ref.create
end
it 'returns the correct refspecs' do
is_expected
- .to contain_exactly('+refs/merge-requests/1/head:refs/merge-requests/1/head',
- '+refs/heads/*:refs/remotes/origin/*',
- '+refs/tags/*:refs/tags/*')
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
+ end
+
+ context 'when ci_force_exposing_merge_request_refs feature flag is enabled' do
+ before do
+ stub_feature_flags(ci_force_exposing_merge_request_refs: true)
+ end
+
+ it 'returns the correct refspecs' do
+ is_expected
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/merge-requests/1/head:refs/merge-requests/1/head')
+ end
+ end
+
+ context 'when GIT_DEPTH is zero' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 0, pipeline: build.pipeline)
+ end
+
+ it 'returns the correct refspecs' do
+ is_expected
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/heads/*:refs/remotes/origin/*',
+ '+refs/tags/*:refs/tags/*')
+ end
+ end
+
+ context 'when pipeline is legacy detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_legacy_detached_merge_request_pipeline) }
+
+ it 'returns the correct refspecs' do
+ is_expected.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ "+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
+ end
end
end
- context 'when pipeline is legacy detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_legacy_detached_merge_request_pipeline) }
+ context 'when depend_on_persistent_pipeline_ref feature flag is disabled' do
+ before do
+ stub_feature_flags(depend_on_persistent_pipeline_ref: false)
+ end
it 'returns the correct refspecs' do
- is_expected.to contain_exactly("+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
+ is_expected
+ .to contain_exactly('+refs/merge-requests/1/head:refs/merge-requests/1/head')
+ end
+
+ context 'when GIT_DEPTH is zero' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 0, pipeline: build.pipeline)
+ end
+
+ it 'returns the correct refspecs' do
+ is_expected
+ .to contain_exactly('+refs/merge-requests/1/head:refs/merge-requests/1/head',
+ '+refs/heads/*:refs/remotes/origin/*',
+ '+refs/tags/*:refs/tags/*')
+ end
+ end
+
+ context 'when pipeline is legacy detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_legacy_detached_merge_request_pipeline) }
+
+ it 'returns the correct refspecs' do
+ is_expected.to contain_exactly("+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
+ end
end
end
end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 318024bacd6..620ef3ff21a 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -4,11 +4,10 @@ require 'spec_helper'
describe ProjectPresenter do
let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:presenter) { described_class.new(project, current_user: user) }
describe '#license_short_name' do
- let(:project) { create(:project) }
- let(:presenter) { described_class.new(project, current_user: user) }
-
context 'when project.repository has a license_key' do
it 'returns the nickname of the license if present' do
allow(project.repository).to receive(:license_key).and_return('agpl-3.0')
@@ -33,13 +32,11 @@ describe ProjectPresenter do
end
describe '#default_view' do
- let(:presenter) { described_class.new(project, current_user: user) }
-
context 'user not signed in' do
- let(:user) { nil }
+ let_it_be(:user) { nil }
context 'when repository is empty' do
- let(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:project) { create(:project_empty_repo, :public) }
it 'returns activity if user has repository access' do
allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true)
@@ -55,7 +52,8 @@ describe ProjectPresenter do
end
context 'when repository is not empty' do
- let(:project) { create(:project, :public, :repository) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let(:release) { create(:release, project: project, author: user) }
it 'returns files and readme if user has repository access' do
allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true)
@@ -68,6 +66,15 @@ describe ProjectPresenter do
expect(presenter.default_view).to eq('activity')
end
+
+ it 'returns releases anchor' do
+ expect(release).to be_truthy
+ expect(presenter.releases_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including("#{project.releases.count}"),
+ link: presenter.project_releases_path(project)
+ )
+ end
end
end
@@ -124,11 +131,8 @@ describe ProjectPresenter do
end
describe '#can_current_user_push_code?' do
- let(:project) { create(:project, :repository) }
- let(:presenter) { described_class.new(project, current_user: user) }
-
context 'empty repo' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it 'returns true if user can push_code' do
project.add_developer(user)
@@ -154,6 +158,7 @@ describe ProjectPresenter do
it 'returns false if default branch is protected' do
project.add_developer(user)
+
create(:protected_branch, project: project, name: project.default_branch)
expect(presenter.can_current_user_push_code?).to be(false)
@@ -162,75 +167,125 @@ describe ProjectPresenter do
end
context 'statistics anchors (empty repo)' do
- let(:project) { create(:project, :empty_repo) }
- let(:presenter) { described_class.new(project, current_user: user) }
+ let_it_be(:project) { create(:project, :empty_repo) }
describe '#files_anchor_data' do
it 'returns files data' do
- expect(presenter.files_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0 Bytes'),
- link: nil)
+ expect(presenter.files_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0 Bytes'),
+ link: nil
+ )
+ end
+ end
+
+ describe '#releases_anchor_data' do
+ it 'does not return release count' do
+ expect(presenter.releases_anchor_data).to be_nil
end
end
describe '#commits_anchor_data' do
it 'returns commits data' do
- expect(presenter.commits_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0'),
- link: nil)
+ expect(presenter.commits_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0'),
+ link: nil
+ )
end
end
describe '#branches_anchor_data' do
it 'returns branches data' do
- expect(presenter.branches_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0'),
- link: nil)
+ expect(presenter.branches_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0'),
+ link: nil
+ )
end
end
describe '#tags_anchor_data' do
it 'returns tags data' do
- expect(presenter.tags_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0'),
- link: nil)
+ expect(presenter.tags_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0'),
+ link: nil
+ )
end
end
end
context 'statistics anchors' do
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:release) { create(:release, project: project, author: user) }
let(:presenter) { described_class.new(project, current_user: user) }
describe '#files_anchor_data' do
it 'returns files data' do
- expect(presenter.files_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0 Bytes'),
- link: presenter.project_tree_path(project))
+ expect(presenter.files_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0 Bytes'),
+ link: presenter.project_tree_path(project)
+ )
+ end
+ end
+
+ describe '#releases_anchor_data' do
+ it 'returns release count if user can read release' do
+ project.add_maintainer(user)
+
+ expect(release).to be_truthy
+ expect(presenter.releases_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including("#{project.releases.count}"),
+ link: presenter.project_releases_path(project)
+ )
+ end
+
+ it 'returns nil if user cannot read release' do
+ expect(release).to be_truthy
+ expect(presenter.releases_anchor_data).to be_nil
+ end
+
+ context 'user not signed in' do
+ let_it_be(:user) { nil }
+
+ it 'returns nil if user is signed out' do
+ expect(release).to be_truthy
+ expect(presenter.releases_anchor_data).to be_nil
+ end
end
end
describe '#commits_anchor_data' do
it 'returns commits data' do
- expect(presenter.commits_anchor_data).to have_attributes(is_link: true,
- label: a_string_including('0'),
- link: presenter.project_commits_path(project, project.repository.root_ref))
+ expect(presenter.commits_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including('0'),
+ link: presenter.project_commits_path(project, project.repository.root_ref)
+ )
end
end
describe '#branches_anchor_data' do
it 'returns branches data' do
- expect(presenter.branches_anchor_data).to have_attributes(is_link: true,
- label: a_string_including("#{project.repository.branches.size}"),
- link: presenter.project_branches_path(project))
+ expect(presenter.branches_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including("#{project.repository.branches.size}"),
+ link: presenter.project_branches_path(project)
+ )
end
end
describe '#tags_anchor_data' do
it 'returns tags data' do
- expect(presenter.tags_anchor_data).to have_attributes(is_link: true,
- label: a_string_including("#{project.repository.tags.size}"),
- link: presenter.project_tags_path(project))
+ expect(presenter.tags_anchor_data).to have_attributes(
+ is_link: true,
+ label: a_string_including("#{project.repository.tags.size}"),
+ link: presenter.project_tags_path(project)
+ )
end
end
@@ -238,10 +293,12 @@ describe ProjectPresenter do
it 'returns new file data if user can push' do
project.add_developer(user)
- expect(presenter.new_file_anchor_data).to have_attributes(is_link: false,
- label: a_string_including("New file"),
- link: presenter.project_new_blob_path(project, 'master'),
- class_modifier: 'success')
+ expect(presenter.new_file_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including("New file"),
+ link: presenter.project_new_blob_path(project, 'master'),
+ class_modifier: 'success'
+ )
end
it 'returns nil if user cannot push' do
@@ -249,7 +306,7 @@ describe ProjectPresenter do
end
context 'when the project is empty' do
- let(:project) { create(:project, :empty_repo) }
+ let_it_be(:project) { create(:project, :empty_repo) }
# Since we protect the default branch for empty repos
it 'is empty for a developer' do
@@ -264,11 +321,14 @@ describe ProjectPresenter do
context 'when user can push and README does not exists' do
it 'returns anchor data' do
project.add_developer(user)
+
allow(project.repository).to receive(:readme).and_return(nil)
- expect(presenter.readme_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Add README'),
- link: presenter.add_readme_path)
+ expect(presenter.readme_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Add README'),
+ link: presenter.add_readme_path
+ )
end
end
@@ -276,9 +336,11 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:readme).and_return(double(name: 'readme'))
- expect(presenter.readme_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('README'),
- link: presenter.readme_path)
+ expect(presenter.readme_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('README'),
+ link: presenter.readme_path
+ )
end
end
end
@@ -287,11 +349,14 @@ describe ProjectPresenter do
context 'when user can push and CHANGELOG does not exist' do
it 'returns anchor data' do
project.add_developer(user)
+
allow(project.repository).to receive(:changelog).and_return(nil)
- expect(presenter.changelog_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Add CHANGELOG'),
- link: presenter.add_changelog_path)
+ expect(presenter.changelog_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Add CHANGELOG'),
+ link: presenter.add_changelog_path
+ )
end
end
@@ -299,9 +364,11 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:changelog).and_return(double(name: 'foo'))
- expect(presenter.changelog_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('CHANGELOG'),
- link: presenter.changelog_path)
+ expect(presenter.changelog_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('CHANGELOG'),
+ link: presenter.changelog_path
+ )
end
end
end
@@ -310,11 +377,14 @@ describe ProjectPresenter do
context 'when user can push and LICENSE does not exist' do
it 'returns anchor data' do
project.add_developer(user)
+
allow(project.repository).to receive(:license_blob).and_return(nil)
- expect(presenter.license_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Add LICENSE'),
- link: presenter.add_license_path)
+ expect(presenter.license_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Add LICENSE'),
+ link: presenter.add_license_path
+ )
end
end
@@ -322,9 +392,11 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:license_blob).and_return(double(name: 'foo'))
- expect(presenter.license_anchor_data).to have_attributes(is_link: false,
- label: a_string_including(presenter.license_short_name),
- link: presenter.license_path)
+ expect(presenter.license_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including(presenter.license_short_name),
+ link: presenter.license_path
+ )
end
end
end
@@ -333,11 +405,14 @@ describe ProjectPresenter do
context 'when user can push and CONTRIBUTING does not exist' do
it 'returns anchor data' do
project.add_developer(user)
+
allow(project.repository).to receive(:contribution_guide).and_return(nil)
- expect(presenter.contribution_guide_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Add CONTRIBUTING'),
- link: presenter.add_contribution_guide_path)
+ expect(presenter.contribution_guide_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Add CONTRIBUTING'),
+ link: presenter.add_contribution_guide_path
+ )
end
end
@@ -345,9 +420,11 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project.repository).to receive(:contribution_guide).and_return(double(name: 'foo'))
- expect(presenter.contribution_guide_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('CONTRIBUTING'),
- link: presenter.contribution_guide_path)
+ expect(presenter.contribution_guide_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('CONTRIBUTING'),
+ link: presenter.contribution_guide_path
+ )
end
end
end
@@ -357,21 +434,26 @@ describe ProjectPresenter do
it 'returns anchor data' do
allow(project).to receive(:auto_devops_enabled?).and_return(true)
- expect(presenter.autodevops_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Auto DevOps enabled'),
- link: nil)
+ expect(presenter.autodevops_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Auto DevOps enabled'),
+ link: nil
+ )
end
end
context 'when user can admin pipeline and CI yml does not exist' do
it 'returns anchor data' do
project.add_maintainer(user)
+
allow(project).to receive(:auto_devops_enabled?).and_return(false)
allow(project.repository).to receive(:gitlab_ci_yml).and_return(nil)
- expect(presenter.autodevops_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Enable Auto DevOps'),
- link: presenter.project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
+ expect(presenter.autodevops_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Enable Auto DevOps'),
+ link: presenter.project_settings_ci_cd_path(project, anchor: 'autodevops-settings')
+ )
end
end
end
@@ -380,29 +462,37 @@ describe ProjectPresenter do
context 'when user can create Kubernetes cluster' do
it 'returns link to cluster if only one exists' do
project.add_maintainer(user)
+
cluster = create(:cluster, projects: [project])
- expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Kubernetes configured'),
- link: presenter.project_cluster_path(project, cluster))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Kubernetes configured'),
+ link: presenter.project_cluster_path(project, cluster)
+ )
end
it 'returns link to clusters page if more than one exists' do
project.add_maintainer(user)
+
create(:cluster, :production_environment, projects: [project])
create(:cluster, projects: [project])
- expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Kubernetes configured'),
- link: presenter.project_clusters_path(project))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Kubernetes configured'),
+ link: presenter.project_clusters_path(project)
+ )
end
it 'returns link to create a cluster if no cluster exists' do
project.add_maintainer(user)
- expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(is_link: false,
- label: a_string_including('Add Kubernetes cluster'),
- link: presenter.new_project_cluster_path(project))
+ expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Add Kubernetes cluster'),
+ link: presenter.new_project_cluster_path(project)
+ )
end
end
@@ -416,7 +506,6 @@ describe ProjectPresenter do
describe '#statistics_buttons' do
let(:project) { build(:project) }
- let(:presenter) { described_class.new(project, current_user: user) }
it 'orders the items correctly' do
allow(project.repository).to receive(:readme).and_return(double(name: 'readme'))
@@ -435,8 +524,6 @@ describe ProjectPresenter do
end
describe '#repo_statistics_buttons' do
- let(:presenter) { described_class.new(project, current_user: user) }
-
subject(:empty_repo_statistics_buttons) { presenter.empty_repo_statistics_buttons }
before do
@@ -473,7 +560,7 @@ describe ProjectPresenter do
end
context 'initialized repo' do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
it 'orders the items correctly' do
expect(empty_repo_statistics_buttons.map(&:label)).to start_with(
@@ -485,4 +572,73 @@ describe ProjectPresenter do
end
end
end
+
+ describe '#can_setup_review_app?' do
+ subject { presenter.can_setup_review_app? }
+
+ context 'when the ci/cd file is missing' do
+ before do
+ allow(presenter).to receive(:cicd_missing?).and_return(true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the ci/cd file is not missing' do
+ before do
+ allow(presenter).to receive(:cicd_missing?).and_return(false)
+ end
+
+ context 'and the user can create a cluster' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :create_cluster, project).and_return(true)
+ end
+
+ context 'and there is no cluster associated to this project' do
+ let(:project) { create(:project, clusters: []) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'and there is already a cluster associated to this project' do
+ let(:project) { create(:project, clusters: [build(:cluster, :providing_by_gcp)]) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when a group cluster is instantiated' do
+ let_it_be(:cluster) { create(:cluster, :group) }
+ let_it_be(:group) { cluster.group }
+
+ context 'and the project belongs to this group' do
+ let!(:project) { create(:project, group: group) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'and the project does not belong to this group' do
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'and there is already an instance cluster' do
+ it 'is false' do
+ create(:cluster, :instance)
+
+ is_expected.to be_falsey
+ end
+ end
+ end
+
+ context 'and the user cannot create a cluster' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :create_cluster, project).and_return(false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
end
diff --git a/spec/requests/api/appearance_spec.rb b/spec/requests/api/appearance_spec.rb
new file mode 100644
index 00000000000..40fd216f32d
--- /dev/null
+++ b/spec/requests/api/appearance_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Appearance, 'Appearance' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+
+ describe "GET /application/appearance" do
+ context 'as a non-admin user' do
+ it "returns 403" do
+ get api("/application/appearance", user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'as an admin user' do
+ it "returns appearance" do
+ get api("/application/appearance", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Hash
+ expect(json_response['description']).to eq('')
+ expect(json_response['email_header_and_footer_enabled']).to be(false)
+ expect(json_response['favicon']).to be_nil
+ expect(json_response['footer_message']).to eq('')
+ expect(json_response['header_logo']).to be_nil
+ expect(json_response['header_message']).to eq('')
+ expect(json_response['logo']).to be_nil
+ expect(json_response['message_background_color']).to eq('#E75E40')
+ expect(json_response['message_font_color']).to eq('#FFFFFF')
+ expect(json_response['new_project_guidelines']).to eq('')
+ expect(json_response['title']).to eq('')
+ end
+ end
+ end
+
+ describe "PUT /application/appearance" do
+ context 'as a non-admin user' do
+ it "returns 403" do
+ put api("/application/appearance", user), params: { title: "Test" }
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'as an admin user' do
+ context "instance basics" do
+ it "allows updating the settings" do
+ put api("/application/appearance", admin), params: {
+ title: "GitLab Test Instance",
+ description: "gitlab-test.example.com",
+ new_project_guidelines: "Please read the FAQs for help."
+ }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Hash
+ expect(json_response['description']).to eq('gitlab-test.example.com')
+ expect(json_response['email_header_and_footer_enabled']).to be(false)
+ expect(json_response['favicon']).to be_nil
+ expect(json_response['footer_message']).to eq('')
+ expect(json_response['header_logo']).to be_nil
+ expect(json_response['header_message']).to eq('')
+ expect(json_response['logo']).to be_nil
+ expect(json_response['message_background_color']).to eq('#E75E40')
+ expect(json_response['message_font_color']).to eq('#FFFFFF')
+ expect(json_response['new_project_guidelines']).to eq('Please read the FAQs for help.')
+ expect(json_response['title']).to eq('GitLab Test Instance')
+ end
+ end
+
+ context "system header and footer" do
+ it "allows updating the settings" do
+ settings = {
+ footer_message: "This is a Header",
+ header_message: "This is a Footer",
+ message_font_color: "#ffffff",
+ message_background_color: "#009999",
+ email_header_and_footer_enabled: true
+ }
+
+ put api("/application/appearance", admin), params: settings
+
+ expect(response).to have_gitlab_http_status(200)
+ settings.each do |attribute, value|
+ expect(Appearance.current.public_send(attribute)).to eq(value)
+ end
+ end
+
+ context "fails on invalid color values" do
+ it "with message_font_color" do
+ put api("/application/appearance", admin), params: { message_font_color: "No Color" }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']['message_font_color']).to contain_exactly('must be a valid color code')
+ end
+
+ it "with message_background_color" do
+ put api("/application/appearance", admin), params: { message_background_color: "#1" }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']['message_background_color']).to contain_exactly('must be a valid color code')
+ end
+ end
+ end
+
+ context "instance logos" do
+ let_it_be(:appearance) { create(:appearance) }
+
+ it "allows updating the image files" do
+ put api("/application/appearance", admin), params: {
+ logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"),
+ header_logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"),
+ favicon: fixture_file_upload("spec/fixtures/dk.png", "image/png")
+ }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['logo']).to eq("/uploads/-/system/appearance/logo/#{appearance.id}/dk.png")
+ expect(json_response['header_logo']).to eq("/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png")
+ expect(json_response['favicon']).to eq("/uploads/-/system/appearance/favicon/#{appearance.id}/dk.png")
+ end
+
+ context "fails on invalid color images" do
+ it "with string instead of file" do
+ put api("/application/appearance", admin), params: { logo: 'not-a-file.png' }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['error']).to eq("logo is invalid")
+ end
+
+ it "with .svg file instead of .png" do
+ put api("/application/appearance", admin), params: { favicon: fixture_file_upload("spec/fixtures/logo_sample.svg", "image/svg") }
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']['favicon']).to contain_exactly("You are not allowed to upload \"svg\" files, allowed types: png, ico")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 3dc8e5749d4..d8fc234cbae 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -11,10 +11,10 @@ describe API::Deployments do
end
describe 'GET /projects/:id/deployments' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now) }
- let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'patch', created_at: 2.days.ago, updated_at: 1.hour.ago) }
+ let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'master', created_at: 1.day.ago, updated_at: 2.hours.ago) }
+ let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'master', created_at: 2.days.ago, updated_at: 1.hour.ago) }
context 'as member of the project' do
it 'returns projects deployments sorted by id asc' do
@@ -40,6 +40,18 @@ describe API::Deployments do
end
end
+ context 'with the environment filter specifed' do
+ it 'returns deployments for the environment' do
+ get(
+ api("/projects/#{project.id}/deployments", user),
+ params: { environment: deployment_1.environment.name }
+ )
+
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['iid']).to eq(deployment_1.iid)
+ end
+ end
+
describe 'ordering' do
let(:order_by) { 'iid' }
let(:sort) { 'desc' }
@@ -343,38 +355,70 @@ describe API::Deployments do
end
end
- context 'prevent N + 1 queries' do
- context 'when the endpoint returns multiple records' do
- let(:project) { create(:project) }
+ describe 'GET /projects/:id/deployments/:deployment_id/merge_requests' do
+ let(:project) { create(:project, :repository) }
+ let!(:deployment) { create(:deployment, :success, project: project) }
- def create_record
- create(:deployment, :success, project: project)
- end
+ subject { get api("/projects/#{project.id}/deployments/#{deployment.id}/merge_requests", user) }
+
+ context 'when a user is not a member of the deployment project' do
+ let(:user) { build(:user) }
+
+ it 'returns a 404 status code' do
+ subject
- def request_with_query_count
- ActiveRecord::QueryRecorder.new { trigger_request }.count
+ expect(response).to have_gitlab_http_status(404)
end
+ end
+
+ context 'when a user member of the deployment project' do
+ let_it_be(:project2) { create(:project) }
+ let!(:merge_request1) { create(:merge_request, source_project: project, target_project: project) }
+ let!(:merge_request2) { create(:merge_request, source_project: project, target_project: project, state: 'closed') }
+ let!(:merge_request3) { create(:merge_request, source_project: project2, target_project: project2) }
+
+ it 'returns the relevant merge requests linked to a deployment for a project' do
+ deployment.merge_requests << [merge_request1, merge_request2]
- def trigger_request
- get api("/projects/#{project.id}/deployments?order_by=updated_at&sort=asc", user)
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.map { |d| d['id'] }).to contain_exactly(merge_request1.id, merge_request2.id)
end
- before do
- create_record
+ context 'when a deployment is not associated to any existing merge requests' do
+ it 'returns an empty array' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq([])
+ end
end
+ end
+ end
- it 'succeeds' do
- trigger_request
+ context 'prevent N + 1 queries' do
+ context 'when the endpoint returns multiple records' do
+ let(:project) { create(:project, :repository) }
+ let!(:deployment) { create(:deployment, :success, project: project) }
- expect(response).to have_gitlab_http_status(200)
+ subject { get api("/projects/#{project.id}/deployments?order_by=updated_at&sort=asc", user) }
+
+ it 'succeeds', :aggregate_failures do
+ subject
+ expect(response).to have_gitlab_http_status(200)
expect(json_response.size).to eq(1)
end
- it 'does not increase the query count' do
- expect { create_record }.not_to change { request_with_query_count }
+ context 'with 10 more records' do
+ it 'does not increase the query count', :aggregate_failures do
+ create_list(:deployment, 10, :success, project: project)
+
+ expect { subject }.not_to be_n_plus_1_query
- expect(json_response.size).to eq(2)
+ expect(json_response.size).to eq(11)
+ end
end
end
end
diff --git a/spec/requests/api/discussions_spec.rb b/spec/requests/api/discussions_spec.rb
index 68f7d407b54..f37a02e7135 100644
--- a/spec/requests/api/discussions_spec.rb
+++ b/spec/requests/api/discussions_spec.rb
@@ -49,6 +49,18 @@ describe API::Discussions do
it_behaves_like 'discussions API', 'projects', 'merge_requests', 'iid', can_reply_to_individual_notes: true
it_behaves_like 'diff discussions API', 'projects', 'merge_requests', 'iid'
it_behaves_like 'resolvable discussions API', 'projects', 'merge_requests', 'iid'
+
+ context "when position is for a previous commit on the merge request" do
+ it "returns a 400 bad request error because the line_code is old" do
+ # SHA taken from an earlier commit listed in spec/factories/merge_requests.rb
+ position = diff_note.position.to_h.merge(new_line: 'c1acaa58bbcbc3eafe538cb8274ba387047b69f8')
+
+ post api("/projects/#{project.id}/merge_requests/#{noteable['iid']}/discussions", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
end
context 'when noteable is a Commit' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index aa273e97209..bdb0ef44038 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe API::Environments do
let(:user) { create(:user) }
let(:non_member) { create(:user) }
- let(:project) { create(:project, :private, namespace: user.namespace) }
+ let(:project) { create(:project, :private, :repository, namespace: user.namespace) }
let!(:environment) { create(:environment, project: project) }
before do
diff --git a/spec/requests/api/error_tracking_spec.rb b/spec/requests/api/error_tracking_spec.rb
new file mode 100644
index 00000000000..48ddc7f5a75
--- /dev/null
+++ b/spec/requests/api/error_tracking_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::ErrorTracking do
+ describe "GET /projects/:id/error_tracking/settings" do
+ let(:user) { create(:user) }
+ let(:setting) { create(:project_error_tracking_setting) }
+ let(:project) { setting.project }
+
+ def make_request
+ get api("/projects/#{project.id}/error_tracking/settings", user)
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns project settings' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq(
+ 'active' => setting.enabled,
+ 'project_name' => setting.project_name,
+ 'sentry_external_url' => setting.sentry_external_url,
+ 'api_url' => setting.api_url
+ )
+ end
+ end
+
+ context 'without a project setting' do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns 404' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message'])
+ .to eq('404 Error Tracking Setting Not Found')
+ end
+ end
+
+ context 'when authenticated as reporter' do
+ before do
+ project.add_reporter(user)
+ end
+
+ it 'returns 403' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when authenticated as non-member' do
+ it 'returns 404' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it 'returns 401' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index 9f8d254a00c..240f9a02877 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -8,6 +8,8 @@ describe API::Events do
let(:private_project) { create(:project, :private, creator_id: user.id, namespace: user.namespace) }
let(:closed_issue) { create(:closed_issue, project: private_project, author: user) }
let!(:closed_issue_event) { create(:event, project: private_project, author: user, target: closed_issue, action: Event::CLOSED, created_at: Date.new(2016, 12, 30)) }
+ let(:closed_issue2) { create(:closed_issue, project: private_project, author: non_member) }
+ let!(:closed_issue_event2) { create(:event, project: private_project, author: non_member, target: closed_issue2, action: Event::CLOSED, created_at: Date.new(2016, 12, 30)) }
describe 'GET /events' do
context 'when unauthenticated' do
@@ -27,6 +29,19 @@ describe API::Events do
expect(json_response).to be_an Array
expect(json_response.size).to eq(1)
end
+
+ context 'when scope is passed' do
+ it 'returns all events across projects' do
+ private_project.add_developer(non_member)
+
+ get api('/events?action=closed&target_type=issue&after=2016-12-1&before=2016-12-31&scope=all', user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(2)
+ end
+ end
end
context 'when the requesting token has "read_user" scope' do
diff --git a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
index 0e8fe4987b9..f80a3401134 100644
--- a/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/mark_as_spam_spec.rb
@@ -52,8 +52,8 @@ describe 'Mark snippet as spam' do
end
it 'marks snippet as spam' do
- expect_next_instance_of(SpamService) do |instance|
- expect(instance).to receive(:mark_as_spam!)
+ expect_next_instance_of(Spam::MarkAsSpamService) do |instance|
+ expect(instance).to receive(:execute)
end
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index d10380dab3a..664206dec29 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -56,6 +56,7 @@ describe 'getting a detailed sentry error' do
expect(error_data['status']).to eql sentry_detailed_error.status.upcase
expect(error_data['firstSeen']).to eql sentry_detailed_error.first_seen
expect(error_data['lastSeen']).to eql sentry_detailed_error.last_seen
+ expect(error_data['gitlabCommit']).to be nil
end
it 'is expected to return the frequency correctly' do
diff --git a/spec/requests/api/graphql/project/grafana_integration_spec.rb b/spec/requests/api/graphql/project/grafana_integration_spec.rb
new file mode 100644
index 00000000000..6075efb0cbd
--- /dev/null
+++ b/spec/requests/api/graphql/project/grafana_integration_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'Getting Grafana Integration' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { project.owner }
+ let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('GrafanaIntegration'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('grafanaIntegration', {}, fields)
+ )
+ end
+
+ context 'with grafana integration data' do
+ let(:integration_data) { graphql_data['project']['grafanaIntegration'] }
+
+ context 'without project admin permissions' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it { expect(integration_data).to be nil }
+ end
+
+ context 'with project admin permissions' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it { expect(integration_data['token']).to eql grafana_integration.token }
+ it { expect(integration_data['grafanaUrl']).to eql grafana_integration.grafana_url }
+
+ it do
+ expect(
+ integration_data['createdAt']
+ ).to eql grafana_integration.created_at.strftime('%Y-%m-%dT%H:%M:%SZ')
+ end
+
+ it do
+ expect(
+ integration_data['updatedAt']
+ ).to eql grafana_integration.updated_at.strftime('%Y-%m-%dT%H:%M:%SZ')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index a4f68df928f..35b77832c73 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -358,6 +358,7 @@ describe API::Groups do
expect(json_response['two_factor_grace_period']).to eq(group1.two_factor_grace_period)
expect(json_response['auto_devops_enabled']).to eq(group1.auto_devops_enabled)
expect(json_response['emails_disabled']).to eq(group1.emails_disabled)
+ expect(json_response['mentions_disabled']).to eq(group1.mentions_disabled)
expect(json_response['project_creation_level']).to eq('maintainer')
expect(json_response['subgroup_creation_level']).to eq('maintainer')
expect(json_response['web_url']).to eq(group1.web_url)
@@ -556,6 +557,7 @@ describe API::Groups do
expect(json_response['two_factor_grace_period']).to eq(48)
expect(json_response['auto_devops_enabled']).to eq(nil)
expect(json_response['emails_disabled']).to eq(nil)
+ expect(json_response['mentions_disabled']).to eq(nil)
expect(json_response['project_creation_level']).to eq("noone")
expect(json_response['subgroup_creation_level']).to eq("maintainer")
expect(json_response['request_access_enabled']).to eq(true)
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index ecbb81294a0..12e6e7c7a09 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -326,7 +326,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true')
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
@@ -346,7 +346,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true')
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
expect(user.reload.last_activity_on).to be_nil
end
end
@@ -389,6 +389,12 @@ describe API::Internal::Base do
end
end
end
+
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: key.user.username, project: project.full_path } }
+
+ subject { push(key, project) }
+ end
end
context "access denied" do
@@ -588,7 +594,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
- expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true')
+ expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-get-tag-messages-go' => 'true', 'gitaly-feature-filter-shas-with-signatures-go' => 'true', 'gitaly-feature-cache-invalidator' => 'true')
end
end
@@ -885,6 +891,12 @@ describe API::Internal::Base do
post api('/internal/post_receive'), params: valid_params
end
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { user: user.username, project: project.full_path } }
+
+ subject { post api('/internal/post_receive'), params: valid_params }
+ end
+
context 'when there are merge_request push options' do
before do
valid_params[:push_options] = ['merge_request.create']
@@ -1000,6 +1012,22 @@ describe API::Internal::Base do
it 'does not try to notify that project moved' do
allow_any_instance_of(Gitlab::Identifier).to receive(:identify).and_return(nil)
+ expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+
+ post api('/internal/post_receive'), params: valid_params
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context 'when project is nil' do
+ let(:gl_repository) { 'project-foo' }
+
+ it 'does not try to notify that project moved' do
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([nil, Gitlab::GlRepository::PROJECT])
+
+ expect(Gitlab::Checks::ProjectMoved).not_to receive(:fetch_message)
+
post api('/internal/post_receive'), params: valid_params
expect(response).to have_gitlab_http_status(200)
diff --git a/spec/requests/api/issues/get_group_issues_spec.rb b/spec/requests/api/issues/get_group_issues_spec.rb
index 3ee08758f99..ef63902ffd7 100644
--- a/spec/requests/api/issues/get_group_issues_spec.rb
+++ b/spec/requests/api/issues/get_group_issues_spec.rb
@@ -688,5 +688,32 @@ describe API::Issues do
end
end
end
+
+ context "#to_reference" do
+ it 'exposes reference path in context of group' do
+ get api(base_url, user)
+
+ expect(json_response.first['references']['short']).to eq("##{group_closed_issue.iid}")
+ expect(json_response.first['references']['relative']).to eq("#{group_closed_issue.project.path}##{group_closed_issue.iid}")
+ expect(json_response.first['references']['full']).to eq("#{group_closed_issue.project.full_path}##{group_closed_issue.iid}")
+ end
+
+ context 'referencing from parent group' do
+ let(:parent_group) { create(:group) }
+
+ before do
+ group.update(parent_id: parent_group.id)
+ group_closed_issue.reload
+ end
+
+ it 'exposes reference path in context of parent group' do
+ get api("/groups/#{parent_group.id}/issues")
+
+ expect(json_response.first['references']['short']).to eq("##{group_closed_issue.iid}")
+ expect(json_response.first['references']['relative']).to eq("#{group_closed_issue.project.full_path}##{group_closed_issue.iid}")
+ expect(json_response.first['references']['full']).to eq("#{group_closed_issue.project.full_path}##{group_closed_issue.iid}")
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 59aeb91edd2..e031cc9b0c6 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -299,6 +299,26 @@ describe API::Issues do
it_behaves_like 'labeled issues with labels and label_name params'
end
+ context 'with_labels_details' do
+ let(:label_b) { create(:label, title: 'foo', project: project) }
+ let(:label_c) { create(:label, title: 'bar', project: project) }
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get api("/projects/#{project.id}/issues?with_labels_details=true", user)
+ end.count
+
+ new_issue = create(:issue, project: project)
+ create(:label_link, label: label, target: new_issue)
+ create(:label_link, label: label_b, target: new_issue)
+ create(:label_link, label: label_c, target: new_issue)
+
+ expect do
+ get api("/projects/#{project.id}/issues?with_labels_details=true", user)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
+
it 'returns issues matching given search string for title' do
get api("#{base_url}/issues?search=#{issue.title}", user)
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 50a0a80b542..a3538aa98b1 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -805,6 +805,17 @@ describe API::Issues do
end
end
+ describe 'GET /projects/:id/issues/:issue_iid' do
+ it 'exposes full reference path' do
+ get api("/projects/#{project.id}/issues/#{issue.iid}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['references']['short']).to eq("##{issue.iid}")
+ expect(json_response['references']['relative']).to eq("##{issue.iid}")
+ expect(json_response['references']['full']).to eq("#{project.parent.path}/#{project.path}##{issue.iid}")
+ end
+ end
+
describe 'DELETE /projects/:id/issues/:issue_iid' do
it 'rejects a non member from deleting an issue' do
delete api("/projects/#{project.id}/issues/#{issue.iid}", non_member)
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index e9f678d164e..67404cf10df 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -160,6 +160,16 @@ describe API::Issues do
expect(json_response['iid']).not_to eq 9001
end
end
+
+ context 'when an issue with the same IID exists on database' do
+ it 'returns 409' do
+ post api("/projects/#{project.id}/issues", admin),
+ params: { title: 'new issue', iid: issue.iid }
+
+ expect(response).to have_gitlab_http_status(409)
+ expect(json_response['message']).to eq 'Duplicated issue'
+ end
+ end
end
it 'creates a new project issue' do
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 82bf607b911..1e1099ebcb6 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -244,7 +244,7 @@ describe API::Jobs do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
end.count
- 3.times { create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) }
+ create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
diff --git a/spec/requests/api/keys_spec.rb b/spec/requests/api/keys_spec.rb
index f7da1abcfdf..c743cb3f633 100644
--- a/spec/requests/api/keys_spec.rb
+++ b/spec/requests/api/keys_spec.rb
@@ -106,6 +106,36 @@ describe API::Keys do
expect(json_response['user']['is_admin']).to be_nil
end
+
+ context 'when searching a DeployKey' do
+ let(:project) { create(:project, :repository) }
+ let(:project_push) { create(:project, :repository) }
+ let(:deploy_key) { create(:deploy_key) }
+
+ let!(:deploy_keys_project) do
+ create(:deploy_keys_project, project: project, deploy_key: deploy_key)
+ end
+
+ let!(:deploy_keys_project_push) do
+ create(:deploy_keys_project, project: project_push, deploy_key: deploy_key, can_push: true)
+ end
+
+ it 'returns user and projects if SSH sha256 fingerprint for DeployKey found' do
+ user.keys << deploy_key
+
+ get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + deploy_key.fingerprint_sha256)}", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['title']).to eq(deploy_key.title)
+ expect(json_response['user']['id']).to eq(user.id)
+
+ expect(json_response['deploy_keys_projects'].count).to eq(2)
+ expect(json_response['deploy_keys_projects'][0]['project_id']).to eq(deploy_keys_project.project.id)
+ expect(json_response['deploy_keys_projects'][0]['can_push']).to eq(deploy_keys_project.can_push)
+ expect(json_response['deploy_keys_projects'][1]['project_id']).to eq(deploy_keys_project_push.project.id)
+ expect(json_response['deploy_keys_projects'][1]['can_push']).to eq(deploy_keys_project_push.can_push)
+ end
+ end
end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index e5ad1a6378e..ae0596bea98 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -88,6 +88,34 @@ describe API::MergeRequests do
expect(json_response.first['merge_commit_sha']).not_to be_nil
expect(json_response.first['merge_commit_sha']).to eq(merge_request_merged.merge_commit_sha)
end
+
+ context 'with labels_details' do
+ it 'returns labels with details' do
+ path = endpoint_path + "?with_labels_details=true"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response.last['labels'].pluck('name')).to eq([label2.title, label.title])
+ expect(json_response.last['labels'].first).to match_schema('/public_api/v4/label_basic')
+ end
+
+ it 'avoids N+1 queries' do
+ path = endpoint_path + "?with_labels_details=true"
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api(path, user)
+ end.count
+
+ mr = create(:merge_request)
+ create(:label_link, label: label, target: mr)
+ create(:label_link, label: label2, target: mr)
+
+ expect do
+ get api(path, user)
+ end.not_to exceed_query_limit(control)
+ end
+ end
end
it 'returns an array of all merge_requests using simple mode' do
@@ -736,6 +764,33 @@ describe API::MergeRequests do
it_behaves_like 'merge requests list'
end
+
+ context "#to_reference" do
+ it 'exposes reference path in context of group' do
+ get api("/groups/#{group.id}/merge_requests", user)
+
+ expect(json_response.first['references']['short']).to eq("!#{merge_request_merged.iid}")
+ expect(json_response.first['references']['relative']).to eq("#{merge_request_merged.target_project.path}!#{merge_request_merged.iid}")
+ expect(json_response.first['references']['full']).to eq("#{merge_request_merged.target_project.full_path}!#{merge_request_merged.iid}")
+ end
+
+ context 'referencing from parent group' do
+ let(:parent_group) { create(:group) }
+
+ before do
+ group.update(parent_id: parent_group.id)
+ merge_request_merged.reload
+ end
+
+ it 'exposes reference path in context of parent group' do
+ get api("/groups/#{parent_group.id}/merge_requests")
+
+ expect(json_response.first['references']['short']).to eq("!#{merge_request_merged.iid}")
+ expect(json_response.first['references']['relative']).to eq("#{merge_request_merged.target_project.full_path}!#{merge_request_merged.iid}")
+ expect(json_response.first['references']['full']).to eq("#{merge_request_merged.target_project.full_path}!#{merge_request_merged.iid}")
+ end
+ end
+ end
end
describe "GET /projects/:id/merge_requests/:merge_request_iid" do
@@ -783,6 +838,9 @@ describe API::MergeRequests do
expect(json_response).not_to include('rebase_in_progress')
expect(json_response['has_conflicts']).to be_falsy
expect(json_response['blocking_discussions_resolved']).to be_truthy
+ expect(json_response['references']['short']).to eq("!#{merge_request.iid}")
+ expect(json_response['references']['relative']).to eq("!#{merge_request.iid}")
+ expect(json_response['references']['full']).to eq("#{merge_request.target_project.full_path}!#{merge_request.iid}")
end
it 'exposes description and title html when render_html is true' do
@@ -1491,7 +1549,7 @@ describe API::MergeRequests do
end
end
- describe "PUT /projects/:id/merge_requests/:merge_request_iid/merge" do
+ describe "PUT /projects/:id/merge_requests/:merge_request_iid/merge", :clean_gitlab_redis_cache do
let(:pipeline) { create(:ci_pipeline) }
it "returns merge_request in case of success" do
@@ -1579,6 +1637,15 @@ describe API::MergeRequests do
expect(merge_request.reload.state).to eq('opened')
end
+ it 'merges if the head pipeline already succeeded and `merge_when_pipeline_succeeds` is passed' do
+ create(:ci_pipeline, :success, sha: merge_request.diff_head_sha, merge_requests_as_head_pipeline: [merge_request])
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['state']).to eq('merged')
+ end
+
it "enables merge when pipeline succeeds if the pipeline is active" do
allow_any_instance_of(MergeRequest).to receive_messages(head_pipeline: pipeline, actual_head_pipeline: pipeline)
allow(pipeline).to receive(:active?).and_return(true)
@@ -2155,16 +2222,34 @@ describe API::MergeRequests do
end
describe 'PUT :id/merge_requests/:merge_request_iid/rebase' do
- it 'enqueues a rebase of the merge request against the target branch' do
- Sidekiq::Testing.fake! do
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
+ context 'when rebase can be performed' do
+ it 'enqueues a rebase of the merge request against the target branch' do
+ Sidekiq::Testing.fake! do
+ expect do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
+ end.to change { RebaseWorker.jobs.size }.by(1)
+ end
+
+ expect(response).to have_gitlab_http_status(202)
+ expect(merge_request.reload).to be_rebase_in_progress
+ expect(json_response['rebase_in_progress']).to be(true)
end
- expect(response).to have_gitlab_http_status(202)
- expect(RebaseWorker.jobs.size).to eq(1)
+ context 'when skip_ci parameter is set' do
+ it 'enqueues a rebase of the merge request with skip_ci flag set' do
+ expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id, true).and_call_original
- expect(merge_request.reload).to be_rebase_in_progress
- expect(json_response['rebase_in_progress']).to be(true)
+ Sidekiq::Testing.fake! do
+ expect do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user), params: { skip_ci: true }
+ end.to change { RebaseWorker.jobs.size }.by(1)
+ end
+
+ expect(response).to have_gitlab_http_status(202)
+ expect(merge_request.reload).to be_rebase_in_progress
+ expect(json_response['rebase_in_progress']).to be(true)
+ end
+ end
end
it 'returns 403 if the user cannot push to the branch' do
@@ -2193,7 +2278,7 @@ describe API::MergeRequests do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/rebase", user)
expect(response).to have_gitlab_http_status(409)
- expect(json_response['message']).to eq(MergeRequest::REBASE_LOCK_MESSAGE)
+ expect(json_response['message']).to eq('Failed to enqueue the rebase operation, possibly due to a long-lived transaction. Try again later.')
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index cc2038a7245..b4416344ecf 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -101,6 +101,75 @@ describe API::Notes do
expect(json_response.first['body']).to eq(cross_reference_note.note)
end
end
+
+ context "activity filters" do
+ let!(:user_reference_note) do
+ create :note,
+ noteable: ext_issue, project: ext_proj,
+ note: "Hello there general!",
+ system: false
+ end
+
+ let(:test_url) {"/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes"}
+
+ shared_examples 'a notes request' do
+ it 'is a note array response' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ end
+ end
+
+ context "when not provided" do
+ let(:count) { 2 }
+
+ before do
+ get api(test_url, private_user)
+ end
+
+ it_behaves_like 'a notes request'
+
+ it 'returns all the notes' do
+ expect(json_response.count).to eq(count)
+ end
+ end
+
+ context "when all_notes provided" do
+ let(:count) { 2 }
+
+ before do
+ get api(test_url + "?activity_filter=all_notes", private_user)
+ end
+
+ it_behaves_like 'a notes request'
+
+ it 'returns all the notes' do
+ expect(json_response.count).to eq(count)
+ end
+ end
+
+ context "when provided" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:filter, :count, :system_notable) do
+ "only_comments" | 1 | false
+ "only_activity" | 1 | true
+ end
+
+ with_them do
+ before do
+ get api(test_url + "?activity_filter=#{filter}", private_user)
+ end
+
+ it_behaves_like 'a notes request'
+
+ it "properly filters the returned notables" do
+ expect(json_response.count).to eq(count)
+ expect(json_response.first["system"]).to be system_notable
+ end
+ end
+ end
+ end
end
describe "GET /projects/:id/noteable/:noteable_id/notes/:note_id" do
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index a9d570b5696..75e3013d362 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -254,9 +254,7 @@ describe API::Pipelines do
context 'when order_by and sort are specified' do
context 'when order_by user_id' do
before do
- 3.times do
- create(:ci_pipeline, project: project, user: create(:user))
- end
+ create_list(:ci_pipeline, 3, project: project, user: create(:user))
end
context 'when sort parameter is valid' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 9af4f484f99..fce49d0248c 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -570,6 +570,102 @@ describe API::Projects do
let(:projects) { Project.all }
end
end
+
+ context 'with keyset pagination' do
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3] }
+
+ context 'headers and records' do
+ let(:params) { { pagination: 'keyset', order_by: :id, sort: :asc, per_page: 1 } }
+
+ it 'includes a pagination header with link to the next page' do
+ get api('/projects', current_user), params: params
+
+ expect(response.header).to include('Links')
+ expect(response.header['Links']).to include('pagination=keyset')
+ expect(response.header['Links']).to include("id_after=#{public_project.id}")
+ end
+
+ it 'contains only the first project with per_page = 1' do
+ get api('/projects', current_user), params: params
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
+ end
+
+ it 'still includes a link if the end has reached and there is no more data after this page' do
+ get api('/projects', current_user), params: params.merge(id_after: project2.id)
+
+ expect(response.header).to include('Links')
+ expect(response.header['Links']).to include('pagination=keyset')
+ expect(response.header['Links']).to include("id_after=#{project3.id}")
+ end
+
+ it 'does not include a next link when the page does not have any records' do
+ get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
+
+ expect(response.header).not_to include('Links')
+ end
+
+ it 'returns an empty array when the page does not have any records' do
+ get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to eq([])
+ end
+
+ it 'responds with 501 if order_by is different from id' do
+ get api('/projects', current_user), params: params.merge(order_by: :created_at)
+
+ expect(response).to have_gitlab_http_status(405)
+ end
+ end
+
+ context 'with descending sorting' do
+ let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 1 } }
+
+ it 'includes a pagination header with link to the next page' do
+ get api('/projects', current_user), params: params
+
+ expect(response.header).to include('Links')
+ expect(response.header['Links']).to include('pagination=keyset')
+ expect(response.header['Links']).to include("id_before=#{project3.id}")
+ end
+
+ it 'contains only the last project with per_page = 1' do
+ get api('/projects', current_user), params: params
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
+ end
+ end
+
+ context 'retrieving the full relation' do
+ let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 2 } }
+
+ it 'returns all projects' do
+ url = '/projects'
+ requests = 0
+ ids = []
+
+ while url && requests <= 5 # circuit breaker
+ requests += 1
+ get api(url, current_user), params: params
+
+ links = response.header['Links']
+ url = links&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
+ match[1]
+ end
+
+ ids += JSON.parse(response.body).map { |p| p['id'] }
+ end
+
+ expect(ids).to contain_exactly(*projects.map(&:id))
+ end
+ end
+ end
end
describe 'POST /projects' do
@@ -635,6 +731,7 @@ describe API::Projects do
wiki_enabled: false,
resolve_outdated_diff_discussions: false,
remove_source_branch_after_merge: true,
+ autoclose_referenced_issues: true,
only_allow_merge_if_pipeline_succeeds: false,
request_access_enabled: true,
only_allow_merge_if_all_discussions_are_resolved: false,
@@ -807,6 +904,22 @@ describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy
end
+ it 'sets a project as enabling auto close referenced issues' do
+ project = attributes_for(:project, autoclose_referenced_issues: true)
+
+ post api('/projects', user), params: project
+
+ expect(json_response['autoclose_referenced_issues']).to be_truthy
+ end
+
+ it 'sets a project as disabling auto close referenced issues' do
+ project = attributes_for(:project, autoclose_referenced_issues: false)
+
+ post api('/projects', user), params: project
+
+ expect(json_response['autoclose_referenced_issues']).to be_falsey
+ end
+
it 'sets the merge method of a project to rebase merge' do
project = attributes_for(:project, merge_method: 'rebase_merge')
@@ -1626,6 +1739,14 @@ describe API::Projects do
end
end
end
+
+ it_behaves_like 'storing arguments in the application context' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let(:expected_params) { { user: user.username, project: project.full_path } }
+
+ subject { get api("/projects/#{project.id}", user) }
+ end
end
describe 'GET /projects/:id/users' do
@@ -2226,6 +2347,22 @@ describe API::Projects do
put api("/projects/#{project3.id}", user4), params: project_param
expect(response).to have_gitlab_http_status(403)
end
+
+ it 'updates container_expiration_policy' do
+ project_param = {
+ container_expiration_policy_attributes: {
+ cadence: '1month',
+ keep_n: 1
+ }
+ }
+
+ put api("/projects/#{project3.id}", user4), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['container_expiration_policy']['cadence']).to eq('1month')
+ expect(json_response['container_expiration_policy']['keep_n']).to eq(1)
+ end
end
context 'when authenticated as project developer' do
@@ -2721,6 +2858,20 @@ describe API::Projects do
expect(json_response['message']).to eq('401 Unauthorized')
end
end
+
+ context 'forking disabled' do
+ before do
+ project.project_feature.update_attribute(
+ :forking_access_level, ProjectFeature::DISABLED)
+ end
+
+ it 'denies project to be forked' do
+ post api("/projects/#{project.id}/fork", admin)
+
+ expect(response).to have_gitlab_http_status(409)
+ expect(json_response['message']['forked_from_project_id']).to eq(['is forbidden'])
+ end
+ end
end
describe 'POST /projects/:id/housekeeping' do
diff --git a/spec/requests/api/remote_mirrors_spec.rb b/spec/requests/api/remote_mirrors_spec.rb
index c5ba9bd223e..065d9c7ca5b 100644
--- a/spec/requests/api/remote_mirrors_spec.rb
+++ b/spec/requests/api/remote_mirrors_spec.rb
@@ -5,14 +5,13 @@ require 'spec_helper'
describe API::RemoteMirrors do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :remote_mirror) }
+ let_it_be(:developer) { create(:user) { |u| project.add_developer(u) } }
describe 'GET /projects/:id/remote_mirrors' do
let(:route) { "/projects/#{project.id}/remote_mirrors" }
it 'requires `admin_remote_mirror` permission' do
- project.add_developer(user)
-
- get api(route, user)
+ get api(route, developer)
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -26,6 +25,7 @@ describe API::RemoteMirrors do
expect(response).to match_response_schema('remote_mirrors')
end
+ # TODO: Remove flag: https://gitlab.com/gitlab-org/gitlab/issues/38121
context 'with the `remote_mirrors_api` feature disabled' do
before do
stub_feature_flags(remote_mirrors_api: false)
@@ -38,4 +38,41 @@ describe API::RemoteMirrors do
end
end
end
+
+ describe 'PUT /projects/:id/remote_mirrors/:mirror_id' do
+ let(:route) { ->(id) { "/projects/#{project.id}/remote_mirrors/#{id}" } }
+ let(:mirror) { project.remote_mirrors.first }
+
+ it 'requires `admin_remote_mirror` permission' do
+ put api(route[mirror.id], developer)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'updates a remote mirror' do
+ project.add_maintainer(user)
+
+ put api(route[mirror.id], user), params: {
+ enabled: '0',
+ only_protected_branches: 'true'
+ }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['enabled']).to eq(false)
+ expect(json_response['only_protected_branches']).to eq(true)
+ end
+
+ # TODO: Remove flag: https://gitlab.com/gitlab-org/gitlab/issues/38121
+ context 'with the `remote_mirrors_api` feature disabled' do
+ before do
+ stub_feature_flags(remote_mirrors_api: false)
+ end
+
+ it 'responds with `not_found`' do
+ put api(route[mirror.id], user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index cc6cadb190a..a313f75e3ec 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -1154,6 +1154,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
+ expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
end
context 'when job has been updated recently' do
@@ -1291,6 +1292,41 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(response.header['Job-Status']).to eq 'canceled'
end
end
+
+ context 'when build trace is being watched' do
+ before do
+ job.trace.being_watched!
+ end
+
+ it 'returns X-GitLab-Trace-Update-Interval as 3' do
+ patch_the_trace
+
+ expect(response.status).to eq 202
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
+ end
+ end
+
+ context 'when build trace is not being watched' do
+ it 'returns X-GitLab-Trace-Update-Interval as 30' do
+ patch_the_trace
+
+ expect(response.status).to eq 202
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
+ end
+ end
+
+ context 'when feature flag runner_job_trace_update_interval_header is disabled' do
+ before do
+ stub_feature_flags(runner_job_trace_update_interval_header: { enabled: false })
+ end
+
+ it 'does not return X-GitLab-Trace-Update-Interval header' do
+ patch_the_trace
+
+ expect(response.status).to eq 202
+ expect(response.header).not_to have_key 'X-GitLab-Trace-Update-Interval'
+ end
+ end
end
context 'when Runner makes a force-patch' do
@@ -1792,6 +1828,58 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
end
+
+ context 'when artifact_type is metrics_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
+
+ it 'stores metrics_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(job.reload.job_artifacts_metrics_referee).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is network_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
+
+ it 'stores network_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(job.reload.job_artifacts_network_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(job.reload.job_artifacts_network_referee).to be_nil
+ end
+ end
+ end
end
context 'when artifacts are being stored outside of tmp path' do
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 7c7620389b4..08f58387bf8 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -10,6 +10,38 @@ describe API::Services do
create(:project, creator_id: user.id, namespace: user.namespace)
end
+ describe "GET /projects/:id/services" do
+ it 'returns authentication error when unauthenticated' do
+ get api("/projects/#{project.id}/services")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+
+ it "returns error when authenticated but user is not a project owner" do
+ project.add_developer(user2)
+ get api("/projects/#{project.id}/services", user2)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+
+ context 'project with services' do
+ let!(:active_service) { create(:emails_on_push_service, project: project, active: true) }
+ let!(:service) { create(:custom_issue_tracker_service, project: project, active: false) }
+
+ it "returns a list of all active services" do
+ get api("/projects/#{project.id}/services", user)
+
+ aggregate_failures 'expect successful response with all active services' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['slug']).to eq('emails-on-push')
+ expect(response).to match_response_schema('public_api/v4/services')
+ end
+ end
+ end
+ end
+
Service.available_services_names.each do |service|
describe "PUT /projects/:id/services/#{service.dasherize}" do
include_context service
@@ -30,6 +62,7 @@ describe API::Services do
put api("/projects/#{project.id}/services/#{dashed_service}?#{query_strings}", user), params: service_attrs
expect(response).to have_gitlab_http_status(200)
+ expect(json_response['slug']).to eq(dashed_service)
events.each do |event|
next if event == "foo"
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index fd1104fa978..d54d112cd9f 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -87,22 +87,6 @@ describe API::Triggers do
expect(pipeline.variables.map { |v| { v.key => v.value } }.last).to eq(variables)
end
end
-
- context 'when legacy trigger' do
- before do
- trigger.update(owner: nil)
- end
-
- it 'creates pipeline' do
- post api("/projects/#{project.id}/trigger/pipeline"), params: options.merge(ref: 'master')
-
- expect(response).to have_gitlab_http_status(201)
- expect(json_response).to include('id' => pipeline.id)
- pipeline.builds.reload
- expect(pipeline.builds.pending.size).to eq(2)
- expect(pipeline.builds.size).to eq(5)
- end
- end
end
context 'when triggering a pipeline from a trigger token' do
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 310caa92eb9..2e0b7a30480 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -115,7 +115,7 @@ describe API::Wikis do
end
[:title, :content, :format].each do |part|
- it "it updates with wiki with missing #{part}" do
+ it "updates with wiki with missing #{part}" do
payload.delete(part)
put(api(url, user), params: payload)
diff --git a/spec/requests/self_monitoring_project_spec.rb b/spec/requests/self_monitoring_project_spec.rb
new file mode 100644
index 00000000000..d562a34aec4
--- /dev/null
+++ b/spec/requests/self_monitoring_project_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Self-Monitoring project requests' do
+ let(:admin) { create(:admin) }
+
+ describe 'POST #create_self_monitoring_project' do
+ let(:worker_class) { SelfMonitoringProjectCreateWorker }
+
+ subject { post create_self_monitoring_project_admin_application_settings_path }
+
+ it_behaves_like 'not accessible to non-admin users'
+
+ context 'with admin user' do
+ before do
+ login_as(admin)
+ end
+
+ context 'with feature flag disabled' do
+ it_behaves_like 'not accessible if feature flag is disabled'
+ end
+
+ context 'with feature flag enabled' do
+ let(:status_api) { status_create_self_monitoring_project_admin_application_settings_path }
+
+ it_behaves_like 'triggers async worker, returns sidekiq job_id with response accepted'
+ end
+ end
+ end
+
+ describe 'GET #status_create_self_monitoring_project' do
+ let(:worker_class) { SelfMonitoringProjectCreateWorker }
+ let(:job_id) { 'job_id' }
+
+ subject do
+ get status_create_self_monitoring_project_admin_application_settings_path,
+ params: { job_id: job_id }
+ end
+
+ it_behaves_like 'not accessible to non-admin users'
+
+ context 'with admin user' do
+ before do
+ login_as(admin)
+ end
+
+ context 'with feature flag disabled' do
+ it_behaves_like 'not accessible if feature flag is disabled'
+ end
+
+ context 'with feature flag enabled' do
+ it_behaves_like 'handles invalid job_id'
+
+ context 'when job is in progress' do
+ before do
+ allow(worker_class).to receive(:in_progress?)
+ .with(job_id)
+ .and_return(true)
+ end
+
+ it_behaves_like 'sets polling header and returns accepted' do
+ let(:in_progress_message) { 'Job to create self-monitoring project is in progress' }
+ end
+ end
+
+ context 'when self-monitoring project and job do not exist' do
+ let(:job_id) { nil }
+
+ it 'returns bad_request' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq(
+ 'message' => 'Self-monitoring project does not exist. Please check logs ' \
+ 'for any error messages'
+ )
+ end
+ end
+ end
+
+ context 'when self-monitoring project exists' do
+ let(:project) { build(:project) }
+
+ before do
+ stub_application_setting(instance_administration_project_id: 1)
+ stub_application_setting(instance_administration_project: project)
+ end
+
+ it 'does not need job_id' do
+ get status_create_self_monitoring_project_admin_application_settings_path
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(
+ 'project_id' => 1,
+ 'project_full_path' => project.full_path
+ )
+ end
+ end
+
+ it 'returns success with job_id' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(
+ 'project_id' => 1,
+ 'project_full_path' => project.full_path
+ )
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe 'DELETE #delete_self_monitoring_project' do
+ let(:worker_class) { SelfMonitoringProjectDeleteWorker }
+
+ subject { delete delete_self_monitoring_project_admin_application_settings_path }
+
+ it_behaves_like 'not accessible to non-admin users'
+
+ context 'with admin user' do
+ before do
+ login_as(admin)
+ end
+
+ context 'with feature flag disabled' do
+ it_behaves_like 'not accessible if feature flag is disabled'
+ end
+
+ context 'with feature flag enabled' do
+ let(:status_api) { status_delete_self_monitoring_project_admin_application_settings_path }
+
+ it_behaves_like 'triggers async worker, returns sidekiq job_id with response accepted'
+ end
+ end
+ end
+
+ describe 'GET #status_delete_self_monitoring_project' do
+ let(:worker_class) { SelfMonitoringProjectDeleteWorker }
+ let(:job_id) { 'job_id' }
+
+ subject do
+ get status_delete_self_monitoring_project_admin_application_settings_path,
+ params: { job_id: job_id }
+ end
+
+ it_behaves_like 'not accessible to non-admin users'
+
+ context 'with admin user' do
+ before do
+ login_as(admin)
+ end
+
+ context 'with feature flag disabled' do
+ it_behaves_like 'not accessible if feature flag is disabled'
+ end
+
+ context 'with feature flag enabled' do
+ it_behaves_like 'handles invalid job_id'
+
+ context 'when job is in progress' do
+ before do
+ allow(worker_class).to receive(:in_progress?)
+ .with(job_id)
+ .and_return(true)
+
+ stub_application_setting(instance_administration_project_id: 1)
+ end
+
+ it_behaves_like 'sets polling header and returns accepted' do
+ let(:in_progress_message) { 'Job to delete self-monitoring project is in progress' }
+ end
+ end
+
+ context 'when self-monitoring project exists and job does not exist' do
+ before do
+ stub_application_setting(instance_administration_project_id: 1)
+ end
+
+ it 'returns bad_request' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq(
+ 'message' => 'Self-monitoring project was not deleted. Please check logs ' \
+ 'for any error messages'
+ )
+ end
+ end
+ end
+
+ context 'when self-monitoring project does not exist' do
+ it 'does not need job_id' do
+ get status_delete_self_monitoring_project_admin_application_settings_path
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(
+ 'message' => 'Self-monitoring project has been successfully deleted'
+ )
+ end
+ end
+
+ it 'returns success with job_id' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(
+ 'message' => 'Self-monitoring project has been successfully deleted'
+ )
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb
index a82bdfe3ce8..93b2c19c74a 100644
--- a/spec/routing/admin_routing_spec.rb
+++ b/spec/routing/admin_routing_spec.rb
@@ -161,3 +161,17 @@ describe Admin::GroupsController, "routing" do
expect(get("/admin/groups/#{name}/edit")).to route_to('admin/groups#edit', id: name)
end
end
+
+describe Admin::SessionsController, "routing" do
+ it "to #new" do
+ expect(get("/admin/session/new")).to route_to('admin/sessions#new')
+ end
+
+ it "to #create" do
+ expect(post("/admin/session")).to route_to('admin/sessions#create')
+ end
+
+ it "to #destroy" do
+ expect(post("/admin/session/destroy")).to route_to('admin/sessions#destroy')
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 287db20448a..efd7d3f3742 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -314,6 +314,12 @@ describe 'project routing' do
expect(get('/gitlab/gitlabhq/merge_requests/1/pipelines')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'pipelines')
end
+ it 'to #show from scoped route' do
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1.diff')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'diff')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1.patch')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: 'patch')
+ expect(get('/gitlab/gitlabhq/-/merge_requests/1/diffs')).to route_to('projects/merge_requests#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', tab: 'diffs')
+ end
+
it_behaves_like 'RESTful project resources' do
let(:controller) { 'merge_requests' }
let(:actions) { [:index, :edit, :show, :update] }
@@ -573,6 +579,10 @@ describe 'project routing' do
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: "blob/master/blob/#{newline_file}" })
end
+
+ it 'to #show from scope routing' do
+ expect(get('/gitlab/gitlabhq/-/blob/master/app/models/project.rb')).to route_to('projects/blob#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ end
end
# project_tree GET /:project_id/tree/:id(.:format) tree#show {id: /[^\0]+/, project_id: /[^\/]+/}
@@ -590,6 +600,10 @@ describe 'project routing' do
namespace_id: 'gitlab', project_id: 'gitlabhq',
id: "master/#{newline_file}" })
end
+
+ it 'to #show from scope routing' do
+ expect(get('/gitlab/gitlabhq/-/tree/master/app/models/project.rb')).to route_to('projects/tree#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/app/models/project.rb')
+ end
end
# project_find_file GET /:namespace_id/:project_id/find_file/*id(.:format) projects/find_file#show {:id=>/[^\0]+/, :namespace_id=>/[a-zA-Z.0-9_\-]+/, :project_id=>/[a-zA-Z.0-9_\-]+(?<!\.atom)/, :format=>/html/}
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 6f67cdb1222..ff002469e3c 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -256,10 +256,8 @@ describe "Authentication", "routing" do
expect(post("/users/sign_in")).to route_to('sessions#create')
end
- # sign_out with GET instead of DELETE facilitates ad-hoc single-sign-out processes
- # (https://gitlab.com/gitlab-org/gitlab-foss/issues/39708)
- it "GET /users/sign_out" do
- expect(get("/users/sign_out")).to route_to('sessions#destroy')
+ it "POST /users/sign_out" do
+ expect(post("/users/sign_out")).to route_to('sessions#destroy')
end
it "POST /users/password" do
diff --git a/spec/routing/uploads_routing_spec.rb b/spec/routing/uploads_routing_spec.rb
index 42e84774088..f94ae81eeb5 100644
--- a/spec/routing/uploads_routing_spec.rb
+++ b/spec/routing/uploads_routing_spec.rb
@@ -28,4 +28,12 @@ describe 'Uploads', 'routing' do
expect(post("/uploads/#{model}?id=1")).not_to be_routable
end
end
+
+ describe 'legacy paths' do
+ include RSpec::Rails::RequestExampleGroup
+
+ it 'redirects project uploads to canonical path under project namespace' do
+ expect(get('/uploads/namespace/project/12345/test.png')).to redirect_to('/namespace/project/uploads/12345/test.png')
+ end
+ end
end
diff --git a/spec/rubocop/cop/migration/add_column_with_default_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
new file mode 100644
index 00000000000..f3518f2f058
--- /dev/null
+++ b/spec/rubocop/cop/migration/add_column_with_default_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/add_column_with_default'
+
+describe RuboCop::Cop::Migration::AddColumnWithDefault do
+ include CopHelper
+
+ let(:cop) { described_class.new }
+
+ context 'outside of a migration' do
+ it 'does not register any offenses' do
+ expect_no_offenses(<<~RUBY)
+ def up
+ add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: false)
+ end
+ RUBY
+ end
+ end
+
+ context 'in a migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ let(:offense) { '`add_column_with_default` without `allow_null: true` may cause prolonged lock situations and downtime, see https://gitlab.com/gitlab-org/gitlab/issues/38060' }
+
+ it 'registers an offense when specifying allow_null: false' do
+ expect_offense(<<~RUBY)
+ def up
+ add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: false)
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
+ end
+ RUBY
+ end
+
+ it 'registers no offense when specifying allow_null: true' do
+ expect_no_offenses(<<~RUBY)
+ def up
+ add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true, allow_null: true)
+ end
+ RUBY
+ end
+
+ it 'registers an offense when allow_null is not specified' do
+ expect_offense(<<~RUBY)
+ def up
+ add_column_with_default(:ci_build_needs, :artifacts, :boolean, default: true)
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
+ end
+ RUBY
+ end
+
+ it 'registers no offense for application_settings (whitelisted table)' do
+ expect_no_offenses(<<~RUBY)
+ def up
+ add_column_with_default(:application_settings, :another_column, :boolean, default: true, allow_null: false)
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/have_gitlab_http_status_spec.rb b/spec/rubocop/cop/rspec/have_gitlab_http_status_spec.rb
new file mode 100644
index 00000000000..12bdacdee3c
--- /dev/null
+++ b/spec/rubocop/cop/rspec/have_gitlab_http_status_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rspec-parameterized'
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/rspec/have_gitlab_http_status'
+
+describe RuboCop::Cop::RSpec::HaveGitlabHttpStatus do
+ include CopHelper
+
+ using RSpec::Parameterized::TableSyntax
+
+ let(:source_file) { 'spec/foo_spec.rb' }
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'offense' do |code|
+ it 'registers an offense' do
+ inspect_source(code, source_file)
+
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ expect(cop.highlights).to eq([code])
+ end
+ end
+
+ shared_examples 'no offense' do |code|
+ it 'does not register an offense' do
+ inspect_source(code)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ shared_examples 'autocorrect' do |bad, good|
+ it 'autocorrects' do
+ autocorrected = autocorrect_source(bad, source_file)
+
+ expect(autocorrected).to eql(good)
+ end
+ end
+
+ shared_examples 'no autocorrect' do |code|
+ it 'does not autocorrect' do
+ autocorrected = autocorrect_source(code, source_file)
+
+ expect(autocorrected).to eql(code)
+ end
+ end
+
+ describe 'offenses and autocorrections' do
+ where(:bad, :good) do
+ 'have_http_status(:ok)' | 'have_gitlab_http_status(:ok)'
+ 'have_http_status(204)' | 'have_gitlab_http_status(:no_content)'
+ 'have_gitlab_http_status(201)' | 'have_gitlab_http_status(:created)'
+ 'have_http_status(var)' | 'have_gitlab_http_status(var)'
+ 'have_http_status(:success)' | 'have_gitlab_http_status(:success)'
+ 'have_http_status(:invalid)' | 'have_gitlab_http_status(:invalid)'
+ end
+
+ with_them do
+ include_examples 'offense', params[:bad]
+ include_examples 'no offense', params[:good]
+ include_examples 'autocorrect', params[:bad], params[:good]
+ include_examples 'no autocorrect', params[:good]
+ end
+ end
+
+ describe 'partially autocorrects invalid numeric status' do
+ where(:bad, :good) do
+ 'have_http_status(-1)' | 'have_gitlab_http_status(-1)'
+ end
+
+ with_them do
+ include_examples 'offense', params[:bad]
+ include_examples 'offense', params[:good]
+ include_examples 'autocorrect', params[:bad], params[:good]
+ include_examples 'no autocorrect', params[:good]
+ end
+ end
+
+ describe 'ignore' do
+ where(:code) do
+ [
+ 'have_http_status',
+ 'have_http_status { }',
+ 'have_http_status(200, arg)',
+ 'have_gitlab_http_status',
+ 'have_gitlab_http_status { }',
+ 'have_gitlab_http_status(200, arg)'
+ ]
+ end
+
+ with_them do
+ include_examples 'no offense', params[:code]
+ include_examples 'no autocorrect', params[:code]
+ end
+ end
+end
diff --git a/spec/serializers/deploy_key_entity_spec.rb b/spec/serializers/deploy_key_entity_spec.rb
index 607adfc2488..0dbbf0de59b 100644
--- a/spec/serializers/deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_key_entity_spec.rb
@@ -24,6 +24,7 @@ describe DeployKeyEntity do
user_id: deploy_key.user_id,
title: deploy_key.title,
fingerprint: deploy_key.fingerprint,
+ fingerprint_sha256: deploy_key.fingerprint_sha256,
destroyed_when_orphaned: true,
almost_orphaned: false,
created_at: deploy_key.created_at,
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 2a57ea51b39..7abe74fae8f 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -6,7 +6,7 @@ describe DeploymentEntity do
let(:user) { developer }
let(:developer) { create(:user) }
let(:reporter) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:request) { double('request') }
let(:deployment) { create(:deployment, deployable: build, project: project) }
let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
diff --git a/spec/serializers/environment_status_entity_spec.rb b/spec/serializers/environment_status_entity_spec.rb
index 6d98f91cfde..11455c57677 100644
--- a/spec/serializers/environment_status_entity_spec.rb
+++ b/spec/serializers/environment_status_entity_spec.rb
@@ -45,7 +45,7 @@ describe EnvironmentStatusEntity do
end
context 'when deployment has metrics' do
- let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
+ let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true, configured?: true) }
let(:simple_metrics) do
{
diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb
index f6fa2a794f6..d013b27369b 100644
--- a/spec/serializers/issue_board_entity_spec.rb
+++ b/spec/serializers/issue_board_entity_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
describe IssueBoardEntity do
- let(:project) { create(:project) }
- let(:resource) { create(:issue, project: project) }
- let(:user) { create(:user) }
- let(:milestone) { create(:milestone, project: project) }
- let(:label) { create(:label, project: project, title: 'Test Label') }
- let(:request) { double('request', current_user: user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:resource) { create(:issue, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:label) { create(:label, project: project, title: 'Test Label') }
+ let(:request) { double('request', current_user: user) }
subject { described_class.new(resource, request: request).as_json }
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index d95aaf3d104..75f3bdfcc9e 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -123,6 +123,26 @@ describe PipelineEntity do
end
end
+ context 'delete path' do
+ context 'user has ability to delete pipeline' do
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'contains delete path' do
+ expect(subject[:delete_path]).to be_present
+ end
+ end
+
+ context 'user does not have ability to delete pipeline' do
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'does not contain delete path' do
+ expect(subject).not_to have_key(:delete_path)
+ end
+ end
+ end
+
context 'when pipeline ref is empty' do
let(:pipeline) { create(:ci_empty_pipeline) }
diff --git a/spec/serializers/review_app_setup_entity_spec.rb b/spec/serializers/review_app_setup_entity_spec.rb
new file mode 100644
index 00000000000..19949fa9282
--- /dev/null
+++ b/spec/serializers/review_app_setup_entity_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ReviewAppSetupEntity do
+ let_it_be(:user) { create(:admin) }
+ let(:project) { create(:project) }
+ let(:presenter) { ProjectPresenter.new(project, current_user: user) }
+ let(:entity) { described_class.new(presenter) }
+ let(:request) { double('request') }
+
+ before do
+ allow(request).to receive(:current_user).and_return(user)
+ allow(request).to receive(:project).and_return(project)
+ end
+
+ subject { entity.as_json }
+
+ describe '#as_json' do
+ it 'contains can_setup_review_app' do
+ expect(subject).to include(:can_setup_review_app)
+ end
+
+ context 'when the user can setup a review app' do
+ before do
+ allow(presenter).to receive(:can_setup_review_app?).and_return(true)
+ end
+
+ it 'contains relevant fields' do
+ expect(subject.keys).to include(:all_clusters_empty, :review_snippet)
+ end
+
+ it 'exposes the relevant review snippet' do
+ review_app_snippet = YAML.safe_load(File.read(Rails.root.join('lib', 'gitlab', 'ci', 'snippets', 'review_app_default.yml'))).to_s
+
+ expect(subject[:review_snippet]).to eq(review_app_snippet)
+ end
+
+ it 'exposes whether the project has associated clusters' do
+ expect(subject[:all_clusters_empty]).to be_truthy
+ end
+ end
+
+ context 'when the user cannot setup a review app' do
+ before do
+ allow(presenter).to receive(:can_setup_review_app?).and_return(false)
+ end
+
+ it 'does not expose certain fields' do
+ expect(subject.keys).not_to include(:all_clusters_empty, :review_snippet)
+ end
+ end
+ end
+end
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index f2cda999932..e03d87e9d49 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -34,7 +34,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy }
- context 'when the head piipeline succeeded' do
+ context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it { is_expected.to be_falsy }
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 50dfc49a59c..221cf695331 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AutoMergeService do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user) }
describe '.all_strategies' do
diff --git a/spec/services/award_emojis/add_service_spec.rb b/spec/services/award_emojis/add_service_spec.rb
index 8364e662735..4bcb5fa039f 100644
--- a/spec/services/award_emojis/add_service_spec.rb
+++ b/spec/services/award_emojis/add_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::AddService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/award_emojis/destroy_service_spec.rb b/spec/services/award_emojis/destroy_service_spec.rb
index 6d54c037464..f411345560e 100644
--- a/spec/services/award_emojis/destroy_service_spec.rb
+++ b/spec/services/award_emojis/destroy_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::DestroyService do
- set(:user) { create(:user) }
- set(:awardable) { create(:note) }
- set(:project) { awardable.project }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:awardable) { create(:note) }
+ let_it_be(:project) { awardable.project }
let(:name) { 'thumbsup' }
let!(:award_from_other_user) do
create(:award_emoji, name: name, awardable: awardable, user: create(:user))
diff --git a/spec/services/award_emojis/toggle_service_spec.rb b/spec/services/award_emojis/toggle_service_spec.rb
index a8d110d04f7..069bdfcb99f 100644
--- a/spec/services/award_emojis/toggle_service_spec.rb
+++ b/spec/services/award_emojis/toggle_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::ToggleService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/boards/issues/move_service_spec.rb b/spec/services/boards/issues/move_service_spec.rb
index cf84ec8fd4c..b9ebbc30c1a 100644
--- a/spec/services/boards/issues/move_service_spec.rb
+++ b/spec/services/boards/issues/move_service_spec.rb
@@ -54,14 +54,14 @@ describe Boards::Issues::MoveService do
end
describe '#execute_multiple' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
- set(:project) { create(:project, namespace: group) }
- set(:board1) { create(:board, group: group) }
- set(:development) { create(:group_label, group: group, name: 'Development') }
- set(:testing) { create(:group_label, group: group, name: 'Testing') }
- set(:list1) { create(:list, board: board1, label: development, position: 0) }
- set(:list2) { create(:list, board: board1, label: testing, position: 1) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:board1) { create(:board, group: group) }
+ let_it_be(:development) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:testing) { create(:group_label, group: group, name: 'Testing') }
+ let_it_be(:list1) { create(:list, board: board1, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board1, label: testing, position: 1) }
let(:params) { { board_id: board1.id, from_list_id: list1.id, to_list_id: list2.id } }
before do
diff --git a/spec/services/boards/list_service_spec.rb b/spec/services/boards/list_service_spec.rb
index c9d372ea166..4eb023907fa 100644
--- a/spec/services/boards/list_service_spec.rb
+++ b/spec/services/boards/list_service_spec.rb
@@ -10,6 +10,7 @@ describe Boards::ListService do
subject(:service) { described_class.new(parent, double) }
it_behaves_like 'boards list service'
+ it_behaves_like 'multiple boards list service'
end
context 'when board parent is a group' do
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
new file mode 100644
index 00000000000..33cd6e164b0
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Ci::CreatePipelineService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:admin) }
+ let(:ref) { 'refs/heads/master' }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+
+ context 'custom config content' do
+ let(:bridge) do
+ double(:bridge, yaml_for_downstream: <<~YML
+ rspec:
+ script: rspec
+ custom:
+ script: custom
+ YML
+ )
+ end
+
+ subject { service.execute(:push, bridge: bridge) }
+
+ it 'creates a pipeline using the content passed in as param' do
+ expect(subject).to be_persisted
+ expect(subject.builds.map(&:name)).to eq %w[rspec custom]
+ expect(subject.config_source).to eq 'bridge_source'
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 04e57b1a2d4..d6cc233088d 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Ci::CreatePipelineService do
include ProjectForksHelper
- set(:project) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let(:user) { create(:admin) }
let(:ref_name) { 'refs/heads/master' }
@@ -362,11 +362,11 @@ describe Ci::CreatePipelineService do
context 'when build that is not marked as interruptible is running' do
it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit
- .builds
- .find_by_name('build_2_1')
- .tap(&:enqueue!)
- .run!
+ build_2_1 = pipeline_on_previous_commit
+ .builds.find_by_name('build_2_1')
+
+ build_2_1.enqueue!
+ build_2_1.reset.run!
pipeline
@@ -377,12 +377,12 @@ describe Ci::CreatePipelineService do
end
context 'when an uninterruptible build is running' do
- it 'does not cancel running outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit
- .builds
- .find_by_name('build_3_1')
- .tap(&:enqueue!)
- .run!
+ it 'does not cancel running outdated pipelines', :sidekiq_inline do
+ build_3_1 = pipeline_on_previous_commit
+ .builds.find_by_name('build_3_1')
+
+ build_3_1.enqueue!
+ build_3_1.reset.run!
pipeline
@@ -493,12 +493,13 @@ describe Ci::CreatePipelineService do
before do
stub_ci_pipeline_yaml_file(nil)
allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(true)
+ create(:project_auto_devops, project: project)
end
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to eq %w[test code_quality build]
+ expect(pipeline.builds.map(&:name)).to match_array(%w[test code_quality build])
end
end
@@ -914,6 +915,44 @@ describe Ci::CreatePipelineService do
end
end
+ context 'with resource group' do
+ context 'when resource group is defined' do
+ before do
+ config = YAML.dump(
+ test: { stage: 'test', script: 'ls', resource_group: resource_group_key }
+ )
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ let(:resource_group_key) { 'iOS' }
+
+ it 'persists the association correctly' do
+ result = execute_service
+ deploy_job = result.builds.find_by_name!(:test)
+ resource_group = project.resource_groups.find_by_key!(resource_group_key)
+
+ expect(result).to be_persisted
+ expect(deploy_job.resource_group.key).to eq(resource_group_key)
+ expect(project.resource_groups.count).to eq(1)
+ expect(resource_group.builds.count).to eq(1)
+ expect(resource_group.resources.count).to eq(1)
+ expect(resource_group.resources.first.build).to eq(nil)
+ end
+
+ context 'when resource group key includes predefined variables' do
+ let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
+
+ it 'interpolates the variables into the key correctly' do
+ result = execute_service
+
+ expect(result).to be_persisted
+ expect(project.resource_groups.exists?(key: 'master-test')).to eq(true)
+ end
+ end
+ end
+ end
+
context 'with timeout' do
context 'when builds with custom timeouts are configured' do
before do
@@ -930,6 +969,70 @@ describe Ci::CreatePipelineService do
end
end
+ context 'with release' do
+ shared_examples_for 'a successful release pipeline' do
+ before do
+ stub_feature_flags(ci_release_generation: true)
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ it 'is valid config' do
+ pipeline = execute_service
+ build = pipeline.builds.first
+ expect(pipeline).to be_kind_of(Ci::Pipeline)
+ expect(pipeline).to be_valid
+ expect(pipeline.yaml_errors).not_to be_present
+ expect(pipeline).to be_persisted
+ expect(build).to be_kind_of(Ci::Build)
+ expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
+ end
+ end
+
+ context 'simple example' do
+ it_behaves_like 'a successful release pipeline' do
+ let(:config) do
+ {
+ release: {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: "v0.06",
+ description: "./release_changelog.txt"
+ }
+ }
+ }
+ end
+ end
+ end
+
+ context 'example with all release metadata' do
+ it_behaves_like 'a successful release pipeline' do
+ let(:config) do
+ {
+ release: {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ name: "Release $CI_TAG_NAME",
+ tag_name: "v0.06",
+ description: "./release_changelog.txt",
+ assets: {
+ links: [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ },
+ {
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
+ }
+ ]
+ }
+ }
+ }
+ }
+ end
+ end
+ end
+ end
+
shared_examples 'when ref is protected' do
let(:user) { create(:user) }
@@ -1020,21 +1123,6 @@ describe Ci::CreatePipelineService do
it_behaves_like 'when ref is protected'
end
- context 'when ref is not protected' do
- context 'when trigger belongs to no one' do
- let(:user) {}
- let(:trigger) { create(:ci_trigger, owner: nil) }
- let(:trigger_request) { create(:ci_trigger_request, trigger: trigger) }
- let(:pipeline) { execute_service(trigger_request: trigger_request) }
-
- it 'creates an unprotected pipeline' do
- expect(pipeline).to be_persisted
- expect(pipeline).not_to be_protected
- expect(Ci::Pipeline.count).to eq(1)
- end
- end
- end
-
context 'when pipeline is running for a tag' do
before do
config = YAML.dump(test: { script: 'test', only: ['branches'] },
diff --git a/spec/services/ci/ensure_stage_service_spec.rb b/spec/services/ci/ensure_stage_service_spec.rb
index 43bbd2130a4..de07a1ae238 100644
--- a/spec/services/ci/ensure_stage_service_spec.rb
+++ b/spec/services/ci/ensure_stage_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::EnsureStageService, '#execute' do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:stage) { create(:ci_stage_entity) }
let(:job) { build(:ci_build) }
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index ff2d286465a..c0226654fd9 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Ci::ExpirePipelineCacheService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
subject { described_class.new }
describe '#execute' do
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
new file mode 100644
index 00000000000..c29c56c2b04
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
+ using RSpec::Parameterized::TableSyntax
+
+ set(:pipeline) { create(:ci_pipeline) }
+ set(:build_a) { create(:ci_build, :success, name: 'build-a', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ set(:build_b) { create(:ci_build, :failed, name: 'build-b', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ set(:test_a) { create(:ci_build, :running, name: 'test-a', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ set(:test_b) { create(:ci_build, :pending, name: 'test-b', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ set(:deploy) { create(:ci_build, :created, name: 'deploy', stage: 'deploy', stage_idx: 2, pipeline: pipeline) }
+
+ let(:collection) { described_class.new(pipeline) }
+
+ describe '#set_processable_status' do
+ it 'does update existing status of processable' do
+ collection.set_processable_status(test_a.id, 'success', 100)
+
+ expect(collection.status_for_names(['test-a'])).to eq('success')
+ end
+
+ it 'ignores a missing processable' do
+ collection.set_processable_status(-1, 'failed', 100)
+ end
+ end
+
+ describe '#status_of_all' do
+ it 'returns composite status of the collection' do
+ expect(collection.status_of_all).to eq('running')
+ end
+ end
+
+ describe '#status_for_names' do
+ where(:names, :status) do
+ %w[build-a] | 'success'
+ %w[build-a build-b] | 'failed'
+ %w[build-a test-a] | 'running'
+ end
+
+ with_them do
+ it 'returns composite status of given names' do
+ expect(collection.status_for_names(names)).to eq(status)
+ end
+ end
+ end
+
+ describe '#status_for_prior_stage_position' do
+ where(:stage, :status) do
+ 0 | 'success'
+ 1 | 'failed'
+ 2 | 'running'
+ end
+
+ with_them do
+ it 'returns composite status for processables in prior stages' do
+ expect(collection.status_for_prior_stage_position(stage)).to eq(status)
+ end
+ end
+ end
+
+ describe '#status_for_stage_position' do
+ where(:stage, :status) do
+ 0 | 'failed'
+ 1 | 'running'
+ 2 | 'created'
+ end
+
+ with_them do
+ it 'returns composite status for processables at a given stages' do
+ expect(collection.status_for_stage_position(stage)).to eq(status)
+ end
+ end
+ end
+
+ describe '#created_processable_ids_for_stage_position' do
+ it 'returns IDs of processables at a given stage position' do
+ expect(collection.created_processable_ids_for_stage_position(0)).to be_empty
+ expect(collection.created_processable_ids_for_stage_position(1)).to be_empty
+ expect(collection.created_processable_ids_for_stage_position(2)).to contain_exactly(deploy.id)
+ end
+ end
+
+ describe '#processing_processables' do
+ it 'returns processables marked as processing' do
+ expect(collection.processing_processables.map { |processable| processable[:id]} )
+ .to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
new file mode 100644
index 00000000000..38686b41a22
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative 'shared_processing_service.rb'
+
+describe Ci::PipelineProcessing::AtomicProcessingService do
+ before do
+ stub_feature_flags(ci_atomic_processing: true)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+end
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
new file mode 100644
index 00000000000..2da1eb19818
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative 'shared_processing_service.rb'
+
+describe Ci::PipelineProcessing::LegacyProcessingService do
+ before do
+ stub_feature_flags(ci_atomic_processing: false)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
new file mode 100644
index 00000000000..cae5ae3f09d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -0,0 +1,940 @@
+# frozen_string_literal: true
+
+shared_examples 'Pipeline Processing Service' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, ref: 'master', project: project)
+ end
+
+ before do
+ stub_ci_pipeline_to_return_yaml_file
+
+ stub_not_protect_default_branch
+
+ project.add_developer(user)
+ end
+
+ context 'when simple pipeline is defined' do
+ before do
+ create_build('linux', stage_idx: 0)
+ create_build('mac', stage_idx: 0)
+ create_build('rspec', stage_idx: 1)
+ create_build('rubocop', stage_idx: 1)
+ create_build('deploy', stage_idx: 2)
+ end
+
+ it 'processes a pipeline', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(2)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(4)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(5)
+ end
+
+ it 'does not process pipeline if existing stage is running' do
+ expect(process_pipeline).to be_truthy
+ expect(builds.pending.count).to eq(2)
+
+ expect(process_pipeline).to be_falsey
+ expect(builds.pending.count).to eq(2)
+ end
+ end
+
+ context 'custom stage with first job allowed to fail' do
+ before do
+ create_build('clean_job', stage_idx: 0, allow_failure: true)
+ create_build('test_job', stage_idx: 1, allow_failure: true)
+ end
+
+ it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+ expect(builds_statuses).to eq ['pending']
+
+ fail_running_or_pending
+
+ expect(builds_statuses).to eq %w(failed pending)
+
+ fail_running_or_pending
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when optional manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('test', stage_idx: 1)
+ create_build('test_failure', stage_idx: 2, when: 'on_failure')
+ create_build('deploy', stage_idx: 3)
+ create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
+ create_build('cleanup', stage_idx: 4, when: 'always')
+ create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
+ expect(builds_statuses).to eq %w(success success success manual pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success success manual success manual)
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when test job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success failed success success)
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when test and test_failure jobs fail' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed success)
+ expect(pipeline.reload.status).to eq('failed')
+ end
+ end
+
+ context 'when deploy job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup)
+ expect(builds_statuses).to eq %w(success success failed manual pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success failed manual success)
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when build is canceled in the second stage' do
+ it 'does not schedule builds after build has been canceled' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds.running_or_pending).not_to be_empty
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ cancel_running_or_pending
+
+ expect(builds.running_or_pending).to be_empty
+ expect(builds_names).to eq %w[build test]
+ expect(builds_statuses).to eq %w[success canceled]
+ expect(pipeline.reload).to be_canceled
+ end
+ end
+
+ context 'when listing optional manual actions' do
+ it 'returns only for skipped builds' do
+ # currently all builds are created
+ expect(process_pipeline).to be_truthy
+ expect(manual_actions).to be_empty
+
+ # succeed stage build
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_empty
+
+ # succeed stage test
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_one # production
+
+ # succeed stage deploy
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_many # production and clear cache
+ end
+ end
+ end
+
+ context 'when delayed jobs are defined', :sidekiq_inline do
+ context 'when the scene is timed incremental rollout' do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('rollout10%', **delayed_options, stage_idx: 1)
+ create_build('rollout100%', **delayed_options, stage_idx: 2)
+ create_build('cleanup', stage_idx: 3)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout100%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when build job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when rollout 10% is unscheduled' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ unschedule
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
+ expect(pipeline.reload.status).to eq 'manual'
+ end
+
+ context 'when user plays rollout 10%' do
+ it 'schedules rollout100%' do
+ process_pipeline
+ succeed_pending
+ unschedule
+ play_manual_action('rollout10%')
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+ end
+
+ context 'when rollout 10% fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+
+ context 'when user retries rollout 10%' do
+ it 'does not schedule rollout10% again' do
+ process_pipeline
+ succeed_pending
+ enqueue_scheduled('rollout10%')
+ fail_running_or_pending
+ retry_build('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when rollout 10% is played immidiately' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ play_manual_action('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when only one scheduled job exists in a pipeline' do
+ before do
+ create_build('delayed', **delayed_options, stage_idx: 0)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+
+ context 'when there are two delayed jobs in a stage' do
+ before do
+ create_build('delayed1', **delayed_options, stage_idx: 0)
+ create_build('delayed2', **delayed_options, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage until all scheduled jobs finished' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('delayed1')
+ end
+
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+
+ context 'when a delayed job is allowed to fail' do
+ before do
+ create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage and continues after it failed' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('delayed')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
+ expect(pipeline.reload.status).to eq 'pending'
+ end
+ end
+ end
+
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ successful_build('test', stage_idx: 0)
+
+ allow_next_instance_of(Ci::PersistentRef) do |instance|
+ allow(instance).to receive(:delete_refs) { raise ArgumentError }
+ end
+ end
+
+ it 'process the pipeline' do
+ expect { process_pipeline }.not_to raise_error
+ end
+ end
+
+ context 'when there are manual action in earlier stages' do
+ context 'when first stage has only optional manual actions' do
+ before do
+ create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('check', stage_idx: 1)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'starts from the second stage' do
+ expect(all_builds_statuses).to eq %w[manual pending created]
+ end
+ end
+
+ context 'when second stage has only optional manual actions' do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success manual pending])
+ end
+ end
+ end
+
+ context 'when there are only manual actions in stages' do
+ before do
+ create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('deploy', stage_idx: 2, when: 'manual')
+ create_build('check', stage_idx: 3)
+
+ process_pipeline
+ end
+
+ it 'processes all jobs until blocking actions encountered' do
+ expect(all_builds_statuses).to eq(%w[manual manual manual created])
+ expect(all_builds_names).to eq(%w[image build deploy check])
+
+ expect(pipeline.reload).to be_blocked
+ end
+ end
+
+ context 'when there is only one manual action' do
+ before do
+ create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
+
+ process_pipeline
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+
+ context 'when the action was played' do
+ before do
+ play_manual_action('deploy')
+ end
+
+ it 'queues the action and pipeline', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending])
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+
+ context 'when blocking manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('code:test', stage_idx: 0)
+ create_build('staging:deploy', stage_idx: 1, when: 'manual')
+ create_build('staging:test', stage_idx: 2, when: 'on_success')
+ create_build('production:deploy', stage_idx: 3, when: 'manual')
+ create_build('production:test', stage_idx: 4, when: 'always')
+ end
+
+ context 'when first stage succeeds' do
+ it 'blocks pipeline on stage with first manual action' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+ end
+ end
+
+ context 'when first stage fails' do
+ it 'does not take blocking action into account' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[code:test production:test]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w[failed success]
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when pipeline is promoted sequentially up to the end' do
+ before do
+ # Users need ability to merge into a branch in order to trigger
+ # protected manual actions.
+ #
+ create(:protected_branch, :developers_can_merge,
+ name: 'master', project: project)
+ end
+
+ it 'properly processes entire pipeline' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+
+ play_manual_action('staging:deploy')
+
+ expect(builds_statuses).to eq %w[success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test]
+ expect(builds_statuses).to eq %w[success success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy]
+ expect(builds_statuses).to eq %w[success success success manual]
+
+ expect(pipeline.reload).to be_manual
+ expect(pipeline.reload).to be_blocked
+ expect(pipeline.reload).not_to be_active
+ expect(pipeline.reload).not_to be_complete
+
+ play_manual_action('production:deploy')
+
+ expect(builds_statuses).to eq %w[success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success success]
+ expect(pipeline.reload).to be_success
+ end
+ end
+ end
+
+ context 'when second stage has only on_failure jobs', :sidekiq_inline do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'on_failure')
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage' do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success skipped pending])
+ end
+ end
+
+ context 'when failed build in the middle stage is retried', :sidekiq_inline do
+ context 'when failed build is the only unsuccessful build in the stage' do
+ before do
+ create_build('build:1', stage_idx: 0)
+ create_build('build:2', stage_idx: 0)
+ create_build('test:1', stage_idx: 1)
+ create_build('test:2', stage_idx: 1)
+ create_build('deploy:1', stage_idx: 2)
+ create_build('deploy:2', stage_idx: 2)
+ end
+
+ it 'does trigger builds in the next stage' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build:1', 'build:2']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ pipeline.builds.find_by(name: 'test:1').success!
+ pipeline.builds.find_by(name: 'test:2').drop!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
+ 'test:2', 'deploy:1', 'deploy:2']
+ end
+ end
+ end
+
+ context 'when builds with auto-retries are configured', :sidekiq_inline do
+ before do
+ create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
+ create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
+ create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
+ end
+
+ it 'automatically retries builds in a valid order' do
+ expect(process_pipeline).to be_truthy
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success success]
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when pipeline with needs is created', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when feature ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'when linux:build finishes first it follows stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running created created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+
+ expect(stages).to eq(%w(success pending created))
+ expect(builds.success).to contain_exactly(linux_build, mac_build)
+ expect(builds.pending).to contain_exactly(
+ linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+
+ context 'when one of the jobs is run on a failure' do
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
+
+ let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
+
+ context 'when another job in build phase fails first' do
+ context 'when ci_dag_support is enabled' do
+ it 'does skip linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_skipped
+ end
+ end
+
+ context 'when ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+
+ context 'when linux:build job fails first' do
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ linux_build.reset.drop!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+ end
+
+ def process_pipeline
+ described_class.new(pipeline).execute
+ end
+
+ def all_builds
+ pipeline.builds.order(:stage_idx, :id)
+ end
+
+ def builds
+ all_builds.where.not(status: [:created, :skipped])
+ end
+
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
+ def builds_names
+ builds.pluck(:name)
+ end
+
+ def builds_names_and_statuses
+ builds.each_with_object({}) do |b, h|
+ h[b.name.to_sym] = b.status
+ h
+ end
+ end
+
+ def all_builds_names
+ all_builds.pluck(:name)
+ end
+
+ def builds_statuses
+ builds.pluck(:status)
+ end
+
+ def all_builds_statuses
+ all_builds.pluck(:status)
+ end
+
+ def succeed_pending
+ builds.pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def succeed_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def fail_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.drop
+ end
+ end
+
+ def cancel_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.cancel
+ end
+ end
+
+ def play_manual_action(name)
+ builds.find_by(name: name).play(user)
+ end
+
+ def enqueue_scheduled(name)
+ builds.scheduled.find_by(name: name).enqueue_scheduled
+ end
+
+ def retry_build(name)
+ Ci::Build.retry(builds.find_by(name: name), user)
+ end
+
+ def manual_actions
+ pipeline.manual_actions.reload
+ end
+
+ def create_build(name, **opts)
+ create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def successful_build(name, **opts)
+ create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def with_stage_opts(opts)
+ { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
+ end
+
+ def delayed_options
+ { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
+ end
+
+ def unschedule
+ pipeline.builds.scheduled.map(&:unschedule)
+ end
+end
diff --git a/spec/services/ci/prepare_build_service_spec.rb b/spec/services/ci/prepare_build_service_spec.rb
index 3c3d8b90bb0..02928b58ff8 100644
--- a/spec/services/ci/prepare_build_service_spec.rb
+++ b/spec/services/ci/prepare_build_service_spec.rb
@@ -14,7 +14,7 @@ describe Ci::PrepareBuildService do
shared_examples 'build enqueueing' do
it 'enqueues the build' do
- expect(build).to receive(:enqueue).once
+ expect(build).to receive(:enqueue_preparing).once
subject
end
@@ -34,7 +34,7 @@ describe Ci::PrepareBuildService do
context 'prerequisites fail to complete' do
before do
- allow(build).to receive(:enqueue).and_return(false)
+ allow(build).to receive(:enqueue_preparing).and_return(false)
end
it 'drops the build' do
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index ba5891c8694..40ae1c4029b 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Ci::ProcessPipelineService, '#execute' do
+describe Ci::ProcessPipelineService do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -18,658 +18,6 @@ describe Ci::ProcessPipelineService, '#execute' do
project.add_developer(user)
end
- context 'when simple pipeline is defined' do
- before do
- create_build('linux', stage_idx: 0)
- create_build('mac', stage_idx: 0)
- create_build('rspec', stage_idx: 1)
- create_build('rubocop', stage_idx: 1)
- create_build('deploy', stage_idx: 2)
- end
-
- it 'processes a pipeline', :sidekiq_might_not_need_inline do
- expect(process_pipeline).to be_truthy
-
- succeed_pending
-
- expect(builds.success.count).to eq(2)
-
- succeed_pending
-
- expect(builds.success.count).to eq(4)
-
- succeed_pending
-
- expect(builds.success.count).to eq(5)
- end
-
- it 'does not process pipeline if existing stage is running' do
- expect(process_pipeline).to be_truthy
- expect(builds.pending.count).to eq(2)
-
- expect(process_pipeline).to be_falsey
- expect(builds.pending.count).to eq(2)
- end
- end
-
- context 'custom stage with first job allowed to fail' do
- before do
- create_build('clean_job', stage_idx: 0, allow_failure: true)
- create_build('test_job', stage_idx: 1, allow_failure: true)
- end
-
- it 'automatically triggers a next stage when build finishes', :sidekiq_might_not_need_inline do
- expect(process_pipeline).to be_truthy
- expect(builds_statuses).to eq ['pending']
-
- fail_running_or_pending
-
- expect(builds_statuses).to eq %w(failed pending)
-
- fail_running_or_pending
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when optional manual actions are defined', :sidekiq_might_not_need_inline do
- before do
- create_build('build', stage_idx: 0)
- create_build('test', stage_idx: 1)
- create_build('test_failure', stage_idx: 2, when: 'on_failure')
- create_build('deploy', stage_idx: 3)
- create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
- create_build('cleanup', stage_idx: 4, when: 'always')
- create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
- expect(builds_statuses).to eq %w(success success success manual pending manual)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success success manual success manual)
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when test job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed success pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success failed success success)
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when test and test_failure jobs fail' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed success)
- expect(pipeline.reload.status).to eq('failed')
- end
- end
-
- context 'when deploy job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup)
- expect(builds_statuses).to eq %w(success success failed manual pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success failed manual success)
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when build is canceled in the second stage' do
- it 'does not schedule builds after build has been canceled' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds.running_or_pending).not_to be_empty
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- cancel_running_or_pending
-
- expect(builds.running_or_pending).to be_empty
- expect(builds_names).to eq %w[build test]
- expect(builds_statuses).to eq %w[success canceled]
- expect(pipeline.reload).to be_canceled
- end
- end
-
- context 'when listing optional manual actions' do
- it 'returns only for skipped builds' do
- # currently all builds are created
- expect(process_pipeline).to be_truthy
- expect(manual_actions).to be_empty
-
- # succeed stage build
- succeed_running_or_pending
-
- expect(manual_actions).to be_empty
-
- # succeed stage test
- succeed_running_or_pending
-
- expect(manual_actions).to be_one # production
-
- # succeed stage deploy
- succeed_running_or_pending
-
- expect(manual_actions).to be_many # production and clear cache
- end
- end
- end
-
- context 'when delayed jobs are defined', :sidekiq_might_not_need_inline do
- context 'when the scene is timed incremental rollout' do
- before do
- create_build('build', stage_idx: 0)
- create_build('rollout10%', **delayed_options, stage_idx: 1)
- create_build('rollout100%', **delayed_options, stage_idx: 2)
- create_build('cleanup', stage_idx: 3)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- enqueue_scheduled('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
-
- enqueue_scheduled('rollout100%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when build job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when rollout 10% is unscheduled' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- unschedule
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
- expect(pipeline.reload.status).to eq 'manual'
- end
-
- context 'when user plays rollout 10%' do
- it 'schedules rollout100%' do
- process_pipeline
- succeed_pending
- unschedule
- play_manual_action('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
- end
-
- context 'when rollout 10% fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
-
- context 'when user retries rollout 10%' do
- it 'does not schedule rollout10% again' do
- process_pipeline
- succeed_pending
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
- retry_build('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when rollout 10% is played immidiately' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- play_manual_action('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when only one scheduled job exists in a pipeline' do
- before do
- create_build('delayed', **delayed_options, stage_idx: 0)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
-
- context 'when there are two delayed jobs in a stage' do
- before do
- create_build('delayed1', **delayed_options, stage_idx: 0)
- create_build('delayed2', **delayed_options, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage until all scheduled jobs finished' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
-
- enqueue_scheduled('delayed1')
-
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
-
- context 'when a delayed job is allowed to fail' do
- before do
- create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage and continues after it failed' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- enqueue_scheduled('delayed')
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
- expect(pipeline.reload.status).to eq 'pending'
- end
- end
- end
-
- context 'when an exception is raised during a persistent ref creation' do
- before do
- successful_build('test', stage_idx: 0)
-
- allow_next_instance_of(Ci::PersistentRef) do |instance|
- allow(instance).to receive(:delete_refs) { raise ArgumentError }
- end
- end
-
- it 'process the pipeline' do
- expect { process_pipeline }.not_to raise_error
- end
- end
-
- context 'when there are manual action in earlier stages' do
- context 'when first stage has only optional manual actions' do
- before do
- create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('check', stage_idx: 1)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'starts from the second stage' do
- expect(all_builds_statuses).to eq %w[manual pending created]
- end
- end
-
- context 'when second stage has only optional manual actions' do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage', :sidekiq_might_not_need_inline do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success manual pending])
- end
- end
- end
-
- context 'when there are only manual actions in stages' do
- before do
- create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('deploy', stage_idx: 2, when: 'manual')
- create_build('check', stage_idx: 3)
-
- process_pipeline
- end
-
- it 'processes all jobs until blocking actions encountered' do
- expect(all_builds_statuses).to eq(%w[manual manual manual created])
- expect(all_builds_names).to eq(%w[image build deploy check])
-
- expect(pipeline.reload).to be_blocked
- end
- end
-
- context 'when there is only one manual action' do
- before do
- create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
-
- process_pipeline
- end
-
- it 'skips the pipeline' do
- expect(pipeline.reload).to be_skipped
- end
-
- context 'when the action was played' do
- before do
- play_manual_action('deploy')
- end
-
- it 'queues the action and pipeline', :sidekiq_might_not_need_inline do
- expect(all_builds_statuses).to eq(%w[pending])
-
- expect(pipeline.reload).to be_pending
- end
- end
- end
-
- context 'when blocking manual actions are defined', :sidekiq_might_not_need_inline do
- before do
- create_build('code:test', stage_idx: 0)
- create_build('staging:deploy', stage_idx: 1, when: 'manual')
- create_build('staging:test', stage_idx: 2, when: 'on_success')
- create_build('production:deploy', stage_idx: 3, when: 'manual')
- create_build('production:test', stage_idx: 4, when: 'always')
- end
-
- context 'when first stage succeeds' do
- it 'blocks pipeline on stage with first manual action' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
- end
- end
-
- context 'when first stage fails' do
- it 'does not take blocking action into account' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[code:test production:test]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w[failed success]
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when pipeline is promoted sequentially up to the end' do
- before do
- # Users need ability to merge into a branch in order to trigger
- # protected manual actions.
- #
- create(:protected_branch, :developers_can_merge,
- name: 'master', project: project)
- end
-
- it 'properly processes entire pipeline' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
-
- play_manual_action('staging:deploy')
-
- expect(builds_statuses).to eq %w[success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test]
- expect(builds_statuses).to eq %w[success success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy]
- expect(builds_statuses).to eq %w[success success success manual]
-
- expect(pipeline.reload).to be_manual
- expect(pipeline.reload).to be_blocked
- expect(pipeline.reload).not_to be_active
- expect(pipeline.reload).not_to be_complete
-
- play_manual_action('production:deploy')
-
- expect(builds_statuses).to eq %w[success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success success]
- expect(pipeline.reload).to be_success
- end
- end
- end
-
- context 'when second stage has only on_failure jobs', :sidekiq_might_not_need_inline do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'on_failure')
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage' do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success skipped pending])
- end
- end
-
- context 'when failed build in the middle stage is retried', :sidekiq_might_not_need_inline do
- context 'when failed build is the only unsuccessful build in the stage' do
- before do
- create_build('build:1', stage_idx: 0)
- create_build('build:2', stage_idx: 0)
- create_build('test:1', stage_idx: 1)
- create_build('test:2', stage_idx: 1)
- create_build('deploy:1', stage_idx: 2)
- create_build('deploy:2', stage_idx: 2)
- end
-
- it 'does trigger builds in the next stage' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build:1', 'build:2']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- pipeline.builds.find_by(name: 'test:1').success
- pipeline.builds.find_by(name: 'test:2').drop
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).success
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
- 'test:2', 'deploy:1', 'deploy:2']
- end
- end
- end
-
context 'updates a list of retried builds' do
subject { described_class.retried.order(:id) }
@@ -685,251 +33,15 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when builds with auto-retries are configured', :sidekiq_might_not_need_inline do
- before do
- create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
- create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
- create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
- end
-
- it 'automatically retries builds in a valid order' do
- expect(process_pipeline).to be_truthy
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success success]
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when pipeline with needs is created', :sidekiq_might_not_need_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
- let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
- let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
-
- let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
- let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
-
- let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
- let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
-
- it 'when linux:* finishes first it runs it out of order' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running pending created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec)
- expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
-
- linux_rubocop.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
-
- context 'when feature ci_dag_support is disabled' do
- before do
- stub_feature_flags(ci_dag_support: false)
- end
-
- it 'when linux:build finishes first it follows stages' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running created created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
-
- expect(stages).to eq(%w(success pending created))
- expect(builds.success).to contain_exactly(linux_build, mac_build)
- expect(builds.pending).to contain_exactly(
- linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
-
- linux_rspec.reset.success!
- linux_rubocop.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
- end
-
- context 'when one of the jobs is run on a failure' do
- let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
-
- let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
-
- context 'when another job in build phase fails first' do
- context 'when ci_dag_support is enabled' do
- it 'does skip linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_skipped
- end
- end
-
- context 'when ci_dag_support is disabled' do
- before do
- stub_feature_flags(ci_dag_support: false)
- end
-
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
-
- context 'when linux:build job fails first' do
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- linux_build.reset.drop!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
- end
-
def process_pipeline
described_class.new(pipeline).execute
end
- def all_builds
- pipeline.builds.order(:stage_idx, :id)
- end
-
- def builds
- all_builds.where.not(status: [:created, :skipped])
- end
-
- def stages
- pipeline.reset.stages.map(&:status)
- end
-
- def builds_names
- builds.pluck(:name)
- end
-
- def builds_names_and_statuses
- builds.each_with_object({}) do |b, h|
- h[b.name.to_sym] = b.status
- h
- end
- end
-
- def all_builds_names
- all_builds.pluck(:name)
- end
-
- def builds_statuses
- builds.pluck(:status)
- end
-
- def all_builds_statuses
- all_builds.pluck(:status)
- end
-
- def succeed_pending
- builds.pending.map(&:success)
- end
-
- def succeed_running_or_pending
- pipeline.builds.running_or_pending.each(&:success)
- end
-
- def fail_running_or_pending
- pipeline.builds.running_or_pending.each(&:drop)
- end
-
- def cancel_running_or_pending
- pipeline.builds.running_or_pending.each(&:cancel)
- end
-
- def play_manual_action(name)
- builds.find_by(name: name).play(user)
- end
-
- def enqueue_scheduled(name)
- builds.scheduled.find_by(name: name).enqueue
- end
-
- def retry_build(name)
- Ci::Build.retry(builds.find_by(name: name), user)
- end
-
- def manual_actions
- pipeline.manual_actions.reload
- end
-
def create_build(name, **opts)
create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end
- def successful_build(name, **opts)
- create(:ci_build, :success, pipeline: pipeline, name: name, **opts)
- end
-
- def delayed_options
- { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
- end
-
- def unschedule
- pipeline.builds.scheduled.map(&:unschedule)
+ def all_builds
+ pipeline.builds.order(:stage_idx, :id)
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 0339c6cc2d6..0f2d994efd4 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -4,9 +4,9 @@ require 'spec_helper'
module Ci
describe RegisterJobService do
- set(:group) { create(:group) }
- set(:project) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let!(:shared_runner) { create(:ci_runner, :instance) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
new file mode 100644
index 00000000000..50d312647ae
--- /dev/null
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ subject { service.execute(resource_group) }
+
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+ let!(:build) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ context 'when there is an available resource' do
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when failed to request resource' do
+ before do
+ allow_next_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:enqueue_waiting_for_resource) { false }
+ end
+ end
+
+ it 'has a build waiting for resource' do
+ subject
+
+ expect(build).to be_waiting_for_resource
+ end
+ end
+
+ context 'when the build has already retained a resource' do
+ before do
+ resource_group.assign_resource_to(build)
+ build.update_column(:status, :pending)
+ end
+
+ it 'has a pending build' do
+ subject
+
+ expect(build).to be_pending
+ end
+ end
+ end
+
+ context 'when there are no available resources' do
+ before do
+ resource_group.assign_resource_to(create(:ci_build))
+ end
+
+ it 'does not request resource' do
+ expect_any_instance_of(Ci::Build).not_to receive(:enqueue_waiting_for_resource)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index b1368f7776b..b3189974440 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -3,9 +3,12 @@
require 'spec_helper'
describe Ci::RetryBuildService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
+ end
let(:stage) do
create(:ci_stage_entity, project: project,
@@ -29,9 +32,11 @@ describe Ci::RetryBuildService do
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_dast
- job_artifacts_license_management job_artifacts_performance
+ job_artifacts_license_management job_artifacts_license_scanning
+ job_artifacts_performance
job_artifacts_codequality job_artifacts_metrics scheduled_at
- job_variables].freeze
+ job_variables waiting_for_resource_at job_artifacts_metrics_referee
+ job_artifacts_network_referee].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
@@ -40,14 +45,15 @@ describe Ci::RetryBuildService do
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
- artifacts_file artifacts_metadata artifacts_size commands].freeze
+ artifacts_file artifacts_metadata artifacts_size commands
+ resource resource_group_id processed].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let(:build) do
create(:ci_build, :failed, :expired, :erased, :queued, :coverage, :tags,
- :allowed_to_fail, :on_tag, :triggered, :teardown_environment,
+ :allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
@@ -197,17 +203,19 @@ describe Ci::RetryBuildService do
it 'does not enqueue the new build' do
expect(new_build).to be_created
+ expect(new_build).not_to be_processed
end
- it 'does mark old build as retried in the database and on the instance' do
+ it 'does mark old build as retried' do
expect(new_build).to be_latest
expect(build).to be_retried
- expect(build.reload).to be_retried
+ expect(build).to be_processed
end
context 'when build with deployment is retried' do
let!(:build) do
- create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline, stage_id: stage.id)
+ create(:ci_build, :with_deployment, :deploy_to_production,
+ pipeline: pipeline, stage_id: stage.id, project: project)
end
it 'creates a new deployment' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 4b949761b8f..e7a241ed335 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -330,7 +330,7 @@ describe Ci::RetryPipelineService, '#execute' do
stage: "stage_#{stage_num}",
stage_idx: stage_num,
pipeline: pipeline, **opts) do |build|
- pipeline.update_status
+ pipeline.update_legacy_status
end
end
end
diff --git a/spec/services/ci/run_scheduled_build_service_spec.rb b/spec/services/ci/run_scheduled_build_service_spec.rb
index ab8d9f4ba2e..43d110cbc8f 100644
--- a/spec/services/ci/run_scheduled_build_service_spec.rb
+++ b/spec/services/ci/run_scheduled_build_service_spec.rb
@@ -26,6 +26,18 @@ describe Ci::RunScheduledBuildService do
expect(build).to be_pending
end
+
+ context 'when build requires resource' do
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ before do
+ build.update!(resource_group: resource_group)
+ end
+
+ it 'transits to waiting for resource status' do
+ expect { subject }.to change { build.status }.from('scheduled').to('waiting_for_resource')
+ end
+ end
end
context 'when scheduled_at is not expired' do
diff --git a/spec/services/clusters/applications/check_installation_progress_service_spec.rb b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
index 7b37eb97800..2f224d40920 100644
--- a/spec/services/clusters/applications/check_installation_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
@@ -160,6 +160,12 @@ describe Clusters::Applications::CheckInstallationProgressService, '#execute' do
expect(application).to be_installed
expect(application.status_reason).to be_nil
end
+
+ it 'tracks application install' do
+ expect(Gitlab::Tracking).to receive(:event).with('cluster:applications', "cluster_application_helm_installed")
+
+ service.execute
+ end
end
context 'when installation POD failed' do
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index bdacb9ce071..f62af86f1bf 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -47,6 +47,33 @@ describe Clusters::Applications::CreateService do
create(:clusters_applications_helm, :installed, cluster: cluster)
end
+ context 'ingress application' do
+ let(:params) do
+ {
+ application: 'ingress',
+ modsecurity_enabled: true
+ }
+ end
+
+ before do
+ expect_any_instance_of(Clusters::Applications::Ingress)
+ .to receive(:make_scheduled!)
+ .and_call_original
+ end
+
+ it 'creates the application' do
+ expect do
+ subject
+
+ cluster.reload
+ end.to change(cluster, :application_ingress)
+ end
+
+ it 'sets modsecurity_enabled' do
+ expect(subject.modsecurity_enabled).to eq(true)
+ end
+ end
+
context 'cert manager application' do
let(:params) do
{
@@ -136,8 +163,7 @@ describe Clusters::Applications::CreateService do
context 'elastic stack application' do
let(:params) do
{
- application: 'elastic_stack',
- kibana_hostname: 'example.com'
+ application: 'elastic_stack'
}
end
@@ -155,10 +181,6 @@ describe Clusters::Applications::CreateService do
cluster.reload
end.to change(cluster, :application_elastic_stack)
end
-
- it 'sets the kibana_hostname' do
- expect(subject.kibana_hostname).to eq('example.com')
- end
end
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index bd1a90996a8..3982d2310d8 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -57,11 +57,21 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
end.to change(Clusters::KubernetesNamespace, :count).by(1)
end
- it 'creates project service account' do
- expect_next_instance_of(Clusters::Kubernetes::CreateOrUpdateServiceAccountService) do |instance|
- expect(instance).to receive(:execute).once
- end
-
+ it 'creates project service account and namespace' do
+ account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
+ expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
+ receive(:namespace_creator).with(
+ cluster.platform.kubeclient,
+ service_account_name: "#{namespace}-service-account",
+ service_account_namespace: namespace,
+ service_account_namespace_labels: {
+ 'app.gitlab.com/app' => project.full_path_slug,
+ 'app.gitlab.com/env' => environment.slug
+ },
+ rbac: true
+ ).and_return(account_service)
+ )
+ expect(account_service).to receive(:execute).once
subject
end
@@ -73,6 +83,29 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
expect(kubernetes_namespace.encrypted_service_account_token).to be_present
end
+
+ context 'without environment' do
+ before do
+ kubernetes_namespace.environment = nil
+ end
+
+ it 'creates project service account and namespace' do
+ account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
+ expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
+ receive(:namespace_creator).with(
+ cluster.platform.kubeclient,
+ service_account_name: "#{namespace}-service-account",
+ service_account_namespace: namespace,
+ service_account_namespace_labels: {
+ 'app.gitlab.com/app' => project.full_path_slug
+ },
+ rbac: true
+ ).and_return(account_service)
+ )
+ expect(account_service).to receive(:execute).once
+ subject
+ end
+ end
end
context 'group clusters' do
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index 4df73fcc2ae..8fa22422074 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -116,6 +116,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
describe '.namespace_creator' do
let(:namespace) { "#{project.path}-#{project.id}" }
+ let(:namespace_labels) { { app: project.full_path_slug, env: "staging" } }
let(:service_account_name) { "#{namespace}-service-account" }
let(:token_name) { "#{namespace}-token" }
@@ -124,6 +125,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
kubeclient,
service_account_name: service_account_name,
service_account_namespace: namespace,
+ service_account_namespace_labels: namespace_labels,
rbac: rbac
).execute
end
@@ -149,6 +151,16 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
end
+ it 'creates a namespace object' do
+ kubernetes_namespace = double(Gitlab::Kubernetes::Namespace)
+ expect(Gitlab::Kubernetes::Namespace).to(
+ receive(:new).with(namespace, kubeclient, labels: namespace_labels).and_return(kubernetes_namespace)
+ )
+ expect(kubernetes_namespace).to receive(:ensure_exists!)
+
+ subject
+ end
+
it_behaves_like 'creates service account and token'
it 'creates a namespaced role binding with edit access' do
diff --git a/spec/services/container_expiration_policy_service_spec.rb b/spec/services/container_expiration_policy_service_spec.rb
new file mode 100644
index 00000000000..1e4899c627f
--- /dev/null
+++ b/spec/services/container_expiration_policy_service_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContainerExpirationPolicyService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+ let(:project) { container_expiration_policy.project }
+ let(:container_repository) { create(:container_repository, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user).execute(container_expiration_policy) }
+
+ it 'kicks off a cleanup worker for the container repository' do
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(user.id, container_repository.id, anything)
+
+ subject
+ end
+
+ it 'sets next_run_at on the container_expiration_policy' do
+ subject
+
+ expect(container_expiration_policy.next_run_at).to be > Time.zone.now
+ end
+ end
+end
diff --git a/spec/services/create_snippet_service_spec.rb b/spec/services/create_snippet_service_spec.rb
deleted file mode 100644
index 1751029a78c..00000000000
--- a/spec/services/create_snippet_service_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe CreateSnippetService do
- let(:user) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let(:opts) { base_opts.merge(extra_opts) }
- let(:base_opts) do
- {
- title: 'Test snippet',
- file_name: 'snippet.rb',
- content: 'puts "hello world"',
- visibility_level: Gitlab::VisibilityLevel::PRIVATE
- }
- end
- let(:extra_opts) { {} }
-
- context 'When public visibility is restricted' do
- let(:extra_opts) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
-
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
- end
-
- it 'non-admins are not able to create a public snippet' do
- snippet = create_snippet(nil, user, opts)
- expect(snippet.errors.messages).to have_key(:visibility_level)
- expect(snippet.errors.messages[:visibility_level].first).to(
- match('has been restricted')
- )
- end
-
- it 'admins are able to create a public snippet' do
- snippet = create_snippet(nil, admin, opts)
- expect(snippet.errors.any?).to be_falsey
- expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
-
- describe "when visibility level is passed as a string" do
- let(:extra_opts) { { visibility: 'internal' } }
-
- before do
- base_opts.delete(:visibility_level)
- end
-
- it "assigns the correct visibility level" do
- snippet = create_snippet(nil, user, opts)
- expect(snippet.errors.any?).to be_falsey
- expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
- end
-
- context 'checking spam' do
- shared_examples 'marked as spam' do
- let(:snippet) { create_snippet(nil, admin, opts) }
-
- it 'marks a snippet as a spam ' do
- expect(snippet).to be_spam
- end
-
- it 'invalidates the snippet' do
- expect(snippet).to be_invalid
- end
-
- it 'creates a new spam_log' do
- expect { snippet }
- .to log_spam(title: snippet.title, noteable_type: 'PersonalSnippet')
- end
-
- it 'assigns a spam_log to an issue' do
- expect(snippet.spam_log).to eq(SpamLog.last)
- end
- end
-
- let(:extra_opts) do
- { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
- end
-
- before do
- expect_next_instance_of(AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
- end
- end
-
- [true, false, nil].each do |allow_possible_spam|
- context "when recaptcha_disabled flag is #{allow_possible_spam.inspect}" do
- before do
- stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
- end
-
- it_behaves_like 'marked as spam'
- end
- end
- end
-
- describe 'usage counter' do
- let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
-
- it 'increments count' do
- expect do
- create_snippet(nil, admin, opts)
- end.to change { counter.read(:create) }.by 1
- end
-
- it 'does not increment count if create fails' do
- expect do
- create_snippet(nil, admin, {})
- end.not_to change { counter.read(:create) }
- end
- end
-
- def create_snippet(project, user, opts)
- CreateSnippetService.new(project, user, opts).execute
- end
-end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/after_create_service_spec.rb
index 4ca96658db0..51c6de2c0b9 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/after_create_service_spec.rb
@@ -6,10 +6,18 @@ describe Deployments::AfterCreateService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:options) { { name: 'production' } }
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ project: project
+ )
+ end
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
tag: false,
environment: 'production',
@@ -53,14 +61,6 @@ describe Deployments::AfterCreateService do
service.execute
end
- it 'links merge requests to deployment' do
- expect_next_instance_of(Deployments::LinkMergeRequestsService, deployment) do |link_mr_service|
- expect(link_mr_service).to receive(:execute)
- end
-
- service.execute
- end
-
it 'returns the deployment' do
expect(subject.execute).to eq(deployment)
end
@@ -139,6 +139,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
environment: 'production',
project: project,
@@ -152,6 +153,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
environment: 'prod-slug',
project: project,
@@ -165,6 +167,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
yaml_variables: [{ key: :APP_HOST, value: 'host' }],
environment: 'production',
project: project,
@@ -175,7 +178,7 @@ describe Deployments::AfterCreateService do
end
context 'when yaml environment does not have url' do
- let(:job) { create(:ci_build, :with_deployment, environment: 'staging', project: project) }
+ let(:job) { create(:ci_build, :with_deployment, pipeline: pipeline, environment: 'staging', project: project) }
it 'returns the external_url from persisted environment' do
is_expected.to be_nil
@@ -202,6 +205,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
tag: false,
environment: 'staging',
@@ -260,30 +264,4 @@ describe Deployments::AfterCreateService do
end
end
end
-
- describe '#update_environment' do
- it 'links the merge requests' do
- double = instance_double(Deployments::LinkMergeRequestsService)
-
- allow(Deployments::LinkMergeRequestsService)
- .to receive(:new)
- .with(deployment)
- .and_return(double)
-
- expect(double).to receive(:execute)
-
- service.update_environment(deployment)
- end
-
- context 'when the tracking of merge requests is disabled' do
- it 'does nothing' do
- stub_feature_flags(deployment_merge_requests: false)
-
- expect(Deployments::LinkMergeRequestsService)
- .not_to receive(:new)
-
- service.update_environment(deployment)
- end
- end
- end
end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index ba069658dfd..307fe22a192 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -3,10 +3,15 @@
require 'spec_helper'
describe Deployments::LinkMergeRequestsService do
+ let(:project) { create(:project, :repository) }
+
describe '#execute' do
- context 'when the deployment did not succeed' do
+ context 'when the deployment is for a review environment' do
it 'does nothing' do
- deploy = create(:deployment, :failed)
+ environment =
+ create(:environment, environment_type: 'review', name: 'review/foo')
+
+ deploy = create(:deployment, :success, environment: environment)
expect(deploy).not_to receive(:link_merge_requests)
@@ -16,20 +21,29 @@ describe Deployments::LinkMergeRequestsService do
context 'when there is a previous deployment' do
it 'links all merge requests merged since the previous deployment' do
- deploy1 = create(:deployment, :success, sha: 'foo')
+ deploy1 = create(
+ :deployment,
+ :success,
+ project: project,
+ sha: '7975be0116940bf2ad4321f79d02a55c5f7779aa'
+ )
+
deploy2 = create(
:deployment,
:success,
- sha: 'bar',
project: deploy1.project,
- environment: deploy1.environment
+ environment: deploy1.environment,
+ sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b'
)
service = described_class.new(deploy2)
expect(service)
.to receive(:link_merge_requests_for_range)
- .with('foo', 'bar')
+ .with(
+ '7975be0116940bf2ad4321f79d02a55c5f7779aa',
+ 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ )
service.execute
end
@@ -37,7 +51,7 @@ describe Deployments::LinkMergeRequestsService do
context 'when there are no previous deployments' do
it 'links all merged merge requests' do
- deploy = create(:deployment, :success)
+ deploy = create(:deployment, :success, project: project)
service = described_class.new(deploy)
expect(service).to receive(:link_all_merged_merge_requests)
@@ -49,7 +63,6 @@ describe Deployments::LinkMergeRequestsService do
describe '#link_merge_requests_for_range' do
it 'links merge requests' do
- project = create(:project, :repository)
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
@@ -81,7 +94,6 @@ describe Deployments::LinkMergeRequestsService do
describe '#link_all_merged_merge_requests' do
it 'links all merged merge requests targeting the deployed branch' do
- project = create(:project, :repository)
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index e0e280591cd..ecb6bcc541b 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ErrorTracking::ListIssuesService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
let(:list_sentry_issues_args) do
{
diff --git a/spec/services/error_tracking/list_projects_service_spec.rb b/spec/services/error_tracking/list_projects_service_spec.rb
index cd4b835e097..ddd369d45f2 100644
--- a/spec/services/error_tracking/list_projects_service_spec.rb
+++ b/spec/services/error_tracking/list_projects_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ErrorTracking::ListProjectsService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
diff --git a/spec/services/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
index a4da5b38b97..d1893960960 100644
--- a/spec/services/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe ExternalPullRequests::CreatePipelineService do
describe '#execute' do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :auto_devops, :repository) }
+ let_it_be(:user) { create(:user) }
let(:pull_request) { create(:external_pull_request, project: project) }
before do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 19d7b84a3ce..4d7ec7ac1d8 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe Git::BranchPushService, services: true do
include RepoHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
@@ -108,7 +108,7 @@ describe Git::BranchPushService, services: true do
end
it 'reports an error' do
- allow(Sidekiq).to receive(:server?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
expect(Sidekiq.logger).to receive(:warn)
expect { subject }.not_to change { Ci::Pipeline.count }
diff --git a/spec/services/groups/auto_devops_service_spec.rb b/spec/services/groups/auto_devops_service_spec.rb
index 7591b2f6f12..63fbdc70c1b 100644
--- a/spec/services/groups/auto_devops_service_spec.rb
+++ b/spec/services/groups/auto_devops_service_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
describe Groups::AutoDevopsService, '#execute' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
let(:group_params) { { auto_devops_enabled: '0' } }
let(:service) { described_class.new(group, user, group_params) }
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index 61d1612829f..2c5af11d2e6 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -15,16 +15,16 @@ describe Issues::ReferencedMergeRequestsService do
end
end
- set(:user) { create(:user) }
- set(:project) { create(:project, :public, :repository) }
- set(:other_project) { create(:project, :public, :repository) }
- set(:issue) { create(:issue, author: user, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:other_project) { create(:project, :public, :repository) }
+ let_it_be(:issue) { create(:issue, author: user, project: project) }
- set(:closing_mr) { create_closing_mr(source_project: project) }
- set(:closing_mr_other_project) { create_closing_mr(source_project: other_project) }
+ let_it_be(:closing_mr) { create_closing_mr(source_project: project) }
+ let_it_be(:closing_mr_other_project) { create_closing_mr(source_project: other_project) }
- set(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') }
- set(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') }
+ let_it_be(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') }
+ let_it_be(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') }
let(:service) { described_class.new(project, user) }
diff --git a/spec/services/issues/reorder_service_spec.rb b/spec/services/issues/reorder_service_spec.rb
index b147cdf4e64..6d72d698b1d 100644
--- a/spec/services/issues/reorder_service_spec.rb
+++ b/spec/services/issues/reorder_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Issues::ReorderService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
shared_examples 'issues reorder service' do
context 'when reordering issues' do
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index f34d2a18552..3fb1eae361a 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Issues::ZoomLinkService do
- set(:user) { create(:user) }
- set(:issue) { create(:issue) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
let(:project) { issue.project }
let(:service) { described_class.new(issue, user) }
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 68a9c0a8b86..13d69307084 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -74,7 +74,9 @@ describe MergeRequests::Conflicts::ListService do
it 'returns a falsey value when the MR has a missing ref after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
- allow_any_instance_of(Gitlab::GitalyClient::ConflictsService).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
+ allow_next_instance_of(Gitlab::GitalyClient::ConflictsService) do |instance|
+ allow(instance).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
+ end
expect(service.can_be_resolved_in_ui?).to be_falsey
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 7145cfe7897..3d58ecdd8cd 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -55,7 +55,9 @@ describe MergeRequests::CreateFromIssueService do
end
it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
- expect_any_instance_of(MergeRequest).to receive(:valid?).at_least(:once).and_return(false)
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:valid?).at_least(:once).and_return(false)
+ end
expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name, branch_project: target_project)
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 25f5c54a413..9eb28759061 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe MergeRequests::CreatePipelineService do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user, params) }
let(:params) { {} }
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index dcb8c8080a1..bb8a1873dac 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -45,6 +45,13 @@ describe MergeRequests::GetUrlsService do
end
end
+ context 'when project is nil' do
+ let(:project) { nil }
+ let(:changes) { default_branch_changes }
+
+ it_behaves_like 'no_merge_request_url'
+ end
+
context 'pushing to default branch' do
let(:changes) { default_branch_changes }
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 61c8103353c..fa1a8f60256 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe MergeRequests::MergeService do
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
let(:project) { merge_request.project }
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 77e38f1eb4c..5c26e32bb22 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -67,7 +67,7 @@ describe MergeRequests::MergeToRefService do
end
end
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:merge_request) { create(:merge_request, :simple) }
let(:project) { merge_request.project }
@@ -214,7 +214,7 @@ describe MergeRequests::MergeToRefService do
end
describe 'cascading merge refs' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref, sha: merge_request.diff_head_sha } }
context 'when first merge happens' do
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 7f9c47d8670..420c8513c72 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -714,9 +714,9 @@ describe MergeRequests::PushOptionsHandlerService do
let(:exception) { StandardError.new('My standard error') }
def run_service_with_exception
- allow_any_instance_of(
- MergeRequests::BuildService
- ).to receive(:execute).and_raise(exception)
+ allow_next_instance_of(MergeRequests::BuildService) do |instance|
+ allow(instance).to receive(:execute).and_raise(exception)
+ end
service.execute
end
@@ -766,9 +766,9 @@ describe MergeRequests::PushOptionsHandlerService do
invalid_merge_request = MergeRequest.new
invalid_merge_request.errors.add(:base, 'my error')
- expect_any_instance_of(
- MergeRequests::CreateService
- ).to receive(:execute).and_return(invalid_merge_request)
+ expect_next_instance_of(MergeRequests::CreateService) do |instance|
+ expect(instance).to receive(:execute).and_return(invalid_merge_request)
+ end
service.execute
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 9c535664c26..184f3f37339 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -15,6 +15,7 @@ describe MergeRequests::RebaseService do
end
let(:project) { merge_request.project }
let(:repository) { project.repository.raw }
+ let(:skip_ci) { false }
subject(:service) { described_class.new(project, user, {}) }
@@ -115,7 +116,7 @@ describe MergeRequests::RebaseService do
context 'valid params' do
shared_examples_for 'a service that can execute a successful rebase' do
before do
- service.execute(merge_request)
+ service.execute(merge_request, skip_ci: skip_ci)
end
it 'rebases source branch' do
@@ -155,6 +156,12 @@ describe MergeRequests::RebaseService do
it_behaves_like 'a service that can execute a successful rebase'
end
+ context 'when skip_ci flag is set' do
+ let(:skip_ci) { true }
+
+ it_behaves_like 'a service that can execute a successful rebase'
+ end
+
context 'fork' do
describe 'successful fork rebase' do
let(:forked_project) do
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
new file mode 100644
index 00000000000..274d594fd68
--- /dev/null
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+
+ set(:user) { create(:user) }
+ set(:project) { create(:project, :repository) }
+ set(:environment) { create(:environment, project: project) }
+
+ describe '#execute' do
+ subject(:service_call) { described_class.new(project, user, params).execute }
+
+ let(:commit_message) { 'test' }
+ let(:branch) { "dashboard_new_branch" }
+ let(:dashboard) { 'config/prometheus/common_metrics.yml' }
+ let(:file_name) { 'custom_dashboard.yml' }
+ let(:params) do
+ {
+ dashboard: dashboard,
+ file_name: file_name,
+ commit_message: commit_message,
+ branch: branch
+ }
+ end
+
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/#{file_name}",
+ file_content: File.read(dashboard)
+ }
+ end
+
+ context 'user does not have push right to repository' do
+ it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You can't commit to this project)
+ end
+
+ context 'with rights to push to the repository' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'wrong target file extension' do
+ let(:file_name) { 'custom_dashboard.txt' }
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, 'The file name should have a .yml extension'
+ end
+
+ context 'wrong source dashboard file' do
+ let(:dashboard) { 'config/prometheus/common_metrics_123.yml' }
+
+ it_behaves_like 'misconfigured dashboard service response', :not_found, 'Not found.'
+ end
+
+ context 'path traversal attack attempt' do
+ let(:dashboard) { 'config/prometheus/../database.yml' }
+
+ it_behaves_like 'misconfigured dashboard service response', :not_found, 'Not found.'
+ end
+
+ context 'path traversal attack attempt on target file' do
+ let(:file_name) { '../../custom_dashboard.yml' }
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/custom_dashboard.yml",
+ file_content: File.read(dashboard)
+ }
+ end
+
+ it 'strips target file name to safe value', :aggregate_failures do
+ service_instance = instance_double(::Files::CreateService)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+ end
+
+ context 'valid parameters' do
+ it 'delegates commit creation to Files::CreateService', :aggregate_failures do
+ service_instance = instance_double(::Files::CreateService)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+
+ context 'selected branch already exists' do
+ let(:branch) { 'existing_branch' }
+
+ before do
+ project.repository.add_branch(user, branch, 'master')
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, "There was an error creating the dashboard, branch named: existing_branch already exists."
+
+ # temporary not available function for first iteration
+ # follow up issue https://gitlab.com/gitlab-org/gitlab/issues/196237 which
+ # require this feature
+ # it 'pass correct params to Files::CreateService', :aggregate_failures do
+ # project.repository.add_branch(user, branch, 'master')
+ #
+ # service_instance = instance_double(::Files::CreateService)
+ # expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ # expect(service_instance).to receive(:execute).and_return(status: :success)
+ #
+ # service_call
+ # end
+ end
+
+ context 'blank branch name' do
+ let(:branch) { '' }
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, 'There was an error creating the dashboard, branch name is invalid.'
+ end
+
+ context 'dashboard file already exists' do
+ let(:branch) { 'custom_dashboard' }
+
+ before do
+ Files::CreateService.new(
+ project,
+ user,
+ commit_message: 'Create custom dashboard custom_dashboard.yml',
+ branch_name: 'master',
+ start_branch: 'master',
+ file_path: ".gitlab/dashboards/custom_dashboard.yml",
+ file_content: File.read('config/prometheus/common_metrics.yml')
+ ).execute
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, "A file with 'custom_dashboard.yml' already exists in custom_dashboard branch"
+ end
+
+ it 'extends dashboard template path to absolute url' do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
+
+ expect(File).to receive(:read).with(Rails.root.join('config/prometheus/common_metrics.yml')).and_return('')
+
+ service_call
+ end
+
+ context 'Files::CreateService success' do
+ before do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
+ end
+
+ it 'clears dashboards cache' do
+ expect(project.repository).to receive(:refresh_method_caches).with([:metrics_dashboard])
+
+ service_call
+ end
+
+ it 'returns success', :aggregate_failures do
+ result = service_call
+ dashboard_details = {
+ path: '.gitlab/dashboards/custom_dashboard.yml',
+ display_name: 'custom_dashboard.yml',
+ default: false,
+ system_dashboard: false
+ }
+
+ expect(result[:status]).to be :success
+ expect(result[:http_status]).to be :created
+ expect(result[:dashboard]).to match dashboard_details
+ end
+ end
+
+ context 'Files::CreateService fails' do
+ before do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :error }))
+ end
+
+ it 'does NOT clear dashboards cache' do
+ expect(project.repository).not_to receive(:refresh_method_caches)
+
+ service_call
+ end
+
+ it 'returns error' do
+ result = service_call
+ expect(result[:status]).to be :error
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
index 53b7497ae21..744693dad15 100644
--- a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::CustomMetricEmbedService do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project, reload: true) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/default_embed_service_spec.rb b/spec/services/metrics/dashboard/default_embed_service_spec.rb
index 803b9a93be7..741a9644905 100644
--- a/spec/services/metrics/dashboard/default_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/default_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::DefaultEmbedService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
index a0f7315f750..c1ce9818f21 100644
--- a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::DynamicEmbedService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
index ab7a7b97861..cba8ef2ec98 100644
--- a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
index 95c5a1479a4..cc9f711c611 100644
--- a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/sample_metrics_service_spec.rb b/spec/services/metrics/sample_metrics_service_spec.rb
index 8574674ebc4..3b4f7cb8062 100644
--- a/spec/services/metrics/sample_metrics_service_spec.rb
+++ b/spec/services/metrics/sample_metrics_service_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
describe Metrics::SampleMetricsService do
describe 'query' do
- subject { described_class.new(identifier).query }
+ let(:range_start) { '2019-12-02T23:31:45.000Z' }
+ let(:range_end) { '2019-12-03T00:01:45.000Z' }
+
+ subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
context 'when the file is not found' do
let(:identifier) { nil }
@@ -26,10 +29,10 @@ describe Metrics::SampleMetricsService do
FileUtils.rm(destination)
end
- subject { described_class.new(identifier).query }
+ subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
it 'loads data from the sample file correctly' do
- expect(subject).to eq(YAML.load_file(source))
+ expect(subject).to eq(YAML.load_file(source)[30])
end
end
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index 22c7e9dde30..fa893b86cdb 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -31,7 +31,9 @@ describe Milestones::PromoteService do
it 'does not promote milestone and update issuables if promoted milestone is not valid' do
issue = create(:issue, milestone: milestone, project: project)
merge_request = create(:merge_request, milestone: milestone, source_project: project)
- allow_any_instance_of(Milestone).to receive(:valid?).and_return(false)
+ allow_next_instance_of(Milestone) do |instance|
+ allow(instance).to receive(:valid?).and_return(false)
+ end
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index b3d41eb0763..711969ce504 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -71,7 +71,9 @@ describe Milestones::TransferService do
context 'when find_or_create_milestone returns nil' do
before do
- allow_any_instance_of(Milestones::FindOrCreateService).to receive(:execute).and_return(nil)
+ allow_next_instance_of(Milestones::FindOrCreateService) do |instance|
+ allow(instance).to receive(:execute).and_return(nil)
+ end
end
it 'removes issues group milestone' do
diff --git a/spec/services/namespaces/statistics_refresher_service_spec.rb b/spec/services/namespaces/statistics_refresher_service_spec.rb
index 9d42e917efe..1fa0a794edd 100644
--- a/spec/services/namespaces/statistics_refresher_service_spec.rb
+++ b/spec/services/namespaces/statistics_refresher_service_spec.rb
@@ -17,7 +17,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
end
it 'recalculate the namespace statistics' do
- expect_any_instance_of(Namespace::RootStorageStatistics).to receive(:recalculate!).once
+ expect_next_instance_of(Namespace::RootStorageStatistics) do |instance|
+ expect(instance).to receive(:recalculate!).once
+ end
service.execute(group)
end
@@ -45,8 +47,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
context 'when something goes wrong' do
before do
- allow_any_instance_of(Namespace::RootStorageStatistics)
- .to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError)
+ allow_next_instance_of(Namespace::RootStorageStatistics) do |instance|
+ allow(instance).to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError)
+ end
end
it 'raises RefreshError' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 8fd03428eb2..c5e2fe8de12 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Notes::CreateService do
- set(:project) { create(:project, :repository) }
- set(:issue) { create(:issue, project: project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:user) { create(:user) }
let(:opts) do
{ note: 'Awesome comment', noteable_type: 'Issue', noteable_id: issue.id }
end
@@ -216,12 +216,12 @@ describe Notes::CreateService do
context 'note with commands' do
context 'all quick actions' do
- set(:milestone) { create(:milestone, project: project, title: "sprint") }
- set(:bug_label) { create(:label, project: project, title: 'bug') }
- set(:to_be_copied_label) { create(:label, project: project, title: 'to be copied') }
- set(:feature_label) { create(:label, project: project, title: 'feature') }
- set(:issue) { create(:issue, project: project, labels: [bug_label], due_date: '2019-01-01') }
- set(:issue_2) { create(:issue, project: project, labels: [bug_label, to_be_copied_label]) }
+ let_it_be(:milestone) { create(:milestone, project: project, title: "sprint") }
+ let_it_be(:bug_label) { create(:label, project: project, title: 'bug') }
+ let_it_be(:to_be_copied_label) { create(:label, project: project, title: 'to be copied') }
+ let_it_be(:feature_label) { create(:label, project: project, title: 'feature') }
+ let_it_be(:issue, reload: true) { create(:issue, project: project, labels: [bug_label], due_date: '2019-01-01') }
+ let_it_be(:issue_2) { create(:issue, project: project, labels: [bug_label, to_be_copied_label]) }
context 'for issues' do
let(:issuable) { issue }
@@ -272,7 +272,7 @@ describe Notes::CreateService do
end
context 'for merge requests' do
- set(:merge_request) { create(:merge_request, source_project: project, labels: [bug_label]) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, labels: [bug_label]) }
let(:issuable) { merge_request }
let(:note_params) { opts.merge(noteable_type: 'MergeRequest', noteable_id: merge_request.id) }
let(:merge_request_quick_actions) do
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 9faf1299ef2..258e5c68265 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Notes::DestroyService do
- set(:project) { create(:project, :public) }
- set(:issue) { create(:issue, project: project) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:user) { issue.author }
describe '#execute' do
diff --git a/spec/services/notes/resolve_service_spec.rb b/spec/services/notes/resolve_service_spec.rb
index 3f82e1dbdc0..c98384c226e 100644
--- a/spec/services/notes/resolve_service_spec.rb
+++ b/spec/services/notes/resolve_service_spec.rb
@@ -17,7 +17,9 @@ describe Notes::ResolveService do
end
it "sends notifications if all discussions are resolved" do
- expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService).to receive(:execute).with(merge_request)
+ expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
described_class.new(merge_request.project, user).execute(note)
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index b80f75c70e6..80b8d36aa07 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -155,7 +155,7 @@ describe NotificationService, :mailer do
describe '#async' do
let(:async) { notification.async }
- set(:key) { create(:personal_key) }
+ let_it_be(:key) { create(:personal_key) }
it 'returns an Async object with the correct parent' do
expect(async).to be_a(described_class::Async)
@@ -2508,14 +2508,14 @@ describe NotificationService, :mailer do
end
describe 'Pages domains' do
- set(:project) { create(:project) }
- set(:domain) { create(:pages_domain, project: project) }
- set(:u_blocked) { create(:user, :blocked) }
- set(:u_silence) { create_user_with_notification(:disabled, 'silent', project) }
- set(:u_owner) { project.owner }
- set(:u_maintainer1) { create(:user) }
- set(:u_maintainer2) { create(:user) }
- set(:u_developer) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:domain, reload: true) { create(:pages_domain, project: project) }
+ let_it_be(:u_blocked) { create(:user, :blocked) }
+ let_it_be(:u_silence) { create_user_with_notification(:disabled, 'silent', project) }
+ let_it_be(:u_owner) { project.owner }
+ let_it_be(:u_maintainer1) { create(:user) }
+ let_it_be(:u_maintainer2) { create(:user) }
+ let_it_be(:u_developer) { create(:user) }
before do
project.add_maintainer(u_blocked)
@@ -2707,7 +2707,7 @@ describe NotificationService, :mailer do
# User to be participant by default
# This user does not contain any record in notification settings table
# It should be treated with a :participating notification_level
- @u_lazy_participant = create(:user, username: 'lazy-participant')
+ @u_lazy_participant = create(:user, username: 'lazy-participant')
@u_guest_watcher = create_user_with_notification(:watch, 'guest_watching')
@u_guest_custom = create_user_with_notification(:custom, 'guest_custom')
diff --git a/spec/services/pages_domains/create_acme_order_service_spec.rb b/spec/services/pages_domains/create_acme_order_service_spec.rb
index d59aa9b979e..154b3fd5600 100644
--- a/spec/services/pages_domains/create_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/create_acme_order_service_spec.rb
@@ -45,12 +45,34 @@ describe PagesDomains::CreateAcmeOrderService do
expect { OpenSSL::PKey::RSA.new(saved_order.private_key) }.not_to raise_error
end
- it 'properly saves order attributes' do
+ it 'properly saves order url' do
service.execute
saved_order = PagesDomainAcmeOrder.last
expect(saved_order.url).to eq(order_double.url)
- expect(saved_order.expires_at).to be_like_time(order_double.expires)
+ end
+
+ context 'when order expires in 2 days' do
+ it 'sets expiration time in 2 hours' do
+ Timecop.freeze do
+ service.execute
+
+ saved_order = PagesDomainAcmeOrder.last
+ expect(saved_order.expires_at).to be_like_time(2.hours.from_now)
+ end
+ end
+ end
+
+ context 'when order expires in an hour' do
+ it 'sets expiration time accordingly to order' do
+ Timecop.freeze do
+ allow(order_double).to receive(:expires).and_return(1.hour.from_now)
+ service.execute
+
+ saved_order = PagesDomainAcmeOrder.last
+ expect(saved_order.expires_at).to be_like_time(1.hour.from_now)
+ end
+ end
end
it 'properly saves challenge attributes' do
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index af79a42b611..9832ba95524 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -32,9 +32,9 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
def stub_lets_encrypt_order(url, status)
order = ::Gitlab::LetsEncrypt::Order.new(acme_order_double(status: status))
- allow_any_instance_of(::Gitlab::LetsEncrypt::Client).to(
- receive(:load_order).with(url).and_return(order)
- )
+ allow_next_instance_of(::Gitlab::LetsEncrypt::Client) do |instance|
+ allow(instance).to receive(:load_order).with(url).and_return(order)
+ end
order
end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 14772d172e8..78b969c8a0e 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Projects::ContainerRepository::CleanupTagsService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:repository) { create(:container_repository, :root, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 7ceb02c9cf8..decbbb7597f 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Projects::ContainerRepository::DeleteTagsService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:repository) { create(:container_repository, :root, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
let(:params) { { tags: tags } }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/projects/container_repository/destroy_service_spec.rb b/spec/services/projects/container_repository/destroy_service_spec.rb
index affcc66d2bb..cc8fd2716e1 100644
--- a/spec/services/projects/container_repository/destroy_service_spec.rb
+++ b/spec/services/projects/container_repository/destroy_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Projects::ContainerRepository::DestroyService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
subject { described_class.new(project, user) }
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 814bf912c8c..bce3f72a287 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -247,7 +247,9 @@ describe Projects::CreateService, '#execute' do
context 'repository creation' do
it 'synchronously creates the repository' do
- expect_any_instance_of(Project).to receive(:create_repository)
+ expect_next_instance_of(Project) do |instance|
+ expect(instance).to receive(:create_repository)
+ end
project = create_project(user, opts)
expect(project).to be_valid
diff --git a/spec/services/projects/detect_repository_languages_service_spec.rb b/spec/services/projects/detect_repository_languages_service_spec.rb
index df5eed18ac0..76600b0e77c 100644
--- a/spec/services/projects/detect_repository_languages_service_spec.rb
+++ b/spec/services/projects/detect_repository_languages_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_state do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
subject { described_class.new(project) }
@@ -51,7 +51,7 @@ describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_
end
context 'when no repository exists' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it 'has no languages' do
expect(subject.execute).to be_empty
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index fc01c93b5cf..e7b904fcd60 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -224,6 +224,19 @@ describe Projects::ForkService do
end
end
end
+
+ context 'when forking is disabled' do
+ before do
+ @from_project.project_feature.update_attribute(
+ :forking_access_level, ProjectFeature::DISABLED)
+ end
+
+ it 'fails' do
+ to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
+
+ expect(to_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ end
+ end
end
describe 'fork to namespace' do
diff --git a/spec/services/projects/gitlab_projects_import_service_spec.rb b/spec/services/projects/gitlab_projects_import_service_spec.rb
index 78580bfa604..1662d4577aa 100644
--- a/spec/services/projects/gitlab_projects_import_service_spec.rb
+++ b/spec/services/projects/gitlab_projects_import_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::GitlabProjectsImportService do
- set(:namespace) { create(:namespace) }
+ let_it_be(:namespace) { create(:namespace) }
let(:path) { 'test-path' }
let(:file) { fixture_file_upload('spec/fixtures/project_export.tar.gz') }
let(:overwrite) { false }
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index c99054d9fd5..60804a8dba6 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Projects::HousekeepingService do
subject { described_class.new(project) }
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
before do
project.reset_pushes_since_gc
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index a557e61da78..c7ac07fc524 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -94,7 +94,9 @@ describe Projects::ImportExport::ExportService do
end
it 'notifies the user' do
- expect_any_instance_of(NotificationService).to receive(:project_not_exported)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported)
+ end
end
it 'notifies logger' do
@@ -122,7 +124,9 @@ describe Projects::ImportExport::ExportService do
end
it 'notifies the user' do
- expect_any_instance_of(NotificationService).to receive(:project_not_exported)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported)
+ end
end
it 'notifies logger' do
diff --git a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
index 7ca20a6d751..016028a96bf 100644
--- a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
@@ -16,7 +16,9 @@ describe Projects::LfsPointers::LfsImportService do
it 'downloads lfs objects' do
service = double
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return(oid_download_links)
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_return(oid_download_links)
+ end
expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice
expect(service).to receive(:execute).twice
@@ -27,7 +29,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when no downloadable lfs object links' do
it 'does not call LfsDownloadService' do
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return({})
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_return({})
+ end
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
result = subject.execute
@@ -39,7 +43,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when an exception is raised' do
it 'returns error' do
error_message = "error message"
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_raise(StandardError, error_message)
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_raise(StandardError, error_message)
+ end
result = subject.execute
diff --git a/spec/services/projects/open_merge_requests_count_service_spec.rb b/spec/services/projects/open_merge_requests_count_service_spec.rb
index f9fff4cbd4c..7d848f9f2c3 100644
--- a/spec/services/projects/open_merge_requests_count_service_spec.rb
+++ b/spec/services/projects/open_merge_requests_count_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::OpenMergeRequestsCountService, :use_clean_rails_memory_store_caching do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
subject { described_class.new(project) }
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 81d59a98b9b..93cd5c43e86 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Projects::Operations::UpdateService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:result) { subject.execute }
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 239d28557ee..6eaf7a71b23 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe Projects::ParticipantsService do
describe '#groups' do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
let(:service) { described_class.new(project, user) }
it 'avoids N+1 queries' do
@@ -62,10 +62,10 @@ describe Projects::ParticipantsService do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
context 'when there is a project in group namespace' do
- set(:public_group) { create(:group, :public) }
- set(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- set(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
let(:service) { described_class.new(public_project, create(:user)) }
@@ -79,18 +79,18 @@ describe Projects::ParticipantsService do
end
context 'when there is a private group and a public project' do
- set(:public_group) { create(:group, :public) }
- set(:private_group) { create(:group, :private, :nested) }
- set(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- set(:project_issue) { create(:issue, project: public_project)}
+ let_it_be(:project_issue) { create(:issue, project: public_project)}
- set(:public_group_owner) { create(:user) }
- set(:private_group_member) { create(:user) }
- set(:public_project_maintainer) { create(:user) }
- set(:private_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- set(:group_ancestor_owner) { create(:user) }
+ let_it_be(:group_ancestor_owner) { create(:user) }
before(:context) do
public_group.add_owner public_group_owner
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index fe92b53cd91..714256d9b08 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -3,9 +3,9 @@
require "spec_helper"
describe Projects::UpdatePagesService do
- set(:project) { create(:project, :repository) }
- set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
- set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
+ let_it_be(:project, refind: true) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
@@ -110,8 +110,9 @@ describe Projects::UpdatePagesService do
context 'when timeout happens by DNS error' do
before do
- allow_any_instance_of(described_class)
- .to receive(:extract_zip_archive!).and_raise(SocketError)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:extract_zip_archive!).and_raise(SocketError)
+ end
end
it 'raises an error' do
@@ -125,9 +126,10 @@ describe Projects::UpdatePagesService do
context 'when failed to extract zip artifacts' do
before do
- expect_any_instance_of(described_class)
- .to receive(:extract_zip_archive!)
- .and_raise(Projects::UpdatePagesService::FailedToExtractError)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:extract_zip_archive!)
+ .and_raise(Projects::UpdatePagesService::FailedToExtractError)
+ end
end
it 'raises an error' do
@@ -185,60 +187,20 @@ describe Projects::UpdatePagesService do
.and_return(metadata)
end
- shared_examples 'pages size limit exceeded' do
- it 'limits the maximum size of gitlab pages' do
- subject.execute
-
- expect(deploy_status.description)
- .to match(/artifacts for pages are too large/)
- expect(deploy_status).to be_script_failure
- expect(project.pages_metadatum).not_to be_deployed
- end
- end
-
context 'when maximum pages size is set to zero' do
before do
stub_application_setting(max_pages_size: 0)
end
- context 'when page size does not exceed internal maximum' do
- before do
- allow(metadata).to receive(:total_size).and_return(200.megabytes)
- end
-
- it 'updates pages correctly' do
- subject.execute
-
- expect(deploy_status.description).not_to be_present
- expect(project.pages_metadatum).to be_deployed
- end
- end
-
- context 'when pages size does exceed internal maximum' do
- before do
- allow(metadata).to receive(:total_size).and_return(2.terabytes)
- end
-
- it_behaves_like 'pages size limit exceeded'
- end
- end
-
- context 'when pages size is greater than max size setting' do
- before do
- stub_application_setting(max_pages_size: 200)
- allow(metadata).to receive(:total_size).and_return(201.megabytes)
- end
-
- it_behaves_like 'pages size limit exceeded'
+ it_behaves_like 'pages size limit is', ::Gitlab::Pages::MAX_SIZE
end
- context 'when max size setting is greater than internal max size' do
+ context 'when size is limited on the instance level' do
before do
- stub_application_setting(max_pages_size: 3.terabytes / 1.megabyte)
- allow(metadata).to receive(:total_size).and_return(2.terabytes)
+ stub_application_setting(max_pages_size: 100)
end
- it_behaves_like 'pages size limit exceeded'
+ it_behaves_like 'pages size limit is', 100.megabytes
end
end
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
index 03bda94e9c6..5a036194d01 100644
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ b/spec/services/prometheus/proxy_service_spec.rb
@@ -5,8 +5,14 @@ require 'spec_helper'
describe Prometheus::ProxyService do
include ReactiveCachingHelpers
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ describe 'configuration' do
+ it 'ReactiveCaching refresh is not needed' do
+ expect(described_class.reactive_cache_refresh_interval).to be > described_class.reactive_cache_lifetime
+ end
+ end
describe '#initialize' do
let(:params) { ActionController::Parameters.new(query: '1').permit! }
diff --git a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
index b1cdb8fd3ae..9978c631366 100644
--- a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
+++ b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
@@ -39,8 +39,12 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
context 'with predefined variables' do
+ let(:params_keys) { { query: 'up{%{environment_filter}}' } }
+
it_behaves_like 'success' do
- let(:expected_query) { %Q[up{environment="#{environment.slug}"}] }
+ let(:expected_query) do
+ %Q[up{container_name!="POD",environment="#{environment.slug}"}]
+ end
end
context 'with nil query' do
@@ -50,6 +54,133 @@ describe Prometheus::ProxyVariableSubstitutionService do
let(:expected_query) { nil }
end
end
+
+ context 'with liquid format' do
+ let(:params_keys) do
+ { query: 'up{environment="{{ci_environment_slug}}"}' }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { %Q[up{environment="#{environment.slug}"}] }
+ end
+ end
+
+ context 'with ruby and liquid formats' do
+ let(:params_keys) do
+ { query: 'up{%{environment_filter},env2="{{ci_environment_slug}}"}' }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) do
+ %Q[up{container_name!="POD",environment="#{environment.slug}",env2="#{environment.slug}"}]
+ end
+ end
+ end
+ end
+
+ context 'with custom variables' do
+ let(:pod_name) { "pod1" }
+
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: ['pod_name', pod_name]
+ }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { %q[up{pod_name="pod1"}] }
+ end
+
+ context 'with ruby variable interpolation format' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="%{pod_name}"}',
+ variables: ['pod_name', pod_name]
+ }
+ end
+
+ it_behaves_like 'success' do
+ # Custom variables cannot be used with the Ruby interpolation format.
+ let(:expected_query) { "up{pod_name=\"%{pod_name}\"}" }
+ end
+ end
+
+ context 'with predefined variables in variables parameter' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}",env="{{ci_environment_slug}}"}',
+ variables: ['pod_name', pod_name, 'ci_environment_slug', 'custom_value']
+ }
+ end
+
+ it_behaves_like 'success' do
+ # Predefined variable values should not be overwritten by custom variable
+ # values.
+ let(:expected_query) { "up{pod_name=\"#{pod_name}\",env=\"#{environment.slug}\"}" }
+ end
+ end
+
+ context 'with invalid variables parameter' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: ['a']
+ }
+ end
+
+ it_behaves_like 'error', 'Optional parameter "variables" must be an ' \
+ 'array of keys and values. Ex: [key1, value1, key2, value2]'
+ end
+
+ context 'with nil variables' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: nil
+ }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { 'up{pod_name=""}' }
+ end
+ end
+
+ context 'with ruby and liquid variables' do
+ let(:params_keys) do
+ {
+ query: 'up{env1="%{ruby_variable}",env2="{{ liquid_variable }}"}',
+ variables: %w(ruby_variable value liquid_variable env_slug)
+ }
+ end
+
+ it_behaves_like 'success' do
+ # It should replace only liquid variables with their values
+ let(:expected_query) { %q[up{env1="%{ruby_variable}",env2="env_slug"}] }
+ end
+ end
+ end
+
+ context 'with liquid tags and ruby format variables' do
+ let(:params_keys) do
+ {
+ query: 'up{ {% if true %}env1="%{ci_environment_slug}",' \
+ 'env2="{{ci_environment_slug}}"{% endif %} }'
+ }
+ end
+
+ # The following spec will fail and should be changed to a 'success' spec
+ # once we remove support for the Ruby interpolation format.
+ # https://gitlab.com/gitlab-org/gitlab/issues/37990
+ #
+ # Liquid tags `{% %}` cannot be used currently because the Ruby `%`
+ # operator raises an error when it encounters a Liquid `{% %}` tag in the
+ # string.
+ #
+ # Once we remove support for the Ruby format, users can start using
+ # Liquid tags.
+
+ it_behaves_like 'error', 'Malformed string'
end
context 'ruby template rendering' do
@@ -139,5 +270,18 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
end
end
+
+ context 'when liquid template rendering raises error' do
+ before do
+ liquid_service = instance_double(TemplateEngines::LiquidService)
+
+ allow(TemplateEngines::LiquidService).to receive(:new).and_return(liquid_service)
+ allow(liquid_service).to receive(:render).and_raise(
+ TemplateEngines::LiquidService::RenderError, 'error message'
+ )
+ end
+
+ it_behaves_like 'error', 'error message'
+ end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index b105e1e40ce..b2576cae575 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -856,9 +856,10 @@ describe QuickActions::InterpretService do
end
context 'only group milestones available' do
- let(:group) { create(:group) }
+ let(:ancestor_group) { create(:group) }
+ let(:group) { create(:group, parent: ancestor_group) }
let(:project) { create(:project, :public, namespace: group) }
- let(:milestone) { create(:milestone, group: group, title: '10.0') }
+ let(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
it_behaves_like 'milestone command' do
let(:content) { "/milestone %#{milestone.title}" }
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index 178bac3574f..f6c70873540 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -21,6 +21,7 @@ describe Releases::UpdateService do
it 'raises an error' do
result = service.execute
expect(result[:status]).to eq(:error)
+ expect(result[:milestones_updated]).to be_falsy
end
end
@@ -50,21 +51,33 @@ describe Releases::UpdateService do
end
context 'when a milestone is passed in' do
- let(:new_title) { 'v2.0' }
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
- let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:params_with_milestone) { params.merge!({ milestones: [new_title] }) }
+ let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:service) { described_class.new(new_milestone.project, user, params_with_milestone) }
before do
release.milestones << milestone
+ end
- service.execute
- release.reload
+ context 'a different milestone' do
+ let(:new_title) { 'v2.0' }
+
+ it 'updates the related milestone accordingly' do
+ result = service.execute
+ release.reload
+
+ expect(release.milestones.first.title).to eq(new_title)
+ expect(result[:milestones_updated]).to be_truthy
+ end
end
- it 'updates the related milestone accordingly' do
- expect(release.milestones.first.title).to eq(new_title)
+ context 'an identical milestone' do
+ let(:new_title) { 'v1.0' }
+
+ it "raises an error" do
+ expect { service.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ end
end
end
@@ -76,12 +89,14 @@ describe Releases::UpdateService do
release.milestones << milestone
service.params = params_with_empty_milestone
- service.execute
- release.reload
end
it 'removes the old milestone and does not associate any new milestone' do
+ result = service.execute
+ release.reload
+
expect(release.milestones).not_to be_present
+ expect(result[:milestones_updated]).to be_truthy
end
end
@@ -96,14 +111,15 @@ describe Releases::UpdateService do
create(:milestone, project: project, title: new_title_1)
create(:milestone, project: project, title: new_title_2)
release.milestones << milestone
-
- service.execute
- release.reload
end
it 'removes the old milestone and update the release with the new ones' do
+ result = service.execute
+ release.reload
+
milestone_titles = release.milestones.map(&:title)
expect(milestone_titles).to match_array([new_title_1, new_title_2])
+ expect(result[:milestones_updated]).to be_truthy
end
end
end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 070964eb1ec..2b987b7fec9 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ResourceEvents::ChangeLabelsService do
- set(:project) { create(:project) }
- set(:author) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:author) { create(:user) }
let(:resource) { create(:issue, project: project) }
describe '.change_labels' do
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 72467091791..6bad1b86fca 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -16,11 +16,11 @@ describe ResourceEvents::MergeIntoNotesService do
create(:note_on_issue, opts.merge(params))
end
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:resource) { create(:issue, project: project) }
- set(:label) { create(:label, project: project) }
- set(:label2) { create(:label, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:resource) { create(:issue, project: project) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
let(:time) { Time.now }
describe '#execute' do
diff --git a/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
new file mode 100644
index 00000000000..41902bc1da1
--- /dev/null
+++ b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ResourceEvents::SyntheticLabelNotesBuilderService do
+ describe '#execute' do
+ let!(:user) { create(:user) }
+
+ let!(:issue) { create(:issue, author: user) }
+
+ let!(:event1) { create(:resource_label_event, issue: issue) }
+ let!(:event2) { create(:resource_label_event, issue: issue) }
+ let!(:event3) { create(:resource_label_event, issue: issue) }
+
+ it 'returns the expected synthetic notes' do
+ notes = ResourceEvents::SyntheticLabelNotesBuilderService.new(issue, user).execute
+
+ expect(notes.size).to eq(3)
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
new file mode 100644
index 00000000000..6f7ce7959ff
--- /dev/null
+++ b/spec/services/snippets/create_service_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let(:opts) { base_opts.merge(extra_opts) }
+ let(:base_opts) do
+ {
+ title: 'Test snippet',
+ file_name: 'snippet.rb',
+ content: 'puts "hello world"',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
+ }
+ end
+ let(:extra_opts) { {} }
+ let(:creator) { admin }
+
+ subject { Snippets::CreateService.new(project, creator, opts).execute }
+
+ let(:snippet) { subject.payload[:snippet] }
+
+ shared_examples 'a service that creates a snippet' do
+ it 'creates a snippet with the provided attributes' do
+ expect(snippet.title).to eq(opts[:title])
+ expect(snippet.file_name).to eq(opts[:file_name])
+ expect(snippet.content).to eq(opts[:content])
+ expect(snippet.visibility_level).to eq(opts[:visibility_level])
+ end
+ end
+
+ shared_examples 'public visibility level restrictions apply' do
+ let(:extra_opts) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ context 'when user is not an admin' do
+ let(:creator) { user }
+
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not create a public snippet' do
+ expect(subject.message).to match('has been restricted')
+ end
+ end
+
+ context 'when user is an admin' do
+ it 'responds with success' do
+ expect(subject).to be_success
+ end
+
+ it 'creates a public snippet' do
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
+ describe 'when visibility level is passed as a string' do
+ let(:extra_opts) { { visibility: 'internal' } }
+
+ before do
+ base_opts.delete(:visibility_level)
+ end
+
+ it 'assigns the correct visibility level' do
+ expect(subject).to be_success
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+ end
+
+ shared_examples 'spam check is performed' do
+ shared_examples 'marked as spam' do
+ it 'marks a snippet as spam ' do
+ expect(snippet).to be_spam
+ end
+
+ it 'invalidates the snippet' do
+ expect(snippet).to be_invalid
+ end
+
+ it 'creates a new spam_log' do
+ expect { snippet }
+ .to log_spam(title: snippet.title, noteable_type: snippet.class.name)
+ end
+
+ it 'assigns a spam_log to an issue' do
+ expect(snippet.spam_log).to eq(SpamLog.last)
+ end
+ end
+
+ let(:extra_opts) do
+ { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
+ end
+
+ before do
+ expect_next_instance_of(AkismetService) do |akismet_service|
+ expect(akismet_service).to receive_messages(spam?: true)
+ end
+ end
+
+ [true, false, nil].each do |allow_possible_spam|
+ context "when recaptcha_disabled flag is #{allow_possible_spam.inspect}" do
+ before do
+ stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
+ end
+
+ it_behaves_like 'marked as spam'
+ end
+ end
+ end
+
+ shared_examples 'snippet create data is tracked' do
+ let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
+
+ it 'increments count when create succeeds' do
+ expect { subject }.to change { counter.read(:create) }.by 1
+ end
+
+ context 'when create fails' do
+ let(:opts) { {} }
+
+ it 'does not increment count' do
+ expect { subject }.not_to change { counter.read(:create) }
+ end
+ end
+ end
+
+ shared_examples 'an error service response when save fails' do
+ let(:extra_opts) { { content: nil } }
+
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not create the snippet' do
+ expect { subject }.not_to change { Snippet.count }
+ end
+ end
+
+ context 'when Project Snippet' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a service that creates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippet create data is tracked'
+ it_behaves_like 'an error service response when save fails'
+ end
+
+ context 'when PersonalSnippet' do
+ let(:project) { nil }
+
+ it_behaves_like 'a service that creates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippet create data is tracked'
+ it_behaves_like 'an error service response when save fails'
+ end
+ end
+end
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
new file mode 100644
index 00000000000..bb035d275ab
--- /dev/null
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::DestroyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ describe '#execute' do
+ subject { Snippets::DestroyService.new(user, snippet).execute }
+
+ context 'when snippet is nil' do
+ let(:snippet) { nil }
+
+ it 'returns a ServiceResponse error' do
+ expect(subject).to be_error
+ end
+ end
+
+ shared_examples 'a successful destroy' do
+ it 'deletes the snippet' do
+ expect { subject }.to change { Snippet.count }.by(-1)
+ end
+
+ it 'returns ServiceResponse success' do
+ expect(subject).to be_success
+ end
+ end
+
+ shared_examples 'an unsuccessful destroy' do
+ it 'does not delete the snippet' do
+ expect { subject }.to change { Snippet.count }.by(0)
+ end
+
+ it 'returns ServiceResponse error' do
+ expect(subject).to be_error
+ end
+ end
+
+ context 'when ProjectSnippet' do
+ let!(:snippet) { create(:project_snippet, project: project, author: author) }
+
+ context 'when user is able to admin_project_snippet' do
+ let(:author) { user }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a successful destroy'
+ end
+
+ context 'when user is not able to admin_project_snippet' do
+ let(:author) { other_user }
+
+ it_behaves_like 'an unsuccessful destroy'
+ end
+ end
+
+ context 'when PersonalSnippet' do
+ let!(:snippet) { create(:personal_snippet, author: author) }
+
+ context 'when user is able to admin_personal_snippet' do
+ let(:author) { user }
+
+ it_behaves_like 'a successful destroy'
+ end
+
+ context 'when user is not able to admin_personal_snippet' do
+ let(:author) { other_user }
+
+ it_behaves_like 'an unsuccessful destroy'
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
new file mode 100644
index 00000000000..b8215f9779d
--- /dev/null
+++ b/spec/services/snippets/update_service_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::UpdateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create :user, admin: true }
+ let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
+ let(:options) do
+ {
+ title: 'Test snippet',
+ file_name: 'snippet.rb',
+ content: 'puts "hello world"',
+ visibility_level: visibility_level
+ }
+ end
+ let(:updater) { user }
+
+ subject do
+ Snippets::UpdateService.new(
+ project,
+ updater,
+ options
+ ).execute(snippet)
+ end
+
+ shared_examples 'a service that updates a snippet' do
+ it 'updates a snippet with the provided attributes' do
+ expect { subject }.to change { snippet.title }.from(snippet.title).to(options[:title])
+ .and change { snippet.file_name }.from(snippet.file_name).to(options[:file_name])
+ .and change { snippet.content }.from(snippet.content).to(options[:content])
+ end
+ end
+
+ shared_examples 'public visibility level restrictions apply' do
+ let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
+
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ context 'when user is not an admin' do
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not update snippet to public visibility' do
+ original_visibility = snippet.visibility_level
+
+ expect(subject.message).to match('has been restricted')
+ expect(snippet.visibility_level).to eq(original_visibility)
+ end
+ end
+
+ context 'when user is an admin' do
+ let(:updater) { admin }
+
+ it 'responds with success' do
+ expect(subject).to be_success
+ end
+
+ it 'updates the snippet to public visibility' do
+ old_visibility = snippet.visibility_level
+
+ expect(subject.payload[:snippet]).not_to be_nil
+ expect(snippet.visibility_level).not_to eq(old_visibility)
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
+ context 'when visibility level is passed as a string' do
+ before do
+ options[:visibility] = 'internal'
+ options.delete(:visibility_level)
+ end
+
+ it 'assigns the correct visibility level' do
+ expect(subject).to be_success
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+ end
+
+ shared_examples 'snippet update data is tracked' do
+ let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
+
+ it 'increments count when create succeeds' do
+ expect { subject }.to change { counter.read(:update) }.by 1
+ end
+
+ context 'when update fails' do
+ let(:options) { { title: '' } }
+
+ it 'does not increment count' do
+ expect { subject }.not_to change { counter.read(:update) }
+ end
+ end
+ end
+
+ context 'when Project Snippet' do
+ let_it_be(:project) { create(:project) }
+ let!(:snippet) { create(:project_snippet, author: user, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a service that updates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'snippet update data is tracked'
+ end
+
+ context 'when PersonalSnippet' do
+ let(:project) { nil }
+ let!(:snippet) { create(:personal_snippet, author: user) }
+
+ it_behaves_like 'a service that updates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'snippet update data is tracked'
+ end
+ end
+end
diff --git a/spec/services/spam/mark_as_spam_service_spec.rb b/spec/services/spam/mark_as_spam_service_spec.rb
new file mode 100644
index 00000000000..cba9d6a39cb
--- /dev/null
+++ b/spec/services/spam/mark_as_spam_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::MarkAsSpamService do
+ let(:user_agent_detail) { build(:user_agent_detail) }
+ let(:spammable) { build(:issue, user_agent_detail: user_agent_detail) }
+ let(:fake_akismet_service) { double(:akismet_service, submit_spam: true) }
+
+ subject { described_class.new(spammable: spammable) }
+
+ describe '#execute' do
+ before do
+ allow(subject).to receive(:akismet).and_return(fake_akismet_service)
+ end
+
+ context 'when the spammable object is not submittable' do
+ before do
+ allow(spammable).to receive(:submittable_as_spam?).and_return false
+ end
+
+ it 'does not submit as spam' do
+ expect(subject.execute).to be_falsey
+ end
+ end
+
+ context 'spam is submitted successfully' do
+ before do
+ allow(spammable).to receive(:submittable_as_spam?).and_return true
+ allow(fake_akismet_service).to receive(:submit_spam).and_return true
+ end
+
+ it 'submits as spam' do
+ expect(subject.execute).to be_truthy
+ end
+
+ it "updates the spammable object's user agent detail as being submitted as spam" do
+ expect(user_agent_detail).to receive(:update_attribute)
+
+ subject.execute
+ end
+
+ context 'when Akismet does not consider it spam' do
+ it 'does not update the spammable object as spam' do
+ allow(fake_akismet_service).to receive(:submit_spam).and_return false
+
+ expect(subject.execute).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/spam_service_spec.rb b/spec/services/spam_service_spec.rb
index 76f77583612..c8ebe87e4d2 100644
--- a/spec/services/spam_service_spec.rb
+++ b/spec/services/spam_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe SpamService do
describe '#when_recaptcha_verified' do
def check_spam(issue, request, recaptcha_verified)
- described_class.new(issue, request).when_recaptcha_verified(recaptcha_verified) do
+ described_class.new(spammable: issue, request: request).when_recaptcha_verified(recaptcha_verified) do
'yielded'
end
end
@@ -45,7 +45,7 @@ describe SpamService do
context 'when indicated as spam by akismet' do
shared_examples 'akismet spam' do
- it 'doesnt check as spam when request is missing' do
+ it "doesn't check as spam when request is missing" do
check_spam(issue, nil, false)
expect(issue).not_to be_spam
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index bdbcb0fdb07..84529af7187 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -48,10 +48,34 @@ describe Suggestions::ApplyService do
expect(commit.committer_email).to eq(user.commit_email)
expect(commit.author_name).to eq(user.name)
end
+
+ context 'when a custom suggestion commit message' do
+ before do
+ project.update!(suggestion_commit_message: message)
+
+ apply(suggestion)
+ end
+
+ context 'is not specified' do
+ let(:message) { nil }
+
+ it 'sets default commit message' do
+ expect(project.repository.commit.message).to eq("Apply suggestion to files/ruby/popen.rb")
+ end
+ end
+
+ context 'is specified' do
+ let(:message) { 'refactor: %{project_path} %{project_name} %{file_path} %{branch_name} %{username} %{user_full_name}' }
+
+ it 'sets custom commit message' do
+ expect(project.repository.commit.message).to eq("refactor: project-1 Project_1 files/ruby/popen.rb master test.user Test User")
+ end
+ end
+ end
end
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, :commit_email) }
+ let(:project) { create(:project, :repository, path: 'project-1', name: 'Project_1') }
+ let(:user) { create(:user, :commit_email, name: 'Test User', username: 'test.user') }
let(:position) { build_position }
@@ -113,7 +137,8 @@ describe Suggestions::ApplyService do
context 'non-fork project' do
let(:merge_request) do
create(:merge_request, source_project: project,
- target_project: project)
+ target_project: project,
+ source_branch: 'master')
end
before do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index a952e26e338..4ba22af85f0 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -7,9 +7,9 @@ describe SystemNoteService do
include RepoHelpers
include AssetsHelpers
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group) }
- set(:author) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:author) { create(:user) }
let(:noteable) { create(:issue, project: project) }
let(:issue) { noteable }
@@ -76,28 +76,14 @@ describe SystemNoteService do
end
describe '.change_due_date' do
- subject { described_class.change_due_date(noteable, project, author, due_date) }
+ let(:due_date) { double }
- let(:due_date) { Date.today }
-
- it_behaves_like 'a note with overridable created_at'
-
- it_behaves_like 'a system note' do
- let(:action) { 'due_date' }
- end
-
- context 'when due date added' do
- it 'sets the note text' do
- expect(subject.note).to eq "changed due date to #{Date.today.to_s(:long)}"
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_due_date).with(due_date)
end
- end
- context 'when due date removed' do
- let(:due_date) { nil }
-
- it 'sets the note text' do
- expect(subject.note).to eq 'removed due date'
- end
+ described_class.change_due_date(noteable, project, author, due_date)
end
end
@@ -488,36 +474,12 @@ describe SystemNoteService do
end
describe '.change_time_estimate' do
- subject { described_class.change_time_estimate(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
-
- context 'with a time estimate' do
- it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
- end
-
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
- end
-
- it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 77h"
- end
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_time_estimate)
end
- end
- context 'without a time estimate' do
- it 'sets the note text' do
- expect(subject.note).to eq "removed time estimate"
- end
+ described_class.change_time_estimate(noteable, project, author)
end
end
@@ -548,61 +510,12 @@ describe SystemNoteService do
end
describe '.change_time_spent' do
- # We need a custom noteable in order to the shared examples to be green.
- let(:noteable) do
- mr = create(:merge_request, source_project: project)
- mr.spend_time(duration: 360000, user_id: author.id)
- mr.save!
- mr
- end
-
- subject do
- described_class.change_time_spent(noteable, project, author)
- end
-
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
-
- context 'when time was added' do
- it 'sets the note text' do
- spend_time!(277200)
-
- expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_time_spent)
end
- end
-
- context 'when time was subtracted' do
- it 'sets the note text' do
- spend_time!(-277200)
-
- expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
- end
- end
-
- context 'when time was removed' do
- it 'sets the note text' do
- spend_time!(:reset)
- expect(subject.note).to eq "removed time spent"
- end
- end
-
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
- end
-
- it 'sets the note text' do
- spend_time!(277200)
-
- expect(subject.note).to eq "added 77h of time spent"
- end
- end
-
- def spend_time!(seconds)
- noteable.spend_time(duration: seconds, user_id: author.id)
- noteable.save!
+ described_class.change_time_spent(noteable, project, author)
end
end
diff --git a/spec/services/system_notes/commit_service_spec.rb b/spec/services/system_notes/commit_service_spec.rb
index 4d4403be59a..5839a17e4a0 100644
--- a/spec/services/system_notes/commit_service_spec.rb
+++ b/spec/services/system_notes/commit_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe SystemNotes::CommitService do
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group) }
- set(:author) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:author) { create(:user) }
let(:commit_service) { described_class.new(noteable: noteable, project: project, author: author) }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index c2f627c681b..56ef0039b63 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -265,7 +265,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference disallowed' do
before do
- expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(true)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:cross_reference_disallowed?).and_return(true)
+ end
end
it 'returns nil' do
@@ -279,7 +281,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference allowed' do
before do
- expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(false)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:cross_reference_disallowed?).and_return(false)
+ end
end
it_behaves_like 'a system note' do
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
new file mode 100644
index 00000000000..7e3e6a75cdf
--- /dev/null
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::SystemNotes::TimeTrackingService do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:noteable) { create(:issue, project: project) }
+
+ describe '#change_due_date' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).change_due_date(due_date) }
+
+ let(:due_date) { Date.today }
+
+ it_behaves_like 'a note with overridable created_at'
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'due_date' }
+ end
+
+ context 'when due date added' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "changed due date to #{due_date.to_s(:long)}"
+ end
+ end
+
+ context 'when due date removed' do
+ let(:due_date) { nil }
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'removed due date'
+ end
+ end
+ end
+
+ describe '.change_time_estimate' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).change_time_estimate }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
+
+ context 'with a time estimate' do
+ it 'sets the note text' do
+ noteable.update_attribute(:time_estimate, 277200)
+
+ expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ noteable.update_attribute(:time_estimate, 277200)
+
+ expect(subject.note).to eq "changed time estimate to 77h"
+ end
+ end
+ end
+
+ context 'without a time estimate' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed time estimate"
+ end
+ end
+ end
+
+ describe '.change_time_spent' do
+ # We need a custom noteable in order to the shared examples to be green.
+ let(:noteable) do
+ mr = create(:merge_request, source_project: project)
+ mr.spend_time(duration: 360000, user_id: author.id)
+ mr.save!
+ mr
+ end
+
+ subject do
+ described_class.new(noteable: noteable, project: project, author: author).change_time_spent
+ end
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
+
+ context 'when time was added' do
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ end
+ end
+
+ context 'when time was subtracted' do
+ it 'sets the note text' do
+ spend_time!(-277200)
+
+ expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
+ end
+ end
+
+ context 'when time was removed' do
+ it 'sets the note text' do
+ spend_time!(:reset)
+
+ expect(subject.note).to eq "removed time spent"
+ end
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 77h of time spent"
+ end
+ end
+
+ def spend_time!(seconds)
+ noteable.spend_time(duration: seconds, user_id: author.id)
+ noteable.save!
+ end
+ end
+end
diff --git a/spec/services/task_list_toggle_service_spec.rb b/spec/services/task_list_toggle_service_spec.rb
index a309951bbcb..82a5446dcb8 100644
--- a/spec/services/task_list_toggle_service_spec.rb
+++ b/spec/services/task_list_toggle_service_spec.rb
@@ -121,7 +121,7 @@ describe TaskListToggleService do
> * [x] Task 2
EOT
- markdown_html = Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ markdown_html = parse_markdown(markdown)
toggler = described_class.new(markdown, markdown_html,
toggle_as_checked: true,
line_source: '> > * [ ] Task 1', line_number: 1)
@@ -142,7 +142,7 @@ describe TaskListToggleService do
* [x] Task 2
EOT
- markdown_html = Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ markdown_html = parse_markdown(markdown)
toggler = described_class.new(markdown, markdown_html,
toggle_as_checked: true,
line_source: '* [ ] Task 1', line_number: 5)
@@ -151,4 +151,44 @@ describe TaskListToggleService do
expect(toggler.updated_markdown.lines[4]).to eq "* [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
+
+ context 'when clicking an embedded subtask' do
+ it 'properly handles it inside an unordered list' do
+ markdown =
+ <<-EOT.strip_heredoc
+ - - [ ] Task 1
+ - [x] Task 2
+ EOT
+
+ markdown_html = parse_markdown(markdown)
+ toggler = described_class.new(markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '- - [ ] Task 1', line_number: 1)
+
+ expect(toggler.execute).to be_truthy
+ expect(toggler.updated_markdown.lines[0]).to eq "- - [x] Task 1\n"
+ expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
+ end
+
+ it 'properly handles it inside an ordered list' do
+ markdown =
+ <<-EOT.strip_heredoc
+ 1. - [ ] Task 1
+ - [x] Task 2
+ EOT
+
+ markdown_html = parse_markdown(markdown)
+ toggler = described_class.new(markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '1. - [ ] Task 1', line_number: 1)
+
+ expect(toggler.execute).to be_truthy
+ expect(toggler.updated_markdown.lines[0]).to eq "1. - [x] Task 1\n"
+ expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
+ end
+ end
+
+ def parse_markdown(markdown)
+ Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ end
end
diff --git a/spec/services/template_engines/liquid_service_spec.rb b/spec/services/template_engines/liquid_service_spec.rb
new file mode 100644
index 00000000000..7c5262bc264
--- /dev/null
+++ b/spec/services/template_engines/liquid_service_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe TemplateEngines::LiquidService do
+ describe '#render' do
+ let(:template) { 'up{env={{ci_environment_slug}}}' }
+ let(:result) { subject }
+
+ let_it_be(:slug) { 'env_slug' }
+
+ let_it_be(:context) do
+ {
+ ci_environment_slug: slug,
+ environment_filter: "container_name!=\"POD\",environment=\"#{slug}\""
+ }
+ end
+
+ subject { described_class.new(template).render(context) }
+
+ it 'with symbol keys in context it substitutes variables' do
+ expect(result).to include("up{env=#{slug}")
+ end
+
+ context 'with multiple occurrences of variable in template' do
+ let(:template) do
+ 'up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}'
+ end
+
+ it 'substitutes variables' do
+ expect(result).to eq("up{env1=#{slug},env2=#{slug}}")
+ end
+ end
+
+ context 'with multiple variables in template' do
+ let(:template) do
+ 'up{env={{ci_environment_slug}},' \
+ '{{environment_filter}}}'
+ end
+
+ it 'substitutes all variables' do
+ expect(result).to eq(
+ "up{env=#{slug}," \
+ "container_name!=\"POD\",environment=\"#{slug}\"}"
+ )
+ end
+ end
+
+ context 'with unknown variables in template' do
+ let(:template) { 'up{env={{env_slug}}}' }
+
+ it 'does not substitute unknown variables' do
+ expect(result).to eq("up{env=}")
+ end
+ end
+
+ context 'with extra variables in context' do
+ let(:template) { 'up{env={{ci_environment_slug}}}' }
+
+ it 'substitutes variables' do
+ # If context has only 1 key, there is no need for this spec.
+ expect(context.count).to be > 1
+ expect(result).to eq("up{env=#{slug}}")
+ end
+ end
+
+ context 'with unknown and known variables in template' do
+ let(:template) { 'up{env={{ci_environment_slug}},other_env={{env_slug}}}' }
+
+ it 'substitutes known variables' do
+ expect(result).to eq("up{env=#{slug},other_env=}")
+ end
+ end
+
+ context 'Liquid errors' do
+ shared_examples 'raises RenderError' do |message|
+ it do
+ expect { result }.to raise_error(described_class::RenderError, message)
+ end
+ end
+
+ context 'when liquid raises error' do
+ let(:template) { 'up{env={{ci_environment_slug}}' }
+ let(:liquid_template) { Liquid::Template.new }
+
+ before do
+ allow(Liquid::Template).to receive(:parse).with(template).and_return(liquid_template)
+ allow(liquid_template).to receive(:render!).and_raise(exception, message)
+ end
+
+ context 'raises Liquid::MemoryError' do
+ let(:exception) { Liquid::MemoryError }
+ let(:message) { 'Liquid error: Memory limits exceeded' }
+
+ it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
+ end
+
+ context 'raises Liquid::Error' do
+ let(:exception) { Liquid::Error }
+ let(:message) { 'Liquid error: Generic error message' }
+
+ it_behaves_like 'raises RenderError', 'Error rendering query'
+ end
+ end
+
+ context 'with template that is expensive to render' do
+ let(:template) do
+ '{% assign loop_count = 1000 %}'\
+ '{% assign padStr = "0" %}'\
+ '{% assign number_to_pad = "1" %}'\
+ '{% assign strLength = number_to_pad | size %}'\
+ '{% assign padLength = loop_count | minus: strLength %}'\
+ '{% if padLength > 0 %}'\
+ ' {% assign padded = number_to_pad %}'\
+ ' {% for position in (1..padLength) %}'\
+ ' {% assign padded = padded | prepend: padStr %}'\
+ ' {% endfor %}'\
+ ' {{ padded }}'\
+ '{% endif %}'
+ end
+
+ it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
+ end
+ end
+ end
+end
diff --git a/spec/services/update_snippet_service_spec.rb b/spec/services/update_snippet_service_spec.rb
deleted file mode 100644
index 19b35dca6a7..00000000000
--- a/spec/services/update_snippet_service_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe UpdateSnippetService do
- before do
- @user = create :user
- @admin = create :user, admin: true
- @opts = {
- title: 'Test snippet',
- file_name: 'snippet.rb',
- content: 'puts "hello world"',
- visibility_level: Gitlab::VisibilityLevel::PRIVATE
- }
- end
-
- context 'When public visibility is restricted' do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
-
- @snippet = create_snippet(@project, @user, @opts)
- @opts.merge!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
-
- it 'non-admins should not be able to update to public visibility' do
- old_visibility = @snippet.visibility_level
- update_snippet(@project, @user, @snippet, @opts)
- expect(@snippet.errors.messages).to have_key(:visibility_level)
- expect(@snippet.errors.messages[:visibility_level].first).to(
- match('has been restricted')
- )
- expect(@snippet.visibility_level).to eq(old_visibility)
- end
-
- it 'admins should be able to update to public visibility' do
- old_visibility = @snippet.visibility_level
- update_snippet(@project, @admin, @snippet, @opts)
- expect(@snippet.visibility_level).not_to eq(old_visibility)
- expect(@snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
-
- describe "when visibility level is passed as a string" do
- before do
- @opts[:visibility] = 'internal'
- @opts.delete(:visibility_level)
- end
-
- it "assigns the correct visibility level" do
- update_snippet(@project, @user, @snippet, @opts)
- expect(@snippet.errors.any?).to be_falsey
- expect(@snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
- end
-
- describe 'usage counter' do
- let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
- let(:snippet) { create_snippet(nil, @user, @opts) }
-
- it 'increments count' do
- expect do
- update_snippet(nil, @admin, snippet, @opts)
- end.to change { counter.read(:update) }.by 1
- end
-
- it 'does not increment count if create fails' do
- expect do
- update_snippet(nil, @admin, snippet, { title: '' })
- end.not_to change { counter.read(:update) }
- end
- end
-
- def create_snippet(project, user, opts)
- CreateSnippetService.new(project, user, opts).execute
- end
-
- def update_snippet(project, user, snippet, opts)
- UpdateSnippetService.new(project, user, snippet, opts).execute
- end
-end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index d8d2be87fd3..f477eee1dd6 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -7,7 +7,7 @@ describe Users::ActivityService do
let(:user) { create(:user, last_activity_on: last_activity_on) }
- subject { described_class.new(user, 'type') }
+ subject { described_class.new(user) }
describe '#execute', :clean_gitlab_redis_shared_state do
context 'when last activity is nil' do
@@ -40,7 +40,7 @@ describe Users::ActivityService do
let(:fake_object) { double(username: 'hello') }
it 'does not record activity' do
- service = described_class.new(fake_object, 'pull')
+ service = described_class.new(fake_object)
expect(service).not_to receive(:record_activity)
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 23a0c71175e..d9335cef5cc 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -20,6 +20,22 @@ describe Users::DestroyService do
expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
+ it 'deletes user associations in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute(user)
+ end
+
+ context 'when :destroy_user_associations_in_batches flag is disabled' do
+ it 'does not delete user associations in batches' do
+ stub_feature_flags(destroy_user_associations_in_batches: false)
+
+ expect(user).not_to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute(user)
+ end
+ end
+
it 'will delete the project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 9384287f98a..50bbb16e368 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -6,13 +6,6 @@ describe Users::UpdateService do
let(:user) { create(:user) }
describe '#execute' do
- it 'updates the name' do
- result = update_user(user, name: 'New Name')
-
- expect(result).to eq(status: :success)
- expect(user.name).to eq('New Name')
- end
-
it 'updates time preferences' do
result = update_user(user, timezone: 'Europe/Warsaw', time_display_relative: true, time_format_in_24h: false)
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 1f0119108a8..6393e482904 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -245,6 +245,12 @@ RSpec.configure do |config|
Rails.cache = caching_store
end
+ config.around do |example|
+ # Wrap each example in it's own context to make sure the contexts don't
+ # leak
+ Labkit::Context.with_context { example.run }
+ end
+
config.around(:each, :clean_gitlab_redis_cache) do |example|
redis_cache_cleanup!
diff --git a/spec/support/cycle_analytics_helpers/test_generation.rb b/spec/support/cycle_analytics_helpers/test_generation.rb
index 2096ec90c5b..34018263339 100644
--- a/spec/support/cycle_analytics_helpers/test_generation.rb
+++ b/spec/support/cycle_analytics_helpers/test_generation.rb
@@ -27,6 +27,8 @@ module CycleAnalyticsHelpers
scenarios = combinations_of_start_time_conditions.product(combinations_of_end_time_conditions)
scenarios.each do |start_time_conditions, end_time_conditions|
+ let_it_be(:other_project) { create(:project, :repository) }
+
context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do
context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do
it "finds the median of available durations between the two conditions", :sidekiq_might_not_need_inline do
@@ -56,8 +58,6 @@ module CycleAnalyticsHelpers
end
context "when the data belongs to another project" do
- let(:other_project) { create(:project, :repository) }
-
it "returns nil" do
# Use a stub to "trick" the data/condition functions
# into using another project. This saves us from having to
@@ -117,7 +117,7 @@ module CycleAnalyticsHelpers
data = data_fn[self]
end_time = rand(1..10).days.from_now
- end_time_conditions.each_with_index do |(condition_name, condition_fn), index|
+ end_time_conditions.each_with_index do |(_condition_name, condition_fn), index|
Timecop.freeze(end_time + index.days) { condition_fn[self, data] }
end
diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb
index f070243f111..ea13e91860a 100644
--- a/spec/support/features/discussion_comments_shared_example.rb
+++ b/spec/support/features/discussion_comments_shared_example.rb
@@ -297,11 +297,11 @@ shared_examples 'thread comments' do |resource_name|
find("#{form_selector} .note-textarea").send_keys('a')
end
- it "should show a 'Comment & reopen #{resource_name}' button" do
+ it "shows a 'Comment & reopen #{resource_name}' button" do
expect(find("#{form_selector} .js-note-target-reopen")).to have_content "Comment & reopen #{resource_name}"
end
- it "should show a 'Start thread & reopen #{resource_name}' button when 'Start thread' is selected" do
+ it "shows a 'Start thread & reopen #{resource_name}' button when 'Start thread' is selected" do
find(toggle_selector).click
find("#{menu_selector} li", match: :first)
diff --git a/spec/support/helpers/filter_spec_helper.rb b/spec/support/helpers/filter_spec_helper.rb
index 95c24d76dcd..45d49696e06 100644
--- a/spec/support/helpers/filter_spec_helper.rb
+++ b/spec/support/helpers/filter_spec_helper.rb
@@ -28,6 +28,17 @@ module FilterSpecHelper
described_class.call(html, context)
end
+ # Get an instance of the Filter class
+ #
+ # Use this for testing instance methods, but remember to test the result of
+ # the full pipeline by calling #call using the other methods in this helper.
+ def filter_instance
+ render_context = Banzai::RenderContext.new(project, current_user)
+ context = { project: project, current_user: current_user, render_context: render_context }
+
+ described_class.new(input_text, context)
+ end
+
# Run text through HTML::Pipeline with the current filter and return the
# result Hash
#
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index 5dc87c36931..c8b7a9251a9 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -26,7 +26,7 @@ module FilteredSearchHelpers
# Select a label clicking in the search dropdown instead
# of entering label names on the input.
def select_label_on_dropdown(label_title)
- input_filtered_search("label:", submit: false)
+ input_filtered_search("label=", submit: false)
within('#js-dropdown-label') do
wait_for_requests
@@ -37,6 +37,10 @@ module FilteredSearchHelpers
filtered_search.send_keys(:enter)
end
+ def expect_filtered_search_dropdown_results(filter_dropdown, count)
+ expect(filter_dropdown).to have_selector('.filter-dropdown .filter-dropdown-item', count: count)
+ end
+
def expect_issues_list_count(open_count, closed_count = 0)
all_count = open_count + closed_count
@@ -67,7 +71,7 @@ module FilteredSearchHelpers
end
def init_label_search
- filtered_search.set('label:')
+ filtered_search.set('label=')
# This ensures the dropdown is shown
expect(find('#js-dropdown-label')).not_to have_css('.filter-dropdown-loading')
end
@@ -86,6 +90,7 @@ module FilteredSearchHelpers
el = token_elements[index]
expect(el.find('.name')).to have_content(token[:name])
+ expect(el.find('.operator')).to have_content(token[:operator]) if token[:operator].present?
expect(el.find('.value')).to have_content(token[:value]) if token[:value].present?
# gl-emoji content is blank when the emoji unicode is not supported
@@ -97,8 +102,8 @@ module FilteredSearchHelpers
end
end
- def create_token(token_name, token_value = nil, symbol = nil)
- { name: token_name, value: "#{symbol}#{token_value}" }
+ def create_token(token_name, token_value = nil, symbol = nil, token_operator = '=')
+ { name: token_name, operator: token_operator, value: "#{symbol}#{token_value}" }
end
def author_token(author_name = nil)
@@ -109,9 +114,9 @@ module FilteredSearchHelpers
create_token('Assignee', assignee_name)
end
- def milestone_token(milestone_name = nil, has_symbol = true)
+ def milestone_token(milestone_name = nil, has_symbol = true, operator = '=')
symbol = has_symbol ? '%' : nil
- create_token('Milestone', milestone_name, symbol)
+ create_token('Milestone', milestone_name, symbol, operator)
end
def release_token(release_tag = nil)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index e21b3aea3da..6d9c27d0255 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -105,12 +105,15 @@ module GraphqlHelpers
end
def query_graphql_field(name, attributes = {}, fields = nil)
- fields ||= all_graphql_fields_for(name.classify)
- attributes = attributes_to_graphql(attributes)
- attributes = "(#{attributes})" if attributes.present?
+ field_params = if attributes.present?
+ "(#{attributes_to_graphql(attributes)})"
+ else
+ ''
+ end
+
<<~QUERY
- #{name}#{attributes}
- #{wrap_fields(fields)}
+ #{GraphqlHelpers.fieldnamerize(name.to_s)}#{field_params}
+ #{wrap_fields(fields || all_graphql_fields_for(name.to_s.classify))}
QUERY
end
@@ -301,6 +304,17 @@ module GraphqlHelpers
def global_id_of(model)
model.to_global_id.to_s
end
+
+ def missing_required_argument(path, argument)
+ a_hash_including(
+ 'path' => ['query'].concat(path),
+ 'extensions' => a_hash_including('code' => 'missingRequiredArguments', 'arguments' => argument.to_s)
+ )
+ end
+
+ def custom_graphql_error(path, msg)
+ a_hash_including('path' => path, 'message' => msg)
+ end
end
# This warms our schema, doing this as part of loading the helpers to avoid
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 9435a0e1487..89360b55de2 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -33,6 +33,14 @@ module KubernetesHelpers
.to_return(kube_response(kube_v1_rbac_authorization_discovery_body))
end
+ def stub_kubeclient_discover_istio(api_url)
+ stub_kubeclient_discover_base(api_url)
+
+ WebMock
+ .stub_request(:get, api_url + '/apis/networking.istio.io/v1alpha3')
+ .to_return(kube_response(kube_istio_discovery_body))
+ end
+
def stub_kubeclient_discover(api_url)
stub_kubeclient_discover_base(api_url)
@@ -229,6 +237,16 @@ module KubernetesHelpers
.to_return(kube_response({}))
end
+ def stub_kubeclient_get_gateway(api_url, name, namespace: 'default')
+ WebMock.stub_request(:get, api_url + "/apis/networking.istio.io/v1alpha3/namespaces/#{namespace}/gateways/#{name}")
+ .to_return(kube_response(kube_istio_gateway_body(name, namespace)))
+ end
+
+ def stub_kubeclient_put_gateway(api_url, name, namespace: 'default')
+ WebMock.stub_request(:put, api_url + "/apis/networking.istio.io/v1alpha3/namespaces/#{namespace}/gateways/#{name}")
+ .to_return(kube_response({}))
+ end
+
def kube_v1_secret_body(**options)
{
"kind" => "SecretList",
@@ -282,6 +300,115 @@ module KubernetesHelpers
}
end
+ def kube_istio_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "apiVersion" => "v1",
+ "groupVersion" => "networking.istio.io/v1alpha3",
+ "resources" => [
+ {
+ "name" => "gateways",
+ "singularName" => "gateway",
+ "namespaced" => true,
+ "kind" => "Gateway",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "shortNames" => %w[gw],
+ "categories" => %w[istio-io networking-istio-io]
+ },
+ {
+ "name" => "serviceentries",
+ "singularName" => "serviceentry",
+ "namespaced" => true,
+ "kind" => "ServiceEntry",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "shortNames" => %w[se],
+ "categories" => %w[istio-io networking-istio-io]
+ },
+ {
+ "name" => "destinationrules",
+ "singularName" => "destinationrule",
+ "namespaced" => true,
+ "kind" => "DestinationRule",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "shortNames" => %w[dr],
+ "categories" => %w[istio-io networking-istio-io]
+ },
+ {
+ "name" => "envoyfilters",
+ "singularName" => "envoyfilter",
+ "namespaced" => true,
+ "kind" => "EnvoyFilter",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "categories" => %w[istio-io networking-istio-io]
+ },
+ {
+ "name" => "sidecars",
+ "singularName" => "sidecar",
+ "namespaced" => true,
+ "kind" => "Sidecar",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "categories" => %w[istio-io networking-istio-io]
+ },
+ {
+ "name" => "virtualservices",
+ "singularName" => "virtualservice",
+ "namespaced" => true,
+ "kind" => "VirtualService",
+ "verbs" => %w[delete deletecollection get list patch create update watch],
+ "shortNames" => %w[vs],
+ "categories" => %w[istio-io networking-istio-io]
+ }
+ ]
+ }
+ end
+
+ def kube_istio_gateway_body(name, namespace)
+ {
+ "apiVersion" => "networking.istio.io/v1alpha3",
+ "kind" => "Gateway",
+ "metadata" => {
+ "generation" => 1,
+ "labels" => {
+ "networking.knative.dev/ingress-provider" => "istio",
+ "serving.knative.dev/release" => "v0.7.0"
+ },
+ "name" => name,
+ "namespace" => namespace,
+ "selfLink" => "/apis/networking.istio.io/v1alpha3/namespaces/#{namespace}/gateways/#{name}"
+ },
+ "spec" => {
+ "selector" => {
+ "istio" => "ingressgateway"
+ },
+ "servers" => [
+ {
+ "hosts" => [
+ "*"
+ ],
+ "port" => {
+ "name" => "http",
+ "number" => 80,
+ "protocol" => "HTTP"
+ }
+ },
+ {
+ "hosts" => [
+ "*"
+ ],
+ "port" => {
+ "name" => "https",
+ "number" => 443,
+ "protocol" => "HTTPS"
+ },
+ "tls" => {
+ "mode" => "PASSTHROUGH"
+ }
+ }
+ ]
+ }
+ }
+ end
+
def kube_v1alpha1_serving_knative_discovery_body
{
"kind" => "APIResourceList",
diff --git a/spec/support/helpers/metrics_dashboard_helpers.rb b/spec/support/helpers/metrics_dashboard_helpers.rb
index 5b425d0964d..908a3e1fb09 100644
--- a/spec/support/helpers/metrics_dashboard_helpers.rb
+++ b/spec/support/helpers/metrics_dashboard_helpers.rb
@@ -29,54 +29,4 @@ module MetricsDashboardHelpers
def business_metric_title
PrometheusMetricEnums.group_details[:business][:group_title]
end
-
- shared_examples_for 'misconfigured dashboard service response' do |status_code|
- it 'returns an appropriate message and status code' do
- result = service_call
-
- expect(result.keys).to contain_exactly(:message, :http_status, :status)
- expect(result[:status]).to eq(:error)
- expect(result[:http_status]).to eq(status_code)
- end
- end
-
- shared_examples_for 'valid dashboard service response for schema' do
- it 'returns a json representation of the dashboard' do
- result = service_call
-
- expect(result.keys).to contain_exactly(:dashboard, :status)
- expect(result[:status]).to eq(:success)
-
- expect(JSON::Validator.fully_validate(dashboard_schema, result[:dashboard])).to be_empty
- end
- end
-
- shared_examples_for 'valid dashboard service response' do
- let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/dashboard.json')) }
-
- it_behaves_like 'valid dashboard service response for schema'
- end
-
- shared_examples_for 'caches the unprocessed dashboard for subsequent calls' do
- it do
- expect(YAML).to receive(:safe_load).once.and_call_original
-
- described_class.new(*service_params).get_dashboard
- described_class.new(*service_params).get_dashboard
- end
- end
-
- shared_examples_for 'valid embedded dashboard service response' do
- let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json')) }
-
- it_behaves_like 'valid dashboard service response for schema'
- end
-
- shared_examples_for 'raises error for users with insufficient permissions' do
- context 'when the user does not have sufficient access' do
- let(:user) { build(:user) }
-
- it_behaves_like 'misconfigured dashboard service response', :unauthorized
- end
- end
end
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 9d47a0c23df..1d04014c9a6 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -16,7 +16,7 @@ module ActiveRecord
def show_backtrace(values)
Rails.logger.debug("QueryRecorder SQL: #{values[:sql]}")
- Gitlab::Profiler.clean_backtrace(caller).each { |line| Rails.logger.debug(" --> #{line}") }
+ Gitlab::BacktraceCleaner.clean_backtrace(caller).each { |line| Rails.logger.debug(" --> #{line}") }
end
def callback(name, start, finish, message_id, values)
diff --git a/spec/support/helpers/sentry_client_helpers.rb b/spec/support/helpers/sentry_client_helpers.rb
index 7476b5fb249..d473fe89fee 100644
--- a/spec/support/helpers/sentry_client_helpers.rb
+++ b/spec/support/helpers/sentry_client_helpers.rb
@@ -3,8 +3,8 @@
module SentryClientHelpers
private
- def stub_sentry_request(url, body: {}, status: 200, headers: {})
- stub_request(:get, url)
+ def stub_sentry_request(url, http_method = :get, body: {}, status: 200, headers: {})
+ stub_request(http_method, url)
.to_return(
status: status,
headers: { 'Content-Type' => 'application/json' }.merge(headers),
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 6a23875f103..bd945fe6409 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -154,7 +154,6 @@ module TestEnv
install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version,
task: "gitlab:gitaly:install[#{install_gitaly_args}]") do
-
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, { 'default' => repos_path }, force: true)
start_gitaly(gitaly_dir)
end
@@ -246,8 +245,8 @@ module TestEnv
end
end
- def copy_repo(project, bare_repo:, refs:)
- target_repo_path = File.expand_path(repos_path + "/#{project.disk_path}.git")
+ def copy_repo(subject, bare_repo:, refs:)
+ target_repo_path = File.expand_path(repos_path + "/#{subject.disk_path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb
index 4e149c9fa54..72baec7bfcb 100644
--- a/spec/support/import_export/common_util.rb
+++ b/spec/support/import_export/common_util.rb
@@ -3,7 +3,9 @@
module ImportExport
module CommonUtil
def setup_symlink(tmpdir, symlink_name)
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(tmpdir)
+ allow_next_instance_of(Gitlab::ImportExport) do |instance|
+ allow(instance).to receive(:storage_path).and_return(tmpdir)
+ end
File.open("#{tmpdir}/test", 'w') { |file| file.write("test") }
FileUtils.ln_s("#{tmpdir}/test", "#{tmpdir}/#{symlink_name}")
diff --git a/spec/support/import_export/configuration_helper.rb b/spec/support/import_export/configuration_helper.rb
index 2e5a99bb8b2..27819b5201a 100644
--- a/spec/support/import_export/configuration_helper.rb
+++ b/spec/support/import_export/configuration_helper.rb
@@ -36,8 +36,8 @@ module ConfigurationHelper
end
def relation_class_for_name(relation_name)
- relation_name = Gitlab::ImportExport::RelationFactory.overrides[relation_name.to_sym] || relation_name
- Gitlab::ImportExport::RelationFactory.relation_class(relation_name)
+ relation_name = Gitlab::ImportExport::ProjectRelationFactory.overrides[relation_name.to_sym] || relation_name
+ Gitlab::ImportExport::ProjectRelationFactory.relation_class(relation_name)
end
def parsed_attributes(relation_name, attributes, config: Gitlab::ImportExport.config_file)
diff --git a/spec/support/matchers/eq_uri.rb b/spec/support/matchers/eq_uri.rb
new file mode 100644
index 00000000000..47b657b3fe1
--- /dev/null
+++ b/spec/support/matchers/eq_uri.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+# Assert the result matches a URI object initialized with the expectation variable.
+#
+# Success:
+# ```
+# expect(URI('www.fish.com')).to eq_uri('www.fish.com')
+# ```
+#
+# Failure:
+# ```
+# expect(URI('www.fish.com')).to eq_uri('www.dog.com')
+# ```
+#
+RSpec::Matchers.define :eq_uri do |expected|
+ match do |actual|
+ actual == URI(expected)
+ end
+end
diff --git a/spec/support/matchers/graphql_matchers.rb b/spec/support/matchers/graphql_matchers.rb
index dbf457a9200..e151a934591 100644
--- a/spec/support/matchers/graphql_matchers.rb
+++ b/spec/support/matchers/graphql_matchers.rb
@@ -8,11 +8,25 @@ end
RSpec::Matchers.define :have_graphql_fields do |*expected|
def expected_field_names
- expected.map { |name| GraphqlHelpers.fieldnamerize(name) }
+ Array.wrap(expected).map { |name| GraphqlHelpers.fieldnamerize(name) }
+ end
+
+ @allow_extra = false
+
+ chain :only do
+ @allow_extra = false
+ end
+
+ chain :at_least do
+ @allow_extra = true
end
match do |kls|
- expect(kls.fields.keys).to contain_exactly(*expected_field_names)
+ if @allow_extra
+ expect(kls.fields.keys).to include(*expected_field_names)
+ else
+ expect(kls.fields.keys).to contain_exactly(*expected_field_names)
+ end
end
failure_message do |kls|
@@ -22,7 +36,7 @@ RSpec::Matchers.define :have_graphql_fields do |*expected|
message = []
message << "is missing fields: <#{missing.inspect}>" if missing.any?
- message << "contained unexpected fields: <#{extra.inspect}>" if extra.any?
+ message << "contained unexpected fields: <#{extra.inspect}>" if extra.any? && !@allow_extra
message.join("\n")
end
diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb
index 35b2993443f..103019d8dd8 100644
--- a/spec/support/matchers/markdown_matchers.rb
+++ b/spec/support/matchers/markdown_matchers.rb
@@ -10,8 +10,21 @@ module MarkdownMatchers
extend RSpec::Matchers::DSL
include Capybara::Node::Matchers
- # RelativeLinkFilter
- matcher :parse_relative_links do
+ # UploadLinkFilter
+ matcher :parse_upload_links do
+ set_default_markdown_messages
+
+ match do |actual|
+ link = actual.at_css('a:contains("Relative Upload Link")')
+ image = actual.at_css('img[alt="Relative Upload Image"]')
+
+ expect(link['href']).to eq("/#{project.full_path}/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg")
+ expect(image['data-src']).to eq("/#{project.full_path}/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg")
+ end
+ end
+
+ # RepositoryLinkFilter
+ matcher :parse_repository_links do
set_default_markdown_messages
match do |actual|
diff --git a/spec/support/migrations_helpers/prometheus_service_helpers.rb b/spec/support/migrations_helpers/prometheus_service_helpers.rb
new file mode 100644
index 00000000000..88f2f71ee1e
--- /dev/null
+++ b/spec/support/migrations_helpers/prometheus_service_helpers.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module MigrationHelpers
+ module PrometheusServiceHelpers
+ def service_params_for(project_id, params = {})
+ {
+ project_id: project_id,
+ active: false,
+ properties: '{}',
+ type: 'PrometheusService',
+ template: false,
+ push_events: true,
+ issues_events: true,
+ merge_requests_events: true,
+ tag_push_events: true,
+ note_events: true,
+ category: 'monitoring',
+ default: false,
+ wiki_page_events: true,
+ pipeline_events: true,
+ confidential_issues_events: true,
+ commit_events: true,
+ job_events: true,
+ confidential_note_events: true,
+ deployment_events: false
+ }.merge(params)
+ end
+
+ def row_attributes(entity)
+ entity.attributes.with_indifferent_access.tap do |hash|
+ hash.merge!(hash.slice(:created_at, :updated_at).transform_values { |v| v.to_s(:db) })
+ end
+ end
+ end
+end
diff --git a/spec/support/prometheus/additional_metrics_shared_examples.rb b/spec/support/prometheus/additional_metrics_shared_examples.rb
index 4e006edb7da..3a5909cd908 100644
--- a/spec/support/prometheus/additional_metrics_shared_examples.rb
+++ b/spec/support/prometheus/additional_metrics_shared_examples.rb
@@ -14,7 +14,7 @@ RSpec.shared_examples 'additional metrics query' do
let(:client) { double('prometheus_client') }
let(:query_result) { described_class.new(client).query(*query_params) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
before do
@@ -47,8 +47,7 @@ RSpec.shared_examples 'additional metrics query' do
describe 'project has Kubernetes service' do
context 'when user configured kubernetes from CI/CD > Clusters' do
- let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:project) { cluster.project }
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
let(:environment) { create(:environment, slug: 'environment-slug', project: project) }
let(:kube_namespace) { environment.deployment_namespace }
diff --git a/spec/support/redis/redis_shared_examples.rb b/spec/support/redis/redis_shared_examples.rb
index 97a23f02b3e..1e2d11a66cb 100644
--- a/spec/support/redis/redis_shared_examples.rb
+++ b/spec/support/redis/redis_shared_examples.rb
@@ -116,9 +116,9 @@ RSpec.shared_examples "redis_shared_examples" do
clear_pool
end
- context 'when running not on sidekiq workers' do
+ context 'when running on single-threaded runtime' do
before do
- allow(Sidekiq).to receive(:server?).and_return(false)
+ allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(false)
end
it 'instantiates a connection pool with size 5' do
@@ -128,10 +128,10 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
- context 'when running on sidekiq workers' do
+ context 'when running on multi-threaded runtime' do
before do
- allow(Sidekiq).to receive(:server?).and_return(true)
- allow(Sidekiq).to receive(:options).and_return({ concurrency: 18 })
+ allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(18)
end
it 'instantiates a connection pool with a size based on the concurrency of the worker' do
diff --git a/spec/support/shared_contexts/upload_type_check_shared_context.rb b/spec/support/shared_contexts/upload_type_check_shared_context.rb
new file mode 100644
index 00000000000..04c97500dd6
--- /dev/null
+++ b/spec/support/shared_contexts/upload_type_check_shared_context.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+# Construct an `uploader` variable that is configured to `check_upload_type`
+# with `mime_types` and `extensions`.
+shared_context 'uploader with type check' do
+ let(:uploader_class) do
+ Class.new(GitlabUploader) do
+ include UploadTypeCheck::Concern
+ storage :file
+ end
+ end
+
+ let(:mime_types) { nil }
+ let(:extensions) { nil }
+ let(:uploader) do
+ uploader_class.class_exec(mime_types, extensions) do |mime_types, extensions|
+ check_upload_type mime_types: mime_types, extensions: extensions
+ end
+ uploader_class.new(build_stubbed(:user))
+ end
+end
+
+shared_context 'stubbed MimeMagic mime type detection' do
+ let(:mime_type) { '' }
+ let(:magic_mime) { mime_type }
+ let(:ext_mime) { mime_type }
+ before do
+ magic_mime_obj = MimeMagic.new(magic_mime)
+ ext_mime_obj = MimeMagic.new(ext_mime)
+ allow(MimeMagic).to receive(:by_magic).with(anything).and_return(magic_mime_obj)
+ allow(MimeMagic).to receive(:by_path).with(anything).and_return(ext_mime_obj)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb b/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb
new file mode 100644
index 00000000000..71251f6ab51
--- /dev/null
+++ b/spec/support/shared_examples/controllers/error_tracking_shared_examples.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+shared_examples 'sets the polling header' do
+ subject { response.headers[Gitlab::PollingInterval::HEADER_NAME] }
+
+ it { is_expected.to eq '1000'}
+end
diff --git a/spec/support/shared_examples/email_shared_examples.rb b/spec/support/shared_examples/email_shared_examples.rb
new file mode 100644
index 00000000000..634a2504766
--- /dev/null
+++ b/spec/support/shared_examples/email_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+shared_examples_for 'correctly finds the mail key' do
+ specify do
+ expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
+
+ receiver.execute
+ end
+end
diff --git a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
index 63ed37cde03..3da80541072 100644
--- a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
@@ -13,7 +13,7 @@ shared_examples 'issuable user dropdown behaviors' do
it 'only includes members of the project/group' do
visit issuables_path
- filtered_search.set("#{dropdown}:")
+ filtered_search.set("#{dropdown}=")
expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).to have_content(user_in_dropdown.name)
expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).not_to have_content(user_not_in_dropdown.name)
diff --git a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
index 8e1d24c4be2..98010150e65 100644
--- a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
+++ b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'Maintainer manages access requests' do
expect_no_visible_access_request(entity, user)
- page.within('.members-list') do
+ page.within('[data-qa-selector="members_list"]') do
expect(page).to have_content user.name
end
end
diff --git a/spec/support/shared_examples/graphql/connection_paged_nodes.rb b/spec/support/shared_examples/graphql/connection_paged_nodes.rb
index 830d2d2d4b1..93de7f619f7 100644
--- a/spec/support/shared_examples/graphql/connection_paged_nodes.rb
+++ b/spec/support/shared_examples/graphql/connection_paged_nodes.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'connection with paged nodes' do
it 'returns the collection limited to max page size' do
- expect(paged_nodes.size).to eq(3)
+ expect(paged_nodes.size).to eq(paged_nodes_size)
end
it 'is a loaded memoized array' do
@@ -22,7 +22,7 @@ RSpec.shared_examples 'connection with paged nodes' do
let(:arguments) { { last: 2 } }
it 'returns only the last elements' do
- expect(paged_nodes).to contain_exactly(all_nodes[3], all_nodes[4])
+ expect(paged_nodes).to contain_exactly(*all_nodes.last(2))
end
end
end
diff --git a/spec/support/shared_examples/graphql/failure_to_find_anything.rb b/spec/support/shared_examples/graphql/failure_to_find_anything.rb
new file mode 100644
index 00000000000..b2533c992c1
--- /dev/null
+++ b/spec/support/shared_examples/graphql/failure_to_find_anything.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Shared example for legal queries that are expected to return nil.
+# Requires the following let bindings to be defined:
+# - post_query: action to send the query
+# - path: array of keys from query root to the result
+shared_examples 'a failure to find anything' do
+ it 'finds nothing' do
+ post_query
+
+ data = graphql_data.dig(*path)
+
+ expect(data).to be_nil
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
new file mode 100644
index 00000000000..691564120cc
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+shared_examples 'log import failure' do |importable_column|
+ it 'tracks error' do
+ extra = {
+ relation_key: relation_key,
+ relation_index: relation_index,
+ retry_count: retry_count
+ }
+ extra[importable_column] = importable.id
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, extra)
+
+ subject.log_import_failure(relation_key, relation_index, exception, retry_count)
+ end
+
+ it 'saves data to ImportFailure' do
+ log_import_failure
+
+ import_failure = ImportFailure.last
+
+ aggregate_failures do
+ expect(import_failure[importable_column]).to eq(importable.id)
+ expect(import_failure.relation_key).to eq(relation_key)
+ expect(import_failure.relation_index).to eq(relation_index)
+ expect(import_failure.exception_class).to eq('StandardError')
+ expect(import_failure.exception_message).to eq(standard_error_message)
+ expect(import_failure.correlation_id_value).to eq(correlation_id)
+ expect(import_failure.retry_count).to eq(retry_count)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/sentry/client_shared_examples.rb b/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
index 76b71ebd3c5..4221708b55c 100644
--- a/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
+++ b/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
@@ -10,7 +10,7 @@ RSpec.shared_examples 'calls sentry api' do
end
# Requires sentry_api_url and subject to be defined
-RSpec.shared_examples 'no Sentry redirects' do
+RSpec.shared_examples 'no Sentry redirects' do |http_method|
let(:redirect_to) { 'https://redirected.example.com' }
let(:other_url) { 'https://other.example.org' }
@@ -19,6 +19,7 @@ RSpec.shared_examples 'no Sentry redirects' do
let!(:redirect_req_stub) do
stub_sentry_request(
sentry_api_url,
+ http_method || :get,
status: 302,
headers: { location: redirect_to }
)
@@ -31,7 +32,7 @@ RSpec.shared_examples 'no Sentry redirects' do
end
end
-RSpec.shared_examples 'maps Sentry exceptions' do
+RSpec.shared_examples 'maps Sentry exceptions' do |http_method|
exceptions = {
Gitlab::HTTP::Error => 'Error when connecting to Sentry',
Net::OpenTimeout => 'Connection to Sentry timed out',
@@ -44,7 +45,10 @@ RSpec.shared_examples 'maps Sentry exceptions' do
exceptions.each do |exception, message|
context "#{exception}" do
before do
- stub_request(:get, sentry_request_url).to_raise(exception)
+ stub_request(
+ http_method || :get,
+ sentry_request_url
+ ).to_raise(exception)
end
it do
diff --git a/spec/support/shared_examples/logging_application_context_shared_examples.rb b/spec/support/shared_examples/logging_application_context_shared_examples.rb
new file mode 100644
index 00000000000..038ede884c8
--- /dev/null
+++ b/spec/support/shared_examples/logging_application_context_shared_examples.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'storing arguments in the application context' do
+ around do |example|
+ Labkit::Context.with_context { example.run }
+ end
+
+ it 'places the expected params in the application context' do
+ # Stub the clearing of the context so we can validate it later
+ # The `around` block above makes sure we do clean it up later
+ allow(Labkit::Context).to receive(:pop)
+
+ subject
+
+ Labkit::Context.with_context do |context|
+ expect(context.to_h)
+ .to include(log_hash(expected_params))
+ end
+ end
+
+ def log_hash(hash)
+ hash.transform_keys! { |key| "meta.#{key}" }
+ end
+end
diff --git a/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb b/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb
deleted file mode 100644
index 18d025a4b07..00000000000
--- a/spec/support/shared_examples/merge_requests_rendering_a_single_diff_version.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-# This pending test can be removed when `single_mr_diff_view` is enabled by default
-# disabling the feature flag above is then not needed anymore.
-RSpec.shared_examples 'rendering a single diff version' do |attribute|
- before do
- stub_feature_flags(diffs_batch_load: false)
- end
-
- pending 'allows editing diff settings single_mr_diff_view is enabled' do
- project = create(:project, :repository)
- user = project.creator
- merge_request = create(:merge_request, source_project: project)
- stub_feature_flags(single_mr_diff_view: true)
- sign_in(user)
-
- visit(diffs_project_merge_request_path(project, merge_request))
-
- expect(page).to have_selector('.js-show-diff-settings')
- end
-end
diff --git a/spec/support/shared_examples/migration_helpers_examples.rb b/spec/support/shared_examples/migration_helpers_examples.rb
new file mode 100644
index 00000000000..3587297a2d7
--- /dev/null
+++ b/spec/support/shared_examples/migration_helpers_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+shared_examples 'skips validation' do |validation_option|
+ it 'skips validation' do
+ expect(model).not_to receive(:disable_statement_timeout)
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
+ expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
+
+ model.add_concurrent_foreign_key(*args, **options.merge(validation_option))
+ end
+end
+
+shared_examples 'performs validation' do |validation_option|
+ it 'performs validation' do
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/NOT VALID/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).with(/RESET ALL/)
+
+ model.add_concurrent_foreign_key(*args, **options.merge(validation_option))
+ end
+end
diff --git a/spec/support/shared_examples/models/cluster_application_initial_status.rb b/spec/support/shared_examples/models/cluster_application_initial_status.rb
index 9775d87953c..030974c9aa0 100644
--- a/spec/support/shared_examples/models/cluster_application_initial_status.rb
+++ b/spec/support/shared_examples/models/cluster_application_initial_status.rb
@@ -6,8 +6,30 @@ shared_examples 'cluster application initial status specs' do
subject { described_class.new(cluster: cluster) }
+ context 'local tiller feature flag is disabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: false)
+ end
+
+ it 'sets a default status' do
+ expect(subject.status_name).to be(:not_installable)
+ end
+ end
+
+ context 'local tiller feature flag is enabled' do
+ before do
+ stub_feature_flags(managed_apps_local_tiller: true)
+ end
+
+ it 'sets a default status' do
+ expect(subject.status_name).to be(:installable)
+ end
+ end
+
context 'when application helm is scheduled' do
before do
+ stub_feature_flags(managed_apps_local_tiller: false)
+
create(:clusters_applications_helm, :scheduled, cluster: cluster)
end
@@ -16,7 +38,7 @@ shared_examples 'cluster application initial status specs' do
end
end
- context 'when application is scheduled' do
+ context 'when application helm is installed' do
before do
create(:clusters_applications_helm, :installed, cluster: cluster)
end
diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
index a6653f89377..4bca37a4cd0 100644
--- a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
@@ -1,16 +1,6 @@
# frozen_string_literal: true
shared_examples 'cluster application status specs' do |application_name|
- describe '#status' do
- let(:cluster) { create(:cluster, :provided_by_gcp) }
-
- subject { described_class.new(cluster: cluster) }
-
- it 'sets a default status' do
- expect(subject.status_name).to be(:not_installable)
- end
- end
-
describe '#status_states' do
let(:cluster) { create(:cluster, :provided_by_gcp) }
diff --git a/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb b/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb
new file mode 100644
index 00000000000..835d2dfe757
--- /dev/null
+++ b/spec/support/shared_examples/models/diff_note_after_commit_shared_examples.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+shared_examples 'a valid diff note with after commit callback' do
+ context 'when diff file is fetched from repository' do
+ before do
+ allow_any_instance_of(::Gitlab::Diff::Position).to receive(:diff_file).with(project.repository).and_return(diff_file_from_repository)
+ end
+
+ context 'when diff_line is not found' do
+ it 'raises an error' do
+ allow(diff_file_from_repository).to receive(:line_for_position).with(position).and_return(nil)
+
+ expect { subject.save }.to raise_error(::DiffNote::NoteDiffFileCreationError,
+ "Failed to find diff line for: #{diff_file_from_repository.file_path}, "\
+ "old_line: #{position.old_line}"\
+ ", new_line: #{position.new_line}")
+ end
+ end
+
+ context 'when diff_line is found' do
+ before do
+ allow(diff_file_from_repository).to receive(:line_for_position).with(position).and_return(diff_line)
+ end
+
+ it 'fallback to fetch file from repository' do
+ expect_any_instance_of(::Gitlab::Diff::Position).to receive(:diff_file).with(project.repository)
+
+ subject.save
+ end
+
+ it 'creates a diff note file' do
+ subject.save
+
+ expect(subject.reload.note_diff_file).to be_present
+ end
+ end
+ end
+
+ context 'when diff file is not found in repository' do
+ it 'raises an error' do
+ allow_any_instance_of(::Gitlab::Diff::Position).to receive(:diff_file).with(project.repository).and_return(nil)
+
+ expect { subject.save }.to raise_error(::DiffNote::NoteDiffFileCreationError, 'Failed to find diff file')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/pages_size_limit_shared_examples.rb b/spec/support/shared_examples/pages_size_limit_shared_examples.rb
new file mode 100644
index 00000000000..c1e27194738
--- /dev/null
+++ b/spec/support/shared_examples/pages_size_limit_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+shared_examples 'pages size limit is' do |size_limit|
+ context "when size is below the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit - 1.megabyte)
+ end
+
+ it 'updates pages correctly' do
+ subject.execute
+
+ expect(deploy_status.description).not_to be_present
+ expect(project.pages_metadatum).to be_deployed
+ end
+ end
+
+ context "when size is above the limit" do
+ before do
+ allow(metadata).to receive(:total_size).and_return(size_limit + 1.megabyte)
+ end
+
+ it 'limits the maximum size of gitlab pages' do
+ subject.execute
+
+ expect(deploy_status.description)
+ .to match(/artifacts for pages are too large/)
+ expect(deploy_status).to be_script_failure
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/diff_discussions.rb b/spec/support/shared_examples/requests/api/diff_discussions.rb
index 76c6c93964a..a7774d17d3c 100644
--- a/spec/support/shared_examples/requests/api/diff_discussions.rb
+++ b/spec/support/shared_examples/requests/api/diff_discussions.rb
@@ -38,13 +38,24 @@ shared_examples 'diff discussions API' do |parent_type, noteable_type, id_name|
expect(json_response['notes'].first['position']).to eq(position.stringify_keys)
end
- it "returns a 400 bad request error when position is invalid" do
- position = diff_note.position.to_h.merge(new_line: '100000')
+ context "when position is invalid" do
+ it "returns a 400 bad request error when position is not plausible" do
+ position = diff_note.position.to_h.merge(new_line: '100000')
- post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
- params: { body: 'hi!', position: position }
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
+ params: { body: 'hi!', position: position }
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it "returns a 400 bad request error when the position is not valid for this discussion" do
+ position = diff_note.position.to_h.merge(new_line: '588440f66559714280628a4f9799f0c4eb880a4a')
+
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
+ params: { body: 'hi!', position: position }
- expect(response).to have_gitlab_http_status(400)
+ expect(response).to have_gitlab_http_status(400)
+ end
end
end
diff --git a/spec/support/shared_examples/requests/api/status_shared_examples.rb b/spec/support/shared_examples/requests/api/status_shared_examples.rb
index eebed7e42c1..ed9964fa108 100644
--- a/spec/support/shared_examples/requests/api/status_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/status_shared_examples.rb
@@ -59,8 +59,9 @@ shared_examples_for '412 response' do
delete request, params: params, headers: { 'HTTP_IF_UNMODIFIED_SINCE' => '1990-01-12T00:00:48-0600' }
end
- it 'returns 412' do
+ it 'returns 412 with a JSON error' do
expect(response).to have_gitlab_http_status(412)
+ expect(json_response).to eq('message' => '412 Precondition Failed')
end
end
@@ -69,8 +70,9 @@ shared_examples_for '412 response' do
delete request, params: params, headers: { 'HTTP_IF_UNMODIFIED_SINCE' => Time.now }
end
- it 'returns accepted' do
+ it 'returns 204 with an empty body' do
expect(response).to have_gitlab_http_status(success_status)
+ expect(response.body).to eq('') if success_status == 204
end
end
end
diff --git a/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb b/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb
new file mode 100644
index 00000000000..949aa079435
--- /dev/null
+++ b/spec/support/shared_examples/requests/self_monitoring_shared_examples.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'not accessible if feature flag is disabled' do
+ before do
+ stub_feature_flags(self_monitoring_project: false)
+ end
+
+ it 'returns not_implemented' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:not_implemented)
+ expect(json_response).to eq(
+ 'message' => _('Self-monitoring is not enabled on this GitLab server, contact your administrator.'),
+ 'documentation_url' => help_page_path('administration/monitoring/gitlab_instance_administration_project/index')
+ )
+ end
+ end
+end
+
+RSpec.shared_examples 'not accessible to non-admin users' do
+ context 'with unauthenticated user' do
+ it 'redirects to signin page' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'with authenticated non-admin user' do
+ before do
+ login_as(create(:user))
+ end
+
+ it 'returns status not_found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
+
+# Requires subject and worker_class and status_api to be defined
+# let(:worker_class) { SelfMonitoringProjectCreateWorker }
+# let(:status_api) { status_create_self_monitoring_project_admin_application_settings_path }
+# subject { post create_self_monitoring_project_admin_application_settings_path }
+RSpec.shared_examples 'triggers async worker, returns sidekiq job_id with response accepted' do
+ it 'returns sidekiq job_id of expected length' do
+ subject
+
+ job_id = json_response['job_id']
+
+ aggregate_failures do
+ expect(job_id).to be_present
+ expect(job_id.length).to be <= Admin::ApplicationSettingsController::PARAM_JOB_ID_MAX_SIZE
+ end
+ end
+
+ it 'triggers async worker' do
+ expect(worker_class).to receive(:perform_async)
+
+ subject
+ end
+
+ it 'returns accepted response' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(json_response.keys).to contain_exactly('job_id', 'monitor_status')
+ expect(json_response).to include(
+ 'monitor_status' => status_api
+ )
+ end
+ end
+
+ it 'returns job_id' do
+ fake_job_id = 'b5b28910d97563e58c2fe55f'
+ allow(worker_class).to receive(:perform_async).and_return(fake_job_id)
+
+ subject
+
+ expect(json_response).to include('job_id' => fake_job_id)
+ end
+end
+
+# Requires job_id and subject to be defined
+# let(:job_id) { 'job_id' }
+# subject do
+# get status_create_self_monitoring_project_admin_application_settings_path,
+# params: { job_id: job_id }
+# end
+RSpec.shared_examples 'handles invalid job_id' do
+ context 'with invalid job_id' do
+ let(:job_id) { 'a' * 51 }
+
+ it 'returns bad_request if job_id too long' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq('message' => 'Parameter "job_id" cannot ' \
+ "exceed length of #{Admin::ApplicationSettingsController::PARAM_JOB_ID_MAX_SIZE}")
+ end
+ end
+ end
+end
+
+# Requires in_progress_message and subject to be defined
+# let(:in_progress_message) { 'Job to create self-monitoring project is in progress' }
+# subject do
+# get status_create_self_monitoring_project_admin_application_settings_path,
+# params: { job_id: job_id }
+# end
+RSpec.shared_examples 'sets polling header and returns accepted' do
+ it 'sets polling header' do
+ expect(::Gitlab::PollingInterval).to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns accepted' do
+ subject
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(json_response).to eq(
+ 'message' => in_progress_message
+ )
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/boards_list_service.rb b/spec/support/shared_examples/services/boards/boards_list_service.rb
index 25dc2e04942..18d45ee324a 100644
--- a/spec/support/shared_examples/services/boards/boards_list_service.rb
+++ b/spec/support/shared_examples/services/boards/boards_list_service.rb
@@ -29,3 +29,20 @@ shared_examples 'boards list service' do
expect(service.execute).to eq [board]
end
end
+
+shared_examples 'multiple boards list service' do
+ let(:service) { described_class.new(parent, double) }
+ let!(:board_B) { create(:board, resource_parent: parent, name: 'B-board') }
+ let!(:board_c) { create(:board, resource_parent: parent, name: 'c-board') }
+ let!(:board_a) { create(:board, resource_parent: parent, name: 'a-board') }
+
+ describe '#execute' do
+ it 'returns all issue boards' do
+ expect(service.execute.size).to eq(3)
+ end
+
+ it 'returns boards ordered by name' do
+ expect(service.execute).to eq [board_a, board_B, board_c]
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
new file mode 100644
index 00000000000..30d91346df3
--- /dev/null
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+shared_examples_for 'misconfigured dashboard service response' do |status_code, message = nil|
+ it 'returns an appropriate message and status code', :aggregate_failures do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:message, :http_status, :status)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(status_code)
+ expect(result[:message]).to eq(message) if message
+ end
+end
+
+shared_examples_for 'valid dashboard service response for schema' do
+ it 'returns a json representation of the dashboard' do
+ result = service_call
+
+ expect(result.keys).to contain_exactly(:dashboard, :status)
+ expect(result[:status]).to eq(:success)
+
+ expect(JSON::Validator.fully_validate(dashboard_schema, result[:dashboard])).to be_empty
+ end
+end
+
+shared_examples_for 'valid dashboard service response' do
+ let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/dashboard.json')) }
+
+ it_behaves_like 'valid dashboard service response for schema'
+end
+
+shared_examples_for 'caches the unprocessed dashboard for subsequent calls' do
+ it do
+ expect(YAML).to receive(:safe_load).once.and_call_original
+
+ described_class.new(*service_params).get_dashboard
+ described_class.new(*service_params).get_dashboard
+ end
+end
+
+shared_examples_for 'valid embedded dashboard service response' do
+ let(:dashboard_schema) { JSON.parse(fixture_file('lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json')) }
+
+ it_behaves_like 'valid dashboard service response for schema'
+end
+
+shared_examples_for 'raises error for users with insufficient permissions' do
+ context 'when the user does not have sufficient access' do
+ let(:user) { build(:user) }
+
+ it_behaves_like 'misconfigured dashboard service response', :unauthorized
+ end
+end
diff --git a/spec/support/shared_examples/unique_ip_check_shared_examples.rb b/spec/support/shared_examples/unique_ip_check_shared_examples.rb
index 65d86ddee9e..9bdfa762fc8 100644
--- a/spec/support/shared_examples/unique_ip_check_shared_examples.rb
+++ b/spec/support/shared_examples/unique_ip_check_shared_examples.rb
@@ -2,6 +2,8 @@
shared_context 'unique ips sign in limit' do
include StubENV
+ let(:request_context) { Gitlab::RequestContext.instance }
+
before do
Gitlab::Redis::Cache.with(&:flushall)
Gitlab::Redis::Queues.with(&:flushall)
@@ -15,10 +17,13 @@ shared_context 'unique ips sign in limit' do
unique_ips_limit_enabled: true,
unique_ips_limit_time_window: 10000
)
+
+ # Make sure we're working with the same reqeust context everywhere
+ allow(Gitlab::RequestContext).to receive(:instance).and_return(request_context)
end
def change_ip(ip)
- allow(Gitlab::RequestContext).to receive(:client_ip).and_return(ip)
+ allow(request_context).to receive(:client_ip).and_return(ip)
end
def request_from_ip(ip)
diff --git a/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
new file mode 100644
index 00000000000..91d2526cde2
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/upload_type_shared_examples.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+def check_content_matches_extension!(file = double(read: nil, path: ''))
+ magic_file = UploadTypeCheck::MagicFile.new(file)
+ uploader.check_content_matches_extension!(magic_file)
+end
+
+shared_examples 'upload passes content type check' do
+ it 'does not raise error' do
+ expect { check_content_matches_extension! }.not_to raise_error
+ end
+end
+
+shared_examples 'upload fails content type check' do
+ it 'raises error' do
+ expect { check_content_matches_extension! }.to raise_error(CarrierWave::IntegrityError)
+ end
+end
+
+def upload_type_checked_filenames(filenames)
+ Array(filenames).each do |filename|
+ # Feed the uploader "some" content.
+ path = File.join('spec', 'fixtures', 'dk.png')
+ file = File.new(path, 'r')
+ # Rename the file with what we want.
+ allow(file).to receive(:path).and_return(filename)
+
+ # Force the content type to match the extension type.
+ mime_type = MimeMagic.by_path(filename)
+ allow(MimeMagic).to receive(:by_magic).and_return(mime_type)
+
+ uploaded_file = Rack::Test::UploadedFile.new(file, original_filename: filename)
+ uploader.cache!(uploaded_file)
+ end
+end
+
+def upload_type_checked_fixtures(upload_fixtures)
+ upload_fixtures = Array(upload_fixtures)
+ upload_fixtures.each do |upload_fixture|
+ path = File.join('spec', 'fixtures', upload_fixture)
+ uploader.cache!(fixture_file_upload(path))
+ end
+end
+
+shared_examples 'type checked uploads' do |upload_fixtures = nil, filenames: nil|
+ it 'check type' do
+ upload_fixtures = Array(upload_fixtures)
+ filenames = Array(filenames)
+
+ times = upload_fixtures.length + filenames.length
+ expect(uploader).to receive(:check_content_matches_extension!).exactly(times).times
+
+ upload_type_checked_fixtures(upload_fixtures) unless upload_fixtures.empty?
+ upload_type_checked_filenames(filenames) unless filenames.empty?
+ end
+end
+
+shared_examples 'skipped type checked uploads' do |upload_fixtures = nil, filenames: nil|
+ it 'skip type check' do
+ expect(uploader).not_to receive(:check_content_matches_extension!)
+
+ upload_type_checked_fixtures(upload_fixtures) if upload_fixtures
+ upload_type_checked_filenames(filenames) if filenames
+ end
+end
diff --git a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
new file mode 100644
index 00000000000..7dffbb04fdc
--- /dev/null
+++ b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+# Expects `worker_class` to be defined
+shared_examples_for 'reenqueuer' do
+ subject(:job) { worker_class.new }
+
+ before do
+ allow(job).to receive(:sleep) # faster tests
+ end
+
+ it 'implements lease_timeout' do
+ expect(job.lease_timeout).to be_a(ActiveSupport::Duration)
+ end
+
+ describe '#perform' do
+ it 'tries to obtain a lease' do
+ expect_to_obtain_exclusive_lease(job.lease_key)
+
+ job.perform
+ end
+ end
+end
+
+# Example usage:
+#
+# it_behaves_like 'it is rate limited to 1 call per', 5.seconds do
+# subject { described_class.new }
+# let(:rate_limited_method) { subject.perform }
+# end
+#
+shared_examples_for 'it is rate limited to 1 call per' do |minimum_duration|
+ before do
+ # Allow Timecop freeze and travel without the block form
+ Timecop.safe_mode = false
+ Timecop.freeze
+
+ time_travel_during_rate_limited_method(actual_duration)
+ end
+
+ after do
+ Timecop.return
+ Timecop.safe_mode = true
+ end
+
+ context 'when the work finishes in 0 seconds' do
+ let(:actual_duration) { 0 }
+
+ it 'sleeps exactly the minimum duration' do
+ expect(subject).to receive(:sleep).with(a_value_within(0.01).of(minimum_duration))
+
+ rate_limited_method
+ end
+ end
+
+ context 'when the work finishes in 10% of minimum duration' do
+ let(:actual_duration) { 0.1 * minimum_duration }
+
+ it 'sleeps 90% of minimum duration' do
+ expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.9 * minimum_duration))
+
+ rate_limited_method
+ end
+ end
+
+ context 'when the work finishes in 90% of minimum duration' do
+ let(:actual_duration) { 0.9 * minimum_duration }
+
+ it 'sleeps 10% of minimum duration' do
+ expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.1 * minimum_duration))
+
+ rate_limited_method
+ end
+ end
+
+ context 'when the work finishes exactly at minimum duration' do
+ let(:actual_duration) { minimum_duration }
+
+ it 'does not sleep' do
+ expect(subject).not_to receive(:sleep)
+
+ rate_limited_method
+ end
+ end
+
+ context 'when the work takes 10% longer than minimum duration' do
+ let(:actual_duration) { 1.1 * minimum_duration }
+
+ it 'does not sleep' do
+ expect(subject).not_to receive(:sleep)
+
+ rate_limited_method
+ end
+ end
+
+ context 'when the work takes twice as long as minimum duration' do
+ let(:actual_duration) { 2 * minimum_duration }
+
+ it 'does not sleep' do
+ expect(subject).not_to receive(:sleep)
+
+ rate_limited_method
+ end
+ end
+
+ def time_travel_during_rate_limited_method(actual_duration)
+ # Save the original implementation of ensure_minimum_duration
+ original_ensure_minimum_duration = subject.method(:ensure_minimum_duration)
+
+ allow(subject).to receive(:ensure_minimum_duration) do |minimum_duration, &block|
+ original_ensure_minimum_duration.call(minimum_duration) do
+ # Time travel inside the block inside ensure_minimum_duration
+ Timecop.travel(actual_duration) if actual_duration && actual_duration > 0
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/self_monitoring_shared_examples.rb b/spec/support/shared_examples/workers/self_monitoring_shared_examples.rb
new file mode 100644
index 00000000000..89c0841fbd6
--- /dev/null
+++ b/spec/support/shared_examples/workers/self_monitoring_shared_examples.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService }
+# let(:service) { instance_double(service_class) }
+RSpec.shared_examples 'executes service' do
+ before do
+ allow(service_class).to receive(:new) { service }
+ end
+
+ it 'runs the service' do
+ expect(service).to receive(:execute)
+
+ subject.perform
+ end
+end
+
+RSpec.shared_examples 'returns in_progress based on Sidekiq::Status' do
+ it 'returns true when job is enqueued' do
+ jid = described_class.perform_async
+
+ expect(described_class.in_progress?(jid)).to eq(true)
+ end
+
+ it 'returns false when job does not exist' do
+ expect(described_class.in_progress?('fake_jid')).to eq(false)
+ end
+end
diff --git a/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb b/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb
index 72e61f5c524..7620047624a 100644
--- a/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb
+++ b/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb
@@ -17,7 +17,7 @@ describe 'gitlab:generate_sample_prometheus_data rake task' do
it 'creates the file correctly' do
Rake.application.rake_require 'tasks/gitlab/generate_sample_prometheus_data'
allow(Environment).to receive(:find).and_return(environment)
- allow(environment).to receive_message_chain(:prometheus_adapter, :prometheus_client, :query_range) { sample_query_result }
+ allow(environment).to receive_message_chain(:prometheus_adapter, :prometheus_client, :query_range) { sample_query_result[30] }
run_rake_task('gitlab:generate_sample_prometheus_data', [environment.id])
expect(File.exist?(sample_query_file)).to be true
diff --git a/spec/tasks/gitlab/import_export/import_rake_spec.rb b/spec/tasks/gitlab/import_export/import_rake_spec.rb
index 18b89912b9f..b824ede03b2 100644
--- a/spec/tasks/gitlab/import_export/import_rake_spec.rb
+++ b/spec/tasks/gitlab/import_export/import_rake_spec.rb
@@ -76,37 +76,13 @@ describe 'gitlab:import_export:import rake task', :sidekiq do
let(:not_imported_message) { /Total number of not imported relations: 1/ }
let(:error) { /Validation failed: Notes is invalid/ }
- context 'when import_graceful_failures feature flag is enabled' do
- before do
- stub_feature_flags(import_graceful_failures: true)
- end
-
- it 'performs project import successfully' do
- expect { subject }.to output(not_imported_message).to_stdout
- expect { subject }.not_to raise_error
-
- expect(project.merge_requests).to be_empty
- expect(project.import_state.last_error).to be_nil
- expect(project.import_state.status).to eq('finished')
- end
- end
-
- context 'when import_graceful_failures feature flag is disabled' do
- before do
- stub_feature_flags(import_graceful_failures: false)
- end
-
- it 'fails project import with an error' do
- # Catch exit call, and raise exception instead
- expect_any_instance_of(GitlabProjectImport).to receive(:exit)
- .with(1).and_raise(SystemExit)
-
- expect { subject }.to raise_error(SystemExit).and output(error).to_stdout
+ it 'performs project import successfully' do
+ expect { subject }.to output(not_imported_message).to_stdout
+ expect { subject }.not_to raise_error
- expect(project.merge_requests).to be_empty
- expect(project.import_state.last_error).to match(error)
- expect(project.import_state.status).to eq('failed')
- end
+ expect(project.merge_requests).to be_empty
+ expect(project.import_state.last_error).to be_nil
+ expect(project.import_state.status).to eq('finished')
end
end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index c0844360589..669f75b2ee8 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -46,4 +46,16 @@ describe AvatarUploader do
expect(uploader.absolute_path).to eq(absolute_path)
end
end
+
+ context 'upload type check' do
+ AvatarUploader::SAFE_IMAGE_EXT.each do |ext|
+ context "#{ext} extension" do
+ it_behaves_like 'type checked uploads', filenames: "image.#{ext}"
+ end
+ end
+
+ context 'skip image/svg+xml integrity check' do
+ it_behaves_like 'skipped type checked uploads', filenames: 'image.svg'
+ end
+ end
end
diff --git a/spec/uploaders/favicon_uploader_spec.rb b/spec/uploaders/favicon_uploader_spec.rb
new file mode 100644
index 00000000000..4d6c849883a
--- /dev/null
+++ b/spec/uploaders/favicon_uploader_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe FaviconUploader do
+ let_it_be(:model) { build_stubbed(:user) }
+ let_it_be(:uploader) { described_class.new(model, :favicon) }
+
+ context 'upload type check' do
+ FaviconUploader::EXTENSION_WHITELIST.each do |ext|
+ context "#{ext} extension" do
+ it_behaves_like 'type checked uploads', filenames: "image.#{ext}"
+ end
+ end
+ end
+
+ context 'upload non-whitelisted file extensions' do
+ it 'will deny upload' do
+ path = File.join('spec', 'fixtures', 'banana_sample.gif')
+ fixture_file = fixture_file_upload(path)
+ expect { uploader.cache!(fixture_file) }.to raise_exception(CarrierWave::IntegrityError)
+ end
+ end
+end
diff --git a/spec/uploaders/upload_type_check_spec.rb b/spec/uploaders/upload_type_check_spec.rb
new file mode 100644
index 00000000000..a4895f6a956
--- /dev/null
+++ b/spec/uploaders/upload_type_check_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UploadTypeCheck do
+ include_context 'uploader with type check'
+
+ def upload_fixture(filename)
+ fixture_file_upload(File.join('spec', 'fixtures', filename))
+ end
+
+ describe '#check_content_matches_extension! callback using file upload' do
+ context 'when extension matches contents' do
+ it 'not raise error on upload' do
+ expect { uploader.cache!(upload_fixture('banana_sample.gif')) }.not_to raise_error
+ end
+ end
+
+ context 'when extension does not match contents' do
+ it 'raise error' do
+ expect { uploader.cache!(upload_fixture('not_a_png.png')) }.to raise_error(CarrierWave::IntegrityError)
+ end
+ end
+ end
+
+ describe '#check_content_matches_extension! callback using stubs' do
+ include_context 'stubbed MimeMagic mime type detection'
+
+ context 'when no extension and with ambiguous/text content' do
+ let(:magic_mime) { '' }
+ let(:ext_mime) { '' }
+
+ it_behaves_like 'upload passes content type check'
+ end
+
+ context 'when no extension and with non-text content' do
+ let(:magic_mime) { 'image/gif' }
+ let(:ext_mime) { '' }
+
+ it_behaves_like 'upload fails content type check'
+ end
+
+ # Most text files will exhibit this behaviour.
+ context 'when ambiguous content with text extension' do
+ let(:magic_mime) { '' }
+ let(:ext_mime) { 'text/plain' }
+
+ it_behaves_like 'upload passes content type check'
+ end
+
+ context 'when text content with text extension' do
+ let(:magic_mime) { 'text/plain' }
+ let(:ext_mime) { 'text/plain' }
+
+ it_behaves_like 'upload passes content type check'
+ end
+
+ context 'when ambiguous content with non-text extension' do
+ let(:magic_mime) { '' }
+ let(:ext_mime) { 'application/zip' }
+
+ it_behaves_like 'upload fails content type check'
+ end
+
+ # These are the types when uploading a .dmg
+ context 'when content and extension do not match' do
+ let(:magic_mime) { 'application/x-bzip' }
+ let(:ext_mime) { 'application/x-apple-diskimage' }
+
+ it_behaves_like 'upload fails content type check'
+ end
+ end
+
+ describe '#check_content_matches_extension! mime_type filtering' do
+ context 'without mime types' do
+ let(:mime_types) { nil }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt rails_sample.jpg]
+ end
+
+ context 'with mime types string' do
+ let(:mime_types) { 'text/plain' }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt]
+ it_behaves_like 'skipped type checked uploads', %w[dk.png]
+ end
+
+ context 'with mime types regex' do
+ let(:mime_types) { [/image\/(gif|png)/] }
+
+ it_behaves_like 'type checked uploads', %w[banana_sample.gif dk.png]
+ it_behaves_like 'skipped type checked uploads', %w[doc_sample.txt]
+ end
+
+ context 'with mime types array' do
+ let(:mime_types) { ['text/plain', /image\/png/] }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
+ it_behaves_like 'skipped type checked uploads', %w[audio_sample.wav]
+ end
+ end
+
+ describe '#check_content_matches_extension! extensions filtering' do
+ context 'without extensions' do
+ let(:extensions) { nil }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
+ end
+
+ context 'with extensions string' do
+ let(:extensions) { 'txt' }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt]
+ it_behaves_like 'skipped type checked uploads', %w[rails_sample.jpg]
+ end
+
+ context 'with extensions array of strings' do
+ let(:extensions) { %w[txt png] }
+
+ it_behaves_like 'type checked uploads', %w[doc_sample.txt dk.png]
+ it_behaves_like 'skipped type checked uploads', %w[audio_sample.wav]
+ end
+ end
+end
diff --git a/spec/validators/qualified_domain_array_validator_spec.rb b/spec/validators/qualified_domain_array_validator_spec.rb
index ab6cca4b671..664048c7544 100644
--- a/spec/validators/qualified_domain_array_validator_spec.rb
+++ b/spec/validators/qualified_domain_array_validator_spec.rb
@@ -3,18 +3,19 @@
require 'spec_helper'
describe QualifiedDomainArrayValidator do
- class QualifiedDomainArrayValidatorTestClass
- include ActiveModel::Validations
+ let(:qualified_domain_array_validator_test_class) do
+ Class.new do
+ include ActiveModel::Validations
- attr_accessor :domain_array
+ attr_accessor :domain_array
- def initialize(domain_array)
- self.domain_array = domain_array
+ def initialize(domain_array)
+ self.domain_array = domain_array
+ end
end
end
-
let!(:record) do
- QualifiedDomainArrayValidatorTestClass.new(['gitlab.com'])
+ qualified_domain_array_validator_test_class.new(['gitlab.com'])
end
subject { validator.validate(record) }
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 52933c42621..e3eb822b045 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -12,6 +12,16 @@ describe 'profiles/preferences/show' do
allow(controller).to receive(:current_user).and_return(user)
end
+ context 'behavior' do
+ before do
+ render
+ end
+
+ it 'has option for Render whitespace characters in the Web IDE' do
+ expect(rendered).to have_unchecked_field('Render whitespace characters in the Web IDE')
+ end
+ end
+
context 'sourcegraph' do
def have_sourcegraph_field(*args)
have_field('user_sourcegraph_enabled', *args)
diff --git a/spec/views/projects/ci/lints/show.html.haml_spec.rb b/spec/views/projects/ci/lints/show.html.haml_spec.rb
index ea67478ff98..8c3cf04bae6 100644
--- a/spec/views/projects/ci/lints/show.html.haml_spec.rb
+++ b/spec/views/projects/ci/lints/show.html.haml_spec.rb
@@ -75,6 +75,7 @@ describe 'projects/ci/lints/show' do
it 'shows the correct values' do
render
+ expect(rendered).to have_content('Status: syntax is correct')
expect(rendered).to have_content('Tag list: dotnet')
expect(rendered).to have_content('Only policy: refs, test@dude/repo')
expect(rendered).to have_content('Except policy: refs, deploy')
@@ -87,14 +88,14 @@ describe 'projects/ci/lints/show' do
before do
assign(:project, project)
assign(:status, false)
- assign(:error, 'Undefined error')
+ assign(:errors, ['Undefined error'])
end
it 'shows error message' do
render
expect(rendered).to have_content('Status: syntax is incorrect')
- expect(rendered).to have_content('Error: Undefined error')
+ expect(rendered).to have_content('Undefined error')
expect(rendered).not_to have_content('Tag list:')
end
end
diff --git a/spec/views/projects/commit/branches.html.haml_spec.rb b/spec/views/projects/commit/branches.html.haml_spec.rb
index 36da489a84f..0fe7165a790 100644
--- a/spec/views/projects/commit/branches.html.haml_spec.rb
+++ b/spec/views/projects/commit/branches.html.haml_spec.rb
@@ -11,7 +11,7 @@ describe 'projects/commit/branches.html.haml' do
context 'when branches and tags are available' do
before do
- assign(:branches, ['master', 'test-branch'])
+ assign(:branches, %w[master test-branch])
assign(:branches_limit_exceeded, false)
assign(:tags, ['tag1'])
assign(:tags_limit_exceeded, false)
@@ -35,7 +35,7 @@ describe 'projects/commit/branches.html.haml' do
context 'when branches are available but no tags' do
before do
- assign(:branches, ['master', 'test-branch'])
+ assign(:branches, %w[master test-branch])
assign(:branches_limit_exceeded, false)
assign(:tags, [])
assign(:tags_limit_exceeded, true)
diff --git a/spec/views/projects/diffs/_viewer.html.haml_spec.rb b/spec/views/projects/diffs/_viewer.html.haml_spec.rb
index 1d5d6e1e78d..27f271bb178 100644
--- a/spec/views/projects/diffs/_viewer.html.haml_spec.rb
+++ b/spec/views/projects/diffs/_viewer.html.haml_spec.rb
@@ -9,15 +9,7 @@ describe 'projects/diffs/_viewer.html.haml' do
let(:commit) { project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
let(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
- let(:viewer_class) do
- Class.new(DiffViewer::Base) do
- include DiffViewer::Rich
-
- self.partial_name = 'text'
- end
- end
-
- let(:viewer) { viewer_class.new(diff_file) }
+ let(:viewer) { diff_file.simple_viewer }
before do
assign(:project, project)
@@ -53,7 +45,7 @@ describe 'projects/diffs/_viewer.html.haml' do
it 'renders the collapsed view' do
render_view
- expect(view).to render_template('projects/diffs/_collapsed')
+ expect(view).to render_template('projects/diffs/viewers/_collapsed')
end
end
diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb
index 8005b549838..e95dec56a2d 100644
--- a/spec/views/projects/edit.html.haml_spec.rb
+++ b/spec/views/projects/edit.html.haml_spec.rb
@@ -28,6 +28,33 @@ describe 'projects/edit' do
end
end
+ context 'merge suggestions settings' do
+ it 'displays all possible variables' do
+ render
+
+ expect(rendered).to have_content('%{project_path}')
+ expect(rendered).to have_content('%{project_name}')
+ expect(rendered).to have_content('%{file_path}')
+ expect(rendered).to have_content('%{branch_name}')
+ expect(rendered).to have_content('%{username}')
+ expect(rendered).to have_content('%{user_full_name}')
+ end
+
+ it 'displays a placeholder if none is set' do
+ render
+
+ expect(rendered).to have_field('project[suggestion_commit_message]', placeholder: 'Apply suggestion to %{file_path}')
+ end
+
+ it 'displays the user entered value' do
+ project.update!(suggestion_commit_message: 'refactor: changed %{file_path}')
+
+ render
+
+ expect(rendered).to have_field('project[suggestion_commit_message]', with: 'refactor: changed %{file_path}')
+ end
+ end
+
context 'forking' do
before do
assign(:project, project)
diff --git a/spec/views/projects/issues/show.html.haml_spec.rb b/spec/views/projects/issues/show.html.haml_spec.rb
index d34b1735445..add4b44e9b6 100644
--- a/spec/views/projects/issues/show.html.haml_spec.rb
+++ b/spec/views/projects/issues/show.html.haml_spec.rb
@@ -130,4 +130,26 @@ describe 'projects/issues/show' do
expect(rendered).to have_selector('.status-box-open:not(.hidden)', text: 'Open')
end
end
+
+ context 'when the issue is related to a sentry error' do
+ it 'renders a stack trace' do
+ sentry_issue = double(:sentry_issue, sentry_issue_identifier: '1066622')
+ allow(issue).to receive(:sentry_issue).and_return(sentry_issue)
+ render
+
+ expect(rendered).to have_selector(
+ "#js-sentry-error-stack-trace"\
+ "[data-issue-stack-trace-path="\
+ "\"/#{project.full_path}/-/error_tracking/1066622/stack_trace.json\"]"
+ )
+ end
+ end
+
+ context 'when the issue is not related to a sentry error' do
+ it 'does not render a stack trace' do
+ render
+
+ expect(rendered).not_to have_selector('#js-sentry-error-stack-trace')
+ end
+ end
end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 628d2e10f93..3243758c650 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -6,7 +6,7 @@ describe 'search/_results' do
before do
controller.params[:action] = 'show'
- 3.times { create(:issue) }
+ create_list(:issue, 3)
@search_objects = Issue.page(1).per(2)
@scope = 'issues'
diff --git a/spec/workers/chat_notification_worker_spec.rb b/spec/workers/chat_notification_worker_spec.rb
index 91695674f5d..e4dccf2bf6b 100644
--- a/spec/workers/chat_notification_worker_spec.rb
+++ b/spec/workers/chat_notification_worker_spec.rb
@@ -8,6 +8,10 @@ describe ChatNotificationWorker do
create(:ci_build, pipeline: create(:ci_pipeline, source: :chat))
end
+ it 'instructs sidekiq not to retry on failure' do
+ expect(described_class.get_sidekiq_options['retry']).to eq(false)
+ end
+
describe '#perform' do
it 'does nothing when the build no longer exists' do
expect(worker).not_to receive(:send_response)
@@ -23,16 +27,31 @@ describe ChatNotificationWorker do
worker.perform(chat_build.id)
end
- it 'reschedules the job if the trace sections could not be found' do
- expect(worker)
- .to receive(:send_response)
- .and_raise(Gitlab::Chat::Output::MissingBuildSectionError)
+ context 'when the trace sections could not be found' do
+ it 'reschedules the job' do
+ expect(worker)
+ .to receive(:send_response)
+ .and_raise(Gitlab::Chat::Output::MissingBuildSectionError)
- expect(described_class)
- .to receive(:perform_in)
- .with(described_class::RESCHEDULE_INTERVAL, chat_build.id)
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(described_class::RESCHEDULE_INTERVAL, chat_build.id, 1)
- worker.perform(chat_build.id)
+ worker.perform(chat_build.id)
+ end
+
+ it "raises an error after #{described_class::RESCHEDULE_TIMEOUT} seconds of retrying" do
+ allow(described_class).to receive(:new).and_return(worker)
+ allow(worker).to receive(:send_response).and_raise(Gitlab::Chat::Output::MissingBuildSectionError)
+
+ worker.perform(chat_build.id)
+
+ expect { described_class.drain }.to raise_error(described_class::TimeoutExceeded)
+
+ max_reschedules = described_class::RESCHEDULE_TIMEOUT / described_class::RESCHEDULE_INTERVAL
+
+ expect(worker).to have_received(:send_response).exactly(max_reschedules + 1).times
+ end
end
end
diff --git a/spec/workers/ci/archive_traces_cron_worker_spec.rb b/spec/workers/ci/archive_traces_cron_worker_spec.rb
index fc700c15b10..789e83783bb 100644
--- a/spec/workers/ci/archive_traces_cron_worker_spec.rb
+++ b/spec/workers/ci/archive_traces_cron_worker_spec.rb
@@ -35,8 +35,9 @@ describe Ci::ArchiveTracesCronWorker do
it_behaves_like 'archives trace'
it 'executes service' do
- expect_any_instance_of(Ci::ArchiveTraceService)
- .to receive(:execute).with(build, anything)
+ expect_next_instance_of(Ci::ArchiveTraceService) do |instance|
+ expect(instance).to receive(:execute).with(build, anything)
+ end
subject
end
@@ -64,7 +65,9 @@ describe Ci::ArchiveTracesCronWorker do
before do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- allow_any_instance_of(Gitlab::Ci::Trace).to receive(:archive!).and_raise('Unexpected error')
+ allow_next_instance_of(Gitlab::Ci::Trace) do |instance|
+ allow(instance).to receive(:archive!).and_raise('Unexpected error')
+ end
end
it 'puts a log' do
diff --git a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
new file mode 100644
index 00000000000..634d932121e
--- /dev/null
+++ b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ subject { worker.perform(resource_group_id) }
+
+ context 'when resource group exists' do
+ let(:resource_group) { create(:ci_resource_group) }
+ let(:resource_group_id) { resource_group.id }
+
+ it 'executes AssignResourceFromResourceGroupService' do
+ expect_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, resource_group.project, nil) do |service|
+ expect(service).to receive(:execute).with(resource_group)
+ end
+
+ subject
+ end
+ end
+
+ context 'when build does not exist' do
+ let(:resource_group_id) { 123 }
+
+ it 'does not execute AssignResourceFromResourceGroupService' do
+ expect(Ci::ResourceGroups::AssignResourceFromResourceGroupService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
index 294eacf09ab..c4f6ddf9aca 100644
--- a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
@@ -57,9 +57,9 @@ describe Gitlab::GithubImport::ReschedulingMethods do
expect(worker)
.not_to receive(:notify_waiter)
- expect_any_instance_of(Gitlab::GithubImport::Client)
- .to receive(:rate_limit_resets_in)
- .and_return(14)
+ expect_next_instance_of(Gitlab::GithubImport::Client) do |instance|
+ expect(instance).to receive(:rate_limit_resets_in).and_return(14)
+ end
expect(worker.class)
.to receive(:perform_in)
diff --git a/spec/workers/concerns/reenqueuer_spec.rb b/spec/workers/concerns/reenqueuer_spec.rb
new file mode 100644
index 00000000000..b28f83d211b
--- /dev/null
+++ b/spec/workers/concerns/reenqueuer_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Reenqueuer do
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:worker_class) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::Bar::DummyWorker'
+ end
+
+ include ApplicationWorker
+ prepend Reenqueuer
+
+ attr_reader :performed_args
+
+ def perform(*args)
+ @performed_args = args
+
+ success? # for stubbing
+ end
+
+ def success?
+ false
+ end
+
+ def lease_timeout
+ 30.seconds
+ end
+ end
+ end
+
+ subject(:job) { worker_class.new }
+
+ before do
+ allow(job).to receive(:sleep) # faster tests
+ end
+
+ it_behaves_like 'reenqueuer'
+
+ it_behaves_like 'it is rate limited to 1 call per', 5.seconds do
+ let(:rate_limited_method) { subject.perform }
+ end
+
+ it 'disables Sidekiq retries' do
+ expect(job.sidekiq_options_hash).to include('retry' => false)
+ end
+
+ describe '#perform', :clean_gitlab_redis_shared_state do
+ let(:arbitrary_args) { [:foo, 'bar', { a: 1 }] }
+
+ context 'when the lease is available' do
+ it 'does perform' do
+ job.perform(*arbitrary_args)
+
+ expect(job.performed_args).to eq(arbitrary_args)
+ end
+ end
+
+ context 'when the lease is taken' do
+ before do
+ stub_exclusive_lease_taken(job.lease_key)
+ end
+
+ it 'does not perform' do
+ job.perform(*arbitrary_args)
+
+ expect(job.performed_args).to be_nil
+ end
+ end
+
+ context 'when #perform returns truthy' do
+ before do
+ allow(job).to receive(:success?).and_return(true)
+ end
+
+ it 'reenqueues the worker' do
+ expect(worker_class).to receive(:perform_async)
+
+ job.perform
+ end
+ end
+
+ context 'when #perform returns falsey' do
+ it 'does not reenqueue the worker' do
+ expect(worker_class).not_to receive(:perform_async)
+
+ job.perform
+ end
+ end
+ end
+end
+
+describe Reenqueuer::ReenqueuerSleeper do
+ let_it_be(:dummy_class) do
+ Class.new do
+ include Reenqueuer::ReenqueuerSleeper
+
+ def rate_limited_method
+ ensure_minimum_duration(11.seconds) do
+ # do work
+ end
+ end
+ end
+ end
+
+ subject(:dummy) { dummy_class.new }
+
+ # Test that rate_limited_method is rate limited by ensure_minimum_duration
+ it_behaves_like 'it is rate limited to 1 call per', 11.seconds do
+ let(:rate_limited_method) { dummy.rate_limited_method }
+ end
+
+ # Test ensure_minimum_duration more directly
+ describe '#ensure_minimum_duration' do
+ around do |example|
+ # Allow Timecop.travel without the block form
+ Timecop.safe_mode = false
+
+ Timecop.freeze do
+ example.run
+ end
+
+ Timecop.safe_mode = true
+ end
+
+ let(:minimum_duration) { 4.seconds }
+
+ context 'when the block completes well before the minimum duration' do
+ let(:time_left) { 3.seconds }
+
+ it 'sleeps until the minimum duration' do
+ expect(dummy).to receive(:sleep).with(a_value_within(0.01).of(time_left))
+
+ dummy.ensure_minimum_duration(minimum_duration) do
+ Timecop.travel(minimum_duration - time_left)
+ end
+ end
+ end
+
+ context 'when the block completes just before the minimum duration' do
+ let(:time_left) { 0.1.seconds }
+
+ it 'sleeps until the minimum duration' do
+ expect(dummy).to receive(:sleep).with(a_value_within(0.01).of(time_left))
+
+ dummy.ensure_minimum_duration(minimum_duration) do
+ Timecop.travel(minimum_duration - time_left)
+ end
+ end
+ end
+
+ context 'when the block completes just after the minimum duration' do
+ let(:time_over) { 0.1.seconds }
+
+ it 'does not sleep' do
+ expect(dummy).not_to receive(:sleep)
+
+ dummy.ensure_minimum_duration(minimum_duration) do
+ Timecop.travel(minimum_duration + time_over)
+ end
+ end
+ end
+
+ context 'when the block completes well after the minimum duration' do
+ let(:time_over) { 10.seconds }
+
+ it 'does not sleep' do
+ expect(dummy).not_to receive(:sleep)
+
+ dummy.ensure_minimum_duration(minimum_duration) do
+ Timecop.travel(minimum_duration + time_over)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
new file mode 100644
index 00000000000..48ab1614633
--- /dev/null
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContainerExpirationPolicyWorker do
+ include ExclusiveLeaseHelpers
+
+ subject { described_class.new.perform }
+
+ context 'With no container expiration policies' do
+ it 'Does not execute any policies' do
+ expect(ContainerExpirationPolicyService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'With container expiration policies' do
+ context 'a valid policy' do
+ let!(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+ let(:user) { container_expiration_policy.project.owner }
+
+ it 'runs the policy' do
+ service = instance_double(ContainerExpirationPolicyService, execute: true)
+
+ expect(ContainerExpirationPolicyService)
+ .to receive(:new).with(container_expiration_policy.project, user).and_return(service)
+
+ subject
+ end
+ end
+
+ context 'a disabled policy' do
+ let!(:container_expiration_policy) { create(:container_expiration_policy, :runnable, :disabled) }
+ let(:user) {container_expiration_policy.project.owner }
+
+ it 'does not run the policy' do
+ expect(ContainerExpirationPolicyService)
+ .not_to receive(:new).with(container_expiration_policy, user)
+
+ subject
+ end
+ end
+
+ context 'a policy that is not due for a run' do
+ let!(:container_expiration_policy) { create(:container_expiration_policy) }
+ let(:user) {container_expiration_policy.project.owner }
+
+ it 'does not run the policy' do
+ expect(ContainerExpirationPolicyService)
+ .not_to receive(:new).with(container_expiration_policy, user)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/delete_merged_branches_worker_spec.rb b/spec/workers/delete_merged_branches_worker_spec.rb
index 8c983859e36..3eaeb7e0797 100644
--- a/spec/workers/delete_merged_branches_worker_spec.rb
+++ b/spec/workers/delete_merged_branches_worker_spec.rb
@@ -9,7 +9,9 @@ describe DeleteMergedBranchesWorker do
describe "#perform" do
it "delegates to Branches::DeleteMergedService" do
- expect_any_instance_of(::Branches::DeleteMergedService).to receive(:execute).and_return(true)
+ expect_next_instance_of(::Branches::DeleteMergedService) do |instance|
+ expect(instance).to receive(:execute).and_return(true)
+ end
worker.perform(project.id, project.owner.id)
end
diff --git a/spec/workers/deployments/finished_worker_spec.rb b/spec/workers/deployments/finished_worker_spec.rb
index df62821e2cd..2961ff599c3 100644
--- a/spec/workers/deployments/finished_worker_spec.rb
+++ b/spec/workers/deployments/finished_worker_spec.rb
@@ -10,6 +10,20 @@ describe Deployments::FinishedWorker do
allow(ProjectServiceWorker).to receive(:perform_async)
end
+ it 'links merge requests to the deployment' do
+ deployment = create(:deployment)
+ service = instance_double(Deployments::LinkMergeRequestsService)
+
+ expect(Deployments::LinkMergeRequestsService)
+ .to receive(:new)
+ .with(deployment)
+ .and_return(service)
+
+ expect(service).to receive(:execute)
+
+ worker.perform(deployment.id)
+ end
+
it 'executes project services for deployment_hooks' do
deployment = create(:deployment)
project = deployment.project
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 0a0aea838d2..06561e94fb7 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -7,7 +7,9 @@ describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
- expect_any_instance_of(Ci::DestroyExpiredJobArtifactsService).to receive(:execute)
+ expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
+ expect(instance).to receive(:execute)
+ end
worker.perform
end
diff --git a/spec/workers/file_hook_worker_spec.rb b/spec/workers/file_hook_worker_spec.rb
new file mode 100644
index 00000000000..1a7e753fc4a
--- /dev/null
+++ b/spec/workers/file_hook_worker_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe FileHookWorker do
+ include RepoHelpers
+
+ let(:filename) { 'my_file_hook.rb' }
+ let(:data) { { 'event_name' => 'project_create' } }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'executes Gitlab::FileHook with expected values' do
+ allow(Gitlab::FileHook).to receive(:execute).with(filename, data).and_return([true, ''])
+
+ expect(subject.perform(filename, data)).to be_truthy
+ end
+
+ it 'logs message in case of file_hook execution failure' do
+ allow(Gitlab::FileHook).to receive(:execute).with(filename, data).and_return([false, 'permission denied'])
+
+ expect(Gitlab::FileHookLogger).to receive(:error)
+ expect(subject.perform(filename, data)).to be_truthy
+ end
+ end
+end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index cc1c23bb9e7..64ad4ba7eb6 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -230,8 +230,8 @@ describe GitGarbageCollectWorker do
new_commit_sha = Rugged::Commit.create(
rugged,
message: "hello world #{SecureRandom.hex(6)}",
- author: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
- committer: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
+ author: { email: 'foo@bar', name: 'baz' },
+ committer: { email: 'foo@bar', name: 'baz' },
tree: old_commit.tree,
parents: [old_commit]
)
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 6d47d73b92e..3a8fe73622a 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -21,9 +21,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
it 'schedules the importing of the base data' do
client = double(:client)
- expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
- .to receive(:execute)
- .and_return(true)
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
+ expect(instance).to receive(:execute).and_return(true)
+ end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async)
@@ -37,9 +37,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
it 'does not schedule the importing of the base data' do
client = double(:client)
- expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
- .to receive(:execute)
- .and_return(false)
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
+ expect(instance).to receive(:execute).and_return(false)
+ end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.not_to receive(:perform_async)
diff --git a/spec/workers/gitlab_shell_worker_spec.rb b/spec/workers/gitlab_shell_worker_spec.rb
index 0758cfc4ee2..5dedf5be9fa 100644
--- a/spec/workers/gitlab_shell_worker_spec.rb
+++ b/spec/workers/gitlab_shell_worker_spec.rb
@@ -7,7 +7,9 @@ describe GitlabShellWorker do
describe '#perform with add_key' do
it 'calls add_key on Gitlab::Shell' do
- expect_any_instance_of(Gitlab::Shell).to receive(:add_key).with('foo', 'bar')
+ expect_next_instance_of(Gitlab::Shell) do |instance|
+ expect(instance).to receive(:add_key).with('foo', 'bar')
+ end
worker.perform(:add_key, 'foo', 'bar')
end
end
diff --git a/spec/workers/gitlab_usage_ping_worker_spec.rb b/spec/workers/gitlab_usage_ping_worker_spec.rb
index aff5d112cdd..198daf40493 100644
--- a/spec/workers/gitlab_usage_ping_worker_spec.rb
+++ b/spec/workers/gitlab_usage_ping_worker_spec.rb
@@ -8,7 +8,9 @@ describe GitlabUsagePingWorker do
it 'delegates to SubmitUsagePingService' do
allow(subject).to receive(:try_obtain_lease).and_return(true)
- expect_any_instance_of(SubmitUsagePingService).to receive(:execute)
+ expect_next_instance_of(SubmitUsagePingService) do |instance|
+ expect(instance).to receive(:execute)
+ end
subject.perform
end
diff --git a/spec/workers/hashed_storage/migrator_worker_spec.rb b/spec/workers/hashed_storage/migrator_worker_spec.rb
index 9180da87058..ac76a306f43 100644
--- a/spec/workers/hashed_storage/migrator_worker_spec.rb
+++ b/spec/workers/hashed_storage/migrator_worker_spec.rb
@@ -10,7 +10,9 @@ describe HashedStorage::MigratorWorker do
describe '#perform' do
it 'delegates to MigratorService' do
- expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_migrate).with(start: 5, finish: 10)
+ expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
+ expect(instance).to receive(:bulk_migrate).with(start: 5, finish: 10)
+ end
worker.perform(5, 10)
end
diff --git a/spec/workers/hashed_storage/rollbacker_worker_spec.rb b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
index 3ca2601df0f..55fc4fb0fe1 100644
--- a/spec/workers/hashed_storage/rollbacker_worker_spec.rb
+++ b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
@@ -10,7 +10,9 @@ describe HashedStorage::RollbackerWorker do
describe '#perform' do
it 'delegates to MigratorService' do
- expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_rollback).with(start: 5, finish: 10)
+ expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
+ expect(instance).to receive(:bulk_rollback).with(start: 5, finish: 10)
+ end
worker.perform(5, 10)
end
diff --git a/spec/workers/import_issues_csv_worker_spec.rb b/spec/workers/import_issues_csv_worker_spec.rb
index 89370c4890d..03944cfb05d 100644
--- a/spec/workers/import_issues_csv_worker_spec.rb
+++ b/spec/workers/import_issues_csv_worker_spec.rb
@@ -11,7 +11,9 @@ describe ImportIssuesCsvWorker do
describe '#perform' do
it 'calls #execute on Issues::ImportCsvService and destroys upload' do
- expect_any_instance_of(Issues::ImportCsvService).to receive(:execute).and_return({ success: 5, errors: [], valid_file: true })
+ expect_next_instance_of(Issues::ImportCsvService) do |instance|
+ expect(instance).to receive(:execute).and_return({ success: 5, errors: [], valid_file: true })
+ end
worker.perform(user.id, project.id, upload.id)
diff --git a/spec/workers/new_release_worker_spec.rb b/spec/workers/new_release_worker_spec.rb
index 9010c36f795..9d8c5bbf919 100644
--- a/spec/workers/new_release_worker_spec.rb
+++ b/spec/workers/new_release_worker_spec.rb
@@ -6,7 +6,9 @@ describe NewReleaseWorker do
let(:release) { create(:release) }
it 'sends a new release notification' do
- expect_any_instance_of(NotificationService).to receive(:send_new_release_notifications).with(release)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:send_new_release_notifications).with(release)
+ end
described_class.new.perform(release.id)
end
diff --git a/spec/workers/pipeline_update_worker_spec.rb b/spec/workers/pipeline_update_worker_spec.rb
index 0225e4a9601..187298034cc 100644
--- a/spec/workers/pipeline_update_worker_spec.rb
+++ b/spec/workers/pipeline_update_worker_spec.rb
@@ -8,7 +8,7 @@ describe PipelineUpdateWorker do
let(:pipeline) { create(:ci_pipeline) }
it 'updates pipeline status' do
- expect_any_instance_of(Ci::Pipeline).to receive(:update_status)
+ expect_any_instance_of(Ci::Pipeline).to receive(:set_status).with('skipped')
described_class.new.perform(pipeline.id)
end
diff --git a/spec/workers/plugin_worker_spec.rb b/spec/workers/plugin_worker_spec.rb
deleted file mode 100644
index ca6c9986131..00000000000
--- a/spec/workers/plugin_worker_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PluginWorker do
- include RepoHelpers
-
- let(:filename) { 'my_plugin.rb' }
- let(:data) { { 'event_name' => 'project_create' } }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'executes Gitlab::Plugin with expected values' do
- allow(Gitlab::Plugin).to receive(:execute).with(filename, data).and_return([true, ''])
-
- expect(subject.perform(filename, data)).to be_truthy
- end
-
- it 'logs message in case of plugin execution failure' do
- allow(Gitlab::Plugin).to receive(:execute).with(filename, data).and_return([false, 'permission denied'])
-
- expect(Gitlab::PluginLogger).to receive(:error)
- expect(subject.perform(filename, data)).to be_truthy
- end
- end
-end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index b8767af8eee..507098582c9 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -21,8 +21,9 @@ describe RepositoryImportWorker do
allow(subject).to receive(:jid).and_return(jid)
- expect_any_instance_of(Projects::ImportService).to receive(:execute)
- .and_return({ status: :ok })
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :ok })
+ end
# Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(started_project.id).and_return(started_project)
@@ -36,8 +37,9 @@ describe RepositoryImportWorker do
context 'when the import was successful' do
it 'imports a project' do
- expect_any_instance_of(Projects::ImportService).to receive(:execute)
- .and_return({ status: :ok })
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :ok })
+ end
# Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(project.id).and_return(project)
@@ -54,7 +56,9 @@ describe RepositoryImportWorker do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
import_state.update(jid: '123')
- expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :error, message: error })
+ end
expect do
subject.perform(project.id)
@@ -67,7 +71,9 @@ describe RepositoryImportWorker do
project.update(import_type: 'gitlab_project')
import_state.update(jid: '123')
- expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :error, message: error })
+ end
expect do
subject.perform(project.id)
@@ -93,8 +99,9 @@ describe RepositoryImportWorker do
.to receive(:async?)
.and_return(true)
- expect_any_instance_of(ProjectImportState)
- .not_to receive(:finish)
+ expect_next_instance_of(ProjectImportState) do |instance|
+ expect(instance).not_to receive(:finish)
+ end
subject.perform(project.id)
end
diff --git a/spec/workers/self_monitoring_project_create_worker_spec.rb b/spec/workers/self_monitoring_project_create_worker_spec.rb
new file mode 100644
index 00000000000..00c288bdc46
--- /dev/null
+++ b/spec/workers/self_monitoring_project_create_worker_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SelfMonitoringProjectCreateWorker do
+ describe '#perform' do
+ let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService }
+ let(:service) { instance_double(service_class) }
+
+ it_behaves_like 'executes service'
+ end
+
+ describe '.in_progress?', :clean_gitlab_redis_shared_state do
+ it_behaves_like 'returns in_progress based on Sidekiq::Status'
+ end
+end
diff --git a/spec/workers/self_monitoring_project_delete_worker_spec.rb b/spec/workers/self_monitoring_project_delete_worker_spec.rb
new file mode 100644
index 00000000000..3685c73513e
--- /dev/null
+++ b/spec/workers/self_monitoring_project_delete_worker_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SelfMonitoringProjectDeleteWorker do
+ let_it_be(:jid) { 'b5b28910d97563e58c2fe55f' }
+ let_it_be(:data_key) { "self_monitoring_delete_result:#{jid}" }
+
+ describe '#perform' do
+ let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService }
+ let(:service) { instance_double(service_class) }
+
+ it_behaves_like 'executes service'
+ end
+
+ describe '.status', :clean_gitlab_redis_shared_state do
+ it_behaves_like 'returns in_progress based on Sidekiq::Status'
+ end
+end
diff --git a/spec/workers/stage_update_worker_spec.rb b/spec/workers/stage_update_worker_spec.rb
index 429d42bac29..8a57cc6bbff 100644
--- a/spec/workers/stage_update_worker_spec.rb
+++ b/spec/workers/stage_update_worker_spec.rb
@@ -8,7 +8,7 @@ describe StageUpdateWorker do
let(:stage) { create(:ci_stage_entity) }
it 'updates stage status' do
- expect_any_instance_of(Ci::Stage).to receive(:update_status)
+ expect_any_instance_of(Ci::Stage).to receive(:set_status).with('skipped')
described_class.new.perform(stage.id)
end