summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorValery Sizov <valery@gitlab.com>2017-05-23 14:10:16 +0300
committerValery Sizov <valery@gitlab.com>2017-05-23 14:10:16 +0300
commit2158beddeed688fee701a00d21752dd253866548 (patch)
treea615157f9ada6da50717b8da69ec45c3f06b378d /spec
parent719e30c53d1d0690fa9e96fa488047e0a68d00a2 (diff)
parent12e77890c29c0451ac7a1a59727df4d56bc9741b (diff)
downloadgitlab-ce-2158beddeed688fee701a00d21752dd253866548.tar.gz
Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into 17489-hide-code-from-guests
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/changelog_spec.rb4
-rw-r--r--spec/controllers/admin/hooks_controller_spec.rb28
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb135
-rw-r--r--spec/controllers/groups_controller_spec.rb298
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb97
-rw-r--r--spec/controllers/projects/deployments_controller_spec.rb84
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb61
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb26
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb70
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb260
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb87
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb38
-rw-r--r--spec/controllers/projects_controller_spec.rb229
-rw-r--r--spec/controllers/snippets_controller_spec.rb34
-rw-r--r--spec/controllers/uploads_controller_spec.rb40
-rw-r--r--spec/controllers/users_controller_spec.rb257
-rw-r--r--spec/factories/ci/pipeline_schedule.rb (renamed from spec/factories/ci/trigger_schedules.rb)13
-rw-r--r--spec/factories/ci/variables.rb2
-rw-r--r--spec/factories/group_members.rb6
-rw-r--r--spec/factories/project_hooks.rb2
-rw-r--r--spec/factories/project_members.rb6
-rw-r--r--spec/factories/projects.rb12
-rw-r--r--spec/factories/services.rb2
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/auto_deploy_spec.rb9
-rw-r--r--spec/features/boards/issue_ordering_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb7
-rw-r--r--spec/features/boards/sub_group_project_spec.rb45
-rw-r--r--spec/features/copy_as_gfm_spec.rb2
-rw-r--r--spec/features/cycle_analytics_spec.rb13
-rw-r--r--spec/features/dashboard/group_spec.rb2
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb4
-rw-r--r--spec/features/dashboard/issues_spec.rb11
-rw-r--r--spec/features/dashboard/milestone_filter_spec.rb60
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb31
-rw-r--r--spec/features/dashboard/snippets_spec.rb47
-rw-r--r--spec/features/groups/members/sorting_spec.rb4
-rw-r--r--spec/features/issuables/issuable_list_spec.rb3
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb10
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb54
-rw-r--r--spec/features/issues/form_spec.rb40
-rw-r--r--spec/features/issues/note_polling_spec.rb115
-rw-r--r--spec/features/issues/notes_on_issues_spec.rb77
-rw-r--r--spec/features/issues_spec.rb22
-rw-r--r--spec/features/login_spec.rb4
-rw-r--r--spec/features/merge_requests/assign_issues_spec.rb2
-rw-r--r--spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb10
-rw-r--r--spec/features/merge_requests/cherry_pick_spec.rb2
-rw-r--r--spec/features/merge_requests/closes_issues_spec.rb15
-rw-r--r--spec/features/merge_requests/conflicts_spec.rb2
-rw-r--r--spec/features/merge_requests/created_from_fork_spec.rb2
-rw-r--r--spec/features/merge_requests/deleted_source_branch_spec.rb4
-rw-r--r--spec/features/merge_requests/diff_notes_avatars_spec.rb2
-rw-r--r--spec/features/merge_requests/diff_notes_resolve_spec.rb2
-rw-r--r--spec/features/merge_requests/diffs_spec.rb28
-rw-r--r--spec/features/merge_requests/edit_mr_spec.rb12
-rw-r--r--spec/features/merge_requests/form_spec.rb2
-rw-r--r--spec/features/merge_requests/merge_commit_message_toggle_spec.rb19
-rw-r--r--spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb18
-rw-r--r--spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb22
-rw-r--r--spec/features/merge_requests/mini_pipeline_graph_spec.rb2
-rw-r--r--spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb44
-rw-r--r--spec/features/merge_requests/target_branch_spec.rb11
-rw-r--r--spec/features/merge_requests/widget_deployments_spec.rb8
-rw-r--r--spec/features/merge_requests/widget_spec.rb50
-rw-r--r--spec/features/profiles/preferences_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb134
-rw-r--r--spec/features/projects/branches/new_branch_ref_dropdown_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb85
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/browse_files_spec.rb12
-rw-r--r--spec/features/projects/gfm_autocomplete_load_spec.rb2
-rw-r--r--spec/features/projects/import_export/test_project_export.tar.gzbin679892 -> 681478 bytes
-rw-r--r--spec/features/projects/members/sorting_spec.rb4
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb146
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb54
-rw-r--r--spec/features/projects/settings/integration_settings_spec.rb2
-rw-r--r--spec/features/projects/snippets_spec.rb24
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb8
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb4
-rw-r--r--spec/features/protected_branches_spec.rb1
-rw-r--r--spec/features/protected_tags_spec.rb1
-rw-r--r--spec/features/search_spec.rb23
-rw-r--r--spec/features/security/project/internal_access_spec.rb16
-rw-r--r--spec/features/security/project/private_access_spec.rb44
-rw-r--r--spec/features/security/project/public_access_spec.rb16
-rw-r--r--spec/features/signup_spec.rb4
-rw-r--r--spec/features/snippets/explore_spec.rb25
-rw-r--r--spec/features/snippets/internal_snippet_spec.rb23
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb4
-rw-r--r--spec/features/tags/master_creates_tag_spec.rb16
-rw-r--r--spec/features/todos/todos_spec.rb6
-rw-r--r--spec/features/triggers_spec.rb71
-rw-r--r--spec/features/uploads/user_uploads_file_to_note_spec.rb76
-rw-r--r--spec/features/user_callout_spec.rb2
-rw-r--r--spec/features/users/snippets_spec.rb46
-rw-r--r--spec/finders/groups_finder_spec.rb57
-rw-r--r--spec/finders/pipeline_schedules_finder_spec.rb41
-rw-r--r--spec/finders/snippets_finder_spec.rb125
-rw-r--r--spec/finders/users_finder_spec.rb66
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request.json98
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_basic.json15
-rw-r--r--spec/helpers/application_helper_spec.rb25
-rw-r--r--spec/helpers/avatars_helper_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb7
-rw-r--r--spec/helpers/diff_helper_spec.rb4
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb192
-rw-r--r--spec/helpers/projects_helper_spec.rb4
-rw-r--r--spec/helpers/submodule_helper_spec.rb25
-rw-r--r--spec/javascripts/abuse_reports_spec.js4
-rw-r--r--spec/javascripts/activities_spec.js6
-rw-r--r--spec/javascripts/ajax_loading_spinner_spec.js8
-rw-r--r--spec/javascripts/api_spec.js281
-rw-r--r--spec/javascripts/awards_handler_spec.js2
-rw-r--r--spec/javascripts/behaviors/autosize_spec.js2
-rw-r--r--spec/javascripts/behaviors/bind_in_out_spec.js12
-rw-r--r--spec/javascripts/behaviors/quick_submit_spec.js2
-rw-r--r--spec/javascripts/behaviors/requires_input_spec.js2
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js51
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js38
-rw-r--r--spec/javascripts/blob/create_branch_dropdown_spec.js7
-rw-r--r--spec/javascripts/blob/target_branch_dropdown_spec.js11
-rw-r--r--spec/javascripts/blob/viewer/index_spec.js31
-rw-r--r--spec/javascripts/boards/board_card_spec.js10
-rw-r--r--spec/javascripts/boards/board_new_issue_spec.js4
-rw-r--r--spec/javascripts/boards/issue_card_spec.js2
-rw-r--r--spec/javascripts/bootstrap_linked_tabs_spec.js8
-rw-r--r--spec/javascripts/ci_status_icon_spec.js44
-rw-r--r--spec/javascripts/commit/pipelines/mock_data.js89
-rw-r--r--spec/javascripts/commit/pipelines/pipelines_spec.js7
-rw-r--r--spec/javascripts/commits_spec.js6
-rw-r--r--spec/javascripts/datetime_utility_spec.js2
-rw-r--r--spec/javascripts/diff_comments_store_spec.js6
-rw-r--r--spec/javascripts/droplab/drop_down_spec.js53
-rw-r--r--spec/javascripts/droplab/hook_spec.js8
-rw-r--r--spec/javascripts/extensions/array_spec.js2
-rw-r--r--spec/javascripts/filtered_search/dropdown_user_spec.js8
-rw-r--r--spec/javascripts/filtered_search/dropdown_utils_spec.js8
-rw-r--r--spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js8
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js15
-rw-r--r--spec/javascripts/filtered_search/filtered_search_token_keys_spec.js4
-rw-r--r--spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js6
-rw-r--r--spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js4
-rw-r--r--spec/javascripts/fixtures/balsamiq.rb18
-rw-r--r--spec/javascripts/fixtures/balsamiq_viewer.html.haml1
-rw-r--r--spec/javascripts/fixtures/graph.html.haml1
-rw-r--r--spec/javascripts/fixtures/merge_requests.rb16
-rw-r--r--spec/javascripts/fixtures/pipelines.rb35
-rw-r--r--spec/javascripts/gfm_auto_complete_spec.js22
-rw-r--r--spec/javascripts/gl_dropdown_spec.js53
-rw-r--r--spec/javascripts/gl_field_errors_spec.js2
-rw-r--r--spec/javascripts/gl_form_spec.js28
-rw-r--r--spec/javascripts/header_spec.js4
-rw-r--r--spec/javascripts/helpers/class_spec_helper.js4
-rw-r--r--spec/javascripts/helpers/class_spec_helper_spec.js4
-rw-r--r--spec/javascripts/helpers/filtered_search_spec_helper.js4
-rw-r--r--spec/javascripts/issuable_spec.js4
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js60
-rw-r--r--spec/javascripts/issue_show/components/description_spec.js99
-rw-r--r--spec/javascripts/issue_show/components/title_spec.js67
-rw-r--r--spec/javascripts/issue_show/issue_title_description_spec.js60
-rw-r--r--spec/javascripts/issue_show/mock_data.js6
-rw-r--r--spec/javascripts/issue_spec.js2
-rw-r--r--spec/javascripts/labels_issue_sidebar_spec.js17
-rw-r--r--spec/javascripts/lib/utils/ajax_cache_spec.js57
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js12
-rw-r--r--spec/javascripts/lib/utils/poll_spec.js93
-rw-r--r--spec/javascripts/lib/utils/text_utility_spec.js2
-rw-r--r--spec/javascripts/line_highlighter_spec.js10
-rw-r--r--spec/javascripts/merge_request_notes_spec.js61
-rw-r--r--spec/javascripts/merge_request_spec.js2
-rw-r--r--spec/javascripts/merge_request_tabs_spec.js26
-rw-r--r--spec/javascripts/merge_request_widget_spec.js199
-rw-r--r--spec/javascripts/merged_buttons_spec.js44
-rw-r--r--spec/javascripts/new_branch_spec.js2
-rw-r--r--spec/javascripts/notes_spec.js196
-rw-r--r--spec/javascripts/pager_spec.js2
-rw-r--r--spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js175
-rw-r--r--spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js106
-rw-r--r--spec/javascripts/pipelines/graph/action_component_spec.js40
-rw-r--r--spec/javascripts/pipelines/graph/dropdown_action_component_spec.js30
-rw-r--r--spec/javascripts/pipelines/graph/graph_component_spec.js62
-rw-r--r--spec/javascripts/pipelines/graph/job_component_spec.js117
-rw-r--r--spec/javascripts/pipelines/graph/job_name_component_spec.js27
-rw-r--r--spec/javascripts/pipelines/graph/mock_data.js232
-rw-r--r--spec/javascripts/pipelines/graph/stage_column_component_spec.js42
-rw-r--r--spec/javascripts/pipelines/mock_data.js107
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js2
-rw-r--r--spec/javascripts/pipelines/pipelines_spec.js9
-rw-r--r--spec/javascripts/pipelines_spec.js32
-rw-r--r--spec/javascripts/pretty_time_spec.js2
-rw-r--r--spec/javascripts/project_title_spec.js11
-rw-r--r--spec/javascripts/search_autocomplete_spec.js9
-rw-r--r--spec/javascripts/shortcuts_issuable_spec.js6
-rw-r--r--spec/javascripts/sidebar/sidebar_assignees_spec.js1
-rw-r--r--spec/javascripts/sidebar/sidebar_bundle_spec.js42
-rw-r--r--spec/javascripts/sidebar/sidebar_mediator_spec.js1
-rw-r--r--spec/javascripts/sidebar/sidebar_service_spec.js1
-rw-r--r--spec/javascripts/signin_tabs_memoizer_spec.js2
-rw-r--r--spec/javascripts/smart_interval_spec.js2
-rw-r--r--spec/javascripts/syntax_highlight_spec.js2
-rw-r--r--spec/javascripts/test_bundle.js19
-rw-r--r--spec/javascripts/todos_spec.js4
-rw-r--r--spec/javascripts/u2f/authenticate_spec.js10
-rw-r--r--spec/javascripts/u2f/mock_u2f_device.js6
-rw-r--r--spec/javascripts/u2f/register_spec.js10
-rw-r--r--spec/javascripts/version_check_image_spec.js7
-rw-r--r--spec/javascripts/visibility_select_spec.js8
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js39
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js61
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js188
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js102
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js184
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js51
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js131
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js138
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js18
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js32
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js19
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js51
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js69
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js122
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js33
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js213
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js174
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js55
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js17
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js29
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js16
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js16
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js389
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js16
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js47
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js96
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js214
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js324
-rw-r--r--spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js46
-rw-r--r--spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js65
-rw-r--r--spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js22
-rw-r--r--spec/javascripts/vue_shared/ci_action_icons_spec.js27
-rw-r--r--spec/javascripts/vue_shared/ci_status_icon_spec.js27
-rw-r--r--spec/javascripts/vue_shared/components/ci_badge_link_spec.js89
-rw-r--r--spec/javascripts/vue_shared/components/ci_icon_spec.js139
-rw-r--r--spec/javascripts/vue_shared/components/commit_spec.js10
-rw-r--r--spec/javascripts/vue_shared/components/loading_icon_spec.js53
-rw-r--r--spec/javascripts/vue_shared/components/memory_graph_spec.js143
-rw-r--r--spec/javascripts/vue_shared/components/mock_data.js69
-rw-r--r--spec/javascripts/vue_shared/components/pipelines_table_row_spec.js94
-rw-r--r--spec/javascripts/vue_shared/components/pipelines_table_spec.js8
-rw-r--r--spec/javascripts/vue_shared/components/table_pagination_spec.js2
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar_image_spec.js54
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar_link_spec.js50
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar_svg_spec.js29
-rw-r--r--spec/javascripts/zen_mode_spec.js2
-rw-r--r--spec/lib/banzai/filter/external_link_filter_spec.rb90
-rw-r--r--spec/lib/ci/gitlab_ci_yaml_processor_spec.rb12
-rw-r--r--spec/lib/container_registry/blob_spec.rb2
-rw-r--r--spec/lib/container_registry/client_spec.rb39
-rw-r--r--spec/lib/expand_variables_spec.rb6
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb29
-rw-r--r--spec/lib/gitlab/auth_spec.rb4
-rw-r--r--spec/lib/gitlab/backup/manager_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/config/entry/global_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/variables_spec.rb8
-rw-r--r--spec/lib/gitlab/conflict/file_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/contributions_calendar_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb8
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb22
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb9
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb58
-rw-r--r--spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb60
-rw-r--r--spec/lib/gitlab/dependency_linker_spec.rb13
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb14
-rw-r--r--spec/lib/gitlab/diff/inline_diff_marker_spec.rb18
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb8
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb2
-rw-r--r--spec/lib/gitlab/file_finder_spec.rb21
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb45
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/git/encoding_helper_spec.rb16
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb54
-rw-r--r--spec/lib/gitlab/git/util_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_spec.rb65
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_spec.rb30
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb21
-rw-r--r--spec/lib/gitlab/highlight_spec.rb11
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml15
-rw-r--r--spec/lib/gitlab/import_export/project.json36
-rw-r--r--spec/lib/gitlab/import_export/relation_factory_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml23
-rw-r--r--spec/lib/gitlab/other_markup_spec.rb2
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb77
-rw-r--r--spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb37
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb (renamed from spec/lib/gitlab/prometheus_spec.rb)22
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb2
-rw-r--r--spec/lib/gitlab/string_range_marker_spec.rb36
-rw-r--r--spec/lib/gitlab/string_regex_marker_spec.rb18
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb3
-rw-r--r--spec/lib/gitlab/user_access_spec.rb48
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb4
-rw-r--r--spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb33
-rw-r--r--spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb32
-rw-r--r--spec/migrations/fix_wrongly_renamed_routes_spec.rb73
-rw-r--r--spec/migrations/migrate_build_events_to_pipeline_events_spec.rb74
-rw-r--r--spec/migrations/migrate_user_project_view_spec.rb7
-rw-r--r--spec/migrations/rename_users_with_renamed_namespace_spec.rb22
-rw-r--r--spec/migrations/update_retried_for_ci_builds_spec.rb17
-rw-r--r--spec/models/application_setting_spec.rb64
-rw-r--r--spec/models/blob_spec.rb46
-rw-r--r--spec/models/blob_viewer/base_spec.rb129
-rw-r--r--spec/models/blob_viewer/changelog_spec.rb27
-rw-r--r--spec/models/blob_viewer/composer_json_spec.rb25
-rw-r--r--spec/models/blob_viewer/gemspec_spec.rb25
-rw-r--r--spec/models/blob_viewer/gitlab_ci_yml_spec.rb32
-rw-r--r--spec/models/blob_viewer/license_spec.rb34
-rw-r--r--spec/models/blob_viewer/package_json_spec.rb25
-rw-r--r--spec/models/blob_viewer/podspec_json_spec.rb25
-rw-r--r--spec/models/blob_viewer/podspec_spec.rb25
-rw-r--r--spec/models/blob_viewer/route_map_spec.rb38
-rw-r--r--spec/models/blob_viewer/server_side_spec.rb41
-rw-r--r--spec/models/ci/build_spec.rb4
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb112
-rw-r--r--spec/models/ci/pipeline_spec.rb47
-rw-r--r--spec/models/ci/stage_spec.rb4
-rw-r--r--spec/models/ci/trigger_schedule_spec.rb108
-rw-r--r--spec/models/ci/trigger_spec.rb1
-rw-r--r--spec/models/ci/variable_spec.rb2
-rw-r--r--spec/models/commit_spec.rb29
-rw-r--r--spec/models/commit_status_spec.rb24
-rw-r--r--spec/models/concerns/mentionable_spec.rb49
-rw-r--r--spec/models/cycle_analytics/test_spec.rb1
-rw-r--r--spec/models/deployment_spec.rb28
-rw-r--r--spec/models/environment_spec.rb4
-rw-r--r--spec/models/global_milestone_spec.rb2
-rw-r--r--spec/models/group_spec.rb16
-rw-r--r--spec/models/hooks/system_hook_spec.rb21
-rw-r--r--spec/models/issue_spec.rb40
-rw-r--r--spec/models/merge_request_spec.rb122
-rw-r--r--spec/models/project_authorization_spec.rb2
-rw-r--r--spec/models/project_services/asana_service_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/issue_message_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/merge_message_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/note_message_spec.rb2
-rw-r--r--spec/models/project_services/chat_message/pipeline_message_spec.rb12
-rw-r--r--spec/models/project_services/chat_message/push_message_spec.rb20
-rw-r--r--spec/models/project_services/chat_message/wiki_page_message_spec.rb4
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb6
-rw-r--r--spec/models/project_services/pivotaltracker_service_spec.rb2
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb33
-rw-r--r--spec/models/project_snippet_spec.rb3
-rw-r--r--spec/models/project_spec.rb13
-rw-r--r--spec/models/project_statistics_spec.rb4
-rw-r--r--spec/models/protected_branch/merge_access_level_spec.rb5
-rw-r--r--spec/models/protected_branch/push_access_level_spec.rb5
-rw-r--r--spec/models/protected_branch_spec.rb3
-rw-r--r--spec/models/repository_spec.rb103
-rw-r--r--spec/models/snippet_spec.rb40
-rw-r--r--spec/models/user_spec.rb86
-rw-r--r--spec/policies/environment_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/policies/project_snippet_policy_spec.rb80
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb356
-rw-r--r--spec/requests/api/branches_spec.rb13
-rw-r--r--spec/requests/api/commit_statuses_spec.rb4
-rw-r--r--spec/requests/api/files_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb4
-rw-r--r--spec/requests/api/issues_spec.rb2
-rw-r--r--spec/requests/api/project_hooks_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb2
-rw-r--r--spec/requests/api/system_hooks_spec.rb3
-rw-r--r--spec/requests/api/v3/branches_spec.rb13
-rw-r--r--spec/requests/api/v3/files_spec.rb4
-rw-r--r--spec/requests/api/v3/groups_spec.rb4
-rw-r--r--spec/requests/api/v3/project_hooks_spec.rb4
-rw-r--r--spec/requests/api/v3/projects_spec.rb4
-rw-r--r--spec/requests/api/v3/system_hooks_spec.rb3
-rw-r--r--spec/requests/ci/api/builds_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb6
-rw-r--r--spec/requests/openid_connect_spec.rb6
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb5
-rw-r--r--spec/routing/project_routing_spec.rb5
-rw-r--r--spec/routing/routing_spec.rb37
-rw-r--r--spec/serializers/analytics_issue_entity_spec.rb2
-rw-r--r--spec/serializers/analytics_issue_serializer_spec.rb2
-rw-r--r--spec/serializers/build_entity_spec.rb2
-rw-r--r--spec/serializers/build_serializer_spec.rb2
-rw-r--r--spec/serializers/deployment_entity_spec.rb2
-rw-r--r--spec/serializers/environment_serializer_spec.rb2
-rw-r--r--spec/serializers/event_entity_spec.rb13
-rw-r--r--spec/serializers/merge_request_basic_serializer_spec.rb12
-rw-r--r--spec/serializers/merge_request_entity_spec.rb145
-rw-r--r--spec/serializers/merge_request_serializer_spec.rb37
-rw-r--r--spec/serializers/pipeline_entity_spec.rb4
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb6
-rw-r--r--spec/serializers/stage_entity_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb50
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb37
-rw-r--r--spec/services/ci/retry_build_service_spec.rb11
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb2
-rw-r--r--spec/services/cohorts_service_spec.rb2
-rw-r--r--spec/services/create_deployment_service_spec.rb2
-rw-r--r--spec/services/delete_merged_branches_service_spec.rb31
-rw-r--r--spec/services/git_push_service_spec.rb13
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb6
-rw-r--r--spec/services/issues/build_service_spec.rb2
-rw-r--r--spec/services/issues/close_service_spec.rb14
-rw-r--r--spec/services/issues/create_service_spec.rb16
-rw-r--r--spec/services/issues/resolve_discussions_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb7
-rw-r--r--spec/services/members/authorized_destroy_service_spec.rb21
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb80
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb (renamed from spec/services/merge_requests/resolve_service_spec.rb)41
-rw-r--r--spec/services/merge_requests/create_service_spec.rb30
-rw-r--r--spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb6
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb2
-rw-r--r--spec/services/merge_requests/update_service_spec.rb51
-rw-r--r--spec/services/notification_service_spec.rb2
-rw-r--r--spec/services/projects/participants_service_spec.rb5
-rw-r--r--spec/services/projects/transfer_service_spec.rb1
-rw-r--r--spec/services/system_note_service_spec.rb2
-rw-r--r--spec/sidekiq/cron/job_gem_dependency_spec.rb18
-rw-r--r--spec/spec_helper.rb2
-rw-r--r--spec/support/cycle_analytics_helpers.rb4
-rw-r--r--spec/support/dropzone_helper.rb40
-rw-r--r--spec/support/filtered_search_helpers.rb6
-rwxr-xr-xspec/support/generate-seed-repo-rb162
-rw-r--r--spec/support/kubernetes_helpers.rb8
-rw-r--r--spec/support/matchers/gitaly_matchers.rb6
-rw-r--r--spec/support/milestone_tabs_examples.rb2
-rw-r--r--spec/support/prometheus_helpers.rb34
-rw-r--r--spec/support/protected_branches/access_control_ce_shared_examples.rb (renamed from spec/features/protected_branches/access_control_ce_spec.rb)2
-rw-r--r--spec/support/protected_tags/access_control_ce_shared_examples.rb (renamed from spec/features/protected_tags/access_control_ce_spec.rb)2
-rw-r--r--spec/support/repo_helpers.rb4
-rw-r--r--spec/support/seed_repo.rb11
-rw-r--r--spec/support/slack_mattermost_notifications_shared_examples.rb14
-rw-r--r--spec/support/test_env.rb66
-rw-r--r--spec/support/wait_for_requests.rb5
-rw-r--r--spec/support/wait_for_vue_resource.rb14
-rw-r--r--spec/support/workhorse_helpers.rb2
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb3
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb24
-rw-r--r--spec/views/projects/_last_commit.html.haml_spec.rb22
-rw-r--r--spec/views/projects/blob/_viewer.html.haml_spec.rb18
-rw-r--r--spec/views/projects/imports/new.html.haml_spec.rb22
-rw-r--r--spec/views/projects/pipelines/_stage.html.haml_spec.rb5
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb67
-rw-r--r--spec/views/projects/tree/show.html.haml_spec.rb8
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb2
-rw-r--r--spec/workers/namespaceless_project_destroy_worker_spec.rb79
-rw-r--r--spec/workers/pipeline_metrics_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb64
-rw-r--r--spec/workers/post_receive_spec.rb30
-rw-r--r--spec/workers/process_commit_worker_spec.rb8
-rw-r--r--spec/workers/project_cache_worker_spec.rb12
-rw-r--r--spec/workers/repository_check/clear_worker_spec.rb2
-rw-r--r--spec/workers/trigger_schedule_worker_spec.rb73
460 files changed, 12556 insertions, 3369 deletions
diff --git a/spec/bin/changelog_spec.rb b/spec/bin/changelog_spec.rb
index 7f4298db59f..91aff0db7cc 100644
--- a/spec/bin/changelog_spec.rb
+++ b/spec/bin/changelog_spec.rb
@@ -46,9 +46,7 @@ describe 'bin/changelog' do
it 'parses -h' do
expect do
- $stdout = StringIO.new
-
- described_class.parse(%w[foo -h bar])
+ expect { described_class.parse(%w[foo -h bar]) }.to output.to_stdout
end.to raise_error(SystemExit)
end
diff --git a/spec/controllers/admin/hooks_controller_spec.rb b/spec/controllers/admin/hooks_controller_spec.rb
new file mode 100644
index 00000000000..1d1070e90f4
--- /dev/null
+++ b/spec/controllers/admin/hooks_controller_spec.rb
@@ -0,0 +1,28 @@
+require 'spec_helper'
+
+describe Admin::HooksController do
+ let(:admin) { create(:admin) }
+
+ before do
+ sign_in(admin)
+ end
+
+ describe 'POST #create' do
+ it 'sets all parameters' do
+ hook_params = {
+ enable_ssl_verification: true,
+ push_events: true,
+ tag_push_events: true,
+ repository_update_events: true,
+ token: "TEST TOKEN",
+ url: "http://example.com"
+ }
+
+ post :create, hook: hook_params
+
+ expect(response).to have_http_status(302)
+ expect(SystemHook.all.size).to eq(1)
+ expect(SystemHook.first).to have_attributes(hook_params)
+ end
+ end
+end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 7cf2996ffd0..f3263bc177d 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -21,7 +21,6 @@ describe Groups::MilestonesController do
sign_in(user)
group.add_owner(user)
project.team << [user, :master]
- controller.instance_variable_set(:@group, group)
end
it_behaves_like 'milestone tabs'
@@ -29,7 +28,7 @@ describe Groups::MilestonesController do
describe "#create" do
it "creates group milestone with Chinese title" do
post :create,
- group_id: group.id,
+ group_id: group.to_param,
milestone: { project_ids: [project.id, project2.id], title: title }
expect(response).to redirect_to(group_milestone_path(group, title.to_slug.to_s, title: title))
@@ -37,9 +36,139 @@ describe Groups::MilestonesController do
end
it "redirects to new when there are no project ids" do
- post :create, group_id: group.id, milestone: { title: title, project_ids: [""] }
+ post :create, group_id: group.to_param, milestone: { title: title, project_ids: [""] }
expect(response).to render_template :new
expect(assigns(:milestone).errors).not_to be_nil
end
end
+
+ describe '#ensure_canonical_path' do
+ before do
+ sign_in(user)
+ end
+
+ context 'for a GET request' do
+ context 'when requesting the canonical path' do
+ context 'non-show path' do
+ context 'with exactly matching casing' do
+ it 'does not redirect' do
+ get :index, group_id: group.to_param
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :index, group_id: group.to_param.upcase
+
+ expect(response).to redirect_to(group_milestones_path(group.to_param))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+
+ context 'show path' do
+ context 'with exactly matching casing' do
+ it 'does not redirect' do
+ get :show, group_id: group.to_param, id: title
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :show, group_id: group.to_param.upcase, id: title
+
+ expect(response).to redirect_to(group_milestone_path(group.to_param, title))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+
+ it 'redirects to the canonical path' do
+ get :merge_requests, group_id: redirect_route.path, id: title
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+
+ context 'when the old group path is a substring of the scheme or host' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+
+ it 'does not modify the requested host' do
+ get :merge_requests, group_id: redirect_route.path, id: title
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+
+ context 'when the old group path is substring of groups' do
+ # I.e. /groups/oups should not become /grfoo/oups
+ let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
+
+ it 'does not modify the /groups part of the path' do
+ get :merge_requests, group_id: redirect_route.path, id: title
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+
+ context 'when the old group path is substring of groups plus the new path' do
+ # I.e. /groups/oups/oup should not become /grfoos
+ let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') }
+
+ it 'does not modify the /groups part of the path' do
+ get :merge_requests, group_id: redirect_route.path, id: title
+
+ expect(response).to redirect_to(merge_requests_group_milestone_path(group.to_param, title))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+ end
+ end
+ end
+
+ context 'for a non-GET request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ post :create,
+ group_id: group.to_param,
+ milestone: { project_ids: [project.id, project2.id], title: title }
+
+ expect(response).not_to have_http_status(404)
+ end
+
+ it 'does not redirect to the correct casing' do
+ post :create,
+ group_id: group.to_param,
+ milestone: { project_ids: [project.id, project2.id], title: title }
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+
+ it 'returns not found' do
+ post :create,
+ group_id: redirect_route.path,
+ milestone: { project_ids: [project.id, project2.id], title: title }
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ def group_moved_message(redirect_route, group)
+ "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path."
+ end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 073b87a1cb4..4626f1ebc29 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -26,6 +26,41 @@ describe GroupsController do
end
end
+ describe 'GET #subgroups' do
+ let!(:public_subgroup) { create(:group, :public, parent: group) }
+ let!(:private_subgroup) { create(:group, :private, parent: group) }
+
+ context 'as a user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'shows the public subgroups' do
+ get :subgroups, id: group.to_param
+
+ expect(assigns(:nested_groups)).to contain_exactly(public_subgroup)
+ end
+
+ context 'being member' do
+ it 'shows public and private subgroups the user is member of' do
+ private_subgroup.add_guest(user)
+
+ get :subgroups, id: group.to_param
+
+ expect(assigns(:nested_groups)).to contain_exactly(public_subgroup, private_subgroup)
+ end
+ end
+ end
+
+ context 'as a guest' do
+ it 'shows the public subgroups' do
+ get :subgroups, id: group.to_param
+
+ expect(assigns(:nested_groups)).to contain_exactly(public_subgroup)
+ end
+ end
+ end
+
describe 'GET #issues' do
let(:issue_1) { create(:issue, project: project) }
let(:issue_2) { create(:issue, project: project) }
@@ -33,7 +68,7 @@ describe GroupsController do
before do
create_list(:award_emoji, 3, awardable: issue_2)
create_list(:award_emoji, 2, awardable: issue_1)
- create_list(:award_emoji, 2, :downvote, awardable: issue_2,)
+ create_list(:award_emoji, 2, :downvote, awardable: issue_2)
sign_in(user)
end
@@ -49,26 +84,6 @@ describe GroupsController do
expect(assigns(:issues)).to eq [issue_2, issue_1]
end
end
-
- context 'when requesting the canonical path with different casing' do
- it 'redirects to the correct casing' do
- get :issues, id: group.to_param.upcase
-
- expect(response).to redirect_to(issues_group_path(group.to_param))
- expect(controller).not_to set_flash[:notice]
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
-
- it 'redirects to the canonical path' do
- get :issues, id: redirect_route.path
-
- expect(response).to redirect_to(issues_group_path(group.to_param))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'GET #merge_requests' do
@@ -94,26 +109,6 @@ describe GroupsController do
expect(assigns(:merge_requests)).to eq [merge_request_2, merge_request_1]
end
end
-
- context 'when requesting the canonical path with different casing' do
- it 'redirects to the correct casing' do
- get :merge_requests, id: group.to_param.upcase
-
- expect(response).to redirect_to(merge_requests_group_path(group.to_param))
- expect(controller).not_to set_flash[:notice]
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
-
- it 'redirects to the canonical path' do
- get :merge_requests, id: redirect_route.path
-
- expect(response).to redirect_to(merge_requests_group_path(group.to_param))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'DELETE #destroy' do
@@ -143,30 +138,6 @@ describe GroupsController do
expect(response).to redirect_to(root_path)
end
-
- context 'when requesting the canonical path with different casing' do
- it 'does not 404' do
- delete :destroy, id: group.to_param.upcase
-
- expect(response).not_to have_http_status(404)
- end
-
- it 'does not redirect to the correct casing' do
- delete :destroy, id: group.to_param.upcase
-
- expect(response).not_to redirect_to(group_path(group.to_param))
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
-
- it 'returns not found' do
- delete :destroy, id: redirect_route.path
-
- expect(response).to have_http_status(404)
- end
- end
end
end
@@ -189,29 +160,202 @@ describe GroupsController do
expect(assigns(:group).errors).not_to be_empty
expect(assigns(:group).path).not_to eq('new_path')
end
+ end
- context 'when requesting the canonical path with different casing' do
- it 'does not 404' do
- post :update, id: group.to_param.upcase, group: { path: 'new_path' }
+ describe '#ensure_canonical_path' do
+ before do
+ sign_in(user)
+ end
+
+ context 'for a GET request' do
+ context 'when requesting groups at the root path' do
+ before do
+ allow(request).to receive(:original_fullpath).and_return("/#{group_full_path}")
+ get :show, id: group_full_path
+ end
- expect(response).not_to have_http_status(404)
+ context 'when requesting the canonical path with different casing' do
+ let(:group_full_path) { group.to_param.upcase }
+
+ it 'redirects to the correct casing' do
+ expect(response).to redirect_to(group)
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:group_full_path) { redirect_route.path }
+
+ it 'redirects to the canonical path' do
+ expect(response).to redirect_to(group)
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+
+ context 'when the old group path is a substring of the scheme or host' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+
+ it 'does not modify the requested host' do
+ expect(response).to redirect_to(group)
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+
+ context 'when the old group path is substring of groups' do
+ # I.e. /groups/oups should not become /grfoo/oups
+ let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
+
+ it 'does not modify the /groups part of the path' do
+ expect(response).to redirect_to(group)
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+ end
end
- it 'does not redirect to the correct casing' do
- post :update, id: group.to_param.upcase, group: { path: 'new_path' }
+ context 'when requesting groups under the /groups path' do
+ context 'when requesting the canonical path' do
+ context 'non-show path' do
+ context 'with exactly matching casing' do
+ it 'does not redirect' do
+ get :issues, id: group.to_param
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :issues, id: group.to_param.upcase
+
+ expect(response).to redirect_to(issues_group_path(group.to_param))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+
+ context 'show path' do
+ context 'with exactly matching casing' do
+ it 'does not redirect' do
+ get :show, id: group.to_param
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing at the root path' do
+ get :show, id: group.to_param.upcase
+
+ expect(response).to redirect_to(group)
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+
+ it 'redirects to the canonical path' do
+ get :issues, id: redirect_route.path
+
+ expect(response).to redirect_to(issues_group_path(group.to_param))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+
+ context 'when the old group path is a substring of the scheme or host' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+
+ it 'does not modify the requested host' do
+ get :issues, id: redirect_route.path
+
+ expect(response).to redirect_to(issues_group_path(group.to_param))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+
+ context 'when the old group path is substring of groups' do
+ # I.e. /groups/oups should not become /grfoo/oups
+ let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
- expect(response).not_to redirect_to(group_path(group.to_param))
+ it 'does not modify the /groups part of the path' do
+ get :issues, id: redirect_route.path
+
+ expect(response).to redirect_to(issues_group_path(group.to_param))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+
+ context 'when the old group path is substring of groups plus the new path' do
+ # I.e. /groups/oups/oup should not become /grfoos
+ let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') }
+
+ it 'does not modify the /groups part of the path' do
+ get :issues, id: redirect_route.path
+
+ expect(response).to redirect_to(issues_group_path(group.to_param))
+ expect(controller).to set_flash[:notice].to(group_moved_message(redirect_route, group))
+ end
+ end
+ end
end
end
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ context 'for a POST request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ post :update, id: group.to_param.upcase, group: { path: 'new_path' }
+
+ expect(response).not_to have_http_status(404)
+ end
- it 'returns not found' do
- post :update, id: redirect_route.path, group: { path: 'new_path' }
+ it 'does not redirect to the correct casing' do
+ post :update, id: group.to_param.upcase, group: { path: 'new_path' }
- expect(response).to have_http_status(404)
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+
+ it 'returns not found' do
+ post :update, id: redirect_route.path, group: { path: 'new_path' }
+
+ expect(response).to have_http_status(404)
+ end
end
end
+
+ context 'for a DELETE request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ delete :destroy, id: group.to_param.upcase
+
+ expect(response).not_to have_http_status(404)
+ end
+
+ it 'does not redirect to the correct casing' do
+ delete :destroy, id: group.to_param.upcase
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+
+ it 'returns not found' do
+ delete :destroy, id: redirect_route.path
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+ end
+
+ def group_moved_message(redirect_route, group)
+ "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path."
end
end
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index 8f915d9d210..f285e5333d6 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -213,33 +213,98 @@ describe Projects::BranchesController do
sign_in(user)
post :destroy,
- format: :js,
- id: branch,
- namespace_id: project.namespace,
- project_id: project
+ format: format,
+ id: branch,
+ namespace_id: project.namespace,
+ project_id: project
end
- context "valid branch name, valid source" do
+ context 'as JS' do
let(:branch) { "feature" }
+ let(:format) { :js }
- it { expect(response).to have_http_status(200) }
- end
+ context "valid branch name, valid source" do
+ let(:branch) { "feature" }
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.body).to be_blank }
+ end
+
+ context "valid branch name with unencoded slashes" do
+ let(:branch) { "improve/awesome" }
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.body).to be_blank }
+ end
+
+ context "valid branch name with encoded slashes" do
+ let(:branch) { "improve%2Fawesome" }
- context "valid branch name with unencoded slashes" do
- let(:branch) { "improve/awesome" }
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.body).to be_blank }
+ end
- it { expect(response).to have_http_status(200) }
+ context "invalid branch name, valid ref" do
+ let(:branch) { "no-branch" }
+
+ it { expect(response).to have_http_status(404) }
+ it { expect(response.body).to be_blank }
+ end
end
- context "valid branch name with encoded slashes" do
- let(:branch) { "improve%2Fawesome" }
+ context 'as JSON' do
+ let(:branch) { "feature" }
+ let(:format) { :json }
+
+ context 'valid branch name, valid source' do
+ let(:branch) { "feature" }
- it { expect(response).to have_http_status(200) }
+ it 'returns JSON response with message' do
+ expect(json_response).to eql("message" => 'Branch was removed')
+ end
+
+ it { expect(response).to have_http_status(200) }
+ end
+
+ context 'valid branch name with unencoded slashes' do
+ let(:branch) { "improve/awesome" }
+
+ it 'returns JSON response with message' do
+ expect(json_response).to eql('message' => 'Branch was removed')
+ end
+
+ it { expect(response).to have_http_status(200) }
+ end
+
+ context "valid branch name with encoded slashes" do
+ let(:branch) { 'improve%2Fawesome' }
+
+ it 'returns JSON response with message' do
+ expect(json_response).to eql('message' => 'Branch was removed')
+ end
+
+ it { expect(response).to have_http_status(200) }
+ end
+
+ context 'invalid branch name, valid ref' do
+ let(:branch) { 'no-branch' }
+
+ it 'returns JSON response with message' do
+ expect(json_response).to eql('message' => 'No such branch')
+ end
+
+ it { expect(response).to have_http_status(404) }
+ end
end
- context "invalid branch name, valid ref" do
- let(:branch) { "no-branch" }
- it { expect(response).to have_http_status(404) }
+ context 'as HTML' do
+ let(:branch) { "feature" }
+ let(:format) { :html }
+
+ it 'redirects to branches path' do
+ expect(response)
+ .to redirect_to(namespace_project_branches_path(project.namespace, project))
+ end
end
end
diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb
index 89692b601b2..4c69443314d 100644
--- a/spec/controllers/projects/deployments_controller_spec.rb
+++ b/spec/controllers/projects/deployments_controller_spec.rb
@@ -8,7 +8,7 @@ describe Projects::DeploymentsController do
let(:environment) { create(:environment, name: 'production', project: project) }
before do
- project.add_master(user)
+ project.team << [user, :master]
sign_in(user)
end
@@ -19,7 +19,7 @@ describe Projects::DeploymentsController do
create(:deployment, environment: environment, created_at: 7.hours.ago)
create(:deployment, environment: environment)
- get :index, environment_params(after: 8.hours.ago)
+ get :index, deployment_params(after: 8.hours.ago)
expect(response).to be_ok
@@ -29,14 +29,88 @@ describe Projects::DeploymentsController do
it 'returns a list with deployments information' do
create(:deployment, environment: environment)
- get :index, environment_params
+ get :index, deployment_params
expect(response).to be_ok
expect(response).to match_response_schema('deployments')
end
end
- def environment_params(opts = {})
- opts.reverse_merge(namespace_id: project.namespace, project_id: project, environment_id: environment.id)
+ describe 'GET #metrics' do
+ let(:deployment) { create(:deployment, project: project, environment: environment) }
+
+ before do
+ allow(controller).to receive(:deployment).and_return(deployment)
+ end
+ context 'when metrics are disabled' do
+ before do
+ allow(deployment).to receive(:has_metrics?).and_return false
+ end
+
+ it 'responds with not found' do
+ get :metrics, deployment_params(id: deployment.id)
+
+ expect(response).to be_not_found
+ end
+ end
+
+ context 'when metrics are enabled' do
+ before do
+ allow(deployment).to receive(:has_metrics?).and_return true
+ end
+
+ context 'when environment has no metrics' do
+ before do
+ expect(deployment).to receive(:metrics).and_return(nil)
+ end
+
+ it 'returns a empty response 204 resposne' do
+ get :metrics, deployment_params(id: deployment.id)
+ expect(response).to have_http_status(204)
+ expect(response.body).to eq('')
+ end
+ end
+
+ context 'when environment has some metrics' do
+ let(:empty_metrics) do
+ {
+ success: true,
+ metrics: {},
+ last_update: 42
+ }
+ end
+
+ before do
+ expect(deployment).to receive(:metrics).and_return(empty_metrics)
+ end
+
+ it 'returns a metrics JSON document' do
+ get :metrics, deployment_params(id: deployment.id)
+
+ expect(response).to be_ok
+ expect(json_response['success']).to be(true)
+ expect(json_response['metrics']).to eq({})
+ expect(json_response['last_update']).to eq(42)
+ end
+ end
+
+ context 'when metrics service does not implement deployment metrics' do
+ before do
+ allow(deployment).to receive(:metrics).and_raise(NotImplementedError)
+ end
+
+ it 'responds with not found' do
+ get :metrics, deployment_params(id: deployment.id)
+
+ expect(response).to be_not_found
+ end
+ end
+ end
+ end
+
+ def deployment_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace,
+ project_id: project,
+ environment_id: environment.id)
end
end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 5c478534ff3..20f99b209eb 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -1,25 +1,25 @@
require 'spec_helper'
describe Projects::EnvironmentsController do
- let(:user) { create(:user) }
- let(:project) { create(:empty_project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:empty_project) }
- let(:environment) do
+ set(:environment) do
create(:environment, name: 'production', project: project)
end
before do
- project.team << [user, :master]
+ project.add_master(user)
sign_in(user)
end
describe 'GET index' do
- context 'when standardrequest has been made' do
+ context 'when a request for the HTML is made' do
it 'responds with status code 200' do
get :index, environment_params
- expect(response).to be_ok
+ expect(response).to have_http_status(:ok)
end
end
@@ -84,6 +84,9 @@ describe Projects::EnvironmentsController do
create(:environment, project: project,
name: 'staging-1.0/review',
state: :available)
+ create(:environment, project: project,
+ name: 'staging-1.0/zzz',
+ state: :available)
end
context 'when using default format' do
@@ -98,7 +101,7 @@ describe Projects::EnvironmentsController do
end
context 'when using JSON format' do
- it 'responds with JSON' do
+ it 'sorts the subfolders lexicographically' do
get :folder, namespace_id: project.namespace,
project_id: project,
id: 'staging-1.0',
@@ -108,6 +111,8 @@ describe Projects::EnvironmentsController do
expect(response).not_to render_template 'folder'
expect(json_response['environments'][0])
.to include('name' => 'staging-1.0/review')
+ expect(json_response['environments'][1])
+ .to include('name' => 'staging-1.0/zzz')
end
end
end
@@ -149,6 +154,48 @@ describe Projects::EnvironmentsController do
end
end
+ describe 'PATCH #stop' do
+ context 'when env not available' do
+ it 'returns 404' do
+ allow_any_instance_of(Environment).to receive(:available?) { false }
+
+ patch :stop, environment_params(format: :json)
+
+ expect(response).to have_http_status(404)
+ end
+ end
+
+ context 'when stop action' do
+ it 'returns action url' do
+ action = create(:ci_build, :manual)
+
+ allow_any_instance_of(Environment)
+ .to receive_messages(available?: true, stop_with_action!: action)
+
+ patch :stop, environment_params(format: :json)
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to eq(
+ { 'redirect_url' =>
+ "http://test.host/#{project.path_with_namespace}/builds/#{action.id}" })
+ end
+ end
+
+ context 'when no stop action' do
+ it 'returns env url' do
+ allow_any_instance_of(Environment)
+ .to receive_messages(available?: true, stop_with_action!: nil)
+
+ patch :stop, environment_params(format: :json)
+
+ expect(response).to have_http_status(200)
+ expect(json_response).to eq(
+ { 'redirect_url' =>
+ "http://test.host/#{project.path_with_namespace}/environments/#{environment.id}" })
+ end
+ end
+ end
+
describe 'GET #terminal' do
context 'with valid id' do
it 'responds with a status code 200' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 1f79e72495a..04afd07c59e 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -156,6 +156,32 @@ describe Projects::IssuesController do
end
end
+ describe 'Redirect after sign in' do
+ context 'with an AJAX request' do
+ it 'does not store the visited URL' do
+ xhr :get,
+ :show,
+ format: :json,
+ namespace_id: project.namespace,
+ project_id: project,
+ id: issue.iid
+
+ expect(session['user_return_to']).to be_blank
+ end
+ end
+
+ context 'without an AJAX request' do
+ it 'stores the visited URL' do
+ get :show,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: issue.iid
+
+ expect(session['user_return_to']).to eq("/#{project.namespace.to_param}/#{project.to_param}/issues/#{issue.iid}")
+ end
+ end
+ end
+
describe 'PUT #update' do
before do
sign_in(user)
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index 05999431d8f..130b0b744b5 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -157,4 +157,74 @@ describe Projects::LabelsController do
end
end
end
+
+ describe '#ensure_canonical_path' do
+ before do
+ sign_in(user)
+ end
+
+ context 'for a GET request' do
+ context 'when requesting the canonical path' do
+ context 'non-show path' do
+ context 'with exactly matching casing' do
+ it 'does not redirect' do
+ get :index, namespace_id: project.namespace, project_id: project.to_param
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :index, namespace_id: project.namespace, project_id: project.to_param.upcase
+
+ expect(response).to redirect_to(namespace_project_labels_path(project.namespace, project))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+
+ it 'redirects to the canonical path' do
+ get :index, namespace_id: project.namespace, project_id: project.to_param + 'old'
+
+ expect(response).to redirect_to(namespace_project_labels_path(project.namespace, project))
+ expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, project))
+ end
+ end
+ end
+ end
+
+ context 'for a non-GET request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ post :generate, namespace_id: project.namespace, project_id: project
+
+ expect(response).not_to have_http_status(404)
+ end
+
+ it 'does not redirect to the correct casing' do
+ post :generate, namespace_id: project.namespace, project_id: project
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+
+ it 'returns not found' do
+ post :generate, namespace_id: project.namespace, project_id: project.to_param + 'old'
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ def project_moved_message(redirect_route, project)
+ "Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path."
+ end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 0483c6b7879..457e07334b9 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -59,6 +59,18 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET commit_change_content' do
+ it 'renders commit_change_content template' do
+ get :commit_change_content,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: 'html'
+
+ expect(response).to render_template('_commit_change_content')
+ end
+ end
+
shared_examples "loads labels" do |action|
it "loads labels into the @labels variable" do
get action,
@@ -71,63 +83,59 @@ describe Projects::MergeRequestsController do
end
describe "GET show" do
- shared_examples "export merge as" do |format|
- it "does generally work" do
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: format)
+ def go(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ }
+
+ get :show, params.merge(extra_params)
+ end
+
+ it_behaves_like "loads labels", :show
+
+ describe 'as html' do
+ it "renders merge request page" do
+ go(format: :html)
expect(response).to be_success
end
+ end
- it_behaves_like "loads labels", :show
-
- it "generates it" do
- expect_any_instance_of(MergeRequest).to receive(:"to_#{format}")
+ describe 'as json' do
+ context 'with basic param' do
+ it 'renders basic MR entity as json' do
+ go(basic: true, format: :json)
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: format)
+ expect(response).to match_response_schema('entities/merge_request_basic')
+ end
end
- it "renders it" do
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: format)
+ context 'without basic param' do
+ it 'renders the merge request in the json format' do
+ go(format: :json)
- expect(response.body).to eq(merge_request.send(:"to_#{format}").to_s)
+ expect(response).to match_response_schema('entities/merge_request')
+ end
end
- it "does not escape Html" do
- allow_any_instance_of(MergeRequest).to receive(:"to_#{format}").
- and_return('HTML entities &<>" ')
+ context 'number of queries' do
+ it 'verifies number of queries' do
+ # pre-create objects
+ merge_request
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: format)
+ recorded = ActiveRecord::QueryRecorder.new { go(format: :json) }
- expect(response.body).not_to include('&amp;')
- expect(response.body).not_to include('&gt;')
- expect(response.body).not_to include('&lt;')
- expect(response.body).not_to include('&quot;')
+ expect(recorded.count).to be_within(5).of(50)
+ expect(recorded.cached_count).to eq(0)
+ end
end
end
describe "as diff" do
it "triggers workhorse to serve the request" do
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: :diff)
+ go(format: :diff)
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-diff:")
end
@@ -135,11 +143,7 @@ describe Projects::MergeRequestsController do
describe "as patch" do
it 'triggers workhorse to serve the request' do
- get(:show,
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: merge_request.iid,
- format: :patch)
+ go(format: :patch)
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-format-patch:")
end
@@ -295,19 +299,18 @@ describe Projects::MergeRequestsController do
namespace_id: project.namespace,
project_id: project,
id: merge_request.iid,
- format: 'raw'
+ format: 'json'
}
end
- context 'when the user does not have access' do
+ context 'when user cannot access' do
before do
- project.team.truncate
- project.team << [user, :reporter]
- post :merge, base_params
+ project.add_reporter(user)
+ xhr :post, :merge, base_params
end
- it 'returns not found' do
- expect(response).to be_not_found
+ it 'returns 404' do
+ expect(response).to have_http_status(404)
end
end
@@ -319,7 +322,7 @@ describe Projects::MergeRequestsController do
end
it 'returns :failed' do
- expect(assigns(:status)).to eq(:failed)
+ expect(json_response).to eq('status' => 'failed')
end
end
@@ -327,7 +330,7 @@ describe Projects::MergeRequestsController do
before { post :merge, base_params.merge(sha: 'foo') }
it 'returns :sha_mismatch' do
- expect(assigns(:status)).to eq(:sha_mismatch)
+ expect(json_response).to eq('status' => 'sha_mismatch')
end
end
@@ -339,7 +342,7 @@ describe Projects::MergeRequestsController do
it 'returns :success' do
merge_with_sha
- expect(assigns(:status)).to eq(:success)
+ expect(json_response).to eq('status' => 'success')
end
it 'starts the merge immediately' do
@@ -354,13 +357,14 @@ describe Projects::MergeRequestsController do
end
before do
- create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch)
+ pipeline = create(:ci_empty_pipeline, project: project, sha: merge_request.diff_head_sha, ref: merge_request.source_branch)
+ merge_request.update(head_pipeline: pipeline)
end
it 'returns :merge_when_pipeline_succeeds' do
merge_when_pipeline_succeeds
- expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds)
+ expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds')
end
it 'sets the MR to merge when the pipeline succeeds' do
@@ -382,7 +386,7 @@ describe Projects::MergeRequestsController do
it 'returns :merge_when_pipeline_succeeds' do
merge_when_pipeline_succeeds
- expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds)
+ expect(json_response).to eq('status' => 'merge_when_pipeline_succeeds')
end
end
end
@@ -403,7 +407,7 @@ describe Projects::MergeRequestsController do
it 'returns :failed' do
merge_with_sha
- expect(assigns(:status)).to eq(:failed)
+ expect(json_response).to eq('status' => 'failed')
end
end
@@ -416,7 +420,7 @@ describe Projects::MergeRequestsController do
it 'returns :success' do
merge_with_sha
- expect(assigns(:status)).to eq(:success)
+ expect(json_response).to eq('status' => 'success')
end
end
end
@@ -434,7 +438,7 @@ describe Projects::MergeRequestsController do
it 'returns :success' do
merge_with_sha
- expect(assigns(:status)).to eq(:success)
+ expect(json_response).to eq('status' => 'success')
end
end
@@ -447,7 +451,7 @@ describe Projects::MergeRequestsController do
it 'returns :success' do
merge_with_sha
- expect(assigns(:status)).to eq(:success)
+ expect(json_response).to eq('status' => 'success')
end
end
end
@@ -831,18 +835,55 @@ describe Projects::MergeRequestsController do
end
end
- context 'POST remove_wip' do
- it 'removes the wip status' do
+ describe 'POST remove_wip' do
+ before do
merge_request.title = merge_request.wip_title
merge_request.save
- post :remove_wip,
- namespace_id: merge_request.project.namespace.to_param,
- project_id: merge_request.project,
- id: merge_request.iid
+ xhr :post, :remove_wip,
+ namespace_id: merge_request.project.namespace.to_param,
+ project_id: merge_request.project,
+ id: merge_request.iid,
+ format: :json
+ end
+ it 'removes the wip status' do
expect(merge_request.reload.title).to eq(merge_request.wipless_title)
end
+
+ it 'renders MergeRequest as JSON' do
+ expect(json_response.keys).to include('id', 'iid', 'description')
+ end
+ end
+
+ describe 'POST cancel_merge_when_pipeline_succeeds' do
+ subject do
+ xhr :post, :cancel_merge_when_pipeline_succeeds,
+ namespace_id: merge_request.project.namespace.to_param,
+ project_id: merge_request.project,
+ id: merge_request.iid,
+ format: :json
+ end
+
+ it 'calls MergeRequests::MergeWhenPipelineSucceedsService' do
+ mwps_service = double
+
+ allow(MergeRequests::MergeWhenPipelineSucceedsService)
+ .to receive(:new)
+ .and_return(mwps_service)
+
+ expect(mwps_service).to receive(:cancel).with(merge_request)
+
+ subject
+ end
+
+ it { is_expected.to have_http_status(:success) }
+
+ it 'renders MergeRequest as JSON' do
+ subject
+
+ expect(json_response.keys).to include('id', 'iid', 'description')
+ end
end
describe 'GET conflict_for_path' do
@@ -887,7 +928,9 @@ describe Projects::MergeRequestsController do
end
it 'returns the file in JSON format' do
- content = merge_request_with_conflicts.conflicts.file_for_path(path, path).content
+ content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts).
+ file_for_path(path, path).
+ content
expect(json_response).to include('old_path' => path,
'new_path' => path,
@@ -1011,11 +1054,15 @@ describe Projects::MergeRequestsController do
context 'when a file has identical content to the conflict' do
before do
+ content = MergeRequests::Conflicts::ListService.new(merge_request_with_conflicts).
+ file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb').
+ content
+
resolved_files = [
{
'new_path' => 'files/ruby/popen.rb',
'old_path' => 'files/ruby/popen.rb',
- 'content' => merge_request_with_conflicts.conflicts.file_for_path('files/ruby/popen.rb', 'files/ruby/popen.rb').content
+ 'content' => content
}, {
'new_path' => 'files/ruby/regex.rb',
'old_path' => 'files/ruby/regex.rb',
@@ -1121,74 +1168,6 @@ describe Projects::MergeRequestsController do
end
end
- describe 'GET merge_widget_refresh' do
- let(:params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- id: merge_request.iid,
- format: :raw
- }
- end
-
- before do
- project.team << [user, :developer]
- xhr :get, :merge_widget_refresh, params
- end
-
- context 'when merge in progress' do
- let(:merge_request) { create(:merge_request, source_project: project, in_progress_merge_commit_sha: 'sha') }
-
- it 'returns an OK response' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'sets status to :success' do
- expect(assigns(:status)).to eq(:success)
- expect(response).to render_template('merge')
- end
- end
-
- context 'when merge request was merged already' do
- let(:merge_request) { create(:merge_request, source_project: project, state: :merged) }
-
- it 'returns an OK response' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'sets status to :success' do
- expect(assigns(:status)).to eq(:success)
- expect(response).to render_template('merge')
- end
- end
-
- context 'when waiting for build' do
- let(:merge_request) { create(:merge_request, source_project: project, merge_when_pipeline_succeeds: true, merge_user: user) }
-
- it 'returns an OK response' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'sets status to :merge_when_pipeline_succeeds' do
- expect(assigns(:status)).to eq(:merge_when_pipeline_succeeds)
- expect(response).to render_template('merge')
- end
- end
-
- context 'when MR does not have special state' do
- let(:merge_request) { create(:merge_request, source_project: project) }
-
- it 'returns an OK response' do
- expect(response).to have_http_status(:ok)
- end
-
- it 'sets status to success' do
- expect(assigns(:status)).to eq(:success)
- expect(response).to render_template('merge')
- end
- end
- end
-
describe 'GET pipeline_status.json' do
context 'when head_pipeline exists' do
let!(:pipeline) do
@@ -1199,7 +1178,10 @@ describe Projects::MergeRequestsController do
let(:status) { pipeline.detailed_status(double('user')) }
- before { get_pipeline_status }
+ before do
+ merge_request.update(head_pipeline: pipeline)
+ get_pipeline_status
+ end
it 'return a detailed head_pipeline status in json' do
expect(response).to have_http_status(:ok)
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
new file mode 100644
index 00000000000..f8f95dd9bc8
--- /dev/null
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -0,0 +1,87 @@
+require 'spec_helper'
+
+describe Projects::PipelineSchedulesController do
+ set(:project) { create(:empty_project, :public) }
+ let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
+
+ describe 'GET #index' do
+ let(:scope) { nil }
+ let!(:inactive_pipeline_schedule) do
+ create(:ci_pipeline_schedule, :inactive, project: project)
+ end
+
+ it 'renders the index view' do
+ visit_pipelines_schedules
+
+ expect(response).to have_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+
+ context 'when the scope is set to active' do
+ let(:scope) { 'active' }
+
+ before do
+ visit_pipelines_schedules
+ end
+
+ it 'only shows active pipeline schedules' do
+ expect(response).to have_http_status(:ok)
+ expect(assigns(:schedules)).to include(pipeline_schedule)
+ expect(assigns(:schedules)).not_to include(inactive_pipeline_schedule)
+ end
+ end
+
+ def visit_pipelines_schedules
+ get :index, namespace_id: project.namespace.to_param, project_id: project, scope: scope
+ end
+ end
+
+ describe 'GET edit' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+
+ sign_in(user)
+ end
+
+ it 'loads the pipeline schedule' do
+ get :edit, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
+
+ expect(response).to have_http_status(:ok)
+ expect(assigns(:schedule)).to eq(pipeline_schedule)
+ end
+ end
+
+ describe 'DELETE #destroy' do
+ set(:user) { create(:user) }
+
+ context 'when a developer makes the request' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+
+ delete :destroy, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
+ end
+
+ it 'does not delete the pipeline schedule' do
+ expect(response).not_to have_http_status(:ok)
+ end
+ end
+
+ context 'when a master makes the request' do
+ before do
+ project.add_master(user)
+ sign_in(user)
+ end
+
+ it 'destroys the pipeline schedule' do
+ expect do
+ delete :destroy, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
+ end.to change { project.pipeline_schedules.count }.by(-1)
+
+ expect(response).to have_http_status(302)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index fb4a4721a58..c880da1e36a 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -38,7 +38,7 @@ describe Projects::PipelinesController do
end
describe 'GET show JSON' do
- let!(:pipeline) { create(:ci_pipeline_with_one_job, project: project) }
+ let(:pipeline) { create(:ci_pipeline_with_one_job, project: project) }
it 'returns the pipeline' do
get_pipeline_json
@@ -49,20 +49,48 @@ describe Projects::PipelinesController do
expect(json_response['details']).to have_key 'stages'
end
- context 'when the pipeline has multiple jobs' do
+ context 'when the pipeline has multiple stages and groups' do
+ before do
+ RequestStore.begin!
+
+ create_build('build', 0, 'build')
+ create_build('test', 1, 'rspec 0')
+ create_build('deploy', 2, 'production')
+ create_build('post deploy', 3, 'pages 0')
+ end
+
+ after do
+ RequestStore.end!
+ RequestStore.clear!
+ end
+
+ let(:project) { create(:project) }
+ let(:pipeline) do
+ create(:ci_empty_pipeline, project: project, user: user, sha: project.commit.id)
+ end
+
it 'does not perform N + 1 queries' do
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
- create(:ci_build, pipeline: pipeline)
+ create_build('test', 1, 'rspec 1')
+ create_build('test', 1, 'spinach 0')
+ create_build('test', 1, 'spinach 1')
+ create_build('test', 1, 'audit')
+ create_build('post deploy', 3, 'pages 1')
+ create_build('post deploy', 3, 'pages 2')
- # The plus 2 is needed to group and sort
- expect { get_pipeline_json }.not_to exceed_query_limit(control_count + 2)
+ new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
+ expect(new_count).to be_within(12).of(control_count)
end
end
def get_pipeline_json
get :show, namespace_id: project.namespace, project_id: project, id: pipeline, format: :json
end
+
+ def create_build(stage, stage_idx, name)
+ create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
+ end
end
describe 'GET stages.json' do
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index e46ef447df2..4f6fc6691be 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -169,27 +169,6 @@ describe ProjectsController do
end
end
- context "when requested with case sensitive namespace and project path" do
- context "when there is a match with the same casing" do
- it "loads the project" do
- get :show, namespace_id: public_project.namespace, id: public_project
-
- expect(assigns(:project)).to eq(public_project)
- expect(response).to have_http_status(200)
- end
- end
-
- context "when there is a match with different casing" do
- it "redirects to the normalized path" do
- get :show, namespace_id: public_project.namespace, id: public_project.path.upcase
-
- expect(assigns(:project)).to eq(public_project)
- expect(response).to redirect_to("/#{public_project.full_path}")
- expect(controller).not_to set_flash[:notice]
- end
- end
- end
-
context "when the url contains .atom" do
let(:public_project_with_dot_atom) { build(:empty_project, :public, name: 'my.atom', path: 'my.atom') }
@@ -219,17 +198,6 @@ describe ProjectsController do
expect(response).to redirect_to(namespace_project_path)
end
end
-
- context 'when requesting a redirected path' do
- let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") }
-
- it 'redirects to the canonical path' do
- get :show, namespace_id: 'foo', id: 'bar'
-
- expect(response).to redirect_to(public_project)
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe "#update" do
@@ -256,32 +224,48 @@ describe ProjectsController do
expect(assigns(:repository).path).to eq(project.repository.path)
expect(response).to have_http_status(302)
end
+ end
- context 'when requesting the canonical path' do
- it "is case-insensitive" do
- controller.instance_variable_set(:@project, project)
+ describe '#transfer' do
+ render_views
- put :update,
- namespace_id: 'FOo',
- id: 'baR',
- project: project_params
+ let(:project) { create(:project) }
+ let(:admin) { create(:admin) }
+ let(:new_namespace) { create(:namespace) }
- expect(project.repository.path).to include(new_path)
- expect(assigns(:repository).path).to eq(project.repository.path)
- expect(response).to have_http_status(302)
- end
+ it 'updates namespace' do
+ sign_in(admin)
+
+ put :transfer,
+ namespace_id: project.namespace.path,
+ new_namespace_id: new_namespace.id,
+ id: project.path,
+ format: :js
+
+ project.reload
+
+ expect(project.namespace).to eq(new_namespace)
+ expect(response).to have_http_status(200)
end
- context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create!(path: "foo/bar") }
+ context 'when new namespace is empty' do
+ it 'project namespace is not changed' do
+ controller.instance_variable_set(:@project, project)
+ sign_in(admin)
+
+ old_namespace = project.namespace
- it 'returns not found' do
- put :update,
- namespace_id: 'foo',
- id: 'bar',
- project: project_params
+ put :transfer,
+ namespace_id: old_namespace.path,
+ new_namespace_id: nil,
+ id: project.path,
+ format: :js
- expect(response).to have_http_status(404)
+ project.reload
+
+ expect(project.namespace).to eq(old_namespace)
+ expect(response).to have_http_status(200)
+ expect(flash[:alert]).to eq 'Please select a new namespace for your project.'
end
end
end
@@ -319,31 +303,6 @@ describe ProjectsController do
expect(merge_request.reload.state).to eq('closed')
end
end
-
- context 'when requesting the canonical path' do
- it "is case-insensitive" do
- controller.instance_variable_set(:@project, project)
- sign_in(admin)
-
- orig_id = project.id
- delete :destroy, namespace_id: project.namespace, id: project.path.upcase
-
- expect { Project.find(orig_id) }.to raise_error(ActiveRecord::RecordNotFound)
- expect(response).to have_http_status(302)
- expect(response).to redirect_to(dashboard_projects_path)
- end
- end
-
- context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create!(path: "foo/bar") }
-
- it 'returns not found' do
- sign_in(admin)
- delete :destroy, namespace_id: 'foo', id: 'bar'
-
- expect(response).to have_http_status(404)
- end
- end
end
describe 'PUT #new_issue_address' do
@@ -465,17 +424,6 @@ describe ProjectsController do
expect(parsed_body["Tags"]).to include("v1.0.0")
expect(parsed_body["Commits"]).to include("123456")
end
-
- context 'when requesting a redirected path' do
- let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") }
-
- it 'redirects to the canonical path' do
- get :refs, namespace_id: 'foo', id: 'bar'
-
- expect(response).to redirect_to(refs_namespace_project_path(namespace_id: public_project.namespace, id: public_project))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'POST #preview_markdown' do
@@ -487,4 +435,111 @@ describe ProjectsController do
expect(JSON.parse(response.body).keys).to match_array(%w(body references))
end
end
+
+ describe '#ensure_canonical_path' do
+ before do
+ sign_in(user)
+ end
+
+ context 'for a GET request' do
+ context 'when requesting the canonical path' do
+ context "with exactly matching casing" do
+ it "loads the project" do
+ get :show, namespace_id: public_project.namespace, id: public_project
+
+ expect(assigns(:project)).to eq(public_project)
+ expect(response).to have_http_status(200)
+ end
+ end
+
+ context "with different casing" do
+ it "redirects to the normalized path" do
+ get :show, namespace_id: public_project.namespace, id: public_project.path.upcase
+
+ expect(assigns(:project)).to eq(public_project)
+ expect(response).to redirect_to("/#{public_project.full_path}")
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") }
+
+ it 'redirects to the canonical path' do
+ get :show, namespace_id: 'foo', id: 'bar'
+
+ expect(response).to redirect_to(public_project)
+ expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, public_project))
+ end
+
+ it 'redirects to the canonical path (testing non-show action)' do
+ get :refs, namespace_id: 'foo', id: 'bar'
+
+ expect(response).to redirect_to(refs_namespace_project_path(namespace_id: public_project.namespace, id: public_project))
+ expect(controller).to set_flash[:notice].to(project_moved_message(redirect_route, public_project))
+ end
+ end
+ end
+
+ context 'for a POST request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ post :toggle_star, namespace_id: public_project.namespace, id: public_project.path.upcase
+
+ expect(response).not_to have_http_status(404)
+ end
+
+ it 'does not redirect to the correct casing' do
+ post :toggle_star, namespace_id: public_project.namespace, id: public_project.path.upcase
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let!(:redirect_route) { public_project.redirect_routes.create!(path: "foo/bar") }
+
+ it 'returns not found' do
+ post :toggle_star, namespace_id: 'foo', id: 'bar'
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ context 'for a DELETE request' do
+ before do
+ sign_in(create(:admin))
+ end
+
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ delete :destroy, namespace_id: project.namespace, id: project.path.upcase
+
+ expect(response).not_to have_http_status(404)
+ end
+
+ it 'does not redirect to the correct casing' do
+ delete :destroy, namespace_id: project.namespace, id: project.path.upcase
+
+ expect(response).not_to have_http_status(301)
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let!(:redirect_route) { project.redirect_routes.create!(path: "foo/bar") }
+
+ it 'returns not found' do
+ delete :destroy, namespace_id: 'foo', id: 'bar'
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+ end
+
+ def project_moved_message(redirect_route, project)
+ "Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path."
+ end
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 41cd5bdcdd8..930415a4778 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -3,6 +3,34 @@ require 'spec_helper'
describe SnippetsController do
let(:user) { create(:user) }
+ describe 'GET #index' do
+ let(:user) { create(:user) }
+
+ context 'when username parameter is present' do
+ it 'renders snippets of a user when username is present' do
+ get :index, username: user.username
+
+ expect(response).to render_template(:index)
+ end
+ end
+
+ context 'when username parameter is not present' do
+ it 'redirects to explore snippets page when user is not logged in' do
+ get :index
+
+ expect(response).to redirect_to(explore_snippets_path)
+ end
+
+ it 'redirects to snippets dashboard page when user is logged in' do
+ sign_in(user)
+
+ get :index
+
+ expect(response).to redirect_to(dashboard_snippets_path)
+ end
+ end
+ end
+
describe 'GET #new' do
context 'when signed in' do
before do
@@ -132,7 +160,7 @@ describe SnippetsController do
it 'responds with status 404' do
get :show, id: 'doesntexist'
- expect(response).to have_http_status(404)
+ expect(response).to redirect_to(new_user_session_path)
end
end
end
@@ -478,10 +506,10 @@ describe SnippetsController do
end
context 'when not signed in' do
- it 'responds with status 404' do
+ it 'redirects to the sign in path' do
get :raw, id: 'doesntexist'
- expect(response).to have_http_status(404)
+ expect(response).to redirect_to(new_user_session_path)
end
end
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 7dedfe160a6..8000c9dec61 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -473,5 +473,45 @@ describe UploadsController do
end
end
end
+
+ context 'Appearance' do
+ context 'when viewing a custom header logo' do
+ let!(:appearance) { create :appearance, header_logo: fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'), 'image/png') }
+
+ context 'when not signed in' do
+ it 'responds with status 200' do
+ get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
+
+ expect(response).to have_http_status(200)
+ end
+
+ it_behaves_like 'content not cached without revalidation' do
+ subject do
+ get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
+ response
+ end
+ end
+ end
+ end
+
+ context 'when viewing a custom logo' do
+ let!(:appearance) { create :appearance, logo: fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'), 'image/png') }
+
+ context 'when not signed in' do
+ it 'responds with status 200' do
+ get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
+
+ expect(response).to have_http_status(200)
+ end
+
+ it_behaves_like 'content not cached without revalidation' do
+ subject do
+ get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
+ response
+ end
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index 74c5aa44ba9..d33e2ba1e53 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -53,40 +53,6 @@ describe UsersController do
end
end
- context 'when requesting the canonical path' do
- let(:user) { create(:user, username: 'CamelCaseUser') }
-
- before { sign_in(user) }
-
- context 'with exactly matching casing' do
- it 'responds with success' do
- get :show, username: user.username
-
- expect(response).to be_success
- end
- end
-
- context 'with different casing' do
- it 'redirects to the correct casing' do
- get :show, username: user.username.downcase
-
- expect(response).to redirect_to(user)
- expect(controller).not_to set_flash[:notice]
- end
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
-
- it 'redirects to the canonical path' do
- get :show, username: redirect_route.path
-
- expect(response).to redirect_to(user)
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
-
context 'when a user by that username does not exist' do
context 'when logged out' do
it 'redirects to login page' do
@@ -131,40 +97,6 @@ describe UsersController do
expect(assigns(:contributions_calendar).projects.count).to eq(2)
end
end
-
- context 'when requesting the canonical path' do
- let(:user) { create(:user, username: 'CamelCaseUser') }
-
- before { sign_in(user) }
-
- context 'with exactly matching casing' do
- it 'responds with success' do
- get :calendar, username: user.username
-
- expect(response).to be_success
- end
- end
-
- context 'with different casing' do
- it 'redirects to the correct casing' do
- get :calendar, username: user.username.downcase
-
- expect(response).to redirect_to(user_calendar_path(user))
- expect(controller).not_to set_flash[:notice]
- end
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
-
- it 'redirects to the canonical path' do
- get :calendar, username: redirect_route.path
-
- expect(response).to redirect_to(user_calendar_path(user))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'GET #calendar_activities' do
@@ -187,38 +119,6 @@ describe UsersController do
get :calendar_activities, username: user.username
expect(response).to render_template('calendar_activities')
end
-
- context 'when requesting the canonical path' do
- let(:user) { create(:user, username: 'CamelCaseUser') }
-
- context 'with exactly matching casing' do
- it 'responds with success' do
- get :calendar_activities, username: user.username
-
- expect(response).to be_success
- end
- end
-
- context 'with different casing' do
- it 'redirects to the correct casing' do
- get :calendar_activities, username: user.username.downcase
-
- expect(response).to redirect_to(user_calendar_activities_path(user))
- expect(controller).not_to set_flash[:notice]
- end
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
-
- it 'redirects to the canonical path' do
- get :calendar_activities, username: redirect_route.path
-
- expect(response).to redirect_to(user_calendar_activities_path(user))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'GET #snippets' do
@@ -241,38 +141,6 @@ describe UsersController do
expect(JSON.parse(response.body)).to have_key('html')
end
end
-
- context 'when requesting the canonical path' do
- let(:user) { create(:user, username: 'CamelCaseUser') }
-
- context 'with exactly matching casing' do
- it 'responds with success' do
- get :snippets, username: user.username
-
- expect(response).to be_success
- end
- end
-
- context 'with different casing' do
- it 'redirects to the correct casing' do
- get :snippets, username: user.username.downcase
-
- expect(response).to redirect_to(user_snippets_path(user))
- expect(controller).not_to set_flash[:notice]
- end
- end
- end
-
- context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
-
- it 'redirects to the canonical path' do
- get :snippets, username: redirect_route.path
-
- expect(response).to redirect_to(user_snippets_path(user))
- expect(controller).to set_flash[:notice].to(/moved/)
- end
- end
end
describe 'GET #exists' do
@@ -320,4 +188,129 @@ describe UsersController do
end
end
end
+
+ describe '#ensure_canonical_path' do
+ before do
+ sign_in(user)
+ end
+
+ context 'for a GET request' do
+ context 'when requesting users at the root path' do
+ context 'when requesting the canonical path' do
+ let(:user) { create(:user, username: 'CamelCaseUser') }
+
+ context 'with exactly matching casing' do
+ it 'responds with success' do
+ get :show, username: user.username
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :show, username: user.username.downcase
+
+ expect(response).to redirect_to(user)
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+
+ it 'redirects to the canonical path' do
+ get :show, username: redirect_route.path
+
+ expect(response).to redirect_to(user)
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+
+ context 'when the old path is a substring of the scheme or host' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
+
+ it 'does not modify the requested host' do
+ get :show, username: redirect_route.path
+
+ expect(response).to redirect_to(user)
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+ end
+
+ context 'when the old path is substring of users' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
+
+ it 'redirects to the canonical path' do
+ get :show, username: redirect_route.path
+
+ expect(response).to redirect_to(user)
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+ end
+ end
+ end
+
+ context 'when requesting users under the /users path' do
+ context 'when requesting the canonical path' do
+ let(:user) { create(:user, username: 'CamelCaseUser') }
+
+ context 'with exactly matching casing' do
+ it 'responds with success' do
+ get :projects, username: user.username
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'with different casing' do
+ it 'redirects to the correct casing' do
+ get :projects, username: user.username.downcase
+
+ expect(response).to redirect_to(user_projects_path(user))
+ expect(controller).not_to set_flash[:notice]
+ end
+ end
+ end
+
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+
+ it 'redirects to the canonical path' do
+ get :projects, username: redirect_route.path
+
+ expect(response).to redirect_to(user_projects_path(user))
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+
+ context 'when the old path is a substring of the scheme or host' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
+
+ it 'does not modify the requested host' do
+ get :projects, username: redirect_route.path
+
+ expect(response).to redirect_to(user_projects_path(user))
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+ end
+
+ context 'when the old path is substring of users' do
+ let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
+
+ # I.e. /users/ser should not become /ufoos/ser
+ it 'does not modify the /users part of the path' do
+ get :projects, username: redirect_route.path
+
+ expect(response).to redirect_to(user_projects_path(user))
+ expect(controller).to set_flash[:notice].to(user_moved_message(redirect_route, user))
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def user_moved_message(redirect_route, user)
+ "User '#{redirect_route.path}' was moved to '#{user.full_path}'. Please update any links and bookmarks that may still have the old path."
+ end
end
diff --git a/spec/factories/ci/trigger_schedules.rb b/spec/factories/ci/pipeline_schedule.rb
index 2390706fa41..a716da46ac6 100644
--- a/spec/factories/ci/trigger_schedules.rb
+++ b/spec/factories/ci/pipeline_schedule.rb
@@ -1,14 +1,11 @@
FactoryGirl.define do
- factory :ci_trigger_schedule, class: Ci::TriggerSchedule do
- trigger factory: :ci_trigger_for_trigger_schedule
+ factory :ci_pipeline_schedule, class: Ci::PipelineSchedule do
cron '0 1 * * *'
cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
ref 'master'
active true
-
- after(:build) do |trigger_schedule, evaluator|
- trigger_schedule.project ||= trigger_schedule.trigger.project
- end
+ description "pipeline schedule"
+ project factory: :empty_project
trait :nightly do
cron '0 1 * * *'
@@ -24,5 +21,9 @@ FactoryGirl.define do
cron '0 1 22 * *'
cron_timezone Gitlab::Ci::CronParser::VALID_SYNTAX_SAMPLE_TIME_ZONE
end
+
+ trait :inactive do
+ active false
+ end
end
end
diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb
index 6653f0bb5c3..c5fba597c1c 100644
--- a/spec/factories/ci/variables.rb
+++ b/spec/factories/ci/variables.rb
@@ -2,5 +2,7 @@ FactoryGirl.define do
factory :ci_variable, class: Ci::Variable do
sequence(:key) { |n| "VARIABLE_#{n}" }
value 'VARIABLE_VALUE'
+
+ project factory: :empty_project
end
end
diff --git a/spec/factories/group_members.rb b/spec/factories/group_members.rb
index 080b2e75ea1..32cbfe28a60 100644
--- a/spec/factories/group_members.rb
+++ b/spec/factories/group_members.rb
@@ -10,5 +10,11 @@ FactoryGirl.define do
trait(:master) { access_level GroupMember::MASTER }
trait(:owner) { access_level GroupMember::OWNER }
trait(:access_request) { requested_at Time.now }
+
+ trait(:invited) do
+ user_id nil
+ invite_token 'xxx'
+ invite_email 'email@email.com'
+ end
end
end
diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb
index 0210e871a63..cd754ea235f 100644
--- a/spec/factories/project_hooks.rb
+++ b/spec/factories/project_hooks.rb
@@ -14,7 +14,7 @@ FactoryGirl.define do
issues_events true
confidential_issues_events true
note_events true
- build_events true
+ job_events true
pipeline_events true
wiki_page_events true
end
diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb
index d62799a5a47..fe4518caadf 100644
--- a/spec/factories/project_members.rb
+++ b/spec/factories/project_members.rb
@@ -9,5 +9,11 @@ FactoryGirl.define do
trait(:developer) { access_level ProjectMember::DEVELOPER }
trait(:master) { access_level ProjectMember::MASTER }
trait(:access_request) { requested_at Time.now }
+
+ trait(:invited) do
+ user_id nil
+ invite_token 'xxx'
+ invite_email 'email@email.com'
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 3580752a805..7a76f5f8afc 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -60,7 +60,9 @@ FactoryGirl.define do
trait :test_repo do
after :create do |project|
- TestEnv.copy_repo(project)
+ TestEnv.copy_repo(project,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
end
end
@@ -139,7 +141,9 @@ FactoryGirl.define do
end
after :create do |project, evaluator|
- TestEnv.copy_repo(project)
+ TestEnv.copy_repo(project,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
if evaluator.create_template
args = evaluator.create_template
@@ -172,7 +176,9 @@ FactoryGirl.define do
path { 'forked-gitlabhq' }
after :create do |project|
- TestEnv.copy_forked_repo_with_submodules(project)
+ TestEnv.copy_repo(project,
+ bare_repo: TestEnv.forked_repo_path_bare,
+ refs: TestEnv::FORKED_BRANCH_SHA)
end
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 62aa71ae8d8..28ddd0da753 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -22,7 +22,7 @@ FactoryGirl.define do
properties({
namespace: 'somepath',
api_url: 'https://kubernetes.example.com',
- token: 'a' * 40,
+ token: 'a' * 40
})
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index 855247de2ea..ab5c42365fe 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -23,7 +23,7 @@ feature 'Admin uses repository checks', feature: true do
project = create(:empty_project)
project.update_columns(
last_repository_check_failed: true,
- last_repository_check_at: Time.now,
+ last_repository_check_at: Time.now
)
visit_admin_project_page(project)
diff --git a/spec/features/auto_deploy_spec.rb b/spec/features/auto_deploy_spec.rb
index 67b0f006854..6c7423e4922 100644
--- a/spec/features/auto_deploy_spec.rb
+++ b/spec/features/auto_deploy_spec.rb
@@ -5,14 +5,7 @@ describe 'Auto deploy' do
let(:project) { create(:project, :repository) }
before do
- project.create_kubernetes_service(
- active: true,
- properties: {
- namespace: project.path,
- api_url: 'https://kubernetes.example.com',
- token: 'a' * 40,
- }
- )
+ create :kubernetes_service, project: project
project.team << [user, :master]
login_as user
end
diff --git a/spec/features/boards/issue_ordering_spec.rb b/spec/features/boards/issue_ordering_spec.rb
index c50155a6d14..bfa2a72a256 100644
--- a/spec/features/boards/issue_ordering_spec.rb
+++ b/spec/features/boards/issue_ordering_spec.rb
@@ -38,6 +38,8 @@ describe 'Issue Boards', :feature, :js do
it 'moves un-ordered issue to top of list' do
drag(from_index: 3, to_index: 0)
+ wait_for_vue_resource
+
page.within(first('.board')) do
expect(first('.card')).to have_content(issue4.title)
end
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 7c53d2b47d9..4667be49fe6 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -115,7 +115,6 @@ describe 'Issue Boards', feature: true, js: true do
click_link 'Unassigned'
end
- find('.dropdown-menu-toggle').click
wait_for_vue_resource
expect(page).to have_content('No assignee')
@@ -158,13 +157,13 @@ describe 'Issue Boards', feature: true, js: true do
end
page.within(first('.board')) do
- find('.card:nth-child(2)').click
+ find('.card:nth-child(2)').trigger('click')
end
page.within('.assignee') do
click_link 'Edit'
-
- expect(page).to have_selector('.is-active')
+
+ expect(find('.dropdown-menu')).to have_selector('.is-active')
end
end
end
diff --git a/spec/features/boards/sub_group_project_spec.rb b/spec/features/boards/sub_group_project_spec.rb
new file mode 100644
index 00000000000..6cd7fddd288
--- /dev/null
+++ b/spec/features/boards/sub_group_project_spec.rb
@@ -0,0 +1,45 @@
+require 'rails_helper'
+
+describe 'Sub-group project issue boards', :feature, :js do
+ include WaitForVueResource
+
+ let(:group) { create(:group) }
+ let(:nested_group_1) { create(:group, parent: group) }
+ let(:project) { create(:empty_project, group: nested_group_1) }
+ let(:board) { create(:board, project: project) }
+ let(:label) { create(:label, project: project) }
+ let(:user) { create(:user) }
+ let!(:list1) { create(:list, board: board, label: label, position: 0) }
+ let!(:issue) { create(:labeled_issue, project: project, labels: [label]) }
+
+ before do
+ project.add_master(user)
+
+ login_as(user)
+
+ visit namespace_project_board_path(project.namespace, project, board)
+ wait_for_vue_resource
+ end
+
+ it 'creates new label from sidebar' do
+ find('.card').click
+
+ page.within '.labels' do
+ click_link 'Edit'
+ click_link 'Create new label'
+ end
+
+ page.within '.dropdown-new-label' do
+ fill_in 'new_label_name', with: 'test label'
+ first('.suggest-colors-dropdown a').click
+
+ click_button 'Create'
+
+ wait_for_ajax
+ end
+
+ page.within '.labels' do
+ expect(page).to have_link 'test label'
+ end
+ end
+end
diff --git a/spec/features/copy_as_gfm_spec.rb b/spec/features/copy_as_gfm_spec.rb
index f197fb44608..be615519a09 100644
--- a/spec/features/copy_as_gfm_spec.rb
+++ b/spec/features/copy_as_gfm_spec.rb
@@ -96,7 +96,7 @@ describe 'Copy as GFM', feature: true, js: true do
# issue link
"[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue)})",
# issue link with note anchor
- "[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue, anchor: 'note_123')})",
+ "[Issue](#{namespace_project_issue_url(@project.namespace, @project, @feat.issue, anchor: 'note_123')})"
)
verify(
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 7c9d522273b..cbeb73d9cae 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -6,14 +6,16 @@ feature 'Cycle Analytics', feature: true, js: true do
let(:project) { create(:project, :repository) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
let(:milestone) { create(:milestone, project: project) }
- let(:mr) { create_merge_request_closing_issue(issue) }
+ let(:mr) { create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}") }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha) }
context 'as an allowed user' do
context 'when project is new' do
before do
- project.team << [user, :master]
+ project.add_master(user)
+
login_as(user)
+
visit namespace_project_cycle_analytics_path(project.namespace, project)
wait_for_ajax
end
@@ -30,9 +32,10 @@ feature 'Cycle Analytics', feature: true, js: true do
context "when there's cycle analytics data" do
before do
- project.team << [user, :master]
-
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
+ mr.update(head_pipeline: pipeline)
+ project.add_master(user)
+
create_cycle
deploy_master
@@ -85,7 +88,7 @@ feature 'Cycle Analytics', feature: true, js: true do
context "as a guest" do
before do
- project.team << [guest, :guest]
+ project.add_guest(guest)
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
create_cycle
diff --git a/spec/features/dashboard/group_spec.rb b/spec/features/dashboard/group_spec.rb
index 1d4b86ed4b4..8e20fdec8ad 100644
--- a/spec/features/dashboard/group_spec.rb
+++ b/spec/features/dashboard/group_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Dashboard Group', feature: true do
it 'creates new group', js: true do
visit dashboard_groups_path
- click_link 'New group'
+ find('.btn-new').trigger('click')
new_path = 'Samurai'
new_description = 'Tokugawa Shogunate'
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 6f7bf0eba6e..354267dbee7 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -19,7 +19,7 @@ describe 'Navigation bar counter', feature: true, caching: true do
issue.assignees = []
- user.update_cache_counts
+ user.invalidate_cache_counts
Timecop.travel(3.minutes.from_now) do
visit issues_path
@@ -35,6 +35,8 @@ describe 'Navigation bar counter', feature: true, caching: true do
merge_request.update(assignee: nil)
+ user.invalidate_cache_counts
+
Timecop.travel(3.minutes.from_now) do
visit merge_requests_path
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index 86c7954e60c..7a132dba1e9 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -26,9 +26,20 @@ RSpec.describe 'Dashboard Issues', feature: true do
expect(page).not_to have_content(other_issue.title)
end
+ it 'shows checkmark when unassigned is selected for assignee', js: true do
+ find('.js-assignee-search').click
+ find('li', text: 'Unassigned').click
+ find('.js-assignee-search').click
+
+ expect(find('li[data-user-id="0"] a.is-active')).to be_visible
+ end
+
it 'shows issues when current user is author', js: true do
find('#assignee_id', visible: false).set('')
find('.js-author-search', match: :first).click
+
+ expect(find('li[data-user-id="null"] a.is-active')).to be_visible
+
find('.dropdown-menu-author li a', match: :first, text: current_user.to_reference).click
find('.js-author-search', match: :first).click
diff --git a/spec/features/dashboard/milestone_filter_spec.rb b/spec/features/dashboard/milestone_filter_spec.rb
new file mode 100644
index 00000000000..d60a002a8d7
--- /dev/null
+++ b/spec/features/dashboard/milestone_filter_spec.rb
@@ -0,0 +1,60 @@
+require 'spec_helper'
+
+describe 'Dashboard > milestone filter', :feature, :js do
+ include WaitForAjax
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, name: 'test', namespace: user.namespace) }
+ let(:milestone) { create(:milestone, title: "v1.0", project: project) }
+ let(:milestone2) { create(:milestone, title: "v2.0", project: project) }
+ let!(:issue) { create :issue, author: user, project: project, milestone: milestone }
+ let!(:issue2) { create :issue, author: user, project: project, milestone: milestone2 }
+
+ before do
+ login_as(user)
+ visit issues_dashboard_path(author_id: user.id)
+ end
+
+ context 'default state' do
+ it 'shows issues with Any Milestone' do
+ page.all('.issue-info').each do |issue_info|
+ expect(issue_info.text).to match(/v\d.0/)
+ end
+ end
+ end
+
+ context 'filtering by milestone' do
+ milestone_select = '.js-milestone-select'
+
+ before do
+ find(milestone_select).click
+ wait_for_ajax
+
+ page.within('.dropdown-content') do
+ click_link 'v1.0'
+ end
+
+ find(milestone_select).click
+ wait_for_ajax
+ end
+
+ it 'shows issues with Milestone v1.0' do
+ expect(find('.issues-list')).to have_selector('.issue', count: 1)
+ expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1)
+ end
+
+ it 'should not change active Milestone unless clicked' do
+ expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1)
+
+ # open & close dropdown
+ find('.dropdown-menu-close').click
+
+ expect(find('.milestone-filter')).not_to have_selector('.dropdown.open')
+
+ find(milestone_select).click
+
+ expect(find('.dropdown-content')).to have_selector('a.is-active', count: 1)
+ expect(find('.dropdown-content a.is-active')).to have_content('v1.0')
+ end
+ end
+end
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 4c9adcabe34..349b948eaee 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Dashboard shortcuts', feature: true, js: true do
+feature 'Dashboard shortcuts', :feature, :js do
context 'logged in' do
before do
login_as :user
@@ -8,21 +8,21 @@ feature 'Dashboard shortcuts', feature: true, js: true do
end
scenario 'Navigate to tabs' do
- find('body').native.send_keys([:shift, 'P'])
-
- check_page_title('Projects')
-
- find('body').native.send_key([:shift, 'I'])
+ find('body').send_keys([:shift, 'I'])
check_page_title('Issues')
- find('body').native.send_key([:shift, 'M'])
+ find('body').send_keys([:shift, 'M'])
check_page_title('Merge Requests')
- find('body').native.send_keys([:shift, 'T'])
+ find('body').send_keys([:shift, 'T'])
check_page_title('Todos')
+
+ find('body').send_keys([:shift, 'P'])
+
+ check_page_title('Projects')
end
end
@@ -32,17 +32,20 @@ feature 'Dashboard shortcuts', feature: true, js: true do
end
scenario 'Navigate to tabs' do
- find('body').native.send_keys([:shift, 'P'])
-
- expect(page).to have_content('No projects found')
-
- find('body').native.send_keys([:shift, 'G'])
+ find('body').send_keys([:shift, 'G'])
+ find('.nothing-here-block')
expect(page).to have_content('No public groups')
- find('body').native.send_keys([:shift, 'S'])
+ find('body').send_keys([:shift, 'S'])
+ find('.nothing-here-block')
expect(page).to have_selector('.snippets-list-holder')
+
+ find('body').send_keys([:shift, 'P'])
+
+ find('.nothing-here-block')
+ expect(page).to have_content('No projects found')
end
end
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index 62937688c22..c6ba118220a 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -12,4 +12,51 @@ describe 'Dashboard snippets', feature: true do
it_behaves_like 'paginated snippets'
end
+
+ context 'filtering by visibility' do
+ let(:user) { create(:user) }
+ let!(:snippets) do
+ [
+ create(:personal_snippet, :public, author: user),
+ create(:personal_snippet, :internal, author: user),
+ create(:personal_snippet, :private, author: user),
+ create(:personal_snippet, :public)
+ ]
+ end
+
+ before do
+ login_as(user)
+
+ visit dashboard_snippets_path
+ end
+
+ it 'contains all snippets of logged user' do
+ expect(page).to have_selector('.snippet-row', count: 3)
+
+ expect(page).to have_content(snippets[0].title)
+ expect(page).to have_content(snippets[1].title)
+ expect(page).to have_content(snippets[2].title)
+ end
+
+ it 'contains all private snippets of logged user when clicking on private' do
+ click_link('Private')
+
+ expect(page).to have_selector('.snippet-row', count: 1)
+ expect(page).to have_content(snippets[2].title)
+ end
+
+ it 'contains all internal snippets of logged user when clicking on internal' do
+ click_link('Internal')
+
+ expect(page).to have_selector('.snippet-row', count: 1)
+ expect(page).to have_content(snippets[1].title)
+ end
+
+ it 'contains all public snippets of logged user when clicking on public' do
+ click_link('Public')
+
+ expect(page).to have_selector('.snippet-row', count: 1)
+ expect(page).to have_content(snippets[0].title)
+ end
+ end
end
diff --git a/spec/features/groups/members/sorting_spec.rb b/spec/features/groups/members/sorting_spec.rb
index 608aedd3471..902d3f789ff 100644
--- a/spec/features/groups/members/sorting_spec.rb
+++ b/spec/features/groups/members/sorting_spec.rb
@@ -68,7 +68,7 @@ feature 'Groups > Members > Sorting', feature: true do
expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Name, descending')
end
- scenario 'sorts by recent sign in' do
+ scenario 'sorts by recent sign in', :redis do
visit_members_list(sort: :recent_sign_in)
expect(first_member).to include(owner.name)
@@ -76,7 +76,7 @@ feature 'Groups > Members > Sorting', feature: true do
expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Recent sign in')
end
- scenario 'sorts by oldest sign in' do
+ scenario 'sorts by oldest sign in', :redis do
visit_members_list(sort: :oldest_sign_in)
expect(first_member).to include(developer.name)
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index f3ec80bb149..414838fa22e 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -52,6 +52,9 @@ describe 'issuable list', feature: true do
create(:issue, project: project, author: user)
else
create(:merge_request, source_project: project, source_branch: generate(:branch))
+ source_branch = FFaker::Name.name
+ pipeline = create(:ci_empty_pipeline, project: project, ref: source_branch, status: %w(running failed success).sample, sha: 'any')
+ create(:merge_request, title: FFaker::Lorem.sentence, source_project: project, source_branch: source_branch, head_pipeline: pipeline)
end
2.times do
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 58f897cba3e..24e2419b5ce 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -14,7 +14,7 @@ feature 'Resolving all open discussions in a merge request from an issue', featu
end
it 'shows a button to resolve all discussions by creating a new issue' do
- within('li#resolve-count-app') do
+ within('#resolve-count-app') do
expect(page).to have_link "Resolve all discussions in new issue", href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid)
end
end
@@ -49,7 +49,7 @@ feature 'Resolving all open discussions in a merge request from an issue', featu
end
it 'does not show a link to create a new issue' do
- expect(page).not_to have_link 'open an issue to resolve them later'
+ expect(page).not_to have_link 'Create an issue to resolve them later'
end
end
@@ -59,18 +59,18 @@ feature 'Resolving all open discussions in a merge request from an issue', featu
end
it 'shows a warning that the merge request contains unresolved discussions' do
- expect(page).to have_content 'This merge request has unresolved discussions'
+ expect(page).to have_content 'There are unresolved discussions.'
end
it 'has a link to resolve all discussions by creating an issue' do
page.within '.mr-widget-body' do
- expect(page).to have_link 'open an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid)
+ expect(page).to have_link 'Create an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid)
end
end
context 'creating an issue for discussions' do
before do
- page.click_link 'open an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid)
+ page.click_link 'Create an issue to resolve them later', href: new_namespace_project_issue_path(project.namespace, project, merge_request_to_resolve_discussions_of: merge_request.iid)
end
it_behaves_like 'creating an issue for a discussion'
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index 08fe3b4553b..09f228bcf49 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -3,17 +3,17 @@ require 'spec_helper'
describe 'Recent searches', js: true, feature: true do
include FilteredSearchHelpers
- let!(:group) { create(:group) }
- let!(:project) { create(:project, group: group) }
- let!(:user) { create(:user) }
+ let(:project_1) { create(:empty_project, :public) }
+ let(:project_2) { create(:empty_project, :public) }
+ let(:project_1_local_storage_key) { "#{project_1.full_path}-issue-recent-searches" }
before do
Capybara.ignore_hidden_elements = false
- project.add_master(user)
- group.add_developer(user)
- create(:issue, project: project)
- login_as(user)
+ create(:issue, project: project_1)
+ create(:issue, project: project_2)
+ # Visit any fast-loading page so we can clear local storage without a DOM exception
+ visit '/404'
remove_recent_searches
end
@@ -22,7 +22,7 @@ describe 'Recent searches', js: true, feature: true do
end
it 'searching adds to recent searches' do
- visit namespace_project_issues_path(project.namespace, project)
+ visit namespace_project_issues_path(project_1.namespace, project_1)
input_filtered_search('foo', submit: true)
input_filtered_search('bar', submit: true)
@@ -35,8 +35,8 @@ describe 'Recent searches', js: true, feature: true do
end
it 'visiting URL with search params adds to recent searches' do
- visit namespace_project_issues_path(project.namespace, project, label_name: 'foo', search: 'bar')
- visit namespace_project_issues_path(project.namespace, project, label_name: 'qux', search: 'garply')
+ visit namespace_project_issues_path(project_1.namespace, project_1, label_name: 'foo', search: 'bar')
+ visit namespace_project_issues_path(project_1.namespace, project_1, label_name: 'qux', search: 'garply')
items = all('.filtered-search-history-dropdown-item', visible: false)
@@ -46,9 +46,9 @@ describe 'Recent searches', js: true, feature: true do
end
it 'saved recent searches are restored last on the list' do
- set_recent_searches('["saved1", "saved2"]')
+ set_recent_searches(project_1_local_storage_key, '["saved1", "saved2"]')
- visit namespace_project_issues_path(project.namespace, project, search: 'foo')
+ visit namespace_project_issues_path(project_1.namespace, project_1, search: 'foo')
items = all('.filtered-search-history-dropdown-item', visible: false)
@@ -58,9 +58,27 @@ describe 'Recent searches', js: true, feature: true do
expect(items[2].text).to eq('saved2')
end
+ it 'searches are scoped to projects' do
+ visit namespace_project_issues_path(project_1.namespace, project_1)
+
+ input_filtered_search('foo', submit: true)
+ input_filtered_search('bar', submit: true)
+
+ visit namespace_project_issues_path(project_2.namespace, project_2)
+
+ input_filtered_search('more', submit: true)
+ input_filtered_search('things', submit: true)
+
+ items = all('.filtered-search-history-dropdown-item', visible: false)
+
+ expect(items.count).to eq(2)
+ expect(items[0].text).to eq('things')
+ expect(items[1].text).to eq('more')
+ end
+
it 'clicking item fills search input' do
- set_recent_searches('["foo", "bar"]')
- visit namespace_project_issues_path(project.namespace, project)
+ set_recent_searches(project_1_local_storage_key, '["foo", "bar"]')
+ visit namespace_project_issues_path(project_1.namespace, project_1)
all('.filtered-search-history-dropdown-item', visible: false)[0].trigger('click')
wait_for_filtered_search('foo')
@@ -69,8 +87,8 @@ describe 'Recent searches', js: true, feature: true do
end
it 'clear recent searches button, clears recent searches' do
- set_recent_searches('["foo"]')
- visit namespace_project_issues_path(project.namespace, project)
+ set_recent_searches(project_1_local_storage_key, '["foo"]')
+ visit namespace_project_issues_path(project_1.namespace, project_1)
items_before = all('.filtered-search-history-dropdown-item', visible: false)
@@ -83,8 +101,8 @@ describe 'Recent searches', js: true, feature: true do
end
it 'shows flash error when failed to parse saved history' do
- set_recent_searches('fail')
- visit namespace_project_issues_path(project.namespace, project)
+ set_recent_searches(project_1_local_storage_key, 'fail')
+ visit namespace_project_issues_path(project_1.namespace, project_1)
expect(find('.flash-alert')).to have_text('An error occured while parsing recent searches')
end
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 87adce3cddd..5c0907e26df 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -1,8 +1,9 @@
require 'rails_helper'
-describe 'New/edit issue', feature: true, js: true do
+describe 'New/edit issue', :feature, :js do
include GitlabRoutingHelper
include ActionView::Helpers::JavaScriptHelper
+ include WaitForAjax
let!(:project) { create(:project) }
let!(:user) { create(:user)}
@@ -23,9 +24,11 @@ describe 'New/edit issue', feature: true, js: true do
visit new_namespace_project_issue_path(project.namespace, project)
end
- describe 'multiple assignees' do
+ describe 'single assignee' do
before do
click_button 'Unassigned'
+
+ wait_for_ajax
end
it 'unselects other assignees when unassigned is selected' do
@@ -33,14 +36,12 @@ describe 'New/edit issue', feature: true, js: true do
click_link user2.name
end
+ click_button user2.name
+
page.within '.dropdown-menu-user' do
click_link 'Unassigned'
end
- page.within '.js-assignee-search' do
- expect(page).to have_content 'Unassigned'
- end
-
expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match('0')
end
@@ -51,11 +52,13 @@ describe 'New/edit issue', feature: true, js: true do
expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
- page.within '.dropdown-menu-user' do
+ click_button user.name
+
+ page.within('.dropdown-menu-user') do
click_link user.name
end
- expect(find('a', text: 'Assign to me')).to be_visible
+ expect(page.find('.dropdown-menu-user', visible: false)).not_to be_visible
end
end
@@ -65,6 +68,9 @@ describe 'New/edit issue', feature: true, js: true do
expect(find('a', text: 'Assign to me')).to be_visible
click_button 'Unassigned'
+
+ wait_for_ajax
+
page.within '.dropdown-menu-user' do
click_link user2.name
end
@@ -148,29 +154,21 @@ describe 'New/edit issue', feature: true, js: true do
it 'correctly updates the selected user when changing assignee' do
click_button 'Unassigned'
+
+ wait_for_ajax
+
page.within '.dropdown-menu-user' do
click_link user.name
end
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user.id.to_s)
-
+ expect(find('.js-assignee-search')).to have_content(user.name)
click_button user.name
- expect(find('.dropdown-menu-user a.is-active').first(:xpath, '..')['data-user-id']).to eq(user.id.to_s)
-
- # check the ::before pseudo element to ensure checkmark icon is present
- expect(before_for_selector('.dropdown-menu-selectable a.is-active')).not_to eq('')
- expect(before_for_selector('.dropdown-menu-selectable a:not(.is-active)')).to eq('')
-
page.within '.dropdown-menu-user' do
click_link user2.name
end
- expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match(user2.id.to_s)
-
- click_button user2.name
-
- expect(find('.dropdown-menu-user a.is-active').first(:xpath, '..')['data-user-id']).to eq(user2.id.to_s)
+ expect(find('.js-assignee-search')).to have_content(user2.name)
end
end
diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb
index 58b3215f14c..80f57906506 100644
--- a/spec/features/issues/note_polling_spec.rb
+++ b/spec/features/issues/note_polling_spec.rb
@@ -18,58 +18,109 @@ feature 'Issue notes polling', :feature, :js do
end
describe 'updates' do
- let(:user) { create(:user) }
- let(:note_text) { "Hello World" }
- let(:updated_text) { "Bye World" }
- let!(:existing_note) { create(:note, noteable: issue, project: project, author: user, note: note_text) }
+ context 'when from own user' do
+ let(:user) { create(:user) }
+ let(:note_text) { "Hello World" }
+ let(:updated_text) { "Bye World" }
+ let!(:existing_note) { create(:note, noteable: issue, project: project, author: user, note: note_text) }
- before do
- login_as(user)
- visit namespace_project_issue_path(project.namespace, project, issue)
- end
+ before do
+ login_as(user)
+ visit namespace_project_issue_path(project.namespace, project, issue)
+ end
- it 'displays the updated content' do
- expect(page).to have_selector("#note_#{existing_note.id}", text: note_text)
+ it 'has .original-note-content to compare against' do
+ expect(page).to have_selector("#note_#{existing_note.id}", text: note_text)
+ expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false)
- update_note(existing_note, updated_text)
+ update_note(existing_note, updated_text)
- expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
- end
+ expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+ expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false)
+ end
- it 'when editing but have not changed anything, and an update comes in, show the updated content in the textarea' do
- find("#note_#{existing_note.id} .js-note-edit").click
+ it 'displays the updated content' do
+ expect(page).to have_selector("#note_#{existing_note.id}", text: note_text)
- expect(page).to have_field("note[note]", with: note_text)
+ update_note(existing_note, updated_text)
- update_note(existing_note, updated_text)
+ expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+ end
- expect(page).to have_field("note[note]", with: updated_text)
- end
+ it 'when editing but have not changed anything, and an update comes in, show the updated content in the textarea' do
+ find("#note_#{existing_note.id} .js-note-edit").click
+
+ expect(page).to have_field("note[note]", with: note_text)
+
+ update_note(existing_note, updated_text)
+
+ expect(page).to have_field("note[note]", with: updated_text)
+ end
+
+ it 'when editing but you changed some things, and an update comes in, show a warning' do
+ find("#note_#{existing_note.id} .js-note-edit").click
- it 'when editing but you changed some things, and an update comes in, show a warning' do
- find("#note_#{existing_note.id} .js-note-edit").click
+ expect(page).to have_field("note[note]", with: note_text)
- expect(page).to have_field("note[note]", with: note_text)
+ find("#note_#{existing_note.id} .js-note-text").set('something random')
- find("#note_#{existing_note.id} .js-note-text").set('something random')
+ update_note(existing_note, updated_text)
- update_note(existing_note, updated_text)
+ expect(page).to have_selector(".alert")
+ end
- expect(page).to have_selector(".alert")
+ it 'when editing but you changed some things, an update comes in, and you press cancel, show the updated content' do
+ find("#note_#{existing_note.id} .js-note-edit").click
+
+ expect(page).to have_field("note[note]", with: note_text)
+
+ find("#note_#{existing_note.id} .js-note-text").set('something random')
+
+ update_note(existing_note, updated_text)
+
+ find("#note_#{existing_note.id} .note-edit-cancel").click
+
+ expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+ end
end
- it 'when editing but you changed some things, an update comes in, and you press cancel, show the updated content' do
- find("#note_#{existing_note.id} .js-note-edit").click
+ context 'when from another user' do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:note_text) { "Hello World" }
+ let(:updated_text) { "Bye World" }
+ let!(:existing_note) { create(:note, noteable: issue, project: project, author: user1, note: note_text) }
+
+ before do
+ login_as(user2)
+ visit namespace_project_issue_path(project.namespace, project, issue)
+ end
- expect(page).to have_field("note[note]", with: note_text)
+ it 'has .original-note-content to compare against' do
+ expect(page).to have_selector("#note_#{existing_note.id}", text: note_text)
+ expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false)
- find("#note_#{existing_note.id} .js-note-text").set('something random')
+ update_note(existing_note, updated_text)
+
+ expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+ expect(page).to have_selector("#note_#{existing_note.id} .original-note-content", count: 1, visible: false)
+ end
+ end
- update_note(existing_note, updated_text)
+ context 'system notes' do
+ let(:user) { create(:user) }
+ let(:note_text) { "Some system note" }
+ let!(:system_note) { create(:system_note, noteable: issue, project: project, author: user, note: note_text) }
- find("#note_#{existing_note.id} .note-edit-cancel").click
+ before do
+ login_as(user)
+ visit namespace_project_issue_path(project.namespace, project, issue)
+ end
- expect(page).to have_selector("#note_#{existing_note.id}", text: updated_text)
+ it 'has .original-note-content to compare against' do
+ expect(page).to have_selector("#note_#{system_note.id}", text: note_text)
+ expect(page).to have_selector("#note_#{system_note.id} .original-note-content", count: 1, visible: false)
+ end
end
end
diff --git a/spec/features/issues/notes_on_issues_spec.rb b/spec/features/issues/notes_on_issues_spec.rb
new file mode 100644
index 00000000000..a4035324d2b
--- /dev/null
+++ b/spec/features/issues/notes_on_issues_spec.rb
@@ -0,0 +1,77 @@
+require 'spec_helper'
+
+describe 'Create notes on issues', :js, :feature do
+ let(:user) { create(:user) }
+
+ shared_examples 'notes with reference' do
+ let(:issue) { create(:issue, project: project) }
+ let(:note_text) { "Check #{mention.to_reference}" }
+
+ before do
+ project.team << [user, :developer]
+ login_as(user)
+ visit namespace_project_issue_path(project.namespace, project, issue)
+
+ fill_in 'note[note]', with: note_text
+ click_button 'Comment'
+
+ wait_for_ajax
+ end
+
+ it 'creates a note with reference and cross references the issue' do
+ page.within('div#notes li.note div.note-text') do
+ expect(page).to have_content(note_text)
+ expect(page.find('a')).to have_content(mention.to_reference)
+ end
+
+ find('div#notes li.note div.note-text a').click
+
+ page.within('div#notes li.note .system-note-message') do
+ expect(page).to have_content('mentioned in issue')
+ expect(page.find('a')).to have_content(issue.to_reference)
+ end
+ end
+ end
+
+ context 'mentioning issue on a private project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :private) }
+ let(:mention) { create(:issue, project: project) }
+ end
+ end
+
+ context 'mentioning issue on an internal project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :internal) }
+ let(:mention) { create(:issue, project: project) }
+ end
+ end
+
+ context 'mentioning issue on a public project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :public) }
+ let(:mention) { create(:issue, project: project) }
+ end
+ end
+
+ context 'mentioning merge request on a private project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :private) }
+ let(:mention) { create(:merge_request, source_project: project) }
+ end
+ end
+
+ context 'mentioning merge request on an internal project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :internal) }
+ let(:mention) { create(:merge_request, source_project: project) }
+ end
+ end
+
+ context 'mentioning merge request on a public project' do
+ it_behaves_like 'notes with reference' do
+ let(:project) { create(:project, :public) }
+ let(:mention) { create(:merge_request, source_project: project) }
+ end
+ end
+end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index 5285dda361b..06ed2dbac64 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -30,13 +30,6 @@ describe 'Issues', feature: true do
it 'opens new issue popup' do
expect(page).to have_content("Issue ##{issue.iid}")
end
-
- describe 'fill in' do
- before do
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- end
- end
end
describe 'Editing issue assignee' do
@@ -465,8 +458,6 @@ describe 'Issues', feature: true do
click_link 'Edit'
click_link @user.name
- find('.dropdown-menu-toggle').click
-
page.within '.value .author' do
expect(page).to have_content @user.name
end
@@ -474,8 +465,6 @@ describe 'Issues', feature: true do
click_link 'Edit'
click_link @user.name
- find('.dropdown-menu-toggle').click
-
page.within '.value .assign-yourself' do
expect(page).to have_content "No assignee"
end
@@ -561,18 +550,11 @@ describe 'Issues', feature: true do
expect(page).to have_content milestone.title
end
end
-
- describe 'removing assignee' do
- let(:user2) { create(:user) }
-
- before do
- issue.assignees << user2
- issue.save
- end
- end
end
describe 'new issue' do
+ let!(:issue) { create(:issue, project: project) }
+
context 'by unauthenticated user' do
before do
logout
diff --git a/spec/features/login_spec.rb b/spec/features/login_spec.rb
index 11d417c253d..c82e8c03343 100644
--- a/spec/features/login_spec.rb
+++ b/spec/features/login_spec.rb
@@ -41,7 +41,7 @@ feature 'Login', feature: true do
expect(page).to have_content('Your account has been blocked.')
end
- it 'does not update Devise trackable attributes' do
+ it 'does not update Devise trackable attributes', :redis do
user = create(:user, :blocked)
expect { login_with(user) }.not_to change { user.reload.sign_in_count }
@@ -55,7 +55,7 @@ feature 'Login', feature: true do
expect(page).to have_content('Invalid Login or password.')
end
- it 'does not update Devise trackable attributes' do
+ it 'does not update Devise trackable attributes', :redis do
expect { login_with(User.ghost) }.not_to change { User.ghost.reload.sign_in_count }
end
end
diff --git a/spec/features/merge_requests/assign_issues_spec.rb b/spec/features/merge_requests/assign_issues_spec.rb
index ec49003772b..b306e2f5f75 100644
--- a/spec/features/merge_requests/assign_issues_spec.rb
+++ b/spec/features/merge_requests/assign_issues_spec.rb
@@ -18,7 +18,7 @@ feature 'Merge request issue assignment', js: true, feature: true do
end
context 'logged in as author' do
- scenario 'updates related issues' do
+ it 'updates related issues' do
visit_merge_request
click_link "Assign yourself to these issues"
diff --git a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
index 77b7ba4ac7a..fa306c02a43 100644
--- a/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
+++ b/spec/features/merge_requests/check_if_mergeable_with_unresolved_discussions_spec.rb
@@ -19,8 +19,8 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru
it 'does not allow to merge' do
visit_merge_request(merge_request)
- expect(page).not_to have_button 'Accept merge request'
- expect(page).to have_content('This merge request has unresolved discussions')
+ expect(page).not_to have_button 'Merge'
+ expect(page).to have_content('There are unresolved discussions.')
end
end
@@ -32,7 +32,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ expect(page).to have_button 'Merge'
end
end
end
@@ -46,7 +46,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru
it 'does not allow to merge' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ expect(page).to have_button 'Merge'
end
end
@@ -58,7 +58,7 @@ feature 'Check if mergeable with unresolved discussions', js: true, feature: tru
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ expect(page).to have_button 'Merge'
end
end
end
diff --git a/spec/features/merge_requests/cherry_pick_spec.rb b/spec/features/merge_requests/cherry_pick_spec.rb
index dfe7c910a10..6ba681e36f7 100644
--- a/spec/features/merge_requests/cherry_pick_spec.rb
+++ b/spec/features/merge_requests/cherry_pick_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Cherry-pick Merge Requests' do
+describe 'Cherry-pick Merge Requests', js: true do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/features/merge_requests/closes_issues_spec.rb b/spec/features/merge_requests/closes_issues_spec.rb
index eafcab6a0d7..ee0880a1e2f 100644
--- a/spec/features/merge_requests/closes_issues_spec.rb
+++ b/spec/features/merge_requests/closes_issues_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
-feature 'Merge Request closing issues message', feature: true do
+feature 'Merge Request closing issues message', feature: true, js: true do
+ include WaitForAjax
+
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:issue_1) { create(:issue, project: project)}
@@ -23,6 +25,7 @@ feature 'Merge Request closing issues message', feature: true do
login_as user
visit namespace_project_merge_request_path(project.namespace, project, merge_request)
+ wait_for_ajax
end
context 'not closing or mentioning any issue' do
@@ -35,7 +38,7 @@ feature 'Merge Request closing issues message', feature: true do
let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Accepting this merge request will close issues #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -51,7 +54,8 @@ feature 'Merge Request closing issues message', feature: true do
let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Accepting this merge request will close issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.")
+ expect(page).to have_content("Closes issue #{issue_1.to_reference}.")
+ expect(page).to have_content("Issue #{issue_2.to_reference} is mentioned but will not be closed.")
end
end
@@ -59,7 +63,7 @@ feature 'Merge Request closing issues message', feature: true do
let(:merge_request_title) { "closing #{issue_1.to_reference}, #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Accepting this merge request will close issues #{issue_1.to_reference} and #{issue_2.to_reference}")
+ expect(page).to have_content("Closes issues #{issue_1.to_reference} and #{issue_2.to_reference}")
end
end
@@ -75,7 +79,8 @@ feature 'Merge Request closing issues message', feature: true do
let(:merge_request_title) { "closes #{issue_1.to_reference}\n\n refers to #{issue_2.to_reference}" }
it 'does not display closing issue message' do
- expect(page).to have_content("Accepting this merge request will close issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.")
+ expect(page).to have_content("Closes issue #{issue_1.to_reference}. Issue #{issue_2.to_reference} is mentioned but will not be closed.")
+ expect(page).to have_content("Issue #{issue_2.to_reference} is mentioned but will not be closed.")
end
end
end
diff --git a/spec/features/merge_requests/conflicts_spec.rb b/spec/features/merge_requests/conflicts_spec.rb
index 43977ad2fc5..04b7593ce68 100644
--- a/spec/features/merge_requests/conflicts_spec.rb
+++ b/spec/features/merge_requests/conflicts_spec.rb
@@ -151,7 +151,7 @@ feature 'Merge request conflict resolution', js: true, feature: true do
'conflict-too-large' => 'when the conflicts contain a large file',
'conflict-binary-file' => 'when the conflicts contain a binary file',
'conflict-missing-side' => 'when the conflicts contain a file edited in one branch and deleted in another',
- 'conflict-non-utf8' => 'when the conflicts contain a non-UTF-8 file',
+ 'conflict-non-utf8' => 'when the conflicts contain a non-UTF-8 file'
}.freeze
UNRESOLVABLE_CONFLICTS.each do |source_branch, description|
diff --git a/spec/features/merge_requests/created_from_fork_spec.rb b/spec/features/merge_requests/created_from_fork_spec.rb
index 18833ba7266..bf34c99b92a 100644
--- a/spec/features/merge_requests/created_from_fork_spec.rb
+++ b/spec/features/merge_requests/created_from_fork_spec.rb
@@ -31,7 +31,7 @@ feature 'Merge request created from fork' do
fork_project.destroy!
end
- scenario 'user can access merge request' do
+ scenario 'user can access merge request', js: true do
visit_merge_request(merge_request)
expect(page).to have_content 'Test merge request'
diff --git a/spec/features/merge_requests/deleted_source_branch_spec.rb b/spec/features/merge_requests/deleted_source_branch_spec.rb
index 648678e2b1a..01e5e4f3a05 100644
--- a/spec/features/merge_requests/deleted_source_branch_spec.rb
+++ b/spec/features/merge_requests/deleted_source_branch_spec.rb
@@ -20,7 +20,7 @@ describe 'Deleted source branch', feature: true, js: true do
it 'shows a message about missing source branch' do
expect(page).to have_content(
- 'Source branch this-branch-does-not-exist does not exist'
+ 'Source branch does not exist.'
)
end
@@ -35,6 +35,6 @@ describe 'Deleted source branch', feature: true, js: true do
wait_for_ajax
expect(page).to have_selector('.diffs.tab-pane .nothing-here-block')
- expect(page).to have_content('Nothing to merge from this-branch-does-not-exist into feature')
+ expect(page).to have_content('Source branch does not exist.')
end
end
diff --git a/spec/features/merge_requests/diff_notes_avatars_spec.rb b/spec/features/merge_requests/diff_notes_avatars_spec.rb
index b2e170513c4..ccf047d3efa 100644
--- a/spec/features/merge_requests/diff_notes_avatars_spec.rb
+++ b/spec/features/merge_requests/diff_notes_avatars_spec.rb
@@ -91,7 +91,7 @@ feature 'Diff note avatars', feature: true, js: true do
page.within find("[id='#{position.line_code(project.repository)}']") do
find('.diff-notes-collapse').click
- expect(first('img.js-diff-comment-avatar')["title"]).to eq("#{note.author.name}: #{note.note.truncate(17)}")
+ expect(first('img.js-diff-comment-avatar')["data-original-title"]).to eq("#{note.author.name}: #{note.note.truncate(17)}")
end
end
diff --git a/spec/features/merge_requests/diff_notes_resolve_spec.rb b/spec/features/merge_requests/diff_notes_resolve_spec.rb
index 0e23c3a8849..4d549f3bdbb 100644
--- a/spec/features/merge_requests/diff_notes_resolve_spec.rb
+++ b/spec/features/merge_requests/diff_notes_resolve_spec.rb
@@ -275,7 +275,7 @@ feature 'Diff notes resolve', feature: true, js: true do
end
page.within '.line-resolve-all-container' do
- page.find('.discussion-next-btn').click
+ page.find('.discussion-next-btn').trigger('click')
end
expect(page.evaluate_script("$('body').scrollTop()")).to be > 0
diff --git a/spec/features/merge_requests/diffs_spec.rb b/spec/features/merge_requests/diffs_spec.rb
index 7dee3b852ca..4860a2a7498 100644
--- a/spec/features/merge_requests/diffs_spec.rb
+++ b/spec/features/merge_requests/diffs_spec.rb
@@ -20,6 +20,34 @@ feature 'Diffs URL', js: true, feature: true do
end
end
+ context 'when linking to note' do
+ describe 'with unresolved note' do
+ let(:note) { create :diff_note_on_merge_request, project: project, noteable: merge_request }
+ let(:fragment) { "#note_#{note.id}" }
+
+ before do
+ visit "#{diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)}#{fragment}"
+ end
+
+ it 'shows expanded note' do
+ expect(page).to have_selector(fragment, visible: true)
+ end
+ end
+
+ describe 'with resolved note' do
+ let(:note) { create :diff_note_on_merge_request, :resolved, project: project, noteable: merge_request }
+ let(:fragment) { "#note_#{note.id}" }
+
+ before do
+ visit "#{diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)}#{fragment}"
+ end
+
+ it 'shows expanded note' do
+ expect(page).to have_selector(fragment, visible: true)
+ end
+ end
+ end
+
context 'when merge request has overflow' do
it 'displays warning' do
allow(Commit).to receive(:max_diff_options).and_return(max_files: 3)
diff --git a/spec/features/merge_requests/edit_mr_spec.rb b/spec/features/merge_requests/edit_mr_spec.rb
index cb3bc392903..ec87a99b3ab 100644
--- a/spec/features/merge_requests/edit_mr_spec.rb
+++ b/spec/features/merge_requests/edit_mr_spec.rb
@@ -29,18 +29,6 @@ feature 'Edit Merge Request', feature: true do
expect(page).to have_content 'Someone edited the merge request the same time you did'
end
- it 'allows to unselect "Remove source branch"' do
- merge_request.update(merge_params: { 'force_remove_source_branch' => '1' })
- expect(merge_request.merge_params['force_remove_source_branch']).to be_truthy
-
- visit edit_namespace_project_merge_request_path(project.namespace, project, merge_request)
- uncheck 'Remove source branch when merge request is accepted'
-
- click_button 'Save changes'
-
- expect(page).to have_content 'Remove source branch'
- end
-
it 'should preserve description textarea height', js: true do
long_description = %q(
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam ac ornare ligula, ut tempus arcu. Etiam ultricies accumsan dolor vitae faucibus. Donec at elit lacus. Mauris orci ante, aliquam quis lorem eget, convallis faucibus arcu. Aenean at pulvinar lacus. Ut viverra quam massa, molestie ornare tortor dignissim a. Suspendisse tristique pellentesque tellus, id lacinia metus elementum id. Nam tristique, arcu rhoncus faucibus viverra, lacus ipsum sagittis ligula, vitae convallis odio lacus a nibh. Ut tincidunt est purus, ac vestibulum augue maximus in. Suspendisse vel erat et mi ultricies semper. Pellentesque volutpat pellentesque consequat.
diff --git a/spec/features/merge_requests/form_spec.rb b/spec/features/merge_requests/form_spec.rb
index f8518f450dc..00ef1ffdddc 100644
--- a/spec/features/merge_requests/form_spec.rb
+++ b/spec/features/merge_requests/form_spec.rb
@@ -90,7 +90,7 @@ describe 'New/edit merge request', feature: true, js: true do
page.within '.issuable-meta' do
merge_request = MergeRequest.find_by(source_branch: 'fix')
- expect(page).to have_text("Merge Request #{merge_request.to_reference}")
+ expect(page).to have_text("Merge request #{merge_request.to_reference}")
# compare paths because the host differ in test
expect(find_link(merge_request.to_reference)[:href])
.to end_with(merge_request_path(merge_request))
diff --git a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
index 1bc2a5548dd..221ddb5873c 100644
--- a/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
+++ b/spec/features/merge_requests/merge_commit_message_toggle_spec.rb
@@ -14,8 +14,6 @@ feature 'Clicking toggle commit message link', feature: true, js: true do
)
end
let(:textbox) { page.find(:css, '.js-commit-message', visible: false) }
- let(:include_link) { page.find(:css, '.js-with-description-link', visible: false) }
- let(:do_not_include_link) { page.find(:css, '.js-without-description-link', visible: false) }
let(:default_message) do
[
"Merge branch 'feature' into 'master'",
@@ -40,7 +38,7 @@ feature 'Clicking toggle commit message link', feature: true, js: true do
visit namespace_project_merge_request_path(project.namespace, project, merge_request)
- expect(textbox).not_to be_visible
+ expect(page).not_to have_selector('.js-commit-message')
click_button "Modify commit message"
expect(textbox).to be_visible
end
@@ -56,19 +54,4 @@ feature 'Clicking toggle commit message link', feature: true, js: true do
expect(textbox.value).to eq(default_message)
end
-
- it "toggles link between 'Include description' and 'Don't include description'" do
- expect(include_link).to be_visible
- expect(do_not_include_link).not_to be_visible
-
- click_link "Include description in commit message"
-
- expect(include_link).not_to be_visible
- expect(do_not_include_link).to be_visible
-
- click_link "Don't include description in commit message"
-
- expect(include_link).to be_visible
- expect(do_not_include_link).not_to be_visible
- end
end
diff --git a/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb b/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb
index 497240803d4..c102722d6db 100644
--- a/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb
+++ b/spec/features/merge_requests/merge_immediately_with_pipeline_spec.rb
@@ -4,16 +4,18 @@ feature 'Merge immediately', :feature, :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
- let(:merge_request) do
+ let!(:merge_request) do
create(:merge_request_with_diffs, source_project: project,
author: user,
- title: 'Bug NS-04')
+ title: 'Bug NS-04',
+ head_pipeline: pipeline,
+ source_branch: pipeline.ref)
end
let(:pipeline) do
create(:ci_pipeline, project: project,
- sha: merge_request.diff_head_sha,
- ref: merge_request.source_branch)
+ ref: 'master',
+ sha: project.repository.commit('master').id)
end
before { project.team << [user, :master] }
@@ -32,11 +34,13 @@ feature 'Merge immediately', :feature, :js do
page.within '.mr-widget-body' do
find('.dropdown-toggle').click
- click_link 'Merge immediately'
+ Sidekiq::Testing.fake! do
+ click_link 'Merge immediately'
- expect(find('.js-merge-when-pipeline-succeeds-button')).to have_content('Merge in progress')
+ expect(find('.accept-merge-request.btn-info')).to have_content('Merge in progress')
- wait_for_ajax
+ wait_for_vue_resource
+ end
end
end
end
diff --git a/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb b/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb
index cd540ca113a..11b6f0c0a64 100644
--- a/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_requests/merge_when_pipeline_succeeds_spec.rb
@@ -16,7 +16,10 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do
ref: merge_request.source_branch)
end
- before { project.team << [user, :master] }
+ before do
+ project.add_master(user)
+ merge_request.update(head_pipeline_id: pipeline.id)
+ end
context 'when there is active pipeline for merge request' do
background do
@@ -38,8 +41,8 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do
click_button "Merge when pipeline succeeds"
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds."
- expect(page).to have_content "The source branch will not be removed."
- expect(page).to have_link "Cancel automatic merge"
+ expect(page).to have_content "The source branch will be removed."
+ expect(page).to have_selector ".js-cancel-auto-merge"
visit_merge_request(merge_request) # Needed to refresh the page
expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i
end
@@ -93,12 +96,10 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do
describe 'enabling Merge when pipeline succeeds via dropdown' do
it 'activates the Merge when pipeline succeeds feature' do
click_button 'Select merge moment'
- within('.js-merge-dropdown') do
- click_link 'Merge when pipeline succeeds'
- end
+ click_link 'Merge when pipeline succeeds'
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds."
- expect(page).to have_content "The source branch will not be removed."
+ expect(page).to have_content "The source branch will be removed."
expect(page).to have_link "Cancel automatic merge"
end
end
@@ -131,13 +132,6 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do
expect(page).to have_content "canceled the automatic merge"
end
- it "allows the user to remove the source branch" do
- expect(page).to have_link "Remove source branch when merged"
-
- click_link "Remove source branch when merged"
- expect(page).to have_content "The source branch will be removed"
- end
-
context 'when pipeline succeeds' do
background { build.success }
diff --git a/spec/features/merge_requests/mini_pipeline_graph_spec.rb b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
index 449a60c1d05..5b2798af32f 100644
--- a/spec/features/merge_requests/mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
@@ -3,7 +3,7 @@ require 'rails_helper'
feature 'Mini Pipeline Graph', :js, :feature do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
- let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:merge_request) { create(:merge_request, source_project: project, head_pipeline: pipeline) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: 'master', status: 'running', sha: project.commit.id) }
let(:build) { create(:ci_build, pipeline: pipeline, stage: 'test', commands: 'test') }
diff --git a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
index 4a590e3bf68..cdda0542c51 100644
--- a/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
+++ b/spec/features/merge_requests/only_allow_merge_if_build_succeeds_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
-feature 'Only allow merge requests to be merged if the pipeline succeeds', feature: true do
+feature 'Only allow merge requests to be merged if the pipeline succeeds', feature: true, js: true do
+ include WaitForVueResource
+
let(:merge_request) { create(:merge_request_with_diffs) }
let(:project) { merge_request.target_project }
@@ -10,15 +12,17 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
project.team << [merge_request.author, :master]
end
- context 'project does not have CI enabled' do
+ context 'project does not have CI enabled', js: true do
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_button 'Merge'
end
end
- context 'when project has CI enabled' do
+ context 'when project has CI enabled', js: true do
given!(:pipeline) do
create(:ci_empty_pipeline,
project: project,
@@ -27,6 +31,8 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
status: status)
end
+ before { merge_request.update(head_pipeline: pipeline) }
+
context 'when merge requests can only be merged if the pipeline succeeds' do
before do
project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true)
@@ -38,6 +44,8 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'does not allow to merge immediately' do
visit_merge_request(merge_request)
+ wait_for_vue_resource
+
expect(page).to have_button 'Merge when pipeline succeeds'
expect(page).not_to have_button 'Select merge moment'
end
@@ -49,7 +57,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'does not allow MR to be merged' do
visit_merge_request(merge_request)
- expect(page).not_to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_css('button[disabled="disabled"]', text: 'Merge')
expect(page).to have_content('Please retry the job or push a new commit to fix the failure.')
end
end
@@ -60,7 +70,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'does not allow MR to be merged' do
visit_merge_request(merge_request)
- expect(page).not_to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).not_to have_button 'Merge'
expect(page).to have_content('Please retry the job or push a new commit to fix the failure.')
end
end
@@ -71,7 +83,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_button 'Merge'
end
end
@@ -81,7 +95,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_button 'Merge'
end
end
end
@@ -94,9 +110,11 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
context 'when CI is running' do
given(:status) { :running }
- it 'allows MR to be merged immediately', js: true do
+ it 'allows MR to be merged immediately' do
visit_merge_request(merge_request)
+ wait_for_vue_resource
+
expect(page).to have_button 'Merge when pipeline succeeds'
click_button 'Select merge moment'
@@ -110,7 +128,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_button 'Merge'
end
end
@@ -120,7 +140,9 @@ feature 'Only allow merge requests to be merged if the pipeline succeeds', featu
it 'allows MR to be merged' do
visit_merge_request(merge_request)
- expect(page).to have_button 'Accept merge request'
+ wait_for_vue_resource
+
+ expect(page).to have_button 'Merge'
end
end
end
diff --git a/spec/features/merge_requests/target_branch_spec.rb b/spec/features/merge_requests/target_branch_spec.rb
index b6134540273..c154cf8ade9 100644
--- a/spec/features/merge_requests/target_branch_spec.rb
+++ b/spec/features/merge_requests/target_branch_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'Target branch', feature: true do
+describe 'Target branch', feature: true, js: true do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
@@ -17,11 +17,6 @@ describe 'Target branch', feature: true do
project.team << [user, :master]
end
- it 'shows link to target branch' do
- visit path_to_merge_request
- expect(page).to have_link('feature', href: namespace_project_commits_path(project.namespace, project, merge_request.target_branch))
- end
-
context 'when branch was deleted' do
before do
DeleteBranchService.new(project, user).execute('feature')
@@ -30,12 +25,12 @@ describe 'Target branch', feature: true do
it 'shows a message about missing target branch' do
expect(page).to have_content(
- 'Target branch feature does not exist'
+ 'Target branch does not exist'
)
end
it 'does not show link to target branch' do
- expect(page).not_to have_link('feature')
+ expect(page).not_to have_selector('.mr-widget-body .js-branch-text a')
end
end
end
diff --git a/spec/features/merge_requests/widget_deployments_spec.rb b/spec/features/merge_requests/widget_deployments_spec.rb
index 00d191ddf2c..8370499f6ed 100644
--- a/spec/features/merge_requests/widget_deployments_spec.rb
+++ b/spec/features/merge_requests/widget_deployments_spec.rb
@@ -21,7 +21,7 @@ feature 'Widget Deployments Header', feature: true, js: true do
wait_for_ajax
expect(page).to have_content("Deployed to #{environment.name}")
- expect(find('.ci_widget > span > span')['data-title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium))
+ expect(find('.js-deploy-time')['data-title']).to eq(deployment.created_at.to_time.in_time_zone.to_s(:medium))
end
context 'with stop action' do
@@ -38,11 +38,11 @@ feature 'Widget Deployments Header', feature: true, js: true do
end
scenario 'does show stop button' do
- expect(page).to have_link('Stop environment')
+ expect(page).to have_button('Stop environment')
end
scenario 'does start build when stop button clicked' do
- click_link('Stop environment')
+ click_button('Stop environment')
expect(page).to have_content('close_app')
end
@@ -51,7 +51,7 @@ feature 'Widget Deployments Header', feature: true, js: true do
given(:role) { :reporter }
scenario 'does not show stop button' do
- expect(page).not_to have_link('Stop environment')
+ expect(page).not_to have_button('Stop environment')
end
end
end
diff --git a/spec/features/merge_requests/widget_spec.rb b/spec/features/merge_requests/widget_spec.rb
index d918181a238..ae799584c0f 100644
--- a/spec/features/merge_requests/widget_spec.rb
+++ b/spec/features/merge_requests/widget_spec.rb
@@ -30,6 +30,7 @@ describe 'Merge request', :feature, :js do
wait_for_ajax
expect(page).to have_selector('.accept-merge-request')
+ expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
@@ -51,14 +52,15 @@ describe 'Merge request', :feature, :js do
page.within('.mr-widget-heading') do
expect(page).to have_content("Deployed to #{environment.name}")
- expect(find('.js-environment-link')[:href]).to include(environment.formatted_external_url)
+ expect(find('.js-deploy-url')[:href]).to include(environment.formatted_external_url)
end
end
it 'shows green accept merge request button' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_ajax
- expect(page).to have_selector('.accept-merge-request.btn-create')
+ expect(page).to have_selector('.accept-merge-request')
+ expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
@@ -89,6 +91,8 @@ describe 'Merge request', :feature, :js do
statuses: [commit_status])
create(:ci_build, :pending, pipeline: pipeline)
+ merge_request.update(head_pipeline: pipeline)
+
visit namespace_project_merge_request_path(project.namespace, project, merge_request)
end
@@ -101,10 +105,15 @@ describe 'Merge request', :feature, :js do
context 'when merge request is in the blocked pipeline state' do
before do
- create(:ci_pipeline, project: project,
- sha: merge_request.diff_head_sha,
- ref: merge_request.source_branch,
- status: :manual)
+ pipeline = create(
+ :ci_pipeline,
+ project: project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch,
+ status: :manual
+ )
+
+ merge_request.update(head_pipeline: pipeline)
visit namespace_project_merge_request_path(project.namespace,
project,
@@ -129,13 +138,36 @@ describe 'Merge request', :feature, :js do
statuses: [commit_status])
create(:ci_build, :pending, pipeline: pipeline)
+ merge_request.update(head_pipeline: pipeline)
+
visit namespace_project_merge_request_path(project.namespace, project, merge_request)
end
it 'has info button when MWBS button' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_ajax
- expect(page).to have_selector('.merge-when-pipeline-succeeds.btn-info')
+ expect(page).to have_selector('.accept-merge-request.btn-info')
+ end
+ end
+
+ context 'view merge request with MWPS enabled but automatically merge fails' do
+ before do
+ merge_request.update(
+ merge_when_pipeline_succeeds: true,
+ merge_user: merge_request.author,
+ merge_error: 'Something went wrong'
+ )
+
+ visit namespace_project_merge_request_path(project.namespace, project, merge_request)
+ end
+
+ it 'shows information about the merge error' do
+ # Wait for the `ci_status` and `merge_check` requests
+ wait_for_ajax
+
+ page.within('.mr-widget-body') do
+ expect(page).to have_content('Something went wrong')
+ end
end
end
@@ -164,11 +196,11 @@ describe 'Merge request', :feature, :js do
before do
allow_any_instance_of(Repository).to receive(:merge).and_return(false)
visit namespace_project_merge_request_path(project.namespace, project, merge_request)
- click_button 'Accept merge request'
- wait_for_ajax
end
it 'updates the MR widget' do
+ click_button 'Merge'
+
page.within('.mr-widget-body') do
expect(page).to have_content('Conflicts detected during merge')
end
diff --git a/spec/features/profiles/preferences_spec.rb b/spec/features/profiles/preferences_spec.rb
index 15c8677fcd3..d368bc4d753 100644
--- a/spec/features/profiles/preferences_spec.rb
+++ b/spec/features/profiles/preferences_spec.rb
@@ -44,7 +44,7 @@ describe 'Profile > Preferences', feature: true do
expect(page.current_path).to eq starred_dashboard_projects_path
end
- click_link 'Your projects'
+ find('.shortcuts-activity').trigger('click')
expect(page).not_to have_content("You don't have starred projects yet")
expect(page.current_path).to eq dashboard_projects_path
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 5955623f565..fc242082278 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -5,13 +5,13 @@ feature 'File blob', :js, feature: true do
def visit_blob(path, fragment = nil)
visit namespace_project_blob_path(project.namespace, project, File.join('master', path), anchor: fragment)
+
+ wait_for_ajax
end
context 'Ruby file' do
before do
visit_blob('files/ruby/popen.rb')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -35,8 +35,6 @@ feature 'File blob', :js, feature: true do
context 'visiting directly' do
before do
visit_blob('files/markdown/ruby-style-guide.md')
-
- wait_for_ajax
end
it 'displays the blob using the rich viewer' do
@@ -104,8 +102,6 @@ feature 'File blob', :js, feature: true do
context 'visiting with a line number anchor' do
before do
visit_blob('files/markdown/ruby-style-guide.md', 'L1')
-
- wait_for_ajax
end
it 'displays the blob using the simple viewer' do
@@ -148,8 +144,6 @@ feature 'File blob', :js, feature: true do
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/file.md')
-
- wait_for_ajax
end
it 'displays an error' do
@@ -198,8 +192,6 @@ feature 'File blob', :js, feature: true do
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/file.md')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -235,8 +227,6 @@ feature 'File blob', :js, feature: true do
).execute
visit_blob('files/test.pdf')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -263,8 +253,6 @@ feature 'File blob', :js, feature: true do
project.update_attribute(:lfs_enabled, true)
visit_blob('files/lfs/lfs_object.iso')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -287,8 +275,6 @@ feature 'File blob', :js, feature: true do
context 'when LFS is disabled on the project' do
before do
visit_blob('files/lfs/lfs_object.iso')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -312,8 +298,6 @@ feature 'File blob', :js, feature: true do
context 'ZIP file' do
before do
visit_blob('Gemfile.zip')
-
- wait_for_ajax
end
it 'displays the blob' do
@@ -348,8 +332,6 @@ feature 'File blob', :js, feature: true do
).execute
visit_blob('files/empty.md')
-
- wait_for_ajax
end
it 'displays an error' do
@@ -369,4 +351,116 @@ feature 'File blob', :js, feature: true do
end
end
end
+
+ context '.gitlab-ci.yml' do
+ before do
+ project.add_master(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab-ci.yml",
+ file_path: '.gitlab-ci.yml',
+ file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ ).execute
+
+ visit_blob('.gitlab-ci.yml')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that configuration is valid
+ expect(page).to have_content('This GitLab CI configuration is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context '.gitlab/route-map.yml' do
+ before do
+ project.add_master(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/route-map.yml",
+ file_path: '.gitlab/route-map.yml',
+ file_content: <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ ).execute
+
+ visit_blob('.gitlab/route-map.yml')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that map is valid
+ expect(page).to have_content('This Route Map is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'LICENSE' do
+ before do
+ visit_blob('LICENSE')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows license
+ expect(page).to have_content('This project is licensed under the MIT License.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', 'http://choosealicense.com/licenses/mit/')
+ end
+ end
+ end
+
+ context '*.gemspec' do
+ before do
+ project.add_master(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add activerecord.gemspec",
+ file_path: 'activerecord.gemspec',
+ file_content: <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ ).execute
+
+ visit_blob('activerecord.gemspec')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows names of dependency manager and package
+ expect(page).to have_content('This project manages its dependencies using RubyGems and defines a gem named activerecord.')
+
+ # shows a link to the gem
+ expect(page).to have_link('activerecord', 'https://rubygems.org/gems/activerecord')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', 'http://choosealicense.com/licenses/mit/')
+ end
+ end
+ end
end
diff --git a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
index cfc782c98ad..c5e0a0f0517 100644
--- a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
+++ b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'New Branch Ref Dropdown', :js, :feature do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
- let(:toggle) { find('.create-from .dropdown-toggle') }
+ let(:toggle) { find('.create-from .dropdown-menu-toggle') }
before do
project.add_master(user)
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 8e0306ce83b..7668ce5f8be 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -4,7 +4,13 @@ describe 'Branches', feature: true do
let(:project) { create(:project, :public) }
let(:repository) { project.repository }
- context 'logged in' do
+ def set_protected_branch_name(branch_name)
+ find(".js-protected-branch-select").click
+ find(".dropdown-input-field").set(branch_name)
+ click_on("Create wildcard #{branch_name}")
+ end
+
+ context 'logged in as developer' do
before do
login_as :user
project.team << [@user, :developer]
@@ -38,6 +44,83 @@ describe 'Branches', feature: true do
expect(find('.all-branches')).to have_selector('li', count: 1)
end
end
+
+ describe 'Delete unprotected branch' do
+ it 'removes branch after confirmation', js: true do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ fill_in 'branch-search', with: 'fix'
+
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_content('fix')
+ expect(find('.all-branches')).to have_selector('li', count: 1)
+ find('.js-branch-fix .btn-remove').trigger(:click)
+
+ expect(page).not_to have_content('fix')
+ expect(find('.all-branches')).to have_selector('li', count: 0)
+ end
+ end
+
+ describe 'Delete protected branch' do
+ before do
+ project.add_user(@user, :master)
+ visit namespace_project_protected_branches_path(project.namespace, project)
+ set_protected_branch_name('fix')
+ click_on "Protect"
+
+ within(".protected-branches-list") { expect(page).to have_content('fix') }
+ expect(ProtectedBranch.count).to eq(1)
+ project.add_user(@user, :developer)
+ end
+
+ it 'does not allow devleoper to removes protected branch', js: true do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_css('.btn-remove.disabled')
+ end
+ end
+ end
+
+ context 'logged in as master' do
+ before do
+ login_as :user
+ project.team << [@user, :master]
+ end
+
+ describe 'Delete protected branch' do
+ before do
+ visit namespace_project_protected_branches_path(project.namespace, project)
+ set_protected_branch_name('fix')
+ click_on "Protect"
+
+ within(".protected-branches-list") { expect(page).to have_content('fix') }
+ expect(ProtectedBranch.count).to eq(1)
+ end
+
+ it 'removes branch after modal confirmation', js: true do
+ visit namespace_project_branches_path(project.namespace, project)
+
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_content('fix')
+ expect(find('.all-branches')).to have_selector('li', count: 1)
+ page.find('[data-target="#modal-delete-branch"]').trigger(:click)
+
+ expect(page).to have_css('.js-delete-branch[disabled]')
+ fill_in 'delete_branch_input', with: 'fix'
+ click_link 'Delete protected branch'
+
+ fill_in 'branch-search', with: 'fix'
+ find('#branch-search').native.send_keys(:enter)
+
+ expect(page).to have_content('No branches to show')
+ end
+ end
end
context 'logged out' do
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index e1781cf320a..4533a6fb144 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -74,7 +74,7 @@ describe 'Edit Project Settings', feature: true do
issues: namespace_project_issues_path(project.namespace, project),
wiki: namespace_project_wiki_path(project.namespace, project, :home),
snippets: namespace_project_snippets_path(project.namespace, project),
- merge_requests: namespace_project_merge_requests_path(project.namespace, project),
+ merge_requests: namespace_project_merge_requests_path(project.namespace, project)
}
end
diff --git a/spec/features/projects/files/browse_files_spec.rb b/spec/features/projects/files/browse_files_spec.rb
index 70e96efd557..4166aec1956 100644
--- a/spec/features/projects/files/browse_files_spec.rb
+++ b/spec/features/projects/files/browse_files_spec.rb
@@ -31,4 +31,16 @@ feature 'user browses project', feature: true, js: true do
expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897'
expect(page).to have_content 'size 1575078'
end
+
+ scenario 'can see last commit for current directory' do
+ last_commit = project.repository.last_commit_for_path(project.default_branch, 'files')
+
+ click_link 'files'
+ wait_for_ajax
+
+ page.within('.blob-commit-info') do
+ expect(page).to have_content last_commit.short_id
+ expect(page).to have_content last_commit.author_name
+ end
+ end
end
diff --git a/spec/features/projects/gfm_autocomplete_load_spec.rb b/spec/features/projects/gfm_autocomplete_load_spec.rb
index dd9622f16a0..67bc9142356 100644
--- a/spec/features/projects/gfm_autocomplete_load_spec.rb
+++ b/spec/features/projects/gfm_autocomplete_load_spec.rb
@@ -10,7 +10,7 @@ describe 'GFM autocomplete loading', feature: true, js: true do
end
it 'does not load on project#show' do
- expect(evaluate_script('gl.GfmAutoComplete.dataSources')).to eq({})
+ expect(evaluate_script('gl.GfmAutoComplete')).to eq(nil)
end
it 'loads on new issue page' do
diff --git a/spec/features/projects/import_export/test_project_export.tar.gz b/spec/features/projects/import_export/test_project_export.tar.gz
index 399c1d478c5..4efd5a26a82 100644
--- a/spec/features/projects/import_export/test_project_export.tar.gz
+++ b/spec/features/projects/import_export/test_project_export.tar.gz
Binary files differ
diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb
index c7a32a65e49..b7ae5f0b925 100644
--- a/spec/features/projects/members/sorting_spec.rb
+++ b/spec/features/projects/members/sorting_spec.rb
@@ -68,7 +68,7 @@ feature 'Projects > Members > Sorting', feature: true do
expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Name, descending')
end
- scenario 'sorts by recent sign in' do
+ scenario 'sorts by recent sign in', :redis do
visit_members_list(sort: :recent_sign_in)
expect(first_member).to include(master.name)
@@ -76,7 +76,7 @@ feature 'Projects > Members > Sorting', feature: true do
expect(page).to have_css('.member-sort-dropdown .dropdown-toggle-text', text: 'Recent sign in')
end
- scenario 'sorts by oldest sign in' do
+ scenario 'sorts by oldest sign in', :redis do
visit_members_list(sort: :oldest_sign_in)
expect(first_member).to include(developer.name)
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
new file mode 100644
index 00000000000..1211b17b3d8
--- /dev/null
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -0,0 +1,146 @@
+require 'spec_helper'
+
+feature 'Pipeline Schedules', :feature do
+ include PipelineSchedulesHelper
+ include WaitForAjax
+
+ let!(:project) { create(:project) }
+ let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
+ let!(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
+ let(:scope) { nil }
+ let!(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+
+ login_as(user)
+ visit_page
+ end
+
+ describe 'GET /projects/pipeline_schedules' do
+ let(:visit_page) { visit_pipelines_schedules }
+
+ it 'avoids N + 1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count
+
+ create_list(:ci_pipeline_schedule, 2, project: project)
+
+ expect { visit_pipelines_schedules }.not_to exceed_query_limit(control_count)
+ end
+
+ describe 'The view' do
+ it 'displays the required information description' do
+ page.within('.pipeline-schedule-table-row') do
+ expect(page).to have_content('pipeline schedule')
+ expect(page).to have_link('master')
+ expect(page).to have_link("##{pipeline.id}")
+ end
+ end
+
+ it 'creates a new scheduled pipeline' do
+ click_link 'New schedule'
+
+ expect(page).to have_content('Schedule a new pipeline')
+ end
+
+ it 'changes ownership of the pipeline' do
+ click_link 'Take ownership'
+ page.within('.pipeline-schedule-table-row') do
+ expect(page).not_to have_content('No owner')
+ expect(page).to have_link('John Doe')
+ end
+ end
+
+ it 'edits the pipeline' do
+ page.within('.pipeline-schedule-table-row') do
+ click_link 'Edit'
+ end
+
+ expect(page).to have_content('Edit Pipeline Schedule')
+ end
+
+ it 'deletes the pipeline' do
+ click_link 'Delete'
+
+ expect(page).not_to have_content('pipeline schedule')
+ end
+ end
+ end
+
+ describe 'POST /projects/pipeline_schedules/new', js: true do
+ let(:visit_page) { visit_new_pipeline_schedule }
+
+ it 'sets defaults for timezone and target branch' do
+ expect(page).to have_button('master')
+ expect(page).to have_button('UTC')
+ end
+
+ it 'it creates a new scheduled pipeline' do
+ fill_in_schedule_form
+ save_pipeline_schedule
+
+ expect(page).to have_content('my fancy description')
+ end
+
+ it 'it prevents an invalid form from being submitted' do
+ save_pipeline_schedule
+
+ expect(page).to have_content('This field is required')
+ end
+ end
+
+ describe 'PATCH /projects/pipelines_schedules/:id/edit', js: true do
+ let(:visit_page) do
+ edit_pipeline_schedule
+ end
+
+ it 'it displays existing properties' do
+ description = find_field('schedule_description').value
+ expect(description).to eq('pipeline schedule')
+ expect(page).to have_button('master')
+ expect(page).to have_button('UTC')
+ end
+
+ it 'edits the scheduled pipeline' do
+ fill_in 'schedule_description', with: 'my brand new description'
+
+ save_pipeline_schedule
+
+ expect(page).to have_content('my brand new description')
+ end
+ end
+
+ def visit_new_pipeline_schedule
+ visit new_namespace_project_pipeline_schedule_path(project.namespace, project, pipeline_schedule)
+ end
+
+ def edit_pipeline_schedule
+ visit edit_namespace_project_pipeline_schedule_path(project.namespace, project, pipeline_schedule)
+ end
+
+ def visit_pipelines_schedules
+ visit namespace_project_pipeline_schedules_path(project.namespace, project, scope: scope)
+ end
+
+ def select_timezone
+ find('.js-timezone-dropdown').click
+ click_link 'American Samoa'
+ end
+
+ def select_target_branch
+ find('.js-target-branch-dropdown').click
+ click_link 'master'
+ end
+
+ def save_pipeline_schedule
+ click_button 'Save pipeline schedule'
+ end
+
+ def fill_in_schedule_form
+ fill_in 'schedule_description', with: 'my fancy description'
+ fill_in 'schedule_cron', with: '* 1 2 3 4'
+
+ select_timezone
+ select_target_branch
+ end
+end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 2272b19bc8f..5f82cf2f5e5 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -22,7 +22,7 @@ describe 'Pipelines', :feature, :js do
project: project,
ref: 'master',
status: 'running',
- sha: project.commit.id,
+ sha: project.commit.id
)
end
@@ -370,6 +370,58 @@ describe 'Pipelines', :feature, :js do
end
end
+ describe 'GET /:project/pipelines/show' do
+ let(:project) { create(:project) }
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline,
+ project: project,
+ sha: project.commit.id,
+ user: user)
+ end
+
+ before do
+ create_build('build', 0, 'build', :success)
+ create_build('test', 1, 'rspec 0:2', :pending)
+ create_build('test', 1, 'rspec 1:2', :running)
+ create_build('test', 1, 'spinach 0:2', :created)
+ create_build('test', 1, 'spinach 1:2', :created)
+ create_build('test', 1, 'audit', :created)
+ create_build('deploy', 2, 'production', :created)
+
+ create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3)
+
+ visit namespace_project_pipeline_path(project.namespace, project, pipeline)
+ wait_for_vue_resource
+ end
+
+ it 'shows a graph with grouped stages' do
+ expect(page).to have_css('.js-pipeline-graph')
+
+ # header
+ expect(page).to have_text("##{pipeline.id}")
+ expect(page).to have_selector(%Q(img[alt$="#{pipeline.user.name}'s avatar"]))
+ expect(page).to have_link(pipeline.user.name, href: user_path(pipeline.user))
+
+ # stages
+ expect(page).to have_text('Build')
+ expect(page).to have_text('Test')
+ expect(page).to have_text('Deploy')
+ expect(page).to have_text('External')
+
+ # builds
+ expect(page).to have_text('rspec')
+ expect(page).to have_text('spinach')
+ expect(page).to have_text('rspec')
+ expect(page).to have_text('production')
+ expect(page).to have_text('jenkins')
+ end
+
+ def create_build(stage, stage_idx, name, status)
+ create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status)
+ end
+ end
+
describe 'POST /:project/pipelines' do
let(:project) { create(:project) }
diff --git a/spec/features/projects/settings/integration_settings_spec.rb b/spec/features/projects/settings/integration_settings_spec.rb
index 7909234556e..d3232f0cc16 100644
--- a/spec/features/projects/settings/integration_settings_spec.rb
+++ b/spec/features/projects/settings/integration_settings_spec.rb
@@ -52,6 +52,7 @@ feature 'Integration settings', feature: true do
fill_in 'hook_url', with: url
check 'Tag push events'
check 'Enable SSL verification'
+ check 'Job events'
click_button 'Add webhook'
@@ -59,6 +60,7 @@ feature 'Integration settings', feature: true do
expect(page).to have_content('SSL Verification: enabled')
expect(page).to have_content('Push Events')
expect(page).to have_content('Tag Push Events')
+ expect(page).to have_content('Job events')
end
scenario 'edit existing webhook' do
diff --git a/spec/features/projects/snippets_spec.rb b/spec/features/projects/snippets_spec.rb
index d37e8ed4699..18689c17fe9 100644
--- a/spec/features/projects/snippets_spec.rb
+++ b/spec/features/projects/snippets_spec.rb
@@ -4,11 +4,27 @@ describe 'Project snippets', feature: true do
context 'when the project has snippets' do
let(:project) { create(:empty_project, :public) }
let!(:snippets) { create_list(:project_snippet, 2, :public, author: project.owner, project: project) }
- before do
- allow(Snippet).to receive(:default_per_page).and_return(1)
- visit namespace_project_snippets_path(project.namespace, project)
+ let!(:other_snippet) { create(:project_snippet) }
+
+ context 'pagination' do
+ before do
+ allow(Snippet).to receive(:default_per_page).and_return(1)
+
+ visit namespace_project_snippets_path(project.namespace, project)
+ end
+
+ it_behaves_like 'paginated snippets'
end
- it_behaves_like 'paginated snippets'
+ context 'list content' do
+ it 'contains all project snippets' do
+ visit namespace_project_snippets_path(project.namespace, project)
+
+ expect(page).to have_selector('.snippet-row', count: 2)
+
+ expect(page).to have_content(snippets[0].title)
+ expect(page).to have_content(snippets[1].title)
+ end
+ end
end
end
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index 43d8b45669e..49d7ef09e64 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -17,14 +17,14 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t
login_as(user)
visit namespace_project_path(project.namespace, project)
- click_link 'Wiki'
+ find('.shortcuts-wiki').trigger('click')
WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute
end
context "while creating a new wiki page" do
context "when there are no spaces or hyphens in the page name" do
it "rewrites relative links as expected" do
- click_link 'New page'
+ find('.add-new-wiki').trigger('click')
page.within '#modal-new-wiki' do
fill_in :new_wiki_path, with: 'a/b/c/d'
click_button 'Create page'
@@ -73,7 +73,7 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t
fill_in :new_wiki_path, with: 'a-page/b-page/c-page/d-page'
click_button 'Create page'
end
-
+
page.within '.wiki-form' do
fill_in :wiki_content, with: wiki_content
click_on "Preview"
@@ -91,7 +91,7 @@ feature 'Projects > Wiki > User previews markdown changes', feature: true, js: t
context "while editing a wiki page" do
def create_wiki_page(path)
- click_link 'New page'
+ find('.add-new-wiki').trigger('click')
page.within '#modal-new-wiki' do
fill_in :new_wiki_path, with: path
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 1ffac8cd542..5c502ce4fb5 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Projects > Wiki > User creates wiki page', feature: true do
+feature 'Projects > Wiki > User creates wiki page', js: true, feature: true do
let(:user) { create(:user) }
background do
@@ -8,7 +8,7 @@ feature 'Projects > Wiki > User creates wiki page', feature: true do
login_as(user)
visit namespace_project_path(project.namespace, project)
- click_link 'Wiki'
+ find('.shortcuts-wiki').trigger('click')
end
context 'in the user namespace' do
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index fc9b293c393..884d1bbb10c 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -1,5 +1,4 @@
require 'spec_helper'
-Dir["./spec/features/protected_branches/*.rb"].sort.each { |f| require f }
feature 'Projected Branches', feature: true, js: true do
let(:user) { create(:user, :admin) }
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index e68448467b0..66236dbc7fc 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -1,5 +1,4 @@
require 'spec_helper'
-Dir["./spec/features/protected_tags/*.rb"].sort.each { |f| require f }
feature 'Projected Tags', feature: true, js: true do
let(:user) { create(:user, :admin) }
diff --git a/spec/features/search_spec.rb b/spec/features/search_spec.rb
index 498a4a5cba0..2fda7758407 100644
--- a/spec/features/search_spec.rb
+++ b/spec/features/search_spec.rb
@@ -20,13 +20,14 @@ describe "Search", feature: true do
context 'search filters', js: true do
let(:group) { create(:group) }
+ let!(:group_project) { create(:empty_project, group: group) }
before do
group.add_owner(user)
end
it 'shows group name after filtering' do
- find('.js-search-group-dropdown').click
+ find('.js-search-group-dropdown').trigger('click')
wait_for_ajax
page.within '.search-holder' do
@@ -36,9 +37,27 @@ describe "Search", feature: true do
expect(find('.js-search-group-dropdown')).to have_content(group.name)
end
+ it 'filters by group projects after filtering by group' do
+ find('.js-search-group-dropdown').trigger('click')
+ wait_for_ajax
+
+ page.within '.search-holder' do
+ click_link group.name
+ end
+
+ expect(find('.js-search-group-dropdown')).to have_content(group.name)
+
+ page.within('.project-filter') do
+ find('.js-search-project-dropdown').trigger('click')
+ wait_for_ajax
+
+ expect(page).to have_link(group_project.name_with_namespace)
+ end
+ end
+
it 'shows project name after filtering' do
page.within('.project-filter') do
- find('.js-search-project-dropdown').click
+ find('.js-search-project-dropdown').trigger('click')
wait_for_ajax
click_link project.name_with_namespace
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 26879a77c48..78a76d9c112 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Internal Project Access", feature: true do
include AccessMatchers
- let(:project) { create(:project, :internal) }
+ set(:project) { create(:project, :internal) }
describe "Project should be internal" do
describe '#internal?' do
@@ -437,6 +437,20 @@ describe "Internal Project Access", feature: true do
end
end
+ describe "GET /:project_path/pipeline_schedules" do
+ subject { namespace_project_pipeline_schedules_path(project.namespace, project) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(project) }
+ it { is_expected.to be_allowed_for(:master).of(project) }
+ it { is_expected.to be_allowed_for(:developer).of(project) }
+ it { is_expected.to be_allowed_for(:reporter).of(project) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
+ it { is_expected.to be_denied_for(:external) }
+ it { is_expected.to be_denied_for(:visitor) }
+ end
+
describe "GET /:project_path/environments" do
subject { namespace_project_environments_path(project.namespace, project) }
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index 699ca4f724c..a66f6e09055 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Private Project Access", feature: true do
include AccessMatchers
- let(:project) { create(:project, :private, public_builds: false) }
+ set(:project) { create(:project, :private, public_builds: false) }
describe "Project should be private" do
describe '#private?' do
@@ -478,6 +478,48 @@ describe "Private Project Access", feature: true do
it { is_expected.to be_denied_for(:visitor) }
end
+ describe "GET /:project_path/pipeline_schedules" do
+ subject { namespace_project_pipeline_schedules_path(project.namespace, project) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(project) }
+ it { is_expected.to be_allowed_for(:master).of(project) }
+ it { is_expected.to be_allowed_for(:developer).of(project) }
+ it { is_expected.to be_allowed_for(:reporter).of(project) }
+ it { is_expected.to be_denied_for(:guest).of(project) }
+ it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_denied_for(:external) }
+ it { is_expected.to be_denied_for(:visitor) }
+ end
+
+ describe "GET /:project_path/pipeline_schedules/new" do
+ subject { new_namespace_project_pipeline_schedule_path(project.namespace, project) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(project) }
+ it { is_expected.to be_allowed_for(:master).of(project) }
+ it { is_expected.to be_allowed_for(:developer).of(project) }
+ it { is_expected.to be_denied_for(:reporter).of(project) }
+ it { is_expected.to be_denied_for(:guest).of(project) }
+ it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_denied_for(:external) }
+ it { is_expected.to be_denied_for(:visitor) }
+ end
+
+ describe "GET /:project_path/environments/new" do
+ subject { new_namespace_project_pipeline_schedule_path(project.namespace, project) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(project) }
+ it { is_expected.to be_allowed_for(:master).of(project) }
+ it { is_expected.to be_allowed_for(:developer).of(project) }
+ it { is_expected.to be_denied_for(:reporter).of(project) }
+ it { is_expected.to be_denied_for(:guest).of(project) }
+ it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_denied_for(:external) }
+ it { is_expected.to be_denied_for(:visitor) }
+ end
+
describe "GET /:project_path/container_registry" do
let(:container_repository) { create(:container_repository) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 624f0d0f485..5cd575500c3 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe "Public Project Access", feature: true do
include AccessMatchers
- let(:project) { create(:project, :public) }
+ set(:project) { create(:project, :public) }
describe "Project should be public" do
describe '#public?' do
@@ -257,6 +257,20 @@ describe "Public Project Access", feature: true do
end
end
+ describe "GET /:project_path/pipeline_schedules" do
+ subject { namespace_project_pipeline_schedules_path(project.namespace, project) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(project) }
+ it { is_expected.to be_allowed_for(:master).of(project) }
+ it { is_expected.to be_allowed_for(:developer).of(project) }
+ it { is_expected.to be_allowed_for(:reporter).of(project) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
+ it { is_expected.to be_allowed_for(:external) }
+ it { is_expected.to be_allowed_for(:visitor) }
+ end
+
describe "GET /:project_path/environments" do
subject { namespace_project_environments_path(project.namespace, project) }
diff --git a/spec/features/signup_spec.rb b/spec/features/signup_spec.rb
index 9fde8d6e5cf..d7b6dda4946 100644
--- a/spec/features/signup_spec.rb
+++ b/spec/features/signup_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
feature 'Signup', feature: true do
describe 'signup with no errors' do
context "when sending confirmation email" do
- before { allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(true) }
+ before { stub_application_setting(send_user_confirmation_email: true) }
it 'creates the user account and sends a confirmation email' do
user = build(:user)
@@ -23,7 +23,7 @@ feature 'Signup', feature: true do
end
context "when not sending confirmation email" do
- before { allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(false) }
+ before { stub_application_setting(send_user_confirmation_email: false) }
it 'creates the user account and goes to dashboard' do
user = build(:user)
diff --git a/spec/features/snippets/explore_spec.rb b/spec/features/snippets/explore_spec.rb
index 10a4597e467..fd097fe2e74 100644
--- a/spec/features/snippets/explore_spec.rb
+++ b/spec/features/snippets/explore_spec.rb
@@ -1,11 +1,11 @@
require 'rails_helper'
feature 'Explore Snippets', feature: true do
- scenario 'User should see snippets that are not private' do
- public_snippet = create(:personal_snippet, :public)
- internal_snippet = create(:personal_snippet, :internal)
- private_snippet = create(:personal_snippet, :private)
+ let!(:public_snippet) { create(:personal_snippet, :public) }
+ let!(:internal_snippet) { create(:personal_snippet, :internal) }
+ let!(:private_snippet) { create(:personal_snippet, :private) }
+ scenario 'User should see snippets that are not private' do
login_as create(:user)
visit explore_snippets_path
@@ -13,4 +13,21 @@ feature 'Explore Snippets', feature: true do
expect(page).to have_content(internal_snippet.title)
expect(page).not_to have_content(private_snippet.title)
end
+
+ scenario 'External user should see only public snippets' do
+ login_as create(:user, :external)
+ visit explore_snippets_path
+
+ expect(page).to have_content(public_snippet.title)
+ expect(page).not_to have_content(internal_snippet.title)
+ expect(page).not_to have_content(private_snippet.title)
+ end
+
+ scenario 'Not authenticated user should see only public snippets' do
+ visit explore_snippets_path
+
+ expect(page).to have_content(public_snippet.title)
+ expect(page).not_to have_content(internal_snippet.title)
+ expect(page).not_to have_content(private_snippet.title)
+ end
end
diff --git a/spec/features/snippets/internal_snippet_spec.rb b/spec/features/snippets/internal_snippet_spec.rb
new file mode 100644
index 00000000000..93382f4c359
--- /dev/null
+++ b/spec/features/snippets/internal_snippet_spec.rb
@@ -0,0 +1,23 @@
+require 'rails_helper'
+
+feature 'Internal Snippets', feature: true, js: true do
+ let(:internal_snippet) { create(:personal_snippet, :internal) }
+
+ describe 'normal user' do
+ before do
+ login_as :user
+ end
+
+ scenario 'sees internal snippets' do
+ visit snippet_path(internal_snippet)
+
+ expect(page).to have_content(internal_snippet.content)
+ end
+
+ scenario 'sees raw internal snippets' do
+ visit raw_snippet_path(internal_snippet)
+
+ expect(page).to have_content(internal_snippet.content)
+ end
+ end
+end
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index 957baac02eb..698eb46573f 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -78,9 +78,11 @@ describe 'Comments on personal snippets', :js, feature: true do
end
page.within("#notes-list li#note_#{snippet_notes[0].id}") do
+ edited_text = find('.edited-text')
+
expect(page).to have_css('.note_edited_ago')
expect(page).to have_content('new content')
- expect(find('.note_edited_ago').text).to match(/less than a minute ago/)
+ expect(edited_text).to have_selector('.note_edited_ago')
end
end
end
diff --git a/spec/features/tags/master_creates_tag_spec.rb b/spec/features/tags/master_creates_tag_spec.rb
index ca25c696f75..af25eebed13 100644
--- a/spec/features/tags/master_creates_tag_spec.rb
+++ b/spec/features/tags/master_creates_tag_spec.rb
@@ -51,10 +51,24 @@ feature 'Master creates tag', feature: true do
end
end
+ scenario 'opens dropdown for ref', js: true do
+ click_link 'New tag'
+ ref_row = find('.form-group:nth-of-type(2) .col-sm-10')
+ page.within ref_row do
+ ref_input = find('[name="ref"]', visible: false)
+ expect(ref_input.value).to eq 'master'
+ expect(find('.dropdown-toggle-text')).to have_content 'master'
+
+ find('.js-branch-select').trigger('click')
+
+ expect(find('.dropdown-menu')).to have_content 'empty-branch'
+ end
+ end
+
def create_tag_in_form(tag:, ref:, message: nil, desc: nil)
click_link 'New tag'
fill_in 'tag_name', with: tag
- fill_in 'ref', with: ref
+ find('#ref', visible: false).set(ref)
fill_in 'message', with: message unless message.nil?
fill_in 'release_description', with: desc unless desc.nil?
click_button 'Create tag'
diff --git a/spec/features/todos/todos_spec.rb b/spec/features/todos/todos_spec.rb
index be5b3af417f..55b3e3d9424 100644
--- a/spec/features/todos/todos_spec.rb
+++ b/spec/features/todos/todos_spec.rb
@@ -251,7 +251,7 @@ describe 'Dashboard Todos', feature: true do
describe 'mark all as done', js: true do
before do
visit dashboard_todos_path
- click_link 'Mark all as done'
+ find('.js-todos-mark-all').trigger('click')
end
it 'shows "All done" message!' do
@@ -308,9 +308,9 @@ describe 'Dashboard Todos', feature: true do
end
def mark_all_and_undo
- click_link 'Mark all as done'
+ find('.js-todos-mark-all').trigger('click')
wait_for_ajax
- click_link 'Undo mark all as done'
+ find('.js-todos-undo-all').trigger('click')
wait_for_ajax
end
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 783f330221c..c1ae6db00c6 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -77,77 +77,6 @@ feature 'Triggers', feature: true, js: true do
expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
expect(page.find('.triggers-list')).to have_content new_trigger_title
end
-
- context 'scheduled triggers' do
- let!(:trigger) do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- end
-
- context 'enabling schedule' do
- before do
- visit edit_namespace_project_trigger_path(@project.namespace, @project, trigger)
- end
-
- scenario 'do fill form with valid data and save' do
- find('#trigger_trigger_schedule_attributes_active').click
- fill_in 'trigger_trigger_schedule_attributes_cron', with: '1 * * * *'
- fill_in 'trigger_trigger_schedule_attributes_cron_timezone', with: 'UTC'
- fill_in 'trigger_trigger_schedule_attributes_ref', with: 'master'
- click_button 'Save trigger'
-
- expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
- end
-
- scenario 'do not fill form with valid data and save' do
- find('#trigger_trigger_schedule_attributes_active').click
- click_button 'Save trigger'
-
- expect(page).to have_content 'The form contains the following errors'
- end
-
- context 'when GitLab time_zone is ActiveSupport::TimeZone format' do
- before do
- allow(Time).to receive(:zone)
- .and_return(ActiveSupport::TimeZone['Eastern Time (US & Canada)'])
- end
-
- scenario 'do fill form with valid data and save' do
- find('#trigger_trigger_schedule_attributes_active').click
- fill_in 'trigger_trigger_schedule_attributes_cron', with: '1 * * * *'
- fill_in 'trigger_trigger_schedule_attributes_cron_timezone', with: 'UTC'
- fill_in 'trigger_trigger_schedule_attributes_ref', with: 'master'
- click_button 'Save trigger'
-
- expect(page.find('.flash-notice'))
- .to have_content 'Trigger was successfully updated.'
- end
- end
- end
-
- context 'disabling schedule' do
- before do
- trigger.create_trigger_schedule(
- project: trigger.project,
- active: true,
- ref: 'master',
- cron: '1 * * * *',
- cron_timezone: 'UTC')
-
- visit edit_namespace_project_trigger_path(@project.namespace, @project, trigger)
- end
-
- scenario 'disable and save form' do
- find('#trigger_trigger_schedule_attributes_active').click
- click_button 'Save trigger'
- expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
-
- visit edit_namespace_project_trigger_path(@project.namespace, @project, trigger)
- checkbox = find_field('trigger_trigger_schedule_attributes_active')
-
- expect(checkbox).not_to be_checked
- end
- end
- end
end
describe 'trigger "Take ownership" workflow' do
diff --git a/spec/features/uploads/user_uploads_file_to_note_spec.rb b/spec/features/uploads/user_uploads_file_to_note_spec.rb
index 0c160dd74b4..8f03024ea06 100644
--- a/spec/features/uploads/user_uploads_file_to_note_spec.rb
+++ b/spec/features/uploads/user_uploads_file_to_note_spec.rb
@@ -5,18 +5,78 @@ feature 'User uploads file to note', feature: true do
let(:user) { create(:user) }
let(:project) { create(:empty_project, creator: user, namespace: user.namespace) }
+ let(:issue) { create(:issue, project: project, author: user) }
- scenario 'they see the attached file', js: true do
- issue = create(:issue, project: project, author: user)
-
+ before do
login_as(user)
visit namespace_project_issue_path(project.namespace, project, issue)
+ end
+
+ context 'before uploading' do
+ it 'shows "Attach a file" button', js: true do
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+ end
+
+ context 'uploading is in progress' do
+ it 'shows "Cancel" button on uploading', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
+
+ expect(page).to have_button('Cancel')
+ end
+
+ it 'cancels uploading on clicking to "Cancel" button', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
+
+ click_button 'Cancel'
+
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_button('Cancel')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
+
+ it 'shows "Attaching a file" message on uploading 1 file', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
+
+ expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching a file -')
+ end
+
+ it 'shows "Attaching 2 files" message on uploading 2 file', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4'),
+ Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
+
+ expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching 2 files -')
+ end
+
+ it 'shows error message, "retry" and "attach a new file" link a if file is too big', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'video_sample.mp4')], 0.01)
+
+ error_text = 'File is too big (0.06MiB). Max filesize: 0.01MiB.'
+
+ expect(page).to have_selector('.uploading-error-message', visible: true, text: error_text)
+ expect(page).to have_selector('.retry-uploading-link', visible: true, text: 'Try again')
+ expect(page).to have_selector('.attach-new-file', visible: true, text: 'attach a new file')
+ expect(page).not_to have_button('Attach a file')
+ end
+ end
+
+ context 'uploading is complete' do
+ it 'shows "Attach a file" button on uploading complete', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')])
+ wait_for_ajax
+
+ expect(page).to have_button('Attach a file')
+ expect(page).not_to have_selector('.uploading-progress-container', visible: true)
+ end
- dropzone_file(Rails.root.join('spec', 'fixtures', 'dk.png'))
- click_button 'Comment'
- wait_for_ajax
+ scenario 'they see the attached file', js: true do
+ dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')])
+ click_button 'Comment'
+ wait_for_ajax
- expect(find('a.no-attachment-icon img[alt="dk"]')['src'])
- .to match(%r{/#{project.full_path}/uploads/\h{32}/dk\.png$})
+ expect(find('a.no-attachment-icon img[alt="dk"]')['src'])
+ .to match(%r{/#{project.full_path}/uploads/\h{32}/dk\.png$})
+ end
end
end
diff --git a/spec/features/user_callout_spec.rb b/spec/features/user_callout_spec.rb
index 848af5e3a4d..b84f834ff1e 100644
--- a/spec/features/user_callout_spec.rb
+++ b/spec/features/user_callout_spec.rb
@@ -20,7 +20,7 @@ describe 'User Callouts', js: true do
visit dashboard_projects_path
within('.user-callout') do
- find('.close').click
+ find('.close').trigger('click')
end
visit dashboard_projects_path
diff --git a/spec/features/users/snippets_spec.rb b/spec/features/users/snippets_spec.rb
index 1546a06b80c..4efbd672322 100644
--- a/spec/features/users/snippets_spec.rb
+++ b/spec/features/users/snippets_spec.rb
@@ -3,14 +3,46 @@ require 'spec_helper'
describe 'Snippets tab on a user profile', feature: true, js: true do
context 'when the user has snippets' do
let(:user) { create(:user) }
- let!(:snippets) { create_list(:snippet, 2, :public, author: user) }
- before do
- allow(Snippet).to receive(:default_per_page).and_return(1)
- visit user_path(user)
- page.within('.user-profile-nav') { click_link 'Snippets' }
- wait_for_ajax
+
+ context 'pagination' do
+ let!(:snippets) { create_list(:snippet, 2, :public, author: user) }
+
+ before do
+ allow(Snippet).to receive(:default_per_page).and_return(1)
+ visit user_path(user)
+ page.within('.user-profile-nav') { click_link 'Snippets' }
+ wait_for_ajax
+ end
+
+ it_behaves_like 'paginated snippets', remote: true
end
- it_behaves_like 'paginated snippets', remote: true
+ context 'list content' do
+ let!(:public_snippet) { create(:snippet, :public, author: user) }
+ let!(:internal_snippet) { create(:snippet, :internal, author: user) }
+ let!(:private_snippet) { create(:snippet, :private, author: user) }
+ let!(:other_snippet) { create(:snippet, :public) }
+
+ it 'contains only internal and public snippets of a user when a user is logged in' do
+ login_as(:user)
+ visit user_path(user)
+ page.within('.user-profile-nav') { click_link 'Snippets' }
+ wait_for_ajax
+
+ expect(page).to have_selector('.snippet-row', count: 2)
+
+ expect(page).to have_content(public_snippet.title)
+ expect(page).to have_content(internal_snippet.title)
+ end
+
+ it 'contains only public snippets of a user when a user is not logged in' do
+ visit user_path(user)
+ page.within('.user-profile-nav') { click_link 'Snippets' }
+ wait_for_ajax
+
+ expect(page).to have_selector('.snippet-row', count: 1)
+ expect(page).to have_content(public_snippet.title)
+ end
+ end
end
end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index d5d111e8d15..5b3591550c1 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -3,29 +3,64 @@ require 'spec_helper'
describe GroupsFinder do
describe '#execute' do
let(:user) { create(:user) }
- let!(:private_group) { create(:group, :private) }
- let!(:internal_group) { create(:group, :internal) }
- let!(:public_group) { create(:group, :public) }
- let(:finder) { described_class.new }
- describe 'execute' do
- describe 'without a user' do
- subject { finder.execute }
+ context 'root level groups' do
+ let!(:private_group) { create(:group, :private) }
+ let!(:internal_group) { create(:group, :internal) }
+ let!(:public_group) { create(:group, :public) }
+
+ context 'without a user' do
+ subject { described_class.new.execute }
it { is_expected.to eq([public_group]) }
end
- describe 'with a user' do
- subject { finder.execute(user) }
+ context 'with a user' do
+ subject { described_class.new(user).execute }
context 'normal user' do
- it { is_expected.to eq([public_group, internal_group]) }
+ it { is_expected.to contain_exactly(public_group, internal_group) }
end
context 'external user' do
let(:user) { create(:user, external: true) }
- it { is_expected.to eq([public_group]) }
+ it { is_expected.to contain_exactly(public_group) }
+ end
+
+ context 'user is member of the private group' do
+ before do
+ private_group.add_guest(user)
+ end
+
+ it { is_expected.to contain_exactly(public_group, internal_group, private_group) }
+ end
+ end
+ end
+
+ context 'subgroups' do
+ let!(:parent_group) { create(:group, :public) }
+ let!(:public_subgroup) { create(:group, :public, parent: parent_group) }
+ let!(:internal_subgroup) { create(:group, :internal, parent: parent_group) }
+ let!(:private_subgroup) { create(:group, :private, parent: parent_group) }
+
+ context 'without a user' do
+ it 'only returns public subgroups' do
+ expect(described_class.new(nil, parent: parent_group).execute).to contain_exactly(public_subgroup)
+ end
+ end
+
+ context 'with a user' do
+ it 'returns public and internal subgroups' do
+ expect(described_class.new(user, parent: parent_group).execute).to contain_exactly(public_subgroup, internal_subgroup)
+ end
+
+ context 'being member' do
+ it 'returns public subgroups, internal subgroups, and private subgroups user is member of' do
+ private_subgroup.add_guest(user)
+
+ expect(described_class.new(user, parent: parent_group).execute).to contain_exactly(public_subgroup, internal_subgroup, private_subgroup)
+ end
end
end
end
diff --git a/spec/finders/pipeline_schedules_finder_spec.rb b/spec/finders/pipeline_schedules_finder_spec.rb
new file mode 100644
index 00000000000..e184a87c9c7
--- /dev/null
+++ b/spec/finders/pipeline_schedules_finder_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe PipelineSchedulesFinder do
+ let(:project) { create(:empty_project) }
+
+ let!(:active_schedule) { create(:ci_pipeline_schedule, project: project) }
+ let!(:inactive_schedule) { create(:ci_pipeline_schedule, :inactive, project: project) }
+
+ subject { described_class.new(project).execute(params) }
+
+ describe "#execute" do
+ context 'when the scope is nil' do
+ let(:params) { { scope: nil } }
+
+ it 'selects all pipeline pipeline schedules' do
+ expect(subject.count).to be(2)
+ expect(subject).to include(active_schedule, inactive_schedule)
+ end
+ end
+
+ context 'when the scope is active' do
+ let(:params) { { scope: 'active' } }
+
+ it 'selects only active pipelines' do
+ expect(subject.count).to be(1)
+ expect(subject).to include(active_schedule)
+ expect(subject).not_to include(inactive_schedule)
+ end
+ end
+
+ context 'when the scope is inactve' do
+ let(:params) { { scope: 'inactive' } }
+
+ it 'selects only inactive pipelines' do
+ expect(subject.count).to be(1)
+ expect(subject).not_to include(active_schedule)
+ expect(subject).to include(inactive_schedule)
+ end
+ end
+ end
+end
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index cb6c80d1bd0..35f1683eef9 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -8,79 +8,145 @@ describe SnippetsFinder do
let(:project1) { create(:empty_project, :public, group: group) }
let(:project2) { create(:empty_project, :private, group: group) }
- context ':all filter' do
+ context 'all snippets visible to a user' do
let!(:snippet1) { create(:personal_snippet, :private) }
let!(:snippet2) { create(:personal_snippet, :internal) }
let!(:snippet3) { create(:personal_snippet, :public) }
+ let!(:project_snippet1) { create(:project_snippet, :private) }
+ let!(:project_snippet2) { create(:project_snippet, :internal) }
+ let!(:project_snippet3) { create(:project_snippet, :public) }
it "returns all private and internal snippets" do
- snippets = described_class.new.execute(user, filter: :all)
- expect(snippets).to include(snippet2, snippet3)
- expect(snippets).not_to include(snippet1)
+ snippets = described_class.new(user, scope: :all).execute
+ expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3)
+ expect(snippets).not_to include(snippet1, project_snippet1)
end
it "returns all public snippets" do
- snippets = described_class.new.execute(nil, filter: :all)
- expect(snippets).to include(snippet3)
- expect(snippets).not_to include(snippet1, snippet2)
+ snippets = described_class.new(nil, scope: :all).execute
+ expect(snippets).to include(snippet3, project_snippet3)
+ expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
+ end
+
+ it "returns all public and internal snippets for normal user" do
+ snippets = described_class.new(user).execute
+
+ expect(snippets).to include(snippet2, snippet3, project_snippet2, project_snippet3)
+ expect(snippets).not_to include(snippet1, project_snippet1)
+ end
+
+ it "returns all public snippets for non authorized user" do
+ snippets = described_class.new(nil).execute
+
+ expect(snippets).to include(snippet3, project_snippet3)
+ expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
+ end
+
+ it "returns all public and authored snippets for external user" do
+ external_user = create(:user, :external)
+ authored_snippet = create(:personal_snippet, :internal, author: external_user)
+
+ snippets = described_class.new(external_user).execute
+
+ expect(snippets).to include(snippet3, project_snippet3, authored_snippet)
+ expect(snippets).not_to include(snippet1, snippet2, project_snippet1, project_snippet2)
end
end
- context ':public filter' do
+ context 'filter by visibility' do
let!(:snippet1) { create(:personal_snippet, :private) }
let!(:snippet2) { create(:personal_snippet, :internal) }
let!(:snippet3) { create(:personal_snippet, :public) }
- it "returns public public snippets" do
- snippets = described_class.new.execute(nil, filter: :public)
+ it "returns public snippets when visibility is PUBLIC" do
+ snippets = described_class.new(nil, visibility: Snippet::PUBLIC).execute
expect(snippets).to include(snippet3)
expect(snippets).not_to include(snippet1, snippet2)
end
end
- context ':by_user filter' do
+ context 'filter by scope' do
+ let!(:snippet1) { create(:personal_snippet, :private, author: user) }
+ let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
+ let!(:snippet3) { create(:personal_snippet, :public, author: user) }
+
+ it "returns all snippets for 'all' scope" do
+ snippets = described_class.new(user, scope: :all).execute
+
+ expect(snippets).to include(snippet1, snippet2, snippet3)
+ end
+
+ it "returns all snippets for 'are_private' scope" do
+ snippets = described_class.new(user, scope: :are_private).execute
+
+ expect(snippets).to include(snippet1)
+ expect(snippets).not_to include(snippet2, snippet3)
+ end
+
+ it "returns all snippets for 'are_interna;' scope" do
+ snippets = described_class.new(user, scope: :are_internal).execute
+
+ expect(snippets).to include(snippet2)
+ expect(snippets).not_to include(snippet1, snippet3)
+ end
+
+ it "returns all snippets for 'are_private' scope" do
+ snippets = described_class.new(user, scope: :are_public).execute
+
+ expect(snippets).to include(snippet3)
+ expect(snippets).not_to include(snippet1, snippet2)
+ end
+ end
+
+ context 'filter by author' do
let!(:snippet1) { create(:personal_snippet, :private, author: user) }
let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
let!(:snippet3) { create(:personal_snippet, :public, author: user) }
it "returns all public and internal snippets" do
- snippets = described_class.new.execute(user1, filter: :by_user, user: user)
+ snippets = described_class.new(user1, author: user).execute
+
expect(snippets).to include(snippet2, snippet3)
expect(snippets).not_to include(snippet1)
end
it "returns internal snippets" do
- snippets = described_class.new.execute(user, filter: :by_user, user: user, scope: "are_internal")
+ snippets = described_class.new(user, author: user, visibility: Snippet::INTERNAL).execute
+
expect(snippets).to include(snippet2)
expect(snippets).not_to include(snippet1, snippet3)
end
it "returns private snippets" do
- snippets = described_class.new.execute(user, filter: :by_user, user: user, scope: "are_private")
+ snippets = described_class.new(user, author: user, visibility: Snippet::PRIVATE).execute
+
expect(snippets).to include(snippet1)
expect(snippets).not_to include(snippet2, snippet3)
end
it "returns public snippets" do
- snippets = described_class.new.execute(user, filter: :by_user, user: user, scope: "are_public")
+ snippets = described_class.new(user, author: user, visibility: Snippet::PUBLIC).execute
+
expect(snippets).to include(snippet3)
expect(snippets).not_to include(snippet1, snippet2)
end
it "returns all snippets" do
- snippets = described_class.new.execute(user, filter: :by_user, user: user)
+ snippets = described_class.new(user, author: user).execute
+
expect(snippets).to include(snippet1, snippet2, snippet3)
end
it "returns only public snippets if unauthenticated user" do
- snippets = described_class.new.execute(nil, filter: :by_user, user: user)
+ snippets = described_class.new(nil, author: user).execute
+
expect(snippets).to include(snippet3)
expect(snippets).not_to include(snippet2, snippet1)
end
end
- context 'by_project filter' do
+ context 'filter by project' do
before do
@snippet1 = create(:project_snippet, :private, project: project1)
@snippet2 = create(:project_snippet, :internal, project: project1)
@@ -88,43 +154,52 @@ describe SnippetsFinder do
end
it "returns public snippets for unauthorized user" do
- snippets = described_class.new.execute(nil, filter: :by_project, project: project1)
+ snippets = described_class.new(nil, project: project1).execute
+
expect(snippets).to include(@snippet3)
expect(snippets).not_to include(@snippet1, @snippet2)
end
it "returns public and internal snippets for non project members" do
- snippets = described_class.new.execute(user, filter: :by_project, project: project1)
+ snippets = described_class.new(user, project: project1).execute
+
expect(snippets).to include(@snippet2, @snippet3)
expect(snippets).not_to include(@snippet1)
end
it "returns public snippets for non project members" do
- snippets = described_class.new.execute(user, filter: :by_project, project: project1, scope: "are_public")
+ snippets = described_class.new(user, project: project1, visibility: Snippet::PUBLIC).execute
+
expect(snippets).to include(@snippet3)
expect(snippets).not_to include(@snippet1, @snippet2)
end
it "returns internal snippets for non project members" do
- snippets = described_class.new.execute(user, filter: :by_project, project: project1, scope: "are_internal")
+ snippets = described_class.new(user, project: project1, visibility: Snippet::INTERNAL).execute
+
expect(snippets).to include(@snippet2)
expect(snippets).not_to include(@snippet1, @snippet3)
end
it "does not return private snippets for non project members" do
- snippets = described_class.new.execute(user, filter: :by_project, project: project1, scope: "are_private")
+ snippets = described_class.new(user, project: project1, visibility: Snippet::PRIVATE).execute
+
expect(snippets).not_to include(@snippet1, @snippet2, @snippet3)
end
it "returns all snippets for project members" do
project1.team << [user, :developer]
- snippets = described_class.new.execute(user, filter: :by_project, project: project1)
+
+ snippets = described_class.new(user, project: project1).execute
+
expect(snippets).to include(@snippet1, @snippet2, @snippet3)
end
it "returns private snippets for project members" do
project1.team << [user, :developer]
- snippets = described_class.new.execute(user, filter: :by_project, project: project1, scope: "are_private")
+
+ snippets = described_class.new(user, project: project1, visibility: Snippet::PRIVATE).execute
+
expect(snippets).to include(@snippet1)
end
end
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
new file mode 100644
index 00000000000..780b309b45e
--- /dev/null
+++ b/spec/finders/users_finder_spec.rb
@@ -0,0 +1,66 @@
+require 'spec_helper'
+
+describe UsersFinder do
+ describe '#execute' do
+ let!(:user1) { create(:user, username: 'johndoe') }
+ let!(:user2) { create(:user, :blocked, username: 'notsorandom') }
+ let!(:external_user) { create(:user, :external) }
+ let!(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+
+ context 'with a normal user' do
+ let(:user) { create(:user) }
+
+ it 'returns all users' do
+ users = described_class.new(user).execute
+
+ expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ end
+
+ it 'filters by username' do
+ users = described_class.new(user, username: 'johndoe').execute
+
+ expect(users).to contain_exactly(user1)
+ end
+
+ it 'filters by search' do
+ users = described_class.new(user, search: 'orando').execute
+
+ expect(users).to contain_exactly(user2)
+ end
+
+ it 'filters by blocked users' do
+ users = described_class.new(user, blocked: true).execute
+
+ expect(users).to contain_exactly(user2)
+ end
+
+ it 'filters by active users' do
+ users = described_class.new(user, active: true).execute
+
+ expect(users).to contain_exactly(user, user1, omniauth_user)
+ end
+
+ it 'returns no external users' do
+ users = described_class.new(user, external: true).execute
+
+ expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ end
+ end
+
+ context 'with an admin user' do
+ let(:admin) { create(:admin) }
+
+ it 'filters by external users' do
+ users = described_class.new(admin, external: true).execute
+
+ expect(users).to contain_exactly(external_user)
+ end
+
+ it 'returns all users' do
+ users = described_class.new(admin).execute
+
+ expect(users).to contain_exactly(admin, user1, user2, external_user, omniauth_user)
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/entities/merge_request.json b/spec/fixtures/api/schemas/entities/merge_request.json
new file mode 100644
index 00000000000..4afbb87453e
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/merge_request.json
@@ -0,0 +1,98 @@
+{
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "author_id": { "type": "integer" },
+ "description": { "type": ["string", "null"] },
+ "lock_version": { "type": ["string", "null"] },
+ "milestone_id": { "type": ["string", "null"] },
+ "position": { "type": "integer" },
+ "state": { "type": "string" },
+ "title": { "type": "string" },
+ "updated_by_id": { "type": ["string", "null"] },
+ "created_at": { "type": "string" },
+ "updated_at": { "type": "string" },
+ "deleted_at": { "type": ["string", "null"] },
+ "time_estimate": { "type": "integer" },
+ "total_time_spent": { "type": "integer" },
+ "human_time_estimate": { "type": ["integer", "null"] },
+ "human_total_time_spent": { "type": ["integer", "null"] },
+ "in_progress_merge_commit_sha": { "type": ["string", "null"] },
+ "locked_at": { "type": ["string", "null"] },
+ "merge_error": { "type": ["string", "null"] },
+ "merge_commit_sha": { "type": ["string", "null"] },
+ "merge_params": { "type": ["object", "null"] },
+ "merge_status": { "type": "string" },
+ "merge_user_id": { "type": ["integer", "null"] },
+ "merge_when_pipeline_succeeds": { "type": "boolean" },
+ "source_branch": { "type": "string" },
+ "source_project_id": { "type": "integer" },
+ "target_branch": { "type": "string" },
+ "target_project_id": { "type": "integer" },
+ "merge_event": { "type": ["object", "null"] },
+ "closed_event": { "type": ["object", "null"] },
+ "author": { "type": ["object", "null"] },
+ "merge_user": { "type": ["object", "null"] },
+ "diff_head_sha": { "type": ["string", "null"] },
+ "diff_head_commit_short_id": { "type": ["string", "null"] },
+ "merge_commit_message": { "type": ["string", "null"] },
+ "pipeline": { "type": ["object", "null"] },
+ "work_in_progress": { "type": "boolean" },
+ "source_branch_exists": { "type": "boolean" },
+ "mergeable_discussions_state": { "type": "boolean" },
+ "conflicts_can_be_resolved_in_ui": { "type": "boolean" },
+ "branch_missing": { "type": "boolean" },
+ "has_conflicts": { "type": "boolean" },
+ "can_be_merged": { "type": "boolean" },
+ "project_archived": { "type": "boolean" },
+ "only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
+ "has_ci": { "type": "boolean" },
+ "ci_status": { "type": ["string", "null"] },
+ "issues_links": {
+ "type": "object",
+ "required": ["closing", "mentioned_but_not_closing", "assign_to_closing"],
+ "properties" : {
+ "closing": { "type": "string" },
+ "mentioned_but_not_closing": { "type": "string" },
+ "assign_to_closing": { "type": ["string", "null"] }
+ },
+ "additionalProperties": false
+ },
+ "source_branch_with_namespace_link": { "type": "string" },
+ "current_user": {
+ "type": "object",
+ "required": [
+ "can_remove_source_branch",
+ "can_revert_on_current_merge_request",
+ "can_cherry_pick_on_current_merge_request"
+ ],
+ "properties": {
+ "can_remove_source_branch": { "type": "boolean" },
+ "can_revert_on_current_merge_request": { "type": ["boolean", "null"] },
+ "can_cherry_pick_on_current_merge_request": { "type": ["boolean", "null"] }
+ },
+ "additionalProperties": false
+ },
+ "target_branch_commits_path": { "type": "string" },
+ "source_branch_path": { "type": "string" },
+ "conflict_resolution_path": { "type": ["string", "null"] },
+ "cancel_merge_when_pipeline_succeeds_path": { "type": "string" },
+ "create_issue_to_resolve_discussions_path": { "type": "string" },
+ "merge_path": { "type": "string" },
+ "cherry_pick_in_fork_path": { "type": ["string", "null"] },
+ "revert_in_fork_path": { "type": ["string", "null"] },
+ "email_patches_path": { "type": "string" },
+ "plain_diff_path": { "type": "string" },
+ "status_path": { "type": "string" },
+ "new_blob_path": { "type": "string" },
+ "merge_check_path": { "type": "string" },
+ "ci_environments_status_path": { "type": "string" },
+ "merge_commit_message_with_description": { "type": "string" },
+ "diverged_commits_count": { "type": "integer" },
+ "commit_change_content_path": { "type": "string" },
+ "remove_wip_path": { "type": "string" },
+ "commits_count": { "type": "integer" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/entities/merge_request_basic.json b/spec/fixtures/api/schemas/entities/merge_request_basic.json
new file mode 100644
index 00000000000..6b14188582a
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/merge_request_basic.json
@@ -0,0 +1,15 @@
+{
+ "type": "object",
+ "properties" : {
+ "state": { "type": "string" },
+ "merge_status": { "type": "string" },
+ "source_branch_exists": { "type": "boolean" },
+ "time_estimate": { "type": "integer" },
+ "total_time_spent": { "type": "integer" },
+ "human_time_estimate": { "type": ["string", "null"] },
+ "human_total_time_spent": { "type": ["string", "null"] },
+ "merge_error": { "type": ["string", "null"] },
+ "assignee_id": { "type": ["integer", "null"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 01bdf01ad22..785fb724132 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -3,6 +3,8 @@ require 'spec_helper'
describe ApplicationHelper do
include UploadHelpers
+ let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
+
describe 'current_controller?' do
it 'returns true when controller matches argument' do
stub_controller_name('foo')
@@ -56,8 +58,14 @@ describe ApplicationHelper do
describe 'project_icon' do
it 'returns an url for the avatar' do
project = create(:empty_project, avatar: File.open(uploaded_image_temp_path))
+ avatar_url = "/uploads/project/avatar/#{project.id}/banana_sample.gif"
+
+ expect(helper.project_icon(project.full_path).to_s).
+ to eq "<img src=\"#{avatar_url}\" alt=\"Banana sample\" />"
+
+ allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
+ avatar_url = "#{gitlab_host}/uploads/project/avatar/#{project.id}/banana_sample.gif"
- avatar_url = "http://#{Gitlab.config.gitlab.host}/uploads/project/avatar/#{project.id}/banana_sample.gif"
expect(helper.project_icon(project.full_path).to_s).
to eq "<img src=\"#{avatar_url}\" alt=\"Banana sample\" />"
end
@@ -67,9 +75,8 @@ describe ApplicationHelper do
allow_any_instance_of(Project).to receive(:avatar_in_git).and_return(true)
- avatar_url = "http://#{Gitlab.config.gitlab.host}#{namespace_project_avatar_path(project.namespace, project)}"
- expect(helper.project_icon(project.full_path).to_s).to match(
- image_tag(avatar_url))
+ avatar_url = "#{gitlab_host}#{namespace_project_avatar_path(project.namespace, project)}"
+ expect(helper.project_icon(project.full_path).to_s).to match(image_tag(avatar_url))
end
end
@@ -77,8 +84,14 @@ describe ApplicationHelper do
it 'returns an url for the avatar' do
user = create(:user, avatar: File.open(uploaded_image_temp_path))
- expect(helper.avatar_icon(user.email).to_s).
- to match("/uploads/user/avatar/#{user.id}/banana_sample.gif")
+ avatar_url = "/uploads/user/avatar/#{user.id}/banana_sample.gif"
+
+ expect(helper.avatar_icon(user.email).to_s).to match(avatar_url)
+
+ allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
+ avatar_url = "#{gitlab_host}/uploads/user/avatar/#{user.id}/banana_sample.gif"
+
+ expect(helper.avatar_icon(user.email).to_s).to match(avatar_url)
end
it 'returns an url for the avatar with relative url' do
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 581726c1d0e..6157abfe339 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -15,7 +15,7 @@ describe AvatarsHelper do
end
it "contains the user's avatar image" do
- is_expected.to include(CGI.escapeHTML(user.avatar_url(16)))
+ is_expected.to include(CGI.escapeHTML(user.avatar_url(size: 16)))
end
end
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 1b4393e6167..41b5df12522 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -116,10 +116,11 @@ describe BlobHelper do
let(:viewer_class) do
Class.new(BlobViewer::Base) do
- self.max_size = 1.megabyte
- self.absolute_max_size = 5.megabytes
+ include BlobViewer::ServerSide
+
+ self.overridable_max_size = 1.megabyte
+ self.max_size = 5.megabytes
self.type = :rich
- self.client_side = false
end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index eae097126ce..dd6566d25bb 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -122,9 +122,9 @@ describe DiffHelper do
it "returns strings with marked inline diffs" do
marked_old_line, marked_new_line = mark_inline_diffs(old_line, new_line)
- expect(marked_old_line).to eq("abc <span class='idiff left right deletion'>&#39;def&#39;</span>")
+ expect(marked_old_line).to eq(%q{abc <span class="idiff left right deletion">&#39;def&#39;</span>})
expect(marked_old_line).to be_html_safe
- expect(marked_new_line).to eq("abc <span class='idiff left right addition'>&quot;def&quot;</span>")
+ expect(marked_new_line).to eq(%q{abc <span class="idiff left right addition">&quot;def&quot;</span>})
expect(marked_new_line).to be_html_safe
end
end
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 10681af5f7e..f2c9d927388 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -21,55 +21,6 @@ describe MergeRequestsHelper do
end
end
- describe '#issues_sentence' do
- let(:project) { create :project }
-
- subject { issues_sentence(issues) }
- let(:issues) do
- [build(:issue, iid: 2, project: project),
- build(:issue, iid: 3, project: project),
- build(:issue, iid: 1, project: project)]
- end
-
- it do
- @project = project
-
- is_expected.to eq('#1, #2, and #3')
- end
-
- context 'for JIRA issues' do
- let(:project) { create(:empty_project) }
- let(:issues) do
- [
- ExternalIssue.new('JIRA-456', project),
- ExternalIssue.new('FOOBAR-7890', project),
- ExternalIssue.new('JIRA-123', project)
- ]
- end
-
- it do
- @project = project
- is_expected.to eq('FOOBAR-7890, JIRA-123, and JIRA-456')
- end
- end
-
- context 'for issues from multiple namespaces' do
- let(:project) { create(:project) }
- let(:other_project) { create(:project) }
- let(:issues) do
- [build(:issue, iid: 2, project: project),
- build(:issue, iid: 3, project: other_project),
- build(:issue, iid: 1, project: project)]
- end
-
- it do
- @project = project
-
- is_expected.to eq("#1, #2, and #{other_project.namespace.path}/#{other_project.path}#3")
- end
- end
- end
-
describe '#format_mr_branch_names' do
describe 'within the same project' do
let(:merge_request) { create(:merge_request) }
@@ -89,147 +40,4 @@ describe MergeRequestsHelper do
it { is_expected.to eq([source_title, target_title]) }
end
end
-
- describe '#mr_widget_refresh_url' do
- let(:guest) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:project_fork) { Projects::ForkService.new(project, guest).execute }
- let(:merge_request) { create(:merge_request, source_project: project_fork, target_project: project) }
-
- it 'returns correct url for MR' do
- expected_url = "#{project.path_with_namespace}/merge_requests/#{merge_request.iid}/merge_widget_refresh"
-
- expect(mr_widget_refresh_url(merge_request)).to end_with(expected_url)
- end
-
- it 'returns empty string for nil' do
- expect(mr_widget_refresh_url(nil)).to eq('')
- end
- end
-
- describe '#mr_closes_issues' do
- let(:user_1) { create(:user) }
- let(:user_2) { create(:user) }
-
- let(:project_1) { create(:project, :private, creator: user_1, namespace: user_1.namespace) }
- let(:project_2) { create(:project, :private, creator: user_2, namespace: user_2.namespace) }
-
- let(:issue_1) { create(:issue, project: project_1) }
- let(:issue_2) { create(:issue, project: project_2) }
-
- let(:merge_request) { create(:merge_request, source_project: project_1, target_project: project_1,) }
-
- let(:merge_request) do
- create(:merge_request,
- source_project: project_1, target_project: project_1,
- description: "Fixes #{issue_1.to_reference} Fixes #{issue_2.to_reference(project_1)}")
- end
-
- before do
- project_1.team << [user_2, :developer]
- project_2.team << [user_2, :developer]
- allow(merge_request.project).to receive(:default_branch).and_return(merge_request.target_branch)
- @merge_request = merge_request
- end
-
- context 'user without access to another private project' do
- let(:current_user) { user_1 }
-
- it 'cannot see that project\'s issue that will be closed on acceptance' do
- expect(mr_closes_issues).to contain_exactly(issue_1)
- end
- end
-
- context 'user with access to another private project' do
- let(:current_user) { user_2 }
-
- it 'can see that project\'s issue that will be closed on acceptance' do
- expect(mr_closes_issues).to contain_exactly(issue_1, issue_2)
- end
- end
- end
-
- describe '#target_projects' do
- let(:project) { create(:empty_project) }
- let(:fork_project) { create(:empty_project, forked_from_project: project) }
-
- context 'when target project has enabled merge requests' do
- it 'returns the forked_from project' do
- expect(target_projects(fork_project)).to contain_exactly(project, fork_project)
- end
- end
-
- context 'when target project has disabled merge requests' do
- it 'returns the forked project' do
- project.project_feature.update(merge_requests_access_level: 0)
-
- expect(target_projects(fork_project)).to contain_exactly(fork_project)
- end
- end
- end
-
- describe '#new_mr_path_from_push_event' do
- subject(:url_params) { URI.decode_www_form(new_mr_path_from_push_event(event)).to_h }
- let(:user) { create(:user) }
- let(:project) { create(:empty_project, creator: user) }
- let(:fork_project) { create(:project, forked_from_project: project, creator: user) }
- let(:event) do
- push_data = Gitlab::DataBuilder::Push.build_sample(fork_project, user)
- create(:event, :pushed, project: fork_project, target: fork_project, author: user, data: push_data)
- end
-
- context 'when target project has enabled merge requests' do
- it 'returns link to create merge request on source project' do
- expect(url_params['merge_request[target_project_id]'].to_i).to eq(project.id)
- end
- end
-
- context 'when target project has disabled merge requests' do
- it 'returns link to create merge request on forked project' do
- project.project_feature.update(merge_requests_access_level: 0)
-
- expect(url_params['merge_request[target_project_id]'].to_i).to eq(fork_project.id)
- end
- end
- end
-
- describe '#mr_issues_mentioned_but_not_closing' do
- let(:user_1) { create(:user) }
- let(:user_2) { create(:user) }
-
- let(:project_1) { create(:project, :private, creator: user_1, namespace: user_1.namespace) }
- let(:project_2) { create(:project, :private, creator: user_2, namespace: user_2.namespace) }
-
- let(:issue_1) { create(:issue, project: project_1) }
- let(:issue_2) { create(:issue, project: project_2) }
-
- let(:merge_request) do
- create(:merge_request,
- source_project: project_1, target_project: project_1,
- description: "#{issue_1.to_reference} #{issue_2.to_reference(project_1)}")
- end
-
- before do
- project_1.team << [user_2, :developer]
- project_2.team << [user_2, :developer]
- allow(merge_request.project).to receive(:default_branch).and_return(merge_request.target_branch)
- @merge_request = merge_request
- end
-
- context 'user without access to another private project' do
- let(:current_user) { user_1 }
-
- it 'cannot see that project\'s issue that will be closed on acceptance' do
- expect(mr_issues_mentioned_but_not_closing).to contain_exactly(issue_1)
- end
- end
-
- context 'user with access to another private project' do
- let(:current_user) { user_2 }
-
- it 'can see that project\'s issue that will be closed on acceptance' do
- expect(mr_issues_mentioned_but_not_closing).to contain_exactly(issue_1, issue_2)
- end
- end
- end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index be97973c693..54c5ba57bdf 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -66,8 +66,8 @@ describe ProjectsHelper do
describe "#project_list_cache_key", redis: true do
let(:project) { create(:project) }
- it "includes the namespace" do
- expect(helper.project_list_cache_key(project)).to include(project.namespace.cache_key)
+ it "includes the route" do
+ expect(helper.project_list_cache_key(project)).to include(project.route.cache_key)
end
it "includes the project" do
diff --git a/spec/helpers/submodule_helper_spec.rb b/spec/helpers/submodule_helper_spec.rb
index 345bc33a67b..18935be95c9 100644
--- a/spec/helpers/submodule_helper_spec.rb
+++ b/spec/helpers/submodule_helper_spec.rb
@@ -81,6 +81,19 @@ describe SubmoduleHelper do
end
end
+ context 'in-repository submodule' do
+ let(:group) { create(:group, name: "Master Project", path: "master-project") }
+ let(:project) { create(:empty_project, group: group) }
+ before do
+ self.instance_variable_set(:@project, project)
+ end
+
+ it 'in-repository' do
+ stub_url('./')
+ expect(submodule_links(submodule_item)).to eq(["/master-project/#{project.path}", "/master-project/#{project.path}/tree/hash"])
+ end
+ end
+
context 'submodule on gitlab.com' do
it 'detects ssh' do
stub_url('git@gitlab.com:gitlab-org/gitlab-ce.git')
@@ -109,6 +122,18 @@ describe SubmoduleHelper do
end
context 'submodule on unsupported' do
+ it 'sanitizes unsupported protocols' do
+ stub_url('javascript:alert("XSS");')
+
+ expect(helper.submodule_links(submodule_item)).to eq([nil, nil])
+ end
+
+ it 'sanitizes unsupported protocols disguised as a repository URL' do
+ stub_url('javascript:alert("XSS");foo/bar.git')
+
+ expect(helper.submodule_links(submodule_item)).to eq([nil, nil])
+ end
+
it 'returns original' do
stub_url('http://mygitserver.com/gitlab-org/gitlab-ce')
expect(submodule_links(submodule_item)).to eq([repo.submodule_url_for, nil])
diff --git a/spec/javascripts/abuse_reports_spec.js b/spec/javascripts/abuse_reports_spec.js
index 76b370b345b..069d857eab6 100644
--- a/spec/javascripts/abuse_reports_spec.js
+++ b/spec/javascripts/abuse_reports_spec.js
@@ -1,5 +1,5 @@
-require('~/lib/utils/text_utility');
-require('~/abuse_reports');
+import '~/lib/utils/text_utility';
+import '~/abuse_reports';
((global) => {
describe('Abuse Reports', () => {
diff --git a/spec/javascripts/activities_spec.js b/spec/javascripts/activities_spec.js
index e6a6fc36ca1..e8c5f721423 100644
--- a/spec/javascripts/activities_spec.js
+++ b/spec/javascripts/activities_spec.js
@@ -1,8 +1,8 @@
/* eslint-disable no-unused-expressions, no-prototype-builtins, no-new, no-shadow, max-len */
-require('vendor/jquery.endless-scroll.js');
-require('~/pager');
-require('~/activities');
+import 'vendor/jquery.endless-scroll';
+import '~/pager';
+import '~/activities';
(() => {
window.gon || (window.gon = {});
diff --git a/spec/javascripts/ajax_loading_spinner_spec.js b/spec/javascripts/ajax_loading_spinner_spec.js
index a68bccb16f4..1518ae68b0d 100644
--- a/spec/javascripts/ajax_loading_spinner_spec.js
+++ b/spec/javascripts/ajax_loading_spinner_spec.js
@@ -1,7 +1,7 @@
-require('~/extensions/array');
-require('jquery');
-require('jquery-ujs');
-require('~/ajax_loading_spinner');
+import '~/extensions/array';
+import 'jquery';
+import 'jquery-ujs';
+import '~/ajax_loading_spinner';
describe('Ajax Loading Spinner', () => {
const fixtureTemplate = 'static/ajax_loading_spinner.html.raw';
diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js
new file mode 100644
index 00000000000..867322ce8ae
--- /dev/null
+++ b/spec/javascripts/api_spec.js
@@ -0,0 +1,281 @@
+import Api from '~/api';
+
+describe('Api', () => {
+ const dummyApiVersion = 'v3000';
+ const dummyUrlRoot = 'http://host.invalid';
+ const dummyGon = {
+ api_version: dummyApiVersion,
+ relative_url_root: dummyUrlRoot,
+ };
+ const dummyResponse = 'hello from outer space!';
+ const sendDummyResponse = () => {
+ const deferred = $.Deferred();
+ deferred.resolve(dummyResponse);
+ return deferred.promise();
+ };
+ let originalGon;
+
+ beforeEach(() => {
+ originalGon = window.gon;
+ window.gon = dummyGon;
+ });
+
+ afterEach(() => {
+ window.gon = originalGon;
+ });
+
+ describe('buildUrl', () => {
+ it('adds URL root and fills in API version', () => {
+ const input = '/api/:version/foo/bar';
+ const expectedOutput = `${dummyUrlRoot}/api/${dummyApiVersion}/foo/bar`;
+
+ const builtUrl = Api.buildUrl(input);
+
+ expect(builtUrl).toEqual(expectedOutput);
+ });
+ });
+
+ describe('group', () => {
+ it('fetches a group', (done) => {
+ const groupId = '123456';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}.json`;
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ return sendDummyResponse();
+ });
+
+ Api.group(groupId, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('groups', () => {
+ it('fetches groups', (done) => {
+ const query = 'dummy query';
+ const options = { unused: 'option' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
+ const expectedData = Object.assign({
+ search: query,
+ per_page: 20,
+ }, options);
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.groups(query, options, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('namespaces', () => {
+ it('fetches namespaces', (done) => {
+ const query = 'dummy query';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
+ const expectedData = {
+ search: query,
+ per_page: 20,
+ };
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.namespaces(query, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('projects', () => {
+ it('fetches projects', (done) => {
+ const query = 'dummy query';
+ const options = { unused: 'option' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json?simple=true`;
+ const expectedData = Object.assign({
+ search: query,
+ per_page: 20,
+ membership: true,
+ }, options);
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.projects(query, options, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('newLabel', () => {
+ it('creates a new label', (done) => {
+ const namespace = 'some namespace';
+ const project = 'some project';
+ const labelData = { some: 'data' };
+ const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/labels`;
+ const expectedData = {
+ label: labelData,
+ };
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.type).toEqual('POST');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.newLabel(namespace, project, labelData, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('groupProjects', () => {
+ it('fetches group projects', (done) => {
+ const groupId = '123456';
+ const query = 'dummy query';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
+ const expectedData = {
+ search: query,
+ per_page: 20,
+ };
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.groupProjects(groupId, query, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('licenseText', () => {
+ it('fetches a license text', (done) => {
+ const licenseKey = "driver's license";
+ const data = { unused: 'option' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`;
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.data).toEqual(data);
+ return sendDummyResponse();
+ });
+
+ Api.licenseText(licenseKey, data, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('gitignoreText', () => {
+ it('fetches a gitignore text', (done) => {
+ const gitignoreKey = 'ignore git';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`;
+ spyOn(jQuery, 'get').and.callFake((url, callback) => {
+ expect(url).toEqual(expectedUrl);
+ callback(dummyResponse);
+ });
+
+ Api.gitignoreText(gitignoreKey, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('gitlabCiYml', () => {
+ it('fetches a .gitlab-ci.yml', (done) => {
+ const gitlabCiYmlKey = 'Y CI ML';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`;
+ spyOn(jQuery, 'get').and.callFake((url, callback) => {
+ expect(url).toEqual(expectedUrl);
+ callback(dummyResponse);
+ });
+
+ Api.gitlabCiYml(gitlabCiYmlKey, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('dockerfileYml', () => {
+ it('fetches a Dockerfile', (done) => {
+ const dockerfileYmlKey = 'a giant whale';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`;
+ spyOn(jQuery, 'get').and.callFake((url, callback) => {
+ expect(url).toEqual(expectedUrl);
+ callback(dummyResponse);
+ });
+
+ Api.dockerfileYml(dockerfileYmlKey, (response) => {
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('issueTemplate', () => {
+ it('fetches an issue template', (done) => {
+ const namespace = 'some namespace';
+ const project = 'some project';
+ const templateKey = 'template key';
+ const templateType = 'template type';
+ const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${templateKey}`;
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ return sendDummyResponse();
+ });
+
+ Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => {
+ expect(error).toBe(null);
+ expect(response).toBe(dummyResponse);
+ done();
+ });
+ });
+ });
+
+ describe('users', () => {
+ it('fetches users', (done) => {
+ const query = 'dummy query';
+ const options = { unused: 'option' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
+ const expectedData = Object.assign({
+ search: query,
+ per_page: 20,
+ }, options);
+ spyOn(jQuery, 'ajax').and.callFake((request) => {
+ expect(request.url).toEqual(expectedUrl);
+ expect(request.dataType).toEqual('json');
+ expect(request.data).toEqual(expectedData);
+ return sendDummyResponse();
+ });
+
+ Api.users(query, options)
+ .then((response) => {
+ expect(response).toBe(dummyResponse);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js
index 68ad5f66676..3fc03324d16 100644
--- a/spec/javascripts/awards_handler_spec.js
+++ b/spec/javascripts/awards_handler_spec.js
@@ -3,7 +3,7 @@
import Cookies from 'js-cookie';
import AwardsHandler from '~/awards_handler';
-require('~/lib/utils/common_utils');
+import '~/lib/utils/common_utils';
(function() {
var awardsHandler, lazyAssert, urlRoot, openAndWaitForEmojiMenu;
diff --git a/spec/javascripts/behaviors/autosize_spec.js b/spec/javascripts/behaviors/autosize_spec.js
index 3deaf258cae..67afba19190 100644
--- a/spec/javascripts/behaviors/autosize_spec.js
+++ b/spec/javascripts/behaviors/autosize_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, no-var, comma-dangle, no-return-assign, max-len */
-require('~/behaviors/autosize');
+import '~/behaviors/autosize';
(function() {
describe('Autosize behavior', function() {
diff --git a/spec/javascripts/behaviors/bind_in_out_spec.js b/spec/javascripts/behaviors/bind_in_out_spec.js
index dd9ab33289f..5ff66167718 100644
--- a/spec/javascripts/behaviors/bind_in_out_spec.js
+++ b/spec/javascripts/behaviors/bind_in_out_spec.js
@@ -2,7 +2,7 @@ import BindInOut from '~/behaviors/bind_in_out';
import ClassSpecHelper from '../helpers/class_spec_helper';
describe('BindInOut', function () {
- describe('.constructor', function () {
+ describe('constructor', function () {
beforeEach(function () {
this.in = {};
this.out = {};
@@ -53,7 +53,7 @@ describe('BindInOut', function () {
});
});
- describe('.addEvents', function () {
+ describe('addEvents', function () {
beforeEach(function () {
this.in = jasmine.createSpyObj('in', ['addEventListener']);
@@ -79,7 +79,7 @@ describe('BindInOut', function () {
});
});
- describe('.updateOut', function () {
+ describe('updateOut', function () {
beforeEach(function () {
this.in = { value: 'the-value' };
this.out = { textContent: 'not-the-value' };
@@ -98,7 +98,7 @@ describe('BindInOut', function () {
});
});
- describe('.removeEvents', function () {
+ describe('removeEvents', function () {
beforeEach(function () {
this.in = jasmine.createSpyObj('in', ['removeEventListener']);
this.updateOut = () => {};
@@ -122,7 +122,7 @@ describe('BindInOut', function () {
});
});
- describe('.initAll', function () {
+ describe('initAll', function () {
beforeEach(function () {
this.ins = [0, 1, 2];
this.instances = [];
@@ -153,7 +153,7 @@ describe('BindInOut', function () {
});
});
- describe('.init', function () {
+ describe('init', function () {
beforeEach(function () {
spyOn(BindInOut.prototype, 'addEvents').and.callFake(function () { return this; });
spyOn(BindInOut.prototype, 'updateOut').and.callFake(function () { return this; });
diff --git a/spec/javascripts/behaviors/quick_submit_spec.js b/spec/javascripts/behaviors/quick_submit_spec.js
index 4820ce41ade..f56b99f8a16 100644
--- a/spec/javascripts/behaviors/quick_submit_spec.js
+++ b/spec/javascripts/behaviors/quick_submit_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, no-var, no-return-assign, comma-dangle, jasmine/no-spec-dupes, new-cap, max-len */
-require('~/behaviors/quick_submit');
+import '~/behaviors/quick_submit';
(function() {
describe('Quick Submit behavior', function() {
diff --git a/spec/javascripts/behaviors/requires_input_spec.js b/spec/javascripts/behaviors/requires_input_spec.js
index 3a84013a2ed..f9fa814b801 100644
--- a/spec/javascripts/behaviors/requires_input_spec.js
+++ b/spec/javascripts/behaviors/requires_input_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, no-var */
-require('~/behaviors/requires_input');
+import '~/behaviors/requires_input';
(function() {
describe('requiresInput', function() {
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
new file mode 100644
index 00000000000..acd0aaf2a86
--- /dev/null
+++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
@@ -0,0 +1,51 @@
+/* eslint-disable import/no-unresolved */
+
+import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
+import bmprPath from '../../fixtures/blob/balsamiq/test.bmpr';
+
+describe('Balsamiq integration spec', () => {
+ let container;
+ let endpoint;
+ let balsamiqViewer;
+
+ preloadFixtures('static/balsamiq_viewer.html.raw');
+
+ beforeEach(() => {
+ loadFixtures('static/balsamiq_viewer.html.raw');
+
+ container = document.getElementById('js-balsamiq-viewer');
+ balsamiqViewer = new BalsamiqViewer(container);
+ });
+
+ describe('successful response', () => {
+ beforeEach((done) => {
+ endpoint = bmprPath;
+
+ balsamiqViewer.loadFile(endpoint).then(done).catch(done.fail);
+ });
+
+ it('does not show loading icon', () => {
+ expect(document.querySelector('.loading')).toBeNull();
+ });
+
+ it('renders the balsamiq previews', () => {
+ expect(document.querySelectorAll('.previews .preview').length).not.toEqual(0);
+ });
+ });
+
+ describe('error getting file', () => {
+ beforeEach((done) => {
+ endpoint = 'invalid/path/to/file.bmpr';
+
+ balsamiqViewer.loadFile(endpoint).then(done.fail, null).catch(done);
+ });
+
+ it('does not show loading icon', () => {
+ expect(document.querySelector('.loading')).toBeNull();
+ });
+
+ it('does not render the balsamiq previews', () => {
+ expect(document.querySelectorAll('.previews .preview').length).toEqual(0);
+ });
+ });
+});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js
index 85816ee1f11..aa87956109f 100644
--- a/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js
+++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_spec.js
@@ -4,17 +4,11 @@ import ClassSpecHelper from '../../helpers/class_spec_helper';
describe('BalsamiqViewer', () => {
let balsamiqViewer;
- let endpoint;
let viewer;
describe('class constructor', () => {
beforeEach(() => {
- endpoint = 'endpoint';
- viewer = {
- dataset: {
- endpoint,
- },
- };
+ viewer = {};
balsamiqViewer = new BalsamiqViewer(viewer);
});
@@ -22,25 +16,25 @@ describe('BalsamiqViewer', () => {
it('should set .viewer', () => {
expect(balsamiqViewer.viewer).toBe(viewer);
});
+ });
+
+ describe('fileLoaded', () => {
- it('should set .endpoint', () => {
- expect(balsamiqViewer.endpoint).toBe(endpoint);
- });
});
describe('loadFile', () => {
let xhr;
+ let loadFile;
+ const endpoint = 'endpoint';
beforeEach(() => {
- endpoint = 'endpoint';
xhr = jasmine.createSpyObj('xhr', ['open', 'send']);
balsamiqViewer = jasmine.createSpyObj('balsamiqViewer', ['renderFile']);
- balsamiqViewer.endpoint = endpoint;
spyOn(window, 'XMLHttpRequest').and.returnValue(xhr);
- BalsamiqViewer.prototype.loadFile.call(balsamiqViewer);
+ loadFile = BalsamiqViewer.prototype.loadFile.call(balsamiqViewer, endpoint);
});
it('should call .open', () => {
@@ -54,6 +48,10 @@ describe('BalsamiqViewer', () => {
it('should call .send', () => {
expect(xhr.send).toHaveBeenCalled();
});
+
+ it('should return a promise', () => {
+ expect(loadFile).toEqual(jasmine.any(Promise));
+ });
});
describe('renderFile', () => {
@@ -325,18 +323,4 @@ describe('BalsamiqViewer', () => {
expect(parseTitle).toBe('name');
});
});
-
- describe('onError', () => {
- beforeEach(() => {
- spyOn(window, 'Flash');
-
- BalsamiqViewer.onError();
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(BalsamiqViewer, 'onError');
-
- it('should instantiate Flash', () => {
- expect(window.Flash).toHaveBeenCalledWith('Balsamiq file could not be loaded.');
- });
- });
});
diff --git a/spec/javascripts/blob/create_branch_dropdown_spec.js b/spec/javascripts/blob/create_branch_dropdown_spec.js
index c1179e572ae..6dbaa47c544 100644
--- a/spec/javascripts/blob/create_branch_dropdown_spec.js
+++ b/spec/javascripts/blob/create_branch_dropdown_spec.js
@@ -1,7 +1,6 @@
-require('~/gl_dropdown');
-require('~/lib/utils/type_utility');
-require('~/blob/create_branch_dropdown');
-require('~/blob/target_branch_dropdown');
+import '~/gl_dropdown';
+import '~/blob/create_branch_dropdown';
+import '~/blob/target_branch_dropdown';
describe('CreateBranchDropdown', () => {
const fixtureTemplate = 'static/target_branch_dropdown.html.raw';
diff --git a/spec/javascripts/blob/target_branch_dropdown_spec.js b/spec/javascripts/blob/target_branch_dropdown_spec.js
index 4fb79663c51..99c9537d2ec 100644
--- a/spec/javascripts/blob/target_branch_dropdown_spec.js
+++ b/spec/javascripts/blob/target_branch_dropdown_spec.js
@@ -1,7 +1,6 @@
-require('~/gl_dropdown');
-require('~/lib/utils/type_utility');
-require('~/blob/create_branch_dropdown');
-require('~/blob/target_branch_dropdown');
+import '~/gl_dropdown';
+import '~/blob/create_branch_dropdown';
+import '~/blob/target_branch_dropdown';
describe('TargetBranchDropdown', () => {
const fixtureTemplate = 'static/target_branch_dropdown.html.raw';
@@ -63,7 +62,7 @@ describe('TargetBranchDropdown', () => {
expect('change.branch').toHaveBeenTriggeredOn(dropdown.$dropdown);
});
- describe('#dropdownData', () => {
+ describe('dropdownData', () => {
it('cache the refs', () => {
const refs = dropdown.cachedRefs;
dropdown.cachedRefs = null;
@@ -88,7 +87,7 @@ describe('TargetBranchDropdown', () => {
});
});
- describe('#setNewBranch', () => {
+ describe('setNewBranch', () => {
it('adds the new branch and select it', () => {
const branchName = 'new_branch';
diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js
index 13f122b68b2..af04e7c1e72 100644
--- a/spec/javascripts/blob/viewer/index_spec.js
+++ b/spec/javascripts/blob/viewer/index_spec.js
@@ -83,25 +83,48 @@ describe('Blob viewer', () => {
});
describe('copy blob button', () => {
+ let copyButton;
+
+ beforeEach(() => {
+ copyButton = document.querySelector('.js-copy-blob-source-btn');
+ });
+
it('disabled on load', () => {
expect(
- document.querySelector('.js-copy-blob-source-btn').classList.contains('disabled'),
+ copyButton.classList.contains('disabled'),
).toBeTruthy();
});
it('has tooltip when disabled', () => {
expect(
- document.querySelector('.js-copy-blob-source-btn').getAttribute('data-original-title'),
+ copyButton.getAttribute('data-original-title'),
).toBe('Switch to the source to copy it to the clipboard');
});
+ it('is blurred when clicked and disabled', () => {
+ spyOn(copyButton, 'blur');
+
+ copyButton.click();
+
+ expect(copyButton.blur).toHaveBeenCalled();
+ });
+
+ it('is not blurred when clicked and not disabled', () => {
+ spyOn(copyButton, 'blur');
+
+ copyButton.classList.remove('disabled');
+ copyButton.click();
+
+ expect(copyButton.blur).not.toHaveBeenCalled();
+ });
+
it('enables after switching to simple view', (done) => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
expect($.ajax).toHaveBeenCalled();
expect(
- document.querySelector('.js-copy-blob-source-btn').classList.contains('disabled'),
+ copyButton.classList.contains('disabled'),
).toBeFalsy();
done();
@@ -115,7 +138,7 @@ describe('Blob viewer', () => {
expect($.ajax).toHaveBeenCalled();
expect(
- document.querySelector('.js-copy-blob-source-btn').getAttribute('data-original-title'),
+ copyButton.getAttribute('data-original-title'),
).toBe('Copy source to clipboard');
done();
diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js
index 376e706d1db..447b244c71f 100644
--- a/spec/javascripts/boards/board_card_spec.js
+++ b/spec/javascripts/boards/board_card_spec.js
@@ -8,11 +8,11 @@
import Vue from 'vue';
import '~/boards/models/assignee';
-require('~/boards/models/list');
-require('~/boards/models/label');
-require('~/boards/stores/boards_store');
-const boardCard = require('~/boards/components/board_card').default;
-require('./mock_data');
+import '~/boards/models/list';
+import '~/boards/models/label';
+import '~/boards/stores/boards_store';
+import boardCard from '~/boards/components/board_card';
+import './mock_data';
describe('Issue card', () => {
let vm;
diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js
index 4999933c0c1..45d12e252c4 100644
--- a/spec/javascripts/boards/board_new_issue_spec.js
+++ b/spec/javascripts/boards/board_new_issue_spec.js
@@ -6,8 +6,8 @@
import Vue from 'vue';
import boardNewIssue from '~/boards/components/board_new_issue';
-require('~/boards/models/list');
-require('./mock_data');
+import '~/boards/models/list';
+import './mock_data';
describe('Issue boards new issue form', () => {
let vm;
diff --git a/spec/javascripts/boards/issue_card_spec.js b/spec/javascripts/boards/issue_card_spec.js
index fddde799d01..bd9b4fbfdd3 100644
--- a/spec/javascripts/boards/issue_card_spec.js
+++ b/spec/javascripts/boards/issue_card_spec.js
@@ -129,7 +129,7 @@ describe('Issue card component', () => {
it('sets title', () => {
expect(
- component.$el.querySelector('.card-assignee a').getAttribute('title'),
+ component.$el.querySelector('.card-assignee img').getAttribute('data-original-title'),
).toContain(`Assigned to ${user.name}`);
});
diff --git a/spec/javascripts/bootstrap_linked_tabs_spec.js b/spec/javascripts/bootstrap_linked_tabs_spec.js
index fa9f95e16cd..a27dc48b3fd 100644
--- a/spec/javascripts/bootstrap_linked_tabs_spec.js
+++ b/spec/javascripts/bootstrap_linked_tabs_spec.js
@@ -1,4 +1,4 @@
-require('~/lib/utils/bootstrap_linked_tabs');
+import LinkedTabs from '~/lib/utils/bootstrap_linked_tabs';
(() => {
// TODO: remove this hack!
@@ -25,7 +25,7 @@ require('~/lib/utils/bootstrap_linked_tabs');
});
it('should activate the tab correspondent to the given action', () => {
- const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line
+ const linkedTabs = new LinkedTabs({ // eslint-disable-line
action: 'tab1',
defaultAction: 'tab1',
parentEl: '.linked-tabs',
@@ -35,7 +35,7 @@ require('~/lib/utils/bootstrap_linked_tabs');
});
it('should active the default tab action when the action is show', () => {
- const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line
+ const linkedTabs = new LinkedTabs({ // eslint-disable-line
action: 'show',
defaultAction: 'tab1',
parentEl: '.linked-tabs',
@@ -49,7 +49,7 @@ require('~/lib/utils/bootstrap_linked_tabs');
it('should change the url according to the clicked tab', () => {
const historySpy = !phantomjs && spyOn(history, 'replaceState').and.callFake(() => {});
- const linkedTabs = new window.gl.LinkedTabs({ // eslint-disable-line
+ const linkedTabs = new LinkedTabs({
action: 'show',
defaultAction: 'tab1',
parentEl: '.linked-tabs',
diff --git a/spec/javascripts/ci_status_icon_spec.js b/spec/javascripts/ci_status_icon_spec.js
deleted file mode 100644
index c83416c15ef..00000000000
--- a/spec/javascripts/ci_status_icon_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import * as icons from '~/ci_status_icons';
-
-describe('CI status icons', () => {
- const statuses = [
- 'canceled',
- 'created',
- 'failed',
- 'manual',
- 'pending',
- 'running',
- 'skipped',
- 'success',
- 'warning',
- ];
-
- statuses.forEach((status) => {
- it(`should export a ${status} svg`, () => {
- const key = `${status.toUpperCase()}_SVG`;
-
- expect(Object.hasOwnProperty.call(icons, key)).toBe(true);
- expect(icons[key]).toMatch(/^<svg/);
- });
- });
-
- describe('default export map', () => {
- const entityIconNames = [
- 'icon_status_canceled',
- 'icon_status_created',
- 'icon_status_failed',
- 'icon_status_manual',
- 'icon_status_pending',
- 'icon_status_running',
- 'icon_status_skipped',
- 'icon_status_success',
- 'icon_status_warning',
- ];
-
- entityIconNames.forEach((iconName) => {
- it(`should have a '${iconName}' key`, () => {
- expect(Object.hasOwnProperty.call(icons.default, iconName)).toBe(true);
- });
- });
- });
-});
diff --git a/spec/javascripts/commit/pipelines/mock_data.js b/spec/javascripts/commit/pipelines/mock_data.js
deleted file mode 100644
index 82b00b4c1ec..00000000000
--- a/spec/javascripts/commit/pipelines/mock_data.js
+++ /dev/null
@@ -1,89 +0,0 @@
-export default {
- id: 73,
- user: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- path: '/root/review-app/pipelines/73',
- details: {
- status: {
- icon: 'icon_status_failed',
- text: 'failed',
- label: 'failed',
- group: 'failed',
- has_details: true,
- details_path: '/root/review-app/pipelines/73',
- },
- duration: null,
- finished_at: '2017-01-25T00:00:17.130Z',
- stages: [{
- name: 'build',
- title: 'build: failed',
- status: {
- icon: 'icon_status_failed',
- text: 'failed',
- label: 'failed',
- group: 'failed',
- has_details: true,
- details_path: '/root/review-app/pipelines/73#build',
- },
- path: '/root/review-app/pipelines/73#build',
- dropdown_path: '/root/review-app/pipelines/73/stage.json?stage=build',
- }],
- artifacts: [],
- manual_actions: [
- {
- name: 'stop_review',
- path: '/root/review-app/builds/1463/play',
- },
- {
- name: 'name',
- path: '/root/review-app/builds/1490/play',
- },
- ],
- },
- flags: {
- latest: true,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: false,
- },
- ref:
- {
- name: 'master',
- path: '/root/review-app/tree/master',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'fbd79f04fa98717641deaaeb092a4d417237c2e4',
- short_id: 'fbd79f04',
- title: 'Update .gitlab-ci.yml',
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- created_at: '2017-01-16T12:13:57.000-05:00',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- message: 'Update .gitlab-ci.yml',
- author: {
- name: 'Administrator',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- author_gravatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- commit_url: 'http://localhost:3000/root/review-app/commit/fbd79f04fa98717641deaaeb092a4d417237c2e4',
- commit_path: '/root/review-app/commit/fbd79f04fa98717641deaaeb092a4d417237c2e4',
- },
- retry_path: '/root/review-app/pipelines/73/retry',
- created_at: '2017-01-16T17:13:59.800Z',
- updated_at: '2017-01-25T00:00:17.132Z',
-};
diff --git a/spec/javascripts/commit/pipelines/pipelines_spec.js b/spec/javascripts/commit/pipelines/pipelines_spec.js
index ad31448f81c..398c593eec2 100644
--- a/spec/javascripts/commit/pipelines/pipelines_spec.js
+++ b/spec/javascripts/commit/pipelines/pipelines_spec.js
@@ -1,12 +1,17 @@
import Vue from 'vue';
import PipelinesTable from '~/commit/pipelines/pipelines_table';
-import pipeline from './mock_data';
describe('Pipelines table in Commits and Merge requests', () => {
+ const jsonFixtureName = 'pipelines/pipelines.json';
+ let pipeline;
+
preloadFixtures('static/pipelines_table.html.raw');
+ preloadFixtures(jsonFixtureName);
beforeEach(() => {
loadFixtures('static/pipelines_table.html.raw');
+ const pipelines = getJSONFixture(jsonFixtureName).pipelines;
+ pipeline = pipelines.find(p => p.id === 1);
});
describe('successful request', () => {
diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js
index 05260760c43..187db7485a5 100644
--- a/spec/javascripts/commits_spec.js
+++ b/spec/javascripts/commits_spec.js
@@ -1,8 +1,8 @@
/* global CommitsList */
-require('vendor/jquery.endless-scroll');
-require('~/pager');
-require('~/commits');
+import 'vendor/jquery.endless-scroll';
+import '~/pager';
+import '~/commits';
(() => {
// TODO: remove this hack!
diff --git a/spec/javascripts/datetime_utility_spec.js b/spec/javascripts/datetime_utility_spec.js
index d5eec10be42..e347c980c78 100644
--- a/spec/javascripts/datetime_utility_spec.js
+++ b/spec/javascripts/datetime_utility_spec.js
@@ -1,4 +1,4 @@
-require('~/lib/utils/datetime_utility');
+import '~/lib/utils/datetime_utility';
(() => {
describe('Date time utils', () => {
diff --git a/spec/javascripts/diff_comments_store_spec.js b/spec/javascripts/diff_comments_store_spec.js
index 66ece7e4f41..d6fc6b56b82 100644
--- a/spec/javascripts/diff_comments_store_spec.js
+++ b/spec/javascripts/diff_comments_store_spec.js
@@ -1,9 +1,9 @@
/* eslint-disable jasmine/no-global-setup, dot-notation, jasmine/no-expect-in-setup-teardown, max-len */
/* global CommentsStore */
-require('~/diff_notes/models/discussion');
-require('~/diff_notes/models/note');
-require('~/diff_notes/stores/comments');
+import '~/diff_notes/models/discussion';
+import '~/diff_notes/models/note';
+import '~/diff_notes/stores/comments';
function createDiscussion(noteId = 1, resolved = true) {
CommentsStore.create({
diff --git a/spec/javascripts/droplab/drop_down_spec.js b/spec/javascripts/droplab/drop_down_spec.js
index e7786e8cc2c..2bbcebeeac0 100644
--- a/spec/javascripts/droplab/drop_down_spec.js
+++ b/spec/javascripts/droplab/drop_down_spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable */
-
import DropDown from '~/droplab/drop_down';
import utils from '~/droplab/utils';
import { SELECTED_CLASS, IGNORE_CLASS } from '~/droplab/constants';
@@ -17,7 +15,7 @@ describe('DropDown', function () {
it('sets the .hidden property to true', function () {
expect(this.dropdown.hidden).toBe(true);
- })
+ });
it('sets the .list property', function () {
expect(this.dropdown.list).toBe(this.list);
@@ -152,7 +150,7 @@ describe('DropDown', function () {
it('should call addSelectedClass', function () {
expect(this.dropdown.addSelectedClass).toHaveBeenCalledWith(this.closestElement);
- })
+ });
it('should call .preventDefault', function () {
expect(this.event.preventDefault).toHaveBeenCalled();
@@ -293,36 +291,6 @@ describe('DropDown', function () {
});
});
- describe('toggle', function () {
- beforeEach(function () {
- this.dropdown = { hidden: true, show: () => {}, hide: () => {} };
-
- spyOn(this.dropdown, 'show');
- spyOn(this.dropdown, 'hide');
-
- DropDown.prototype.toggle.call(this.dropdown);
- });
-
- it('should call .show if hidden is true', function () {
- expect(this.dropdown.show).toHaveBeenCalled();
- });
-
- describe('if hidden is false', function () {
- beforeEach(function () {
- this.dropdown = { hidden: false, show: () => {}, hide: () => {} };
-
- spyOn(this.dropdown, 'show');
- spyOn(this.dropdown, 'hide');
-
- DropDown.prototype.toggle.call(this.dropdown);
- });
-
- it('should call .show if hidden is true', function () {
- expect(this.dropdown.hide).toHaveBeenCalled();
- });
- });
- });
-
describe('setData', function () {
beforeEach(function () {
this.dropdown = { render: () => {} };
@@ -399,7 +367,7 @@ describe('DropDown', function () {
expect(this.data.map).toHaveBeenCalledWith(jasmine.any(Function));
});
- it('should call .renderChildren for each data item', function() {
+ it('should call .renderChildren for each data item', function () {
expect(this.dropdown.renderChildren.calls.count()).toBe(this.data.length);
});
@@ -407,7 +375,7 @@ describe('DropDown', function () {
expect(this.renderableList.innerHTML).toBe('01');
});
- describe('if no data argument is passed' , function () {
+ describe('if no data argument is passed', function () {
beforeEach(function () {
this.data.map.calls.reset();
this.dropdown.renderChildren.calls.reset();
@@ -446,14 +414,14 @@ describe('DropDown', function () {
describe('renderChildren', function () {
beforeEach(function () {
this.templateString = 'templateString';
- this.dropdown = { setImagesSrc: () => {}, templateString: this.templateString };
+ this.dropdown = { templateString: this.templateString };
this.data = { droplab_hidden: true };
this.html = 'html';
this.template = { firstChild: { outerHTML: 'outerHTML', style: {} } };
spyOn(utils, 'template').and.returnValue(this.html);
spyOn(document, 'createElement').and.returnValue(this.template);
- spyOn(this.dropdown, 'setImagesSrc');
+ spyOn(DropDown, 'setImagesSrc');
this.renderChildren = DropDown.prototype.renderChildren.call(this.dropdown, this.data);
});
@@ -471,7 +439,7 @@ describe('DropDown', function () {
});
it('should call .setImagesSrc with the template', function () {
- expect(this.dropdown.setImagesSrc).toHaveBeenCalledWith(this.template);
+ expect(DropDown.setImagesSrc).toHaveBeenCalledWith(this.template);
});
it('should set the template display to none', function () {
@@ -496,12 +464,11 @@ describe('DropDown', function () {
describe('setImagesSrc', function () {
beforeEach(function () {
- this.dropdown = {};
this.template = { querySelectorAll: () => {} };
spyOn(this.template, 'querySelectorAll').and.returnValue([]);
- DropDown.prototype.setImagesSrc.call(this.dropdown, this.template);
+ DropDown.setImagesSrc(this.template);
});
it('should call .querySelectorAll', function () {
@@ -562,7 +529,7 @@ describe('DropDown', function () {
describe('toggle', function () {
beforeEach(function () {
- this.hidden = true
+ this.hidden = true;
this.dropdown = { hidden: this.hidden, show: () => {}, hide: () => {} };
spyOn(this.dropdown, 'show');
@@ -577,7 +544,7 @@ describe('DropDown', function () {
describe('if .hidden is false', function () {
beforeEach(function () {
- this.hidden = false
+ this.hidden = false;
this.dropdown = { hidden: this.hidden, show: () => {}, hide: () => {} };
spyOn(this.dropdown, 'show');
diff --git a/spec/javascripts/droplab/hook_spec.js b/spec/javascripts/droplab/hook_spec.js
index 8ebdcdd1404..75bf5f3d611 100644
--- a/spec/javascripts/droplab/hook_spec.js
+++ b/spec/javascripts/droplab/hook_spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable */
-
import Hook from '~/droplab/hook';
import * as dropdownSrc from '~/droplab/drop_down';
@@ -73,10 +71,4 @@ describe('Hook', function () {
});
});
});
-
- describe('addEvents', function () {
- it('should exist', function () {
- expect(Hook.prototype.hasOwnProperty('addEvents')).toBe(true);
- });
- });
});
diff --git a/spec/javascripts/extensions/array_spec.js b/spec/javascripts/extensions/array_spec.js
index 4b871fe967d..b1b81b4efc2 100644
--- a/spec/javascripts/extensions/array_spec.js
+++ b/spec/javascripts/extensions/array_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, no-var */
-require('~/extensions/array');
+import '~/extensions/array';
(function() {
describe('Array extensions', function() {
diff --git a/spec/javascripts/filtered_search/dropdown_user_spec.js b/spec/javascripts/filtered_search/dropdown_user_spec.js
index 3f92fe4701e..0d8bdf4c8e7 100644
--- a/spec/javascripts/filtered_search/dropdown_user_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_user_spec.js
@@ -1,7 +1,7 @@
-require('~/filtered_search/dropdown_utils');
-require('~/filtered_search/filtered_search_tokenizer');
-require('~/filtered_search/filtered_search_dropdown');
-require('~/filtered_search/dropdown_user');
+import '~/filtered_search/dropdown_utils';
+import '~/filtered_search/filtered_search_tokenizer';
+import '~/filtered_search/filtered_search_dropdown';
+import '~/filtered_search/dropdown_user';
describe('Dropdown User', () => {
describe('getSearchInput', () => {
diff --git a/spec/javascripts/filtered_search/dropdown_utils_spec.js b/spec/javascripts/filtered_search/dropdown_utils_spec.js
index c820c955172..a68e315e3e4 100644
--- a/spec/javascripts/filtered_search/dropdown_utils_spec.js
+++ b/spec/javascripts/filtered_search/dropdown_utils_spec.js
@@ -1,7 +1,7 @@
-require('~/extensions/array');
-require('~/filtered_search/dropdown_utils');
-require('~/filtered_search/filtered_search_tokenizer');
-require('~/filtered_search/filtered_search_dropdown_manager');
+import '~/extensions/array';
+import '~/filtered_search/dropdown_utils';
+import '~/filtered_search/filtered_search_tokenizer';
+import '~/filtered_search/filtered_search_dropdown_manager';
describe('Dropdown Utils', () => {
describe('getEscapedText', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
index 17bf8932489..c92a147b937 100644
--- a/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_dropdown_manager_spec.js
@@ -1,7 +1,7 @@
-require('~/extensions/array');
-require('~/filtered_search/filtered_search_visual_tokens');
-require('~/filtered_search/filtered_search_tokenizer');
-require('~/filtered_search/filtered_search_dropdown_manager');
+import '~/extensions/array';
+import '~/filtered_search/filtered_search_visual_tokens';
+import '~/filtered_search/filtered_search_tokenizer';
+import '~/filtered_search/filtered_search_dropdown_manager';
describe('Filtered Search Dropdown Manager', () => {
describe('addWordToInput', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index 063d547d00c..7c7def3470d 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -1,14 +1,13 @@
import * as recentSearchesStoreSrc from '~/filtered_search/stores/recent_searches_store';
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error';
-
-require('~/lib/utils/url_utility');
-require('~/lib/utils/common_utils');
-require('~/filtered_search/filtered_search_token_keys');
-require('~/filtered_search/filtered_search_tokenizer');
-require('~/filtered_search/filtered_search_dropdown_manager');
-require('~/filtered_search/filtered_search_manager');
-const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper');
+import '~/lib/utils/url_utility';
+import '~/lib/utils/common_utils';
+import '~/filtered_search/filtered_search_token_keys';
+import '~/filtered_search/filtered_search_tokenizer';
+import '~/filtered_search/filtered_search_dropdown_manager';
+import '~/filtered_search/filtered_search_manager';
+import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Filtered Search Manager', () => {
let input;
diff --git a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
index 6f9fa434c35..1a7631994b4 100644
--- a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
@@ -1,5 +1,5 @@
-require('~/extensions/array');
-require('~/filtered_search/filtered_search_token_keys');
+import '~/extensions/array';
+import '~/filtered_search/filtered_search_token_keys';
describe('Filtered Search Token Keys', () => {
describe('get', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
index 3e2e577f115..9561580c839 100644
--- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
@@ -1,6 +1,6 @@
-require('~/extensions/array');
-require('~/filtered_search/filtered_search_token_keys');
-require('~/filtered_search/filtered_search_tokenizer');
+import '~/extensions/array';
+import '~/filtered_search/filtered_search_token_keys';
+import '~/filtered_search/filtered_search_tokenizer';
describe('Filtered Search Tokenizer', () => {
describe('processTokens', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
index 8b750561eb7..c5fa2b17106 100644
--- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
@@ -1,7 +1,7 @@
import AjaxCache from '~/lib/utils/ajax_cache';
-require('~/filtered_search/filtered_search_visual_tokens');
-const FilteredSearchSpecHelper = require('../helpers/filtered_search_spec_helper');
+import '~/filtered_search/filtered_search_visual_tokens';
+import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
describe('Filtered Search Visual Tokens', () => {
let tokensContainer;
diff --git a/spec/javascripts/fixtures/balsamiq.rb b/spec/javascripts/fixtures/balsamiq.rb
new file mode 100644
index 00000000000..b5372821bf5
--- /dev/null
+++ b/spec/javascripts/fixtures/balsamiq.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe 'Balsamiq file', '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:project) { create(:project, namespace: namespace, path: 'balsamiq-project') }
+
+ before(:all) do
+ clean_frontend_fixtures('blob/balsamiq/')
+ end
+
+ it 'blob/balsamiq/test.bmpr' do |example|
+ blob = project.repository.blob_at('b89b56d79', 'files/images/balsamiq.bmpr')
+
+ store_frontend_fixture(blob.data.force_encoding('utf-8'), example.description)
+ end
+end
diff --git a/spec/javascripts/fixtures/balsamiq_viewer.html.haml b/spec/javascripts/fixtures/balsamiq_viewer.html.haml
new file mode 100644
index 00000000000..18166ba4901
--- /dev/null
+++ b/spec/javascripts/fixtures/balsamiq_viewer.html.haml
@@ -0,0 +1 @@
+.file-content.balsamiq-viewer#js-balsamiq-viewer{ data: { endpoint: '/test' } }
diff --git a/spec/javascripts/fixtures/graph.html.haml b/spec/javascripts/fixtures/graph.html.haml
new file mode 100644
index 00000000000..4fedb0f1ded
--- /dev/null
+++ b/spec/javascripts/fixtures/graph.html.haml
@@ -0,0 +1 @@
+#js-pipeline-graph-vue{ data: { endpoint: "foo" } }
diff --git a/spec/javascripts/fixtures/merge_requests.rb b/spec/javascripts/fixtures/merge_requests.rb
index 47d904b865b..a746a776548 100644
--- a/spec/javascripts/fixtures/merge_requests.rb
+++ b/spec/javascripts/fixtures/merge_requests.rb
@@ -16,6 +16,16 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
sha: merge_request.diff_head_sha
)
end
+ let(:path) { "files/ruby/popen.rb" }
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: path,
+ new_path: path,
+ old_line: nil,
+ new_line: 14,
+ diff_refs: merge_request.diff_refs
+ )
+ end
render_views
@@ -39,6 +49,12 @@ describe Projects::MergeRequestsController, '(JavaScript fixtures)', type: :cont
render_merge_request(example.description, merged_merge_request)
end
+ it 'merge_requests/diff_comment.html.raw' do |example|
+ create(:diff_note_on_merge_request, project: project, author: admin, position: position, noteable: merge_request)
+ create(:note_on_merge_request, author: admin, project: project, noteable: merge_request)
+ render_merge_request(example.description, merge_request)
+ end
+
private
def render_merge_request(fixture_file_name, merge_request)
diff --git a/spec/javascripts/fixtures/pipelines.rb b/spec/javascripts/fixtures/pipelines.rb
new file mode 100644
index 00000000000..daafbac86db
--- /dev/null
+++ b/spec/javascripts/fixtures/pipelines.rb
@@ -0,0 +1,35 @@
+require 'spec_helper'
+
+describe Projects::PipelinesController, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin) }
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:project) { create(:project, :repository, namespace: namespace, path: 'pipelines-project') }
+ let(:commit) { create(:commit, project: project) }
+ let(:commit_without_author) { RepoHelpers.another_sample_commit }
+ let!(:user) { create(:user, email: commit.author_email) }
+ let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id, user: user) }
+ let!(:pipeline_without_author) { create(:ci_pipeline, project: project, sha: commit_without_author.id) }
+ let!(:pipeline_without_commit) { create(:ci_pipeline, project: project, sha: '0000') }
+
+ render_views
+
+ before(:all) do
+ clean_frontend_fixtures('pipelines/')
+ end
+
+ before(:each) do
+ sign_in(admin)
+ end
+
+ it 'pipelines/pipelines.json' do |example|
+ get :index,
+ namespace_id: namespace,
+ project_id: project,
+ format: :json
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+end
diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js
index 5dfa4008fbd..ad0c7264616 100644
--- a/spec/javascripts/gfm_auto_complete_spec.js
+++ b/spec/javascripts/gfm_auto_complete_spec.js
@@ -1,13 +1,15 @@
/* eslint no-param-reassign: "off" */
-require('~/gfm_auto_complete');
-require('vendor/jquery.caret');
-require('vendor/jquery.atwho');
+import GfmAutoComplete from '~/gfm_auto_complete';
-const global = window.gl || (window.gl = {});
-const GfmAutoComplete = global.GfmAutoComplete;
+import 'vendor/jquery.caret';
+import 'vendor/jquery.atwho';
describe('GfmAutoComplete', function () {
+ const gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
+ fetchData: () => {},
+ });
+
describe('DefaultOptions.sorter', function () {
describe('assets loading', function () {
beforeEach(function () {
@@ -16,7 +18,7 @@ describe('GfmAutoComplete', function () {
this.atwhoInstance = { setting: {} };
this.items = [];
- this.sorterValue = GfmAutoComplete.DefaultOptions.sorter
+ this.sorterValue = gfmAutoCompleteCallbacks.sorter
.call(this.atwhoInstance, '', this.items);
});
@@ -38,7 +40,7 @@ describe('GfmAutoComplete', function () {
it('should enable highlightFirst if alwaysHighlightFirst is set', function () {
const atwhoInstance = { setting: { alwaysHighlightFirst: true } };
- GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance);
+ gfmAutoCompleteCallbacks.sorter.call(atwhoInstance);
expect(atwhoInstance.setting.highlightFirst).toBe(true);
});
@@ -46,7 +48,7 @@ describe('GfmAutoComplete', function () {
it('should enable highlightFirst if a query is present', function () {
const atwhoInstance = { setting: {} };
- GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance, 'query');
+ gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, 'query');
expect(atwhoInstance.setting.highlightFirst).toBe(true);
});
@@ -58,7 +60,7 @@ describe('GfmAutoComplete', function () {
const items = [];
const searchKey = 'searchKey';
- GfmAutoComplete.DefaultOptions.sorter.call(atwhoInstance, query, items, searchKey);
+ gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, query, items, searchKey);
expect($.fn.atwho.default.callbacks.sorter).toHaveBeenCalledWith(query, items, searchKey);
});
@@ -67,7 +69,7 @@ describe('GfmAutoComplete', function () {
describe('DefaultOptions.matcher', function () {
const defaultMatcher = (context, flag, subtext) => (
- GfmAutoComplete.DefaultOptions.matcher.call(context, flag, subtext)
+ gfmAutoCompleteCallbacks.matcher.call(context, flag, subtext)
);
const flagsUseDefaultMatcher = ['@', '#', '!', '~', '%'];
diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js
index c207fb00a47..3292590b9ed 100644
--- a/spec/javascripts/gl_dropdown_spec.js
+++ b/spec/javascripts/gl_dropdown_spec.js
@@ -1,9 +1,8 @@
/* eslint-disable comma-dangle, no-param-reassign, no-unused-expressions, max-len */
-require('~/gl_dropdown');
-require('~/lib/utils/common_utils');
-require('~/lib/utils/type_utility');
-require('~/lib/utils/url_utility');
+import '~/gl_dropdown';
+import '~/lib/utils/common_utils';
+import '~/lib/utils/url_utility';
(() => {
const NON_SELECTABLE_CLASSES = '.divider, .separator, .dropdown-header, .dropdown-menu-empty-link';
@@ -44,21 +43,18 @@ require('~/lib/utils/url_utility');
preloadFixtures('static/gl_dropdown.html.raw');
loadJSONFixtures('projects.json');
- function initDropDown(hasRemote, isFilterable) {
- this.dropdownButtonElement = $('#js-project-dropdown', this.dropdownContainerElement).glDropdown({
+ function initDropDown(hasRemote, isFilterable, extraOpts = {}) {
+ const options = Object.assign({
selectable: true,
filterable: isFilterable,
data: hasRemote ? remoteMock.bind({}, this.projectsData) : this.projectsData,
search: {
fields: ['name']
},
- text: (project) => {
- (project.name_with_namespace || project.name);
- },
- id: (project) => {
- project.id;
- }
- });
+ text: project => (project.name_with_namespace || project.name),
+ id: project => project.id,
+ }, extraOpts);
+ this.dropdownButtonElement = $('#js-project-dropdown', this.dropdownContainerElement).glDropdown(options);
}
beforeEach(() => {
@@ -80,6 +76,37 @@ require('~/lib/utils/url_utility');
expect(this.dropdownContainerElement).toHaveClass('open');
});
+ it('escapes HTML as text', () => {
+ this.projectsData[0].name_with_namespace = '<script>alert("testing");</script>';
+
+ initDropDown.call(this, false);
+
+ this.dropdownButtonElement.click();
+
+ expect(
+ $('.dropdown-content li:first-child').text(),
+ ).toBe('<script>alert("testing");</script>');
+ });
+
+ it('should output HTML when highlighting', () => {
+ this.projectsData[0].name_with_namespace = 'testing';
+ $('.dropdown-input .dropdown-input-field').val('test');
+
+ initDropDown.call(this, false, true, {
+ highlight: true,
+ });
+
+ this.dropdownButtonElement.click();
+
+ expect(
+ $('.dropdown-content li:first-child').text(),
+ ).toBe('testing');
+
+ expect(
+ $('.dropdown-content li:first-child a').html(),
+ ).toBe('<b>t</b><b>e</b><b>s</b><b>t</b>ing');
+ });
+
describe('that is open', () => {
beforeEach(() => {
initDropDown.call(this, false, false);
diff --git a/spec/javascripts/gl_field_errors_spec.js b/spec/javascripts/gl_field_errors_spec.js
index 733023481f5..fa24aa426b6 100644
--- a/spec/javascripts/gl_field_errors_spec.js
+++ b/spec/javascripts/gl_field_errors_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, arrow-body-style */
-require('~/gl_field_errors');
+import '~/gl_field_errors';
((global) => {
preloadFixtures('static/gl_field_errors.html.raw');
diff --git a/spec/javascripts/gl_form_spec.js b/spec/javascripts/gl_form_spec.js
index 71d6e2a7e22..837feacec1d 100644
--- a/spec/javascripts/gl_form_spec.js
+++ b/spec/javascripts/gl_form_spec.js
@@ -1,9 +1,9 @@
-/* global autosize */
+import autosize from 'vendor/autosize';
+import '~/gl_form';
+import '~/lib/utils/text_utility';
+import '~/lib/utils/common_utils';
-window.autosize = require('vendor/autosize');
-require('~/gl_form');
-require('~/lib/utils/text_utility');
-require('~/lib/utils/common_utils');
+window.autosize = autosize;
describe('GLForm', () => {
const global = window.gl || (window.gl = {});
@@ -27,12 +27,12 @@ describe('GLForm', () => {
$.prototype.off.calls.reset();
$.prototype.on.calls.reset();
$.prototype.css.calls.reset();
- autosize.calls.reset();
+ window.autosize.calls.reset();
done();
});
});
- describe('.setupAutosize', () => {
+ describe('setupAutosize', () => {
beforeEach((done) => {
this.glForm.setupAutosize();
setTimeout(() => {
@@ -51,7 +51,7 @@ describe('GLForm', () => {
});
it('should autosize the textarea', () => {
- expect(autosize).toHaveBeenCalledWith(jasmine.any(Object));
+ expect(window.autosize).toHaveBeenCalledWith(jasmine.any(Object));
});
it('should set the resize css property to vertical', () => {
@@ -59,7 +59,7 @@ describe('GLForm', () => {
});
});
- describe('.setHeightData', () => {
+ describe('setHeightData', () => {
beforeEach(() => {
spyOn($.prototype, 'data');
spyOn($.prototype, 'outerHeight').and.returnValue(200);
@@ -75,13 +75,13 @@ describe('GLForm', () => {
});
});
- describe('.destroyAutosize', () => {
+ describe('destroyAutosize', () => {
describe('when called', () => {
beforeEach(() => {
spyOn($.prototype, 'data');
spyOn($.prototype, 'outerHeight').and.returnValue(200);
spyOn(window, 'outerHeight').and.returnValue(400);
- spyOn(autosize, 'destroy');
+ spyOn(window.autosize, 'destroy');
this.glForm.destroyAutosize();
});
@@ -95,7 +95,7 @@ describe('GLForm', () => {
});
it('should call autosize destroy', () => {
- expect(autosize.destroy).toHaveBeenCalledWith(this.textarea);
+ expect(window.autosize.destroy).toHaveBeenCalledWith(this.textarea);
});
it('should set the data-height attribute', () => {
@@ -114,9 +114,9 @@ describe('GLForm', () => {
it('should return undefined if the data-height equals the outerHeight', () => {
spyOn($.prototype, 'outerHeight').and.returnValue(200);
spyOn($.prototype, 'data').and.returnValue(200);
- spyOn(autosize, 'destroy');
+ spyOn(window.autosize, 'destroy');
expect(this.glForm.destroyAutosize()).toBeUndefined();
- expect(autosize.destroy).not.toHaveBeenCalled();
+ expect(window.autosize.destroy).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/javascripts/header_spec.js b/spec/javascripts/header_spec.js
index b5dde5525e5..0e01934d3a3 100644
--- a/spec/javascripts/header_spec.js
+++ b/spec/javascripts/header_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, no-var */
-require('~/header');
-require('~/lib/utils/text_utility');
+import '~/header';
+import '~/lib/utils/text_utility';
(function() {
describe('Header', function() {
diff --git a/spec/javascripts/helpers/class_spec_helper.js b/spec/javascripts/helpers/class_spec_helper.js
index 61db27a8fcc..7a60d33b471 100644
--- a/spec/javascripts/helpers/class_spec_helper.js
+++ b/spec/javascripts/helpers/class_spec_helper.js
@@ -1,4 +1,4 @@
-class ClassSpecHelper {
+export default class ClassSpecHelper {
static itShouldBeAStaticMethod(base, method) {
return it('should be a static method', () => {
expect(Object.prototype.hasOwnProperty.call(base, method)).toBeTruthy();
@@ -7,5 +7,3 @@ class ClassSpecHelper {
}
window.ClassSpecHelper = ClassSpecHelper;
-
-module.exports = ClassSpecHelper;
diff --git a/spec/javascripts/helpers/class_spec_helper_spec.js b/spec/javascripts/helpers/class_spec_helper_spec.js
index 0a61e561640..686b8eaed31 100644
--- a/spec/javascripts/helpers/class_spec_helper_spec.js
+++ b/spec/javascripts/helpers/class_spec_helper_spec.js
@@ -1,9 +1,9 @@
/* global ClassSpecHelper */
-require('./class_spec_helper');
+import './class_spec_helper';
describe('ClassSpecHelper', () => {
- describe('.itShouldBeAStaticMethod', function () {
+ describe('itShouldBeAStaticMethod', function () {
beforeEach(() => {
class TestClass {
instanceMethod() { this.prop = 'val'; }
diff --git a/spec/javascripts/helpers/filtered_search_spec_helper.js b/spec/javascripts/helpers/filtered_search_spec_helper.js
index b8d4a93b1ab..0d7092a2357 100644
--- a/spec/javascripts/helpers/filtered_search_spec_helper.js
+++ b/spec/javascripts/helpers/filtered_search_spec_helper.js
@@ -1,4 +1,4 @@
-class FilteredSearchSpecHelper {
+export default class FilteredSearchSpecHelper {
static createFilterVisualTokenHTML(name, value, isSelected) {
return FilteredSearchSpecHelper.createFilterVisualToken(name, value, isSelected).outerHTML;
}
@@ -53,5 +53,3 @@ class FilteredSearchSpecHelper {
`;
}
}
-
-module.exports = FilteredSearchSpecHelper;
diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js
index 26d87cc5931..49fa2cb8367 100644
--- a/spec/javascripts/issuable_spec.js
+++ b/spec/javascripts/issuable_spec.js
@@ -1,7 +1,7 @@
/* global Issuable */
-require('~/lib/utils/url_utility');
-require('~/issuable');
+import '~/lib/utils/url_utility';
+import '~/issuable';
(() => {
const BASE_URL = '/user/project/issues?scope=all&state=closed';
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
new file mode 100644
index 00000000000..ee456869c53
--- /dev/null
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -0,0 +1,60 @@
+import Vue from 'vue';
+import '~/render_math';
+import '~/render_gfm';
+import issuableApp from '~/issue_show/components/app.vue';
+import issueShowData from '../mock_data';
+
+const issueShowInterceptor = data => (request, next) => {
+ next(request.respondWith(JSON.stringify(data), {
+ status: 200,
+ headers: {
+ 'POLL-INTERVAL': 1,
+ },
+ }));
+};
+
+describe('Issuable output', () => {
+ document.body.innerHTML = '<span id="task_status"></span>';
+
+ let vm;
+
+ beforeEach(() => {
+ const IssuableDescriptionComponent = Vue.extend(issuableApp);
+ Vue.http.interceptors.push(issueShowInterceptor(issueShowData.initialRequest));
+
+ vm = new IssuableDescriptionComponent({
+ propsData: {
+ canUpdate: true,
+ endpoint: '/gitlab-org/gitlab-shell/issues/9/realtime_changes',
+ issuableRef: '#1',
+ initialTitle: '',
+ initialDescriptionHtml: '',
+ initialDescriptionText: '',
+ },
+ }).$mount();
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, issueShowInterceptor);
+ });
+
+ it('should render a title/description and update title/description on update', (done) => {
+ setTimeout(() => {
+ expect(document.querySelector('title').innerText).toContain('this is a title (#1)');
+ expect(vm.$el.querySelector('.title').innerHTML).toContain('<p>this is a title</p>');
+ expect(vm.$el.querySelector('.wiki').innerHTML).toContain('<p>this is a description!</p>');
+ expect(vm.$el.querySelector('.js-task-list-field').value).toContain('this is a description');
+
+ Vue.http.interceptors.push(issueShowInterceptor(issueShowData.secondRequest));
+
+ setTimeout(() => {
+ expect(document.querySelector('title').innerText).toContain('2 (#1)');
+ expect(vm.$el.querySelector('.title').innerHTML).toContain('<p>2</p>');
+ expect(vm.$el.querySelector('.wiki').innerHTML).toContain('<p>42</p>');
+ expect(vm.$el.querySelector('.js-task-list-field').value).toContain('42');
+
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js
new file mode 100644
index 00000000000..408349cc42d
--- /dev/null
+++ b/spec/javascripts/issue_show/components/description_spec.js
@@ -0,0 +1,99 @@
+import Vue from 'vue';
+import descriptionComponent from '~/issue_show/components/description.vue';
+
+describe('Description component', () => {
+ let vm;
+
+ beforeEach(() => {
+ const Component = Vue.extend(descriptionComponent);
+
+ if (!document.querySelector('.issuable-meta')) {
+ const metaData = document.createElement('div');
+ metaData.classList.add('issuable-meta');
+ metaData.innerHTML = '<span id="task_status"></span><span id="task_status_short"></span>';
+
+ document.body.appendChild(metaData);
+ }
+
+ vm = new Component({
+ propsData: {
+ canUpdate: true,
+ descriptionHtml: 'test',
+ descriptionText: 'test',
+ updatedAt: new Date().toString(),
+ taskStatus: '',
+ },
+ }).$mount();
+ });
+
+ it('animates description changes', (done) => {
+ vm.descriptionHtml = 'changed';
+
+ Vue.nextTick(() => {
+ expect(
+ vm.$el.querySelector('.wiki').classList.contains('issue-realtime-pre-pulse'),
+ ).toBeTruthy();
+
+ setTimeout(() => {
+ expect(
+ vm.$el.querySelector('.wiki').classList.contains('issue-realtime-trigger-pulse'),
+ ).toBeTruthy();
+
+ done();
+ });
+ });
+ });
+
+ it('re-inits the TaskList when description changed', (done) => {
+ spyOn(gl, 'TaskList');
+ vm.descriptionHtml = 'changed';
+
+ setTimeout(() => {
+ expect(
+ gl.TaskList,
+ ).toHaveBeenCalled();
+
+ done();
+ });
+ });
+
+ it('does not re-init the TaskList when canUpdate is false', (done) => {
+ spyOn(gl, 'TaskList');
+ vm.canUpdate = false;
+ vm.descriptionHtml = 'changed';
+
+ setTimeout(() => {
+ expect(
+ gl.TaskList,
+ ).not.toHaveBeenCalled();
+
+ done();
+ });
+ });
+
+ describe('taskStatus', () => {
+ it('adds full taskStatus', (done) => {
+ vm.taskStatus = '1 of 1';
+
+ setTimeout(() => {
+ expect(
+ document.querySelector('.issuable-meta #task_status').textContent.trim(),
+ ).toBe('1 of 1');
+
+ done();
+ });
+ });
+
+ it('adds short taskStatus', (done) => {
+ vm.taskStatus = '1 of 1';
+
+ setTimeout(() => {
+ expect(
+ document.querySelector('.issuable-meta #task_status_short').textContent.trim(),
+ ).toBe('1/1 task');
+
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/issue_show/components/title_spec.js b/spec/javascripts/issue_show/components/title_spec.js
new file mode 100644
index 00000000000..2f953e7e92e
--- /dev/null
+++ b/spec/javascripts/issue_show/components/title_spec.js
@@ -0,0 +1,67 @@
+import Vue from 'vue';
+import titleComponent from '~/issue_show/components/title.vue';
+
+describe('Title component', () => {
+ let vm;
+
+ beforeEach(() => {
+ const Component = Vue.extend(titleComponent);
+ vm = new Component({
+ propsData: {
+ issuableRef: '#1',
+ titleHtml: 'Testing <img />',
+ titleText: 'Testing',
+ },
+ }).$mount();
+ });
+
+ it('renders title HTML', () => {
+ expect(
+ vm.$el.innerHTML.trim(),
+ ).toBe('Testing <img>');
+ });
+
+ it('updates page title when changing titleHtml', (done) => {
+ spyOn(vm, 'setPageTitle');
+ vm.titleHtml = 'test';
+
+ Vue.nextTick(() => {
+ expect(
+ vm.setPageTitle,
+ ).toHaveBeenCalled();
+
+ done();
+ });
+ });
+
+ it('animates title changes', (done) => {
+ vm.titleHtml = 'test';
+
+ Vue.nextTick(() => {
+ expect(
+ vm.$el.classList.contains('issue-realtime-pre-pulse'),
+ ).toBeTruthy();
+
+ setTimeout(() => {
+ expect(
+ vm.$el.classList.contains('issue-realtime-trigger-pulse'),
+ ).toBeTruthy();
+
+ done();
+ });
+ });
+ });
+
+ it('updates page title after changing title', (done) => {
+ vm.titleHtml = 'changed';
+ vm.titleText = 'changed';
+
+ Vue.nextTick(() => {
+ expect(
+ document.querySelector('title').textContent.trim(),
+ ).toContain('changed');
+
+ done();
+ });
+ });
+});
diff --git a/spec/javascripts/issue_show/issue_title_description_spec.js b/spec/javascripts/issue_show/issue_title_description_spec.js
deleted file mode 100644
index 1ec4fe58b08..00000000000
--- a/spec/javascripts/issue_show/issue_title_description_spec.js
+++ /dev/null
@@ -1,60 +0,0 @@
-import Vue from 'vue';
-import $ from 'jquery';
-import '~/render_math';
-import '~/render_gfm';
-import issueTitleDescription from '~/issue_show/issue_title_description.vue';
-import issueShowData from './mock_data';
-
-window.$ = $;
-
-const issueShowInterceptor = data => (request, next) => {
- next(request.respondWith(JSON.stringify(data), {
- status: 200,
- headers: {
- 'POLL-INTERVAL': 1,
- },
- }));
-};
-
-describe('Issue Title', () => {
- document.body.innerHTML = '<span id="task_status"></span>';
-
- let IssueTitleDescriptionComponent;
-
- beforeEach(() => {
- IssueTitleDescriptionComponent = Vue.extend(issueTitleDescription);
- });
-
- afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, issueShowInterceptor);
- });
-
- it('should render a title/description and update title/description on update', (done) => {
- Vue.http.interceptors.push(issueShowInterceptor(issueShowData.initialRequest));
-
- const issueShowComponent = new IssueTitleDescriptionComponent({
- propsData: {
- canUpdateIssue: '.css-stuff',
- endpoint: '/gitlab-org/gitlab-shell/issues/9/rendered_title',
- },
- }).$mount();
-
- setTimeout(() => {
- expect(document.querySelector('title').innerText).toContain('this is a title (#1)');
- expect(issueShowComponent.$el.querySelector('.title').innerHTML).toContain('<p>this is a title</p>');
- expect(issueShowComponent.$el.querySelector('.wiki').innerHTML).toContain('<p>this is a description!</p>');
- expect(issueShowComponent.$el.querySelector('.js-task-list-field').innerText).toContain('this is a description');
-
- Vue.http.interceptors.push(issueShowInterceptor(issueShowData.secondRequest));
-
- setTimeout(() => {
- expect(document.querySelector('title').innerText).toContain('2 (#1)');
- expect(issueShowComponent.$el.querySelector('.title').innerHTML).toContain('<p>2</p>');
- expect(issueShowComponent.$el.querySelector('.wiki').innerHTML).toContain('<p>42</p>');
- expect(issueShowComponent.$el.querySelector('.js-task-list-field').innerText).toContain('42');
-
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/issue_show/mock_data.js b/spec/javascripts/issue_show/mock_data.js
index ad5a7b63470..6683d581bc5 100644
--- a/spec/javascripts/issue_show/mock_data.js
+++ b/spec/javascripts/issue_show/mock_data.js
@@ -4,23 +4,23 @@ export default {
title_text: 'this is a title',
description: '<p>this is a description!</p>',
description_text: 'this is a description',
- issue_number: 1,
task_status: '2 of 4 completed',
+ updated_at: new Date().toString(),
},
secondRequest: {
title: '<p>2</p>',
title_text: '2',
description: '<p>42</p>',
description_text: '42',
- issue_number: 1,
task_status: '0 of 0 completed',
+ updated_at: new Date().toString(),
},
issueSpecRequest: {
title: '<p>this is a title</p>',
title_text: 'this is a title',
description: '<li class="task-list-item enabled"><input type="checkbox" class="task-list-item-checkbox">Task List Item</li>',
description_text: '- [ ] Task List Item',
- issue_number: 1,
task_status: '0 of 1 completed',
+ updated_at: new Date().toString(),
},
};
diff --git a/spec/javascripts/issue_spec.js b/spec/javascripts/issue_spec.js
index 763f5ee9e50..df97a100b0d 100644
--- a/spec/javascripts/issue_spec.js
+++ b/spec/javascripts/issue_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, one-var, one-var-declaration-per-line, no-use-before-define, comma-dangle, max-len */
import Issue from '~/issue';
-require('~/lib/utils/text_utility');
+import '~/lib/utils/text_utility';
describe('Issue', function() {
let $boxClosed, $boxOpen, $btnClose, $btnReopen;
diff --git a/spec/javascripts/labels_issue_sidebar_spec.js b/spec/javascripts/labels_issue_sidebar_spec.js
index 37e038c16da..c99f379b871 100644
--- a/spec/javascripts/labels_issue_sidebar_spec.js
+++ b/spec/javascripts/labels_issue_sidebar_spec.js
@@ -2,15 +2,14 @@
/* global IssuableContext */
/* global LabelsSelect */
-require('~/lib/utils/type_utility');
-require('~/gl_dropdown');
-require('select2');
-require('vendor/jquery.nicescroll');
-require('~/api');
-require('~/create_label');
-require('~/issuable_context');
-require('~/users_select');
-require('~/labels_select');
+import '~/gl_dropdown';
+import 'select2';
+import 'vendor/jquery.nicescroll';
+import '~/api';
+import '~/create_label';
+import '~/issuable_context';
+import '~/users_select';
+import '~/labels_select';
(() => {
let saveLabelCount = 0;
diff --git a/spec/javascripts/lib/utils/ajax_cache_spec.js b/spec/javascripts/lib/utils/ajax_cache_spec.js
index 7b466a11b92..e1747a82329 100644
--- a/spec/javascripts/lib/utils/ajax_cache_spec.js
+++ b/spec/javascripts/lib/utils/ajax_cache_spec.js
@@ -5,19 +5,13 @@ describe('AjaxCache', () => {
const dummyResponse = {
important: 'dummy data',
};
- let ajaxSpy = (url) => {
- expect(url).toBe(dummyEndpoint);
- const deferred = $.Deferred();
- deferred.resolve(dummyResponse);
- return deferred.promise();
- };
beforeEach(() => {
AjaxCache.internalStorage = { };
- spyOn(jQuery, 'ajax').and.callFake(url => ajaxSpy(url));
+ AjaxCache.pendingRequests = { };
});
- describe('#get', () => {
+ describe('get', () => {
it('returns undefined if cache is empty', () => {
const data = AjaxCache.get(dummyEndpoint);
@@ -41,7 +35,7 @@ describe('AjaxCache', () => {
});
});
- describe('#hasData', () => {
+ describe('hasData', () => {
it('returns false if cache is empty', () => {
expect(AjaxCache.hasData(dummyEndpoint)).toBe(false);
});
@@ -59,9 +53,9 @@ describe('AjaxCache', () => {
});
});
- describe('#purge', () => {
+ describe('remove', () => {
it('does nothing if cache is empty', () => {
- AjaxCache.purge(dummyEndpoint);
+ AjaxCache.remove(dummyEndpoint);
expect(AjaxCache.internalStorage).toEqual({ });
});
@@ -69,7 +63,7 @@ describe('AjaxCache', () => {
it('does nothing if cache contains no matching data', () => {
AjaxCache.internalStorage['not matching'] = dummyResponse;
- AjaxCache.purge(dummyEndpoint);
+ AjaxCache.remove(dummyEndpoint);
expect(AjaxCache.internalStorage['not matching']).toBe(dummyResponse);
});
@@ -77,14 +71,27 @@ describe('AjaxCache', () => {
it('removes matching data', () => {
AjaxCache.internalStorage[dummyEndpoint] = dummyResponse;
- AjaxCache.purge(dummyEndpoint);
+ AjaxCache.remove(dummyEndpoint);
expect(AjaxCache.internalStorage).toEqual({ });
});
});
- describe('#retrieve', () => {
+ describe('retrieve', () => {
+ let ajaxSpy;
+
+ beforeEach(() => {
+ spyOn(jQuery, 'ajax').and.callFake(url => ajaxSpy(url));
+ });
+
it('stores and returns data from Ajax call if cache is empty', (done) => {
+ ajaxSpy = (url) => {
+ expect(url).toBe(dummyEndpoint);
+ const deferred = $.Deferred();
+ deferred.resolve(dummyResponse);
+ return deferred.promise();
+ };
+
AjaxCache.retrieve(dummyEndpoint)
.then((data) => {
expect(data).toBe(dummyResponse);
@@ -94,6 +101,28 @@ describe('AjaxCache', () => {
.catch(fail);
});
+ it('makes no Ajax call if request is pending', () => {
+ const responseDeferred = $.Deferred();
+
+ ajaxSpy = (url) => {
+ expect(url).toBe(dummyEndpoint);
+ // neither reject nor resolve to keep request pending
+ return responseDeferred.promise();
+ };
+
+ const unexpectedResponse = data => fail(`Did not expect response: ${data}`);
+
+ AjaxCache.retrieve(dummyEndpoint)
+ .then(unexpectedResponse)
+ .catch(fail);
+
+ AjaxCache.retrieve(dummyEndpoint)
+ .then(unexpectedResponse)
+ .catch(fail);
+
+ expect($.ajax.calls.count()).toBe(1);
+ });
+
it('returns undefined if Ajax call fails and cache is empty', (done) => {
const dummyStatusText = 'exploded';
const dummyErrorMessage = 'server exploded';
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index 5eb147ed888..e9bffd74d90 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable promise/catch-or-return */
-require('~/lib/utils/common_utils');
+import '~/lib/utils/common_utils';
(() => {
describe('common_utils', () => {
@@ -41,6 +41,16 @@ require('~/lib/utils/common_utils');
const paramsArray = gl.utils.getUrlParamsArray();
expect(paramsArray[0][0] !== '?').toBe(true);
});
+
+ it('should decode params', () => {
+ history.pushState('', '', '?label_name%5B%5D=test');
+
+ expect(
+ gl.utils.getUrlParamsArray()[0],
+ ).toBe('label_name[]=test');
+
+ history.pushState('', '', '?');
+ });
});
describe('gl.utils.handleLocationHash', () => {
diff --git a/spec/javascripts/lib/utils/poll_spec.js b/spec/javascripts/lib/utils/poll_spec.js
index 918b6d32c43..22f30191ab9 100644
--- a/spec/javascripts/lib/utils/poll_spec.js
+++ b/spec/javascripts/lib/utils/poll_spec.js
@@ -1,9 +1,5 @@
-import Vue from 'vue';
-import VueResource from 'vue-resource';
import Poll from '~/lib/utils/poll';
-Vue.use(VueResource);
-
const waitForAllCallsToFinish = (service, waitForCount, successCallback) => {
const timer = () => {
setTimeout(() => {
@@ -12,45 +8,33 @@ const waitForAllCallsToFinish = (service, waitForCount, successCallback) => {
} else {
timer();
}
- }, 5);
+ }, 0);
};
timer();
};
-class ServiceMock {
- constructor(endpoint) {
- this.service = Vue.resource(endpoint);
- }
+function mockServiceCall(service, response, shouldFail = false) {
+ const action = shouldFail ? Promise.reject : Promise.resolve;
+ const responseObject = response;
+
+ if (!responseObject.headers) responseObject.headers = {};
- fetch() {
- return this.service.get();
- }
+ service.fetch.and.callFake(action.bind(Promise, responseObject));
}
describe('Poll', () => {
- let callbacks;
- let service;
+ const service = jasmine.createSpyObj('service', ['fetch']);
+ const callbacks = jasmine.createSpyObj('callbacks', ['success', 'error']);
- beforeEach(() => {
- callbacks = {
- success: () => {},
- error: () => {},
- };
-
- service = new ServiceMock('endpoint');
-
- spyOn(callbacks, 'success');
- spyOn(callbacks, 'error');
- spyOn(service, 'fetch').and.callThrough();
+ afterEach(() => {
+ callbacks.success.calls.reset();
+ callbacks.error.calls.reset();
+ service.fetch.calls.reset();
});
it('calls the success callback when no header for interval is provided', (done) => {
- const successInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 200 }));
- };
-
- Vue.http.interceptors.push(successInterceptor);
+ mockServiceCall(service, { status: 200 });
new Poll({
resource: service,
@@ -63,18 +47,12 @@ describe('Poll', () => {
expect(callbacks.success).toHaveBeenCalled();
expect(callbacks.error).not.toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, successInterceptor);
-
done();
- }, 0);
+ });
});
it('calls the error callback whe the http request returns an error', (done) => {
- const errorInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 500 }));
- };
-
- Vue.http.interceptors.push(errorInterceptor);
+ mockServiceCall(service, { status: 500 }, true);
new Poll({
resource: service,
@@ -86,42 +64,29 @@ describe('Poll', () => {
waitForAllCallsToFinish(service, 1, () => {
expect(callbacks.success).not.toHaveBeenCalled();
expect(callbacks.error).toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, errorInterceptor);
done();
});
});
it('should call the success callback when the interval header is -1', (done) => {
- const intervalInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': -1 } }));
- };
-
- Vue.http.interceptors.push(intervalInterceptor);
+ mockServiceCall(service, { status: 200, headers: { 'poll-interval': -1 } });
new Poll({
resource: service,
method: 'fetch',
successCallback: callbacks.success,
errorCallback: callbacks.error,
- }).makeRequest();
-
- setTimeout(() => {
+ }).makeRequest().then(() => {
expect(callbacks.success).toHaveBeenCalled();
expect(callbacks.error).not.toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, intervalInterceptor);
-
done();
- }, 0);
+ }).catch(done.fail);
});
it('starts polling when http status is 200 and interval header is provided', (done) => {
- const pollInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } }));
- };
-
- Vue.http.interceptors.push(pollInterceptor);
+ mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
@@ -141,19 +106,13 @@ describe('Poll', () => {
expect(callbacks.success).toHaveBeenCalled();
expect(callbacks.error).not.toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor);
-
done();
});
});
describe('stop', () => {
it('stops polling when method is called', (done) => {
- const pollInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } }));
- };
-
- Vue.http.interceptors.push(pollInterceptor);
+ mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
@@ -174,8 +133,6 @@ describe('Poll', () => {
expect(service.fetch).toHaveBeenCalledWith({ page: 1 });
expect(Polling.stop).toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor);
-
done();
});
});
@@ -183,11 +140,7 @@ describe('Poll', () => {
describe('restart', () => {
it('should restart polling when its called', (done) => {
- const pollInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify([]), { status: 200, headers: { 'poll-interval': 2 } }));
- };
-
- Vue.http.interceptors.push(pollInterceptor);
+ mockServiceCall(service, { status: 200, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
@@ -215,8 +168,6 @@ describe('Poll', () => {
expect(Polling.stop).toHaveBeenCalled();
expect(Polling.restart).toHaveBeenCalled();
- Vue.http.interceptors = _.without(Vue.http.interceptors, pollInterceptor);
-
done();
});
});
diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/javascripts/lib/utils/text_utility_spec.js
index daef9b93fa5..ca1b1b7cc3c 100644
--- a/spec/javascripts/lib/utils/text_utility_spec.js
+++ b/spec/javascripts/lib/utils/text_utility_spec.js
@@ -1,4 +1,4 @@
-require('~/lib/utils/text_utility');
+import '~/lib/utils/text_utility';
describe('text_utility', () => {
describe('gl.text.getTextWidth', () => {
diff --git a/spec/javascripts/line_highlighter_spec.js b/spec/javascripts/line_highlighter_spec.js
index a1fd2d38968..aee274641e8 100644
--- a/spec/javascripts/line_highlighter_spec.js
+++ b/spec/javascripts/line_highlighter_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, no-var, no-param-reassign, quotes, prefer-template, no-else-return, new-cap, dot-notation, no-return-assign, comma-dangle, no-new, one-var, one-var-declaration-per-line, jasmine/no-spec-dupes, no-underscore-dangle, max-len */
/* global LineHighlighter */
-require('~/line_highlighter');
+import '~/line_highlighter';
(function() {
describe('LineHighlighter', function() {
@@ -58,7 +58,7 @@ require('~/line_highlighter');
return expect(func).not.toThrow();
});
});
- describe('#clickHandler', function() {
+ describe('clickHandler', function() {
it('handles clicking on a child icon element', function() {
var spy;
spy = spyOn(this["class"], 'setHash').and.callThrough();
@@ -176,7 +176,7 @@ require('~/line_highlighter');
});
});
});
- describe('#hashToRange', function() {
+ describe('hashToRange', function() {
beforeEach(function() {
return this.subject = this["class"].hashToRange;
});
@@ -190,7 +190,7 @@ require('~/line_highlighter');
return expect(this.subject('#foo')).toEqual([null, null]);
});
});
- describe('#highlightLine', function() {
+ describe('highlightLine', function() {
beforeEach(function() {
return this.subject = this["class"].highlightLine;
});
@@ -203,7 +203,7 @@ require('~/line_highlighter');
return expect($('#LC13')).toHaveClass(this.css);
});
});
- return describe('#setHash', function() {
+ return describe('setHash', function() {
beforeEach(function() {
return this.subject = this["class"].setHash;
});
diff --git a/spec/javascripts/merge_request_notes_spec.js b/spec/javascripts/merge_request_notes_spec.js
new file mode 100644
index 00000000000..e54acfa8e44
--- /dev/null
+++ b/spec/javascripts/merge_request_notes_spec.js
@@ -0,0 +1,61 @@
+/* global Notes */
+
+import 'vendor/autosize';
+import '~/gl_form';
+import '~/lib/utils/text_utility';
+import '~/render_gfm';
+import '~/render_math';
+import '~/notes';
+
+describe('Merge request notes', () => {
+ window.gon = window.gon || {};
+ window.gl = window.gl || {};
+ gl.utils = gl.utils || {};
+
+ const fixture = 'merge_requests/diff_comment.html.raw';
+ preloadFixtures(fixture);
+
+ beforeEach(() => {
+ loadFixtures(fixture);
+ gl.utils.disableButtonIfEmptyField = _.noop;
+ window.project_uploads_path = 'http://test.host/uploads';
+ $('body').data('page', 'projects:merge_requests:show');
+ window.gon.current_user_id = $('.note:last').data('author-id');
+
+ return new Notes('', []);
+ });
+
+ describe('up arrow', () => {
+ it('edits last comment when triggered in main form', () => {
+ const upArrowEvent = $.Event('keydown');
+ upArrowEvent.which = 38;
+
+ spyOnEvent('.note:last .js-note-edit', 'click');
+
+ $('.js-note-text').trigger(upArrowEvent);
+
+ expect('click').toHaveBeenTriggeredOn('.note:last .js-note-edit');
+ });
+
+ it('edits last comment in discussion when triggered in discussion form', (done) => {
+ const upArrowEvent = $.Event('keydown');
+ upArrowEvent.which = 38;
+
+ spyOnEvent('.note-discussion .js-note-edit', 'click');
+
+ $('.js-discussion-reply-button').click();
+
+ setTimeout(() => {
+ expect(
+ $('.note-discussion .js-note-text'),
+ ).toExist();
+
+ $('.note-discussion .js-note-text').trigger(upArrowEvent);
+
+ expect('click').toHaveBeenTriggeredOn('.note-discussion .js-note-edit');
+
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/merge_request_spec.js b/spec/javascripts/merge_request_spec.js
index fd97dced870..1173fa40947 100644
--- a/spec/javascripts/merge_request_spec.js
+++ b/spec/javascripts/merge_request_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, no-return-assign */
/* global MergeRequest */
-require('~/merge_request');
+import '~/merge_request';
(function() {
describe('MergeRequest', function() {
diff --git a/spec/javascripts/merge_request_tabs_spec.js b/spec/javascripts/merge_request_tabs_spec.js
index e437333d522..3d1706aab68 100644
--- a/spec/javascripts/merge_request_tabs_spec.js
+++ b/spec/javascripts/merge_request_tabs_spec.js
@@ -1,13 +1,13 @@
/* eslint-disable no-var, comma-dangle, object-shorthand */
-require('~/merge_request_tabs');
-require('~/commit/pipelines/pipelines_bundle.js');
-require('~/breakpoints');
-require('~/lib/utils/common_utils');
-require('~/diff');
-require('~/single_file_diff');
-require('~/files_comment_button');
-require('vendor/jquery.scrollTo');
+import '~/merge_request_tabs';
+import '~/commit/pipelines/pipelines_bundle';
+import '~/breakpoints';
+import '~/lib/utils/common_utils';
+import '~/diff';
+import '~/single_file_diff';
+import '~/files_comment_button';
+import 'vendor/jquery.scrollTo';
(function () {
// TODO: remove this hack!
@@ -47,7 +47,7 @@ require('vendor/jquery.scrollTo');
this.class.destroyPipelinesView();
});
- describe('#activateTab', function () {
+ describe('activateTab', function () {
beforeEach(function () {
spyOn($, 'ajax').and.callFake(function () {});
loadFixtures('merge_requests/merge_request_with_task_list.html.raw');
@@ -71,7 +71,7 @@ require('vendor/jquery.scrollTo');
});
});
- describe('#opensInNewTab', function () {
+ describe('opensInNewTab', function () {
var tabUrl;
var windowTarget = '_blank';
@@ -152,7 +152,7 @@ require('vendor/jquery.scrollTo');
});
});
- describe('#setCurrentAction', function () {
+ describe('setCurrentAction', function () {
beforeEach(function () {
spyOn($, 'ajax').and.callFake(function () {});
this.subject = this.class.setCurrentAction;
@@ -221,7 +221,7 @@ require('vendor/jquery.scrollTo');
});
});
- describe('#tabShown', () => {
+ describe('tabShown', () => {
beforeEach(function () {
spyOn($, 'ajax').and.callFake(function (options) {
options.success({ html: '' });
@@ -281,7 +281,7 @@ require('vendor/jquery.scrollTo');
});
});
- describe('#loadDiff', function () {
+ describe('loadDiff', function () {
it('requires an absolute pathname', function () {
spyOn($, 'ajax').and.callFake(function (options) {
expect(options.url).toEqual('/foo/bar/merge_requests/1/diffs.json');
diff --git a/spec/javascripts/merge_request_widget_spec.js b/spec/javascripts/merge_request_widget_spec.js
deleted file mode 100644
index 88dae8c3e06..00000000000
--- a/spec/javascripts/merge_request_widget_spec.js
+++ /dev/null
@@ -1,199 +0,0 @@
-/* eslint-disable space-before-function-paren, quotes, comma-dangle, dot-notation, quote-props, no-var, max-len */
-
-require('~/merge_request_widget');
-require('~/smart_interval');
-require('~/lib/utils/datetime_utility');
-
-(function() {
- describe('MergeRequestWidget', function() {
- beforeEach(function() {
- window.notifyPermissions = function() {};
- window.notify = function() {};
- this.opts = {
- ci_status_url: "http://sampledomain.local/ci/getstatus",
- ci_environments_status_url: "http://sampledomain.local/ci/getenvironmentsstatus",
- ci_status: "",
- ci_message: {
- normal: "Build {{status}} for \"{{title}}\"",
- preparing: "{{status}} build for \"{{title}}\""
- },
- ci_title: {
- preparing: "{{status}} build",
- normal: "Build {{status}}"
- },
- gitlab_icon: "gitlab_logo.png",
- ci_pipeline: 80,
- ci_sha: "12a34bc5",
- builds_path: "http://sampledomain.local/sampleBuildsPath",
- commits_path: "http://sampledomain.local/commits",
- pipeline_path: "http://sampledomain.local/pipelines"
- };
- this["class"] = new window.gl.MergeRequestWidget(this.opts);
- });
-
- describe('getCIEnvironmentsStatus', function() {
- beforeEach(function() {
- this.ciEnvironmentsStatusData = [{
- created_at: '2016-09-12T13:38:30.636Z',
- environment_id: 1,
- environment_name: 'env1',
- external_url: 'https://test-url.com',
- external_url_formatted: 'test-url.com'
- }];
-
- spyOn(jQuery, 'getJSON').and.callFake(function(req, cb) {
- cb(this.ciEnvironmentsStatusData);
- }.bind(this));
- });
-
- it('should call renderEnvironments when the environments property is set', function() {
- const spy = spyOn(this.class, 'renderEnvironments').and.stub();
- this.class.getCIEnvironmentsStatus();
- expect(spy).toHaveBeenCalledWith(this.ciEnvironmentsStatusData);
- });
-
- it('should not call renderEnvironments when the environments property is not set', function() {
- this.ciEnvironmentsStatusData = null;
- const spy = spyOn(this.class, 'renderEnvironments').and.stub();
- this.class.getCIEnvironmentsStatus();
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
- describe('renderEnvironments', function() {
- describe('should render correct timeago', function() {
- beforeEach(function() {
- this.environments = [{
- id: 'test-environment-id',
- url: 'testurl',
- deployed_at: new Date().toISOString(),
- deployed_at_formatted: true
- }];
- });
-
- function getTimeagoText(template) {
- var el = document.createElement('html');
- el.innerHTML = template;
- return el.querySelector('.js-environment-timeago').innerText.trim();
- }
-
- it('should render less than a minute ago text', function() {
- spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
- expect(getTimeagoText(template)).toBe('less than a minute ago.');
- });
-
- this.class.renderEnvironments(this.environments);
- });
-
- it('should render about an hour ago text', function() {
- var oneHourAgo = new Date();
- oneHourAgo.setHours(oneHourAgo.getHours() - 1);
-
- this.environments[0].deployed_at = oneHourAgo.toISOString();
- spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
- expect(getTimeagoText(template)).toBe('about an hour ago.');
- });
-
- this.class.renderEnvironments(this.environments);
- });
-
- it('should render about 2 hours ago text', function() {
- var twoHoursAgo = new Date();
- twoHoursAgo.setHours(twoHoursAgo.getHours() - 2);
-
- this.environments[0].deployed_at = twoHoursAgo.toISOString();
- spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
- expect(getTimeagoText(template)).toBe('about 2 hours ago.');
- });
-
- this.class.renderEnvironments(this.environments);
- });
- });
- });
-
- describe('mergeInProgress', function() {
- it('should display error with h4 tag', function() {
- spyOn(this.class.$widgetBody, 'html').and.callFake(function(html) {
- expect(html).toBe('<h4>Sorry, something went wrong.</h4>');
- });
- spyOn($, 'ajax').and.callFake(function(e) {
- e.success({ merge_error: 'Sorry, something went wrong.' });
- });
- this.class.mergeInProgress(null);
- });
- });
-
- describe('getCIStatus', function() {
- beforeEach(function() {
- this.ciStatusData = {
- "title": "Sample MR title",
- "pipeline": 80,
- "sha": "12a34bc5",
- "status": "success",
- "coverage": 98
- };
-
- spyOn(jQuery, 'getJSON').and.callFake((function(_this) {
- return function(req, cb) {
- return cb(_this.ciStatusData);
- };
- })(this));
- });
- it('should call showCIStatus even if a notification should not be displayed', function() {
- var spy;
- spy = spyOn(this["class"], 'showCIStatus').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(false);
- return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status);
- });
- it('should call showCIStatus when a notification should be displayed', function() {
- var spy;
- spy = spyOn(this["class"], 'showCIStatus').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(true);
- return expect(spy).toHaveBeenCalledWith(this.ciStatusData.status);
- });
- it('should call showCICoverage when the coverage rate is set', function() {
- var spy;
- spy = spyOn(this["class"], 'showCICoverage').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(false);
- return expect(spy).toHaveBeenCalledWith(this.ciStatusData.coverage);
- });
- it('should not call showCICoverage when the coverage rate is not set', function() {
- var spy;
- this.ciStatusData.coverage = null;
- spy = spyOn(this["class"], 'showCICoverage').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(false);
- return expect(spy).not.toHaveBeenCalled();
- });
- it('should not display a notification on the first check after the widget has been created', function() {
- var spy;
- spy = spyOn(window, 'notify');
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"] = new window.gl.MergeRequestWidget(this.opts);
- this["class"].getCIStatus(true);
- return expect(spy).not.toHaveBeenCalled();
- });
- it('should update the pipeline URL when the pipeline changes', function() {
- var spy;
- spy = spyOn(this["class"], 'updatePipelineUrls').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(false);
- this.ciStatusData.pipeline += 1;
- this["class"].getCIStatus(false);
- return expect(spy).toHaveBeenCalled();
- });
- it('should update the commit URL when the sha changes', function() {
- var spy;
- spy = spyOn(this["class"], 'updateCommitUrls').and.stub();
- spyOn(gl.utils, 'setCiStatusFavicon').and.callFake(() => {});
- this["class"].getCIStatus(false);
- this.ciStatusData.sha = "9b50b99a";
- this["class"].getCIStatus(false);
- return expect(spy).toHaveBeenCalled();
- });
- });
- });
-}).call(window);
diff --git a/spec/javascripts/merged_buttons_spec.js b/spec/javascripts/merged_buttons_spec.js
deleted file mode 100644
index b5c5e60dd97..00000000000
--- a/spec/javascripts/merged_buttons_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-/* global MergedButtons */
-
-import '~/merged_buttons';
-
-describe('MergedButtons', () => {
- const fixturesPath = 'merge_requests/merged_merge_request.html.raw';
- preloadFixtures(fixturesPath);
-
- beforeEach(() => {
- loadFixtures(fixturesPath);
- this.mergedButtons = new MergedButtons();
- this.$removeBranchWidget = $('.remove_source_branch_widget:not(.failed)');
- this.$removeBranchProgress = $('.remove_source_branch_in_progress');
- this.$removeBranchFailed = $('.remove_source_branch_widget.failed');
- this.$removeBranchButton = $('.remove_source_branch');
- });
-
- describe('removeSourceBranch', () => {
- it('shows loader', () => {
- $('.remove_source_branch').trigger('click');
- expect(this.$removeBranchProgress).toBeVisible();
- expect(this.$removeBranchWidget).not.toBeVisible();
- });
- });
-
- describe('removeBranchSuccess', () => {
- it('refreshes page when branch removed', () => {
- spyOn(gl.utils, 'refreshCurrentPage').and.stub();
- const response = { status: 200 };
- this.$removeBranchButton.trigger('ajax:success', response, 'xhr');
- expect(gl.utils.refreshCurrentPage).toHaveBeenCalled();
- });
- });
-
- describe('removeBranchError', () => {
- it('shows error message', () => {
- const response = { status: 500 };
- this.$removeBranchButton.trigger('ajax:error', response, 'xhr');
- expect(this.$removeBranchFailed).toBeVisible();
- expect(this.$removeBranchProgress).not.toBeVisible();
- expect(this.$removeBranchWidget).not.toBeVisible();
- });
- });
-});
diff --git a/spec/javascripts/new_branch_spec.js b/spec/javascripts/new_branch_spec.js
index 90a429beeca..c57f44dae17 100644
--- a/spec/javascripts/new_branch_spec.js
+++ b/spec/javascripts/new_branch_spec.js
@@ -1,7 +1,7 @@
/* eslint-disable space-before-function-paren, one-var, no-var, one-var-declaration-per-line, no-return-assign, quotes, max-len */
/* global NewBranchForm */
-require('~/new_branch_form');
+import '~/new_branch_form';
(function() {
describe('Branch', function() {
diff --git a/spec/javascripts/notes_spec.js b/spec/javascripts/notes_spec.js
index cfd599f793e..025f08ee332 100644
--- a/spec/javascripts/notes_spec.js
+++ b/spec/javascripts/notes_spec.js
@@ -14,6 +14,7 @@ import '~/notes';
gl.utils = gl.utils || {};
describe('Notes', function() {
+ const FLASH_TYPE_ALERT = 'alert';
var commentsTemplate = 'issues/issue_with_comment.html.raw';
preloadFixtures(commentsTemplate);
@@ -78,6 +79,47 @@ import '~/notes';
});
});
+ describe('updateNote', () => {
+ let sampleComment;
+ let noteEntity;
+ let $form;
+ let $notesContainer;
+
+ beforeEach(() => {
+ this.notes = new Notes('', []);
+ window.gon.current_username = 'root';
+ window.gon.current_user_fullname = 'Administrator';
+ sampleComment = 'foo';
+ noteEntity = {
+ id: 1234,
+ html: `<li class="note note-row-1234 timeline-entry" id="note_1234">
+ <div class="note-text">${sampleComment}</div>
+ </li>`,
+ note: sampleComment,
+ valid: true
+ };
+ $form = $('form.js-main-target-form');
+ $notesContainer = $('ul.main-notes-list');
+ $form.find('textarea.js-note-text').val(sampleComment);
+ });
+
+ it('updates note and resets edit form', () => {
+ const deferred = $.Deferred();
+ spyOn($, 'ajax').and.returnValue(deferred.promise());
+ spyOn(this.notes, 'revertNoteEditForm');
+
+ $('.js-comment-button').click();
+ deferred.resolve(noteEntity);
+
+ const $targetNote = $notesContainer.find(`#note_${noteEntity.id}`);
+ const updatedNote = Object.assign({}, noteEntity);
+ updatedNote.note = 'bar';
+ this.notes.updateNote(updatedNote, $targetNote);
+
+ expect(this.notes.revertNoteEditForm).toHaveBeenCalledWith($targetNote);
+ });
+ });
+
describe('renderNote', () => {
let notes;
let note;
@@ -97,9 +139,8 @@ import '~/notes';
]);
notes = jasmine.createSpyObj('notes', [
+ 'setupNewNote',
'refresh',
- 'isNewNote',
- 'isUpdatedNote',
'collapseLongCommitList',
'updateNotesCount',
'putConflictEditWarningInPlace'
@@ -109,13 +150,15 @@ import '~/notes';
notes.updatedNotesTrackingMap = {};
spyOn(gl.utils, 'localTimeAgo');
+ spyOn(Notes, 'isNewNote').and.callThrough();
+ spyOn(Notes, 'isUpdatedNote').and.callThrough();
spyOn(Notes, 'animateAppendNote').and.callThrough();
spyOn(Notes, 'animateUpdateNote').and.callThrough();
});
describe('when adding note', () => {
it('should call .animateAppendNote', () => {
- notes.isNewNote.and.returnValue(true);
+ Notes.isNewNote.and.returnValue(true);
Notes.prototype.renderNote.call(notes, note, null, $notesList);
expect(Notes.animateAppendNote).toHaveBeenCalledWith(note.html, $notesList);
@@ -124,7 +167,8 @@ import '~/notes';
describe('when note was edited', () => {
it('should call .animateUpdateNote', () => {
- notes.isUpdatedNote.and.returnValue(true);
+ Notes.isNewNote.and.returnValue(false);
+ Notes.isUpdatedNote.and.returnValue(true);
const $note = $('<div>');
$notesList.find.and.returnValue($note);
Notes.prototype.renderNote.call(notes, note, null, $notesList);
@@ -134,7 +178,8 @@ import '~/notes';
describe('while editing', () => {
it('should update textarea if nothing has been touched', () => {
- notes.isUpdatedNote.and.returnValue(true);
+ Notes.isNewNote.and.returnValue(false);
+ Notes.isUpdatedNote.and.returnValue(true);
const $note = $(`<div class="is-editing">
<div class="original-note-content">initial</div>
<textarea class="js-note-text">initial</textarea>
@@ -146,7 +191,8 @@ import '~/notes';
});
it('should call .putConflictEditWarningInPlace', () => {
- notes.isUpdatedNote.and.returnValue(true);
+ Notes.isNewNote.and.returnValue(false);
+ Notes.isUpdatedNote.and.returnValue(true);
const $note = $(`<div class="is-editing">
<div class="original-note-content">initial</div>
<textarea class="js-note-text">different</textarea>
@@ -160,6 +206,47 @@ import '~/notes';
});
});
+ describe('isUpdatedNote', () => {
+ it('should consider same note text as the same', () => {
+ const result = Notes.isUpdatedNote(
+ {
+ note: 'initial'
+ },
+ $(`<div>
+ <div class="original-note-content">initial</div>
+ </div>`)
+ );
+
+ expect(result).toEqual(false);
+ });
+
+ it('should consider same note with trailing newline as the same', () => {
+ const result = Notes.isUpdatedNote(
+ {
+ note: 'initial\n'
+ },
+ $(`<div>
+ <div class="original-note-content">initial\n</div>
+ </div>`)
+ );
+
+ expect(result).toEqual(false);
+ });
+
+ it('should consider different notes as different', () => {
+ const result = Notes.isUpdatedNote(
+ {
+ note: 'foo'
+ },
+ $(`<div>
+ <div class="original-note-content">bar</div>
+ </div>`)
+ );
+
+ expect(result).toEqual(true);
+ });
+ });
+
describe('renderDiscussionNote', () => {
let discussionContainer;
let note;
@@ -179,15 +266,15 @@ import '~/notes';
row = jasmine.createSpyObj('row', ['prevAll', 'first', 'find']);
notes = jasmine.createSpyObj('notes', [
- 'isNewNote',
'isParallelView',
'updateNotesCount',
]);
notes.note_ids = [];
spyOn(gl.utils, 'localTimeAgo');
+ spyOn(Notes, 'isNewNote');
spyOn(Notes, 'animateAppendNote');
- notes.isNewNote.and.returnValue(true);
+ Notes.isNewNote.and.returnValue(true);
notes.isParallelView.and.returnValue(false);
row.prevAll.and.returnValue(row);
row.first.and.returnValue(row);
@@ -376,13 +463,20 @@ import '~/notes';
this.notes = new Notes('', []);
});
- it('should return true when comment has slash commands', () => {
- const sampleComment = '/wip /milestone %1.0 /merge /unassign Merging this';
+ it('should return true when comment begins with a slash command', () => {
+ const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this';
const hasSlashCommands = this.notes.hasSlashCommands(sampleComment);
expect(hasSlashCommands).toBeTruthy();
});
+ it('should return false when comment does NOT begin with a slash command', () => {
+ const sampleComment = 'Hey, /unassign Merging this';
+ const hasSlashCommands = this.notes.hasSlashCommands(sampleComment);
+
+ expect(hasSlashCommands).toBeFalsy();
+ });
+
it('should return false when comment does NOT have any slash commands', () => {
const sampleComment = 'Looking good, Awesome!';
const hasSlashCommands = this.notes.hasSlashCommands(sampleComment);
@@ -392,14 +486,28 @@ import '~/notes';
});
describe('stripSlashCommands', () => {
- const REGEX_SLASH_COMMANDS = /\/\w+/g;
+ it('should strip slash commands from the comment which begins with a slash command', () => {
+ this.notes = new Notes();
+ const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this';
+ const stripedComment = this.notes.stripSlashCommands(sampleComment);
+
+ expect(stripedComment).toBe('');
+ });
- it('should strip slash commands from the comment', () => {
+ it('should strip slash commands from the comment but leaves plain comment if it is present', () => {
this.notes = new Notes();
- const sampleComment = '/wip /milestone %1.0 /merge /unassign Merging this';
+ const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign\nMerging this';
const stripedComment = this.notes.stripSlashCommands(sampleComment);
- expect(REGEX_SLASH_COMMANDS.test(stripedComment)).toBeFalsy();
+ expect(stripedComment).toBe('Merging this');
+ });
+
+ it('should NOT strip string that has slashes within', () => {
+ this.notes = new Notes();
+ const sampleComment = 'http://127.0.0.1:3000/root/gitlab-shell/issues/1';
+ const stripedComment = this.notes.stripSlashCommands(sampleComment);
+
+ expect(stripedComment).toBe(sampleComment);
});
});
@@ -411,6 +519,22 @@ import '~/notes';
beforeEach(() => {
this.notes = new Notes('', []);
+ spyOn(_, 'escape').and.callFake((comment) => {
+ const escapedString = comment.replace(/["&'<>]/g, (a) => {
+ const escapedToken = {
+ '&': '&amp;',
+ '<': '&lt;',
+ '>': '&gt;',
+ '"': '&quot;',
+ "'": '&#x27;',
+ '`': '&#x60;'
+ }[a];
+
+ return escapedToken;
+ });
+
+ return escapedString;
+ });
});
it('should return constructed placeholder element for regular note based on form contents', () => {
@@ -431,7 +555,21 @@ import '~/notes';
expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeFalsy();
expect($tempNoteHeader.find('.hidden-xs').text().trim()).toEqual(currentUserFullname);
expect($tempNoteHeader.find('.note-headline-light').text().trim()).toEqual(`@${currentUsername}`);
- expect($tempNote.find('.note-body .note-text').text().trim()).toEqual(sampleComment);
+ expect($tempNote.find('.note-body .note-text p').text().trim()).toEqual(sampleComment);
+ });
+
+ it('should escape HTML characters from note based on form contents', () => {
+ const commentWithHtml = '<script>alert("Boom!");</script>';
+ const $tempNote = this.notes.createPlaceholderNote({
+ formContent: commentWithHtml,
+ uniqueId,
+ isDiscussionNote: false,
+ currentUsername,
+ currentUserFullname
+ });
+
+ expect(_.escape).toHaveBeenCalledWith(commentWithHtml);
+ expect($tempNote.find('.note-body .note-text p').html()).toEqual('&lt;script&gt;alert("Boom!");&lt;/script&gt;');
});
it('should return constructed placeholder element for discussion note based on form contents', () => {
@@ -447,5 +585,33 @@ import '~/notes';
expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeTruthy();
});
});
+
+ describe('appendFlash', () => {
+ beforeEach(() => {
+ this.notes = new Notes();
+ });
+
+ it('shows a flash message', () => {
+ this.notes.addFlash('Error message', FLASH_TYPE_ALERT, this.notes.parentTimeline);
+
+ expect(document.querySelectorAll('.flash-alert').length).toBe(1);
+ });
+ });
+
+ describe('clearFlash', () => {
+ beforeEach(() => {
+ $(document).off('ajax:success');
+ this.notes = new Notes();
+ });
+
+ it('removes all the associated flash messages', () => {
+ this.notes.addFlash('Error message 1', FLASH_TYPE_ALERT, this.notes.parentTimeline);
+ this.notes.addFlash('Error message 2', FLASH_TYPE_ALERT, this.notes.parentTimeline);
+
+ this.notes.clearFlash();
+
+ expect(document.querySelectorAll('.flash-alert').length).toBe(0);
+ });
+ });
});
}).call(window);
diff --git a/spec/javascripts/pager_spec.js b/spec/javascripts/pager_spec.js
index d966226909b..1d3e1263371 100644
--- a/spec/javascripts/pager_spec.js
+++ b/spec/javascripts/pager_spec.js
@@ -1,6 +1,6 @@
/* global fixture */
-require('~/pager');
+import '~/pager';
describe('pager', () => {
const Pager = window.Pager;
diff --git a/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js b/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js
new file mode 100644
index 00000000000..845b371d90c
--- /dev/null
+++ b/spec/javascripts/pipeline_schedules/interval_pattern_input_spec.js
@@ -0,0 +1,175 @@
+import Vue from 'vue';
+import IntervalPatternInput from '~/pipeline_schedules/components/interval_pattern_input';
+
+const IntervalPatternInputComponent = Vue.extend(IntervalPatternInput);
+const inputNameAttribute = 'schedule[cron]';
+
+const cronIntervalPresets = {
+ everyDay: '0 4 * * *',
+ everyWeek: '0 4 * * 0',
+ everyMonth: '0 4 1 * *',
+};
+
+window.gl = window.gl || {};
+
+window.gl.pipelineScheduleFieldErrors = {
+ updateFormValidityState: () => {},
+};
+
+describe('Interval Pattern Input Component', function () {
+ describe('when prop initialCronInterval is passed (edit)', function () {
+ describe('when prop initialCronInterval is custom', function () {
+ beforeEach(function () {
+ this.initialCronInterval = '1 2 3 4 5';
+ this.intervalPatternComponent = new IntervalPatternInputComponent({
+ propsData: {
+ initialCronInterval: this.initialCronInterval,
+ },
+ }).$mount();
+ });
+
+ it('is initialized as a Vue component', function () {
+ expect(this.intervalPatternComponent).toBeDefined();
+ });
+
+ it('prop initialCronInterval is set', function () {
+ expect(this.intervalPatternComponent.initialCronInterval).toBe(this.initialCronInterval);
+ });
+
+ it('sets isEditable to true', function (done) {
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.isEditable).toBe(true);
+ done();
+ });
+ });
+ });
+
+ describe('when prop initialCronInterval is preset', function () {
+ beforeEach(function () {
+ this.intervalPatternComponent = new IntervalPatternInputComponent({
+ propsData: {
+ inputNameAttribute,
+ initialCronInterval: '0 4 * * *',
+ },
+ }).$mount();
+ });
+
+ it('is initialized as a Vue component', function () {
+ expect(this.intervalPatternComponent).toBeDefined();
+ });
+
+ it('sets isEditable to false', function (done) {
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.isEditable).toBe(false);
+ done();
+ });
+ });
+ });
+ });
+
+ describe('when prop initialCronInterval is not passed (new)', function () {
+ beforeEach(function () {
+ this.intervalPatternComponent = new IntervalPatternInputComponent({
+ propsData: {
+ inputNameAttribute,
+ },
+ }).$mount();
+ });
+
+ it('is initialized as a Vue component', function () {
+ expect(this.intervalPatternComponent).toBeDefined();
+ });
+
+ it('prop initialCronInterval is set', function () {
+ const defaultInitialCronInterval = '';
+ expect(this.intervalPatternComponent.initialCronInterval).toBe(defaultInitialCronInterval);
+ });
+
+ it('sets isEditable to true', function (done) {
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.isEditable).toBe(true);
+ done();
+ });
+ });
+ });
+
+ describe('User Actions', function () {
+ beforeEach(function () {
+ // For an unknown reason, Phantom.js doesn't trigger click events
+ // on radio buttons in a way Vue can register. So, we have to mount
+ // to a fixture.
+ setFixtures('<div id="my-mount"></div>');
+
+ this.initialCronInterval = '1 2 3 4 5';
+ this.intervalPatternComponent = new IntervalPatternInputComponent({
+ propsData: {
+ initialCronInterval: this.initialCronInterval,
+ },
+ }).$mount('#my-mount');
+ });
+
+ it('cronInterval is updated when everyday preset interval is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-day').click();
+
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyDay);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyDay);
+ done();
+ });
+ });
+
+ it('cronInterval is updated when everyweek preset interval is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-week').click();
+
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyWeek);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyWeek);
+
+ done();
+ });
+ });
+
+ it('cronInterval is updated when everymonth preset interval is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-month').click();
+
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.cronInterval).toBe(cronIntervalPresets.everyMonth);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(cronIntervalPresets.everyMonth);
+ done();
+ });
+ });
+
+ it('only a space is added to cronInterval (trimmed later) when custom radio is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-month').click();
+ this.intervalPatternComponent.$el.querySelector('#custom').click();
+
+ Vue.nextTick(() => {
+ const intervalWithSpaceAppended = `${cronIntervalPresets.everyMonth} `;
+ expect(this.intervalPatternComponent.cronInterval).toBe(intervalWithSpaceAppended);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').value).toBe(intervalWithSpaceAppended);
+ done();
+ });
+ });
+
+ it('text input is disabled when preset interval is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-month').click();
+
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.isEditable).toBe(false);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').disabled).toBe(true);
+ done();
+ });
+ });
+
+ it('text input is enabled when custom is selected', function (done) {
+ this.intervalPatternComponent.$el.querySelector('#every-month').click();
+ this.intervalPatternComponent.$el.querySelector('#custom').click();
+
+ Vue.nextTick(() => {
+ expect(this.intervalPatternComponent.isEditable).toBe(true);
+ expect(this.intervalPatternComponent.$el.querySelector('.cron-interval-input').disabled).toBe(false);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js b/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js
new file mode 100644
index 00000000000..6120d224ac0
--- /dev/null
+++ b/spec/javascripts/pipeline_schedules/pipeline_schedule_callout_spec.js
@@ -0,0 +1,106 @@
+import Vue from 'vue';
+import Cookies from 'js-cookie';
+import PipelineSchedulesCallout from '~/pipeline_schedules/components/pipeline_schedules_callout';
+
+const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout);
+const cookieKey = 'pipeline_schedules_callout_dismissed';
+const docsUrl = 'help/ci/scheduled_pipelines';
+
+describe('Pipeline Schedule Callout', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <div id='pipeline-schedules-callout' data-docs-url=${docsUrl}></div>
+ `);
+ });
+
+ describe('independent of cookies', () => {
+ beforeEach(() => {
+ this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount();
+ });
+
+ it('the component can be initialized', () => {
+ expect(this.calloutComponent).toBeDefined();
+ });
+
+ it('correctly sets illustrationSvg', () => {
+ expect(this.calloutComponent.illustrationSvg).toContain('<svg');
+ });
+
+ it('correctly sets docsUrl', () => {
+ expect(this.calloutComponent.docsUrl).toContain(docsUrl);
+ });
+ });
+
+ describe(`when ${cookieKey} cookie is set`, () => {
+ beforeEach(() => {
+ Cookies.set(cookieKey, true);
+ this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount();
+ });
+
+ it('correctly sets calloutDismissed to true', () => {
+ expect(this.calloutComponent.calloutDismissed).toBe(true);
+ });
+
+ it('does not render the callout', () => {
+ expect(this.calloutComponent.$el.childNodes.length).toBe(0);
+ });
+ });
+
+ describe('when cookie is not set', () => {
+ beforeEach(() => {
+ Cookies.remove(cookieKey);
+ this.calloutComponent = new PipelineSchedulesCalloutComponent().$mount();
+ });
+
+ it('correctly sets calloutDismissed to false', () => {
+ expect(this.calloutComponent.calloutDismissed).toBe(false);
+ });
+
+ it('renders the callout container', () => {
+ expect(this.calloutComponent.$el.querySelector('.bordered-box')).not.toBeNull();
+ });
+
+ it('renders the callout svg', () => {
+ expect(this.calloutComponent.$el.outerHTML).toContain('<svg');
+ });
+
+ it('renders the callout title', () => {
+ expect(this.calloutComponent.$el.outerHTML).toContain('Scheduling Pipelines');
+ });
+
+ it('renders the callout text', () => {
+ expect(this.calloutComponent.$el.outerHTML).toContain('runs pipelines in the future');
+ });
+
+ it('renders the documentation url', () => {
+ expect(this.calloutComponent.$el.outerHTML).toContain(docsUrl);
+ });
+
+ it('updates calloutDismissed when close button is clicked', (done) => {
+ this.calloutComponent.$el.querySelector('#dismiss-callout-btn').click();
+
+ Vue.nextTick(() => {
+ expect(this.calloutComponent.calloutDismissed).toBe(true);
+ done();
+ });
+ });
+
+ it('#dismissCallout updates calloutDismissed', (done) => {
+ this.calloutComponent.dismissCallout();
+
+ Vue.nextTick(() => {
+ expect(this.calloutComponent.calloutDismissed).toBe(true);
+ done();
+ });
+ });
+
+ it('is hidden when close button is clicked', (done) => {
+ this.calloutComponent.$el.querySelector('#dismiss-callout-btn').click();
+
+ Vue.nextTick(() => {
+ expect(this.calloutComponent.$el.childNodes.length).toBe(0);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js
new file mode 100644
index 00000000000..f033956c071
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/action_component_spec.js
@@ -0,0 +1,40 @@
+import Vue from 'vue';
+import actionComponent from '~/pipelines/components/graph/action_component.vue';
+
+describe('pipeline graph action component', () => {
+ let component;
+
+ beforeEach(() => {
+ const ActionComponent = Vue.extend(actionComponent);
+ component = new ActionComponent({
+ propsData: {
+ tooltipText: 'bar',
+ link: 'foo',
+ actionMethod: 'post',
+ actionIcon: 'icon_action_cancel',
+ },
+ }).$mount();
+ });
+
+ it('should render a link', () => {
+ expect(component.$el.getAttribute('href')).toEqual('foo');
+ });
+
+ it('should render the provided title as a bootstrap tooltip', () => {
+ expect(component.$el.getAttribute('data-original-title')).toEqual('bar');
+ });
+
+ it('should update bootstrap tooltip when title changes', (done) => {
+ component.tooltipText = 'changed';
+
+ Vue.nextTick(() => {
+ expect(component.$el.getAttribute('data-original-title')).toBe('changed');
+ done();
+ });
+ });
+
+ it('should render an svg', () => {
+ expect(component.$el.querySelector('.ci-action-icon-wrapper')).toBeDefined();
+ expect(component.$el.querySelector('svg')).toBeDefined();
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js b/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js
new file mode 100644
index 00000000000..14ff1b0d25c
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/dropdown_action_component_spec.js
@@ -0,0 +1,30 @@
+import Vue from 'vue';
+import dropdownActionComponent from '~/pipelines/components/graph/dropdown_action_component.vue';
+
+describe('action component', () => {
+ let component;
+
+ beforeEach(() => {
+ const DropdownActionComponent = Vue.extend(dropdownActionComponent);
+ component = new DropdownActionComponent({
+ propsData: {
+ tooltipText: 'bar',
+ link: 'foo',
+ actionMethod: 'post',
+ actionIcon: 'icon_action_cancel',
+ },
+ }).$mount();
+ });
+
+ it('should render a link', () => {
+ expect(component.$el.getAttribute('href')).toEqual('foo');
+ });
+
+ it('should render the provided title as a bootstrap tooltip', () => {
+ expect(component.$el.getAttribute('data-original-title')).toEqual('bar');
+ });
+
+ it('should render an svg', () => {
+ expect(component.$el.querySelector('svg')).toBeDefined();
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/graph_component_spec.js b/spec/javascripts/pipelines/graph/graph_component_spec.js
new file mode 100644
index 00000000000..6bd0eb86263
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/graph_component_spec.js
@@ -0,0 +1,62 @@
+import Vue from 'vue';
+import graphComponent from '~/pipelines/components/graph/graph_component.vue';
+import graphJSON from './mock_data';
+
+describe('graph component', () => {
+ preloadFixtures('static/graph.html.raw');
+
+ let GraphComponent;
+
+ beforeEach(() => {
+ loadFixtures('static/graph.html.raw');
+ GraphComponent = Vue.extend(graphComponent);
+ });
+
+ describe('while is loading', () => {
+ it('should render a loading icon', () => {
+ const component = new GraphComponent().$mount('#js-pipeline-graph-vue');
+ expect(component.$el.querySelector('.loading-icon')).toBeDefined();
+ });
+ });
+
+ describe('with a successfull response', () => {
+ const interceptor = (request, next) => {
+ next(request.respondWith(JSON.stringify(graphJSON), {
+ status: 200,
+ }));
+ };
+
+ beforeEach(() => {
+ Vue.http.interceptors.push(interceptor);
+ });
+
+ afterEach(() => {
+ Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ });
+
+ it('should render the graph', (done) => {
+ const component = new GraphComponent().$mount('#js-pipeline-graph-vue');
+
+ setTimeout(() => {
+ expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true);
+
+ expect(
+ component.$el.querySelector('.stage-column:first-child').classList.contains('no-margin'),
+ ).toEqual(true);
+
+ expect(
+ component.$el.querySelector('.stage-column:nth-child(2)').classList.contains('left-margin'),
+ ).toEqual(true);
+
+ expect(
+ component.$el.querySelector('.stage-column:nth-child(2) .build:nth-child(1)').classList.contains('left-connector'),
+ ).toEqual(true);
+
+ expect(component.$el.querySelector('loading-icon')).toBe(null);
+
+ expect(component.$el.querySelector('.stage-column-list')).toBeDefined();
+ done();
+ }, 0);
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/job_component_spec.js b/spec/javascripts/pipelines/graph/job_component_spec.js
new file mode 100644
index 00000000000..63986b6c0db
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/job_component_spec.js
@@ -0,0 +1,117 @@
+import Vue from 'vue';
+import jobComponent from '~/pipelines/components/graph/job_component.vue';
+
+describe('pipeline graph job component', () => {
+ let JobComponent;
+
+ const mockJob = {
+ id: 4256,
+ name: 'test',
+ status: {
+ icon: 'icon_status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4256',
+ action: {
+ icon: 'icon_action_retry',
+ title: 'Retry',
+ path: '/root/ci-mock/builds/4256/retry',
+ method: 'post',
+ },
+ },
+ };
+
+ beforeEach(() => {
+ JobComponent = Vue.extend(jobComponent);
+ });
+
+ describe('name with link', () => {
+ it('should render the job name and status with a link', () => {
+ const component = new JobComponent({
+ propsData: {
+ job: mockJob,
+ },
+ }).$mount();
+
+ const link = component.$el.querySelector('a');
+
+ expect(link.getAttribute('href')).toEqual(mockJob.status.details_path);
+
+ expect(
+ link.getAttribute('data-original-title'),
+ ).toEqual(`${mockJob.name} - ${mockJob.status.label}`);
+
+ expect(component.$el.querySelector('.js-status-icon-success')).toBeDefined();
+
+ expect(
+ component.$el.querySelector('.ci-status-text').textContent.trim(),
+ ).toEqual(mockJob.name);
+ });
+ });
+
+ describe('name without link', () => {
+ it('it should render status and name', () => {
+ const component = new JobComponent({
+ propsData: {
+ job: {
+ id: 4256,
+ name: 'test',
+ status: {
+ icon: 'icon_status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4256',
+ },
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.querySelector('.js-status-icon-success')).toBeDefined();
+
+ expect(
+ component.$el.querySelector('.ci-status-text').textContent.trim(),
+ ).toEqual(mockJob.name);
+ });
+ });
+
+ describe('action icon', () => {
+ it('it should render the action icon', () => {
+ const component = new JobComponent({
+ propsData: {
+ job: mockJob,
+ },
+ }).$mount();
+
+ expect(component.$el.querySelector('a.ci-action-icon-container')).toBeDefined();
+ expect(component.$el.querySelector('i.ci-action-icon-wrapper')).toBeDefined();
+ });
+ });
+
+ describe('dropdown', () => {
+ it('should render the dropdown action icon', () => {
+ const component = new JobComponent({
+ propsData: {
+ job: mockJob,
+ isDropdown: true,
+ },
+ }).$mount();
+
+ expect(component.$el.querySelector('a.ci-action-icon-wrapper')).toBeDefined();
+ });
+ });
+
+ it('should render provided class name', () => {
+ const component = new JobComponent({
+ propsData: {
+ job: mockJob,
+ cssClassJobName: 'css-class-job-name',
+ },
+ }).$mount();
+
+ expect(
+ component.$el.querySelector('a').classList.contains('css-class-job-name'),
+ ).toBe(true);
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/job_name_component_spec.js b/spec/javascripts/pipelines/graph/job_name_component_spec.js
new file mode 100644
index 00000000000..8e2071ba0b3
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/job_name_component_spec.js
@@ -0,0 +1,27 @@
+import Vue from 'vue';
+import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
+
+describe('job name component', () => {
+ let component;
+
+ beforeEach(() => {
+ const JobNameComponent = Vue.extend(jobNameComponent);
+ component = new JobNameComponent({
+ propsData: {
+ name: 'foo',
+ status: {
+ icon: 'icon_status_success',
+ },
+ },
+ }).$mount();
+ });
+
+ it('should render the provided name', () => {
+ expect(component.$el.querySelector('.ci-status-text').textContent.trim()).toEqual('foo');
+ });
+
+ it('should render an icon with the provided status', () => {
+ expect(component.$el.querySelector('.ci-status-icon-success')).toBeDefined();
+ expect(component.$el.querySelector('.ci-status-icon-success svg')).toBeDefined();
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/mock_data.js b/spec/javascripts/pipelines/graph/mock_data.js
new file mode 100644
index 00000000000..56c522b7f77
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/mock_data.js
@@ -0,0 +1,232 @@
+/* eslint-disable quote-props, quotes, comma-dangle */
+export default {
+ "id": 123,
+ "user": {
+ "name": "Root",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": null,
+ "web_url": "http://localhost:3000/root"
+ },
+ "active": false,
+ "coverage": null,
+ "path": "/root/ci-mock/pipelines/123",
+ "details": {
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/pipelines/123",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico"
+ },
+ "duration": 9,
+ "finished_at": "2017-04-19T14:30:27.542Z",
+ "stages": [{
+ "name": "test",
+ "title": "test: passed",
+ "groups": [{
+ "name": "test",
+ "size": 1,
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4153",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4153/retry",
+ "method": "post"
+ }
+ },
+ "jobs": [{
+ "id": 4153,
+ "name": "test",
+ "build_path": "/root/ci-mock/builds/4153",
+ "retry_path": "/root/ci-mock/builds/4153/retry",
+ "playable": false,
+ "created_at": "2017-04-13T09:25:18.959Z",
+ "updated_at": "2017-04-13T09:25:23.118Z",
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4153",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4153/retry",
+ "method": "post"
+ }
+ }
+ }]
+ }],
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/pipelines/123#test",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico"
+ },
+ "path": "/root/ci-mock/pipelines/123#test",
+ "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=test"
+ }, {
+ "name": "deploy",
+ "title": "deploy: passed",
+ "groups": [{
+ "name": "deploy to production",
+ "size": 1,
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4166",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4166/retry",
+ "method": "post"
+ }
+ },
+ "jobs": [{
+ "id": 4166,
+ "name": "deploy to production",
+ "build_path": "/root/ci-mock/builds/4166",
+ "retry_path": "/root/ci-mock/builds/4166/retry",
+ "playable": false,
+ "created_at": "2017-04-19T14:29:46.463Z",
+ "updated_at": "2017-04-19T14:30:27.498Z",
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4166",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4166/retry",
+ "method": "post"
+ }
+ }
+ }]
+ }, {
+ "name": "deploy to staging",
+ "size": 1,
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4159",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4159/retry",
+ "method": "post"
+ }
+ },
+ "jobs": [{
+ "id": 4159,
+ "name": "deploy to staging",
+ "build_path": "/root/ci-mock/builds/4159",
+ "retry_path": "/root/ci-mock/builds/4159/retry",
+ "playable": false,
+ "created_at": "2017-04-18T16:32:08.420Z",
+ "updated_at": "2017-04-18T16:32:12.631Z",
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/builds/4159",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico",
+ "action": {
+ "icon": "icon_action_retry",
+ "title": "Retry",
+ "path": "/root/ci-mock/builds/4159/retry",
+ "method": "post"
+ }
+ }
+ }]
+ }],
+ "status": {
+ "icon": "icon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/ci-mock/pipelines/123#deploy",
+ "favicon": "/assets/ci_favicons/dev/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.ico"
+ },
+ "path": "/root/ci-mock/pipelines/123#deploy",
+ "dropdown_path": "/root/ci-mock/pipelines/123/stage.json?stage=deploy"
+ }],
+ "artifacts": [],
+ "manual_actions": [{
+ "name": "deploy to production",
+ "path": "/root/ci-mock/builds/4166/play",
+ "playable": false
+ }]
+ },
+ "flags": {
+ "latest": true,
+ "triggered": false,
+ "stuck": false,
+ "yaml_errors": false,
+ "retryable": false,
+ "cancelable": false
+ },
+ "ref": {
+ "name": "master",
+ "path": "/root/ci-mock/tree/master",
+ "tag": false,
+ "branch": true
+ },
+ "commit": {
+ "id": "798e5f902592192afaba73f4668ae30e56eae492",
+ "short_id": "798e5f90",
+ "title": "Merge branch 'new-branch' into 'master'\r",
+ "created_at": "2017-04-13T10:25:17.000+01:00",
+ "parent_ids": ["54d483b1ed156fbbf618886ddf7ab023e24f8738", "c8e2d38a6c538822e81c57022a6e3a0cfedebbcc"],
+ "message": "Merge branch 'new-branch' into 'master'\r\n\r\nAdd new file\r\n\r\nSee merge request !1",
+ "author_name": "Root",
+ "author_email": "admin@example.com",
+ "authored_date": "2017-04-13T10:25:17.000+01:00",
+ "committer_name": "Root",
+ "committer_email": "admin@example.com",
+ "committed_date": "2017-04-13T10:25:17.000+01:00",
+ "author": {
+ "name": "Root",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": null,
+ "web_url": "http://localhost:3000/root"
+ },
+ "author_gravatar_url": null,
+ "commit_url": "http://localhost:3000/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492",
+ "commit_path": "/root/ci-mock/commit/798e5f902592192afaba73f4668ae30e56eae492"
+ },
+ "created_at": "2017-04-13T09:25:18.881Z",
+ "updated_at": "2017-04-19T14:30:27.561Z"
+};
diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
new file mode 100644
index 00000000000..aa4d6eedaf4
--- /dev/null
+++ b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
@@ -0,0 +1,42 @@
+import Vue from 'vue';
+import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+
+describe('stage column component', () => {
+ let component;
+ const mockJob = {
+ id: 4256,
+ name: 'test',
+ status: {
+ icon: 'icon_status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4256',
+ action: {
+ icon: 'icon_action_retry',
+ title: 'Retry',
+ path: '/root/ci-mock/builds/4256/retry',
+ method: 'post',
+ },
+ },
+ };
+
+ beforeEach(() => {
+ const StageColumnComponent = Vue.extend(stageColumnComponent);
+
+ component = new StageColumnComponent({
+ propsData: {
+ title: 'foo',
+ jobs: [mockJob, mockJob, mockJob],
+ },
+ }).$mount();
+ });
+
+ it('should render provided title', () => {
+ expect(component.$el.querySelector('.stage-name').textContent.trim()).toEqual('foo');
+ });
+
+ it('should render the provided jobs', () => {
+ expect(component.$el.querySelectorAll('.builds-container > ul > li').length).toEqual(3);
+ });
+});
diff --git a/spec/javascripts/pipelines/mock_data.js b/spec/javascripts/pipelines/mock_data.js
deleted file mode 100644
index 2365a662b9f..00000000000
--- a/spec/javascripts/pipelines/mock_data.js
+++ /dev/null
@@ -1,107 +0,0 @@
-export default {
- pipelines: [{
- id: 115,
- user: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- path: '/root/review-app/pipelines/115',
- details: {
- status: {
- icon: 'icon_status_failed',
- text: 'failed',
- label: 'failed',
- group: 'failed',
- has_details: true,
- details_path: '/root/review-app/pipelines/115',
- },
- duration: null,
- finished_at: '2017-03-17T19:00:15.996Z',
- stages: [{
- name: 'build',
- title: 'build: failed',
- status: {
- icon: 'icon_status_failed',
- text: 'failed',
- label: 'failed',
- group: 'failed',
- has_details: true,
- details_path: '/root/review-app/pipelines/115#build',
- },
- path: '/root/review-app/pipelines/115#build',
- dropdown_path: '/root/review-app/pipelines/115/stage.json?stage=build',
- },
- {
- name: 'review',
- title: 'review: skipped',
- status: {
- icon: 'icon_status_skipped',
- text: 'skipped',
- label: 'skipped',
- group: 'skipped',
- has_details: true,
- details_path: '/root/review-app/pipelines/115#review',
- },
- path: '/root/review-app/pipelines/115#review',
- dropdown_path: '/root/review-app/pipelines/115/stage.json?stage=review',
- }],
- artifacts: [],
- manual_actions: [{
- name: 'stop_review',
- path: '/root/review-app/builds/3766/play',
- }],
- },
- flags: {
- latest: true,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: false,
- },
- ref: {
- name: 'thisisabranch',
- path: '/root/review-app/tree/thisisabranch',
- tag: false,
- branch: true,
- },
- commit: {
- id: '9e87f87625b26c42c59a2ee0398f81d20cdfe600',
- short_id: '9e87f876',
- title: 'Update README.md',
- created_at: '2017-03-15T22:58:28.000+00:00',
- parent_ids: ['3744f9226e699faec2662a8b267e5d3fd0bfff0e'],
- message: 'Update README.md',
- author_name: 'Root',
- author_email: 'admin@example.com',
- authored_date: '2017-03-15T22:58:28.000+00:00',
- committer_name: 'Root',
- committer_email: 'admin@example.com',
- committed_date: '2017-03-15T22:58:28.000+00:00',
- author: {
- name: 'Root',
- username: 'root',
- id: 1,
- state: 'active',
- avatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- web_url: 'http://localhost:3000/root',
- },
- author_gravatar_url: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- commit_url: 'http://localhost:3000/root/review-app/commit/9e87f87625b26c42c59a2ee0398f81d20cdfe600',
- commit_path: '/root/review-app/commit/9e87f87625b26c42c59a2ee0398f81d20cdfe600',
- },
- retry_path: '/root/review-app/pipelines/115/retry',
- created_at: '2017-03-15T22:58:33.436Z',
- updated_at: '2017-03-17T19:00:15.997Z',
- }],
- count: {
- all: 52,
- running: 0,
- pending: 0,
- finished: 52,
- },
-};
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index 53931d67ad7..0bcc3905702 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -60,7 +60,7 @@ describe('Pipeline Url Component', () => {
expect(
component.$el.querySelector('.js-pipeline-url-user').getAttribute('href'),
).toEqual(mockData.pipeline.user.web_url);
- expect(image.getAttribute('title')).toEqual(mockData.pipeline.user.name);
+ expect(image.getAttribute('data-original-title')).toEqual(mockData.pipeline.user.name);
expect(image.getAttribute('src')).toEqual(mockData.pipeline.user.avatar_url);
});
diff --git a/spec/javascripts/pipelines/pipelines_spec.js b/spec/javascripts/pipelines/pipelines_spec.js
index e9c05f74ce6..3a56156358b 100644
--- a/spec/javascripts/pipelines/pipelines_spec.js
+++ b/spec/javascripts/pipelines/pipelines_spec.js
@@ -1,15 +1,20 @@
import Vue from 'vue';
import pipelinesComp from '~/pipelines/pipelines';
import Store from '~/pipelines/stores/pipelines_store';
-import pipelinesData from './mock_data';
describe('Pipelines', () => {
+ const jsonFixtureName = 'pipelines/pipelines.json';
+
preloadFixtures('static/pipelines.html.raw');
+ preloadFixtures(jsonFixtureName);
let PipelinesComponent;
+ let pipeline;
beforeEach(() => {
loadFixtures('static/pipelines.html.raw');
+ const pipelines = getJSONFixture(jsonFixtureName).pipelines;
+ pipeline = pipelines.find(p => p.id === 1);
PipelinesComponent = Vue.extend(pipelinesComp);
});
@@ -17,7 +22,7 @@ describe('Pipelines', () => {
describe('successfull request', () => {
describe('with pipelines', () => {
const pipelinesInterceptor = (request, next) => {
- next(request.respondWith(JSON.stringify(pipelinesData), {
+ next(request.respondWith(JSON.stringify(pipeline), {
status: 200,
}));
};
diff --git a/spec/javascripts/pipelines_spec.js b/spec/javascripts/pipelines_spec.js
index 72770a702d3..81ac589f4e6 100644
--- a/spec/javascripts/pipelines_spec.js
+++ b/spec/javascripts/pipelines_spec.js
@@ -1,30 +1,22 @@
-require('~/pipelines');
+import Pipelines from '~/pipelines';
// Fix for phantomJS
if (!Element.prototype.matches && Element.prototype.webkitMatchesSelector) {
Element.prototype.matches = Element.prototype.webkitMatchesSelector;
}
-(() => {
- describe('Pipelines', () => {
- preloadFixtures('static/pipeline_graph.html.raw');
+describe('Pipelines', () => {
+ preloadFixtures('static/pipeline_graph.html.raw');
- beforeEach(() => {
- loadFixtures('static/pipeline_graph.html.raw');
- });
-
- it('should be defined', () => {
- expect(window.gl.Pipelines).toBeDefined();
- });
-
- it('should create a `Pipelines` instance without options', () => {
- expect(() => { new window.gl.Pipelines(); }).not.toThrow(); //eslint-disable-line
- });
+ beforeEach(() => {
+ loadFixtures('static/pipeline_graph.html.raw');
+ });
- it('should create a `Pipelines` instance with options', () => {
- const pipelines = new window.gl.Pipelines({ foo: 'bar' });
+ it('should be defined', () => {
+ expect(Pipelines).toBeDefined();
+ });
- expect(pipelines.pipelineGraph).toBeDefined();
- });
+ it('should create a `Pipelines` instance without options', () => {
+ expect(() => { new Pipelines(); }).not.toThrow(); //eslint-disable-line
});
-})();
+});
diff --git a/spec/javascripts/pretty_time_spec.js b/spec/javascripts/pretty_time_spec.js
index a4662cfb557..de99e7e3894 100644
--- a/spec/javascripts/pretty_time_spec.js
+++ b/spec/javascripts/pretty_time_spec.js
@@ -1,4 +1,4 @@
-require('~/lib/utils/pretty_time');
+import '~/lib/utils/pretty_time';
(() => {
const prettyTime = gl.utils.prettyTime;
diff --git a/spec/javascripts/project_title_spec.js b/spec/javascripts/project_title_spec.js
index 3a1d4e2440f..3dba2e817ff 100644
--- a/spec/javascripts/project_title_spec.js
+++ b/spec/javascripts/project_title_spec.js
@@ -1,12 +1,11 @@
/* eslint-disable space-before-function-paren, no-unused-expressions, no-return-assign, no-param-reassign, no-var, new-cap, wrap-iife, no-unused-vars, quotes, jasmine/no-expect-in-setup-teardown, max-len */
/* global Project */
-require('select2/select2.js');
-require('~/lib/utils/type_utility');
-require('~/gl_dropdown');
-require('~/api');
-require('~/project_select');
-require('~/project');
+import 'select2/select2';
+import '~/gl_dropdown';
+import '~/api';
+import '~/project_select';
+import '~/project';
(function() {
describe('Project Title', function() {
diff --git a/spec/javascripts/search_autocomplete_spec.js b/spec/javascripts/search_autocomplete_spec.js
index aaf058bd755..a53f58b5d0d 100644
--- a/spec/javascripts/search_autocomplete_spec.js
+++ b/spec/javascripts/search_autocomplete_spec.js
@@ -1,10 +1,9 @@
/* eslint-disable space-before-function-paren, max-len, no-var, one-var, one-var-declaration-per-line, no-unused-expressions, consistent-return, no-param-reassign, default-case, no-return-assign, comma-dangle, object-shorthand, prefer-template, quotes, new-parens, vars-on-top, new-cap, max-len */
-require('~/gl_dropdown');
-require('~/search_autocomplete');
-require('~/lib/utils/common_utils');
-require('~/lib/utils/type_utility');
-require('vendor/fuzzaldrin-plus');
+import '~/gl_dropdown';
+import '~/search_autocomplete';
+import '~/lib/utils/common_utils';
+import 'vendor/fuzzaldrin-plus';
(function() {
var addBodyAttributes, assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget;
diff --git a/spec/javascripts/shortcuts_issuable_spec.js b/spec/javascripts/shortcuts_issuable_spec.js
index 9e19dabd0e3..3515dfbc60b 100644
--- a/spec/javascripts/shortcuts_issuable_spec.js
+++ b/spec/javascripts/shortcuts_issuable_spec.js
@@ -1,8 +1,8 @@
/* eslint-disable space-before-function-paren, no-return-assign, no-var, quotes */
/* global ShortcutsIssuable */
-require('~/copy_as_gfm');
-require('~/shortcuts_issuable');
+import '~/copy_as_gfm';
+import '~/shortcuts_issuable';
(function() {
describe('ShortcutsIssuable', function() {
@@ -13,7 +13,7 @@ require('~/shortcuts_issuable');
document.querySelector('.js-new-note-form').classList.add('js-main-target-form');
this.shortcut = new ShortcutsIssuable();
});
- describe('#replyWithSelectedText', function() {
+ describe('replyWithSelectedText', function() {
var stubSelection;
// Stub window.gl.utils.getSelectedFragment to return a node with the provided HTML.
stubSelection = function(html) {
diff --git a/spec/javascripts/sidebar/sidebar_assignees_spec.js b/spec/javascripts/sidebar/sidebar_assignees_spec.js
index e0df0a3228f..865951b2ad7 100644
--- a/spec/javascripts/sidebar/sidebar_assignees_spec.js
+++ b/spec/javascripts/sidebar/sidebar_assignees_spec.js
@@ -24,6 +24,7 @@ describe('sidebar assignees', () => {
SidebarService.singleton = null;
SidebarStore.singleton = null;
SidebarMediator.singleton = null;
+ Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
});
it('calls the mediator when saves the assignees', () => {
diff --git a/spec/javascripts/sidebar/sidebar_bundle_spec.js b/spec/javascripts/sidebar/sidebar_bundle_spec.js
deleted file mode 100644
index 7760b34e071..00000000000
--- a/spec/javascripts/sidebar/sidebar_bundle_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import Vue from 'vue';
-import SidebarBundleDomContentLoaded from '~/sidebar/sidebar_bundle';
-import SidebarTimeTracking from '~/sidebar/components/time_tracking/sidebar_time_tracking';
-import SidebarMediator from '~/sidebar/sidebar_mediator';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import SidebarStore from '~/sidebar/stores/sidebar_store';
-import Mock from './mock_data';
-
-describe('sidebar bundle', () => {
- gl.sidebarOptions = Mock.mediator;
-
- beforeEach(() => {
- spyOn(SidebarTimeTracking.methods, 'listenForSlashCommands').and.callFake(() => { });
- preloadFixtures('issues/open-issue.html.raw');
- Vue.http.interceptors.push(Mock.sidebarMockInterceptor);
- loadFixtures('issues/open-issue.html.raw');
- spyOn(Vue.prototype, '$mount');
- SidebarBundleDomContentLoaded();
- this.mediator = new SidebarMediator();
- });
-
- afterEach(() => {
- SidebarService.singleton = null;
- SidebarStore.singleton = null;
- SidebarMediator.singleton = null;
- });
-
- it('the mediator should be already defined with some data', () => {
- SidebarBundleDomContentLoaded();
-
- expect(this.mediator.store).toBeDefined();
- expect(this.mediator.service).toBeDefined();
- expect(this.mediator.store.currentUser).toEqual(Mock.mediator.currentUser);
- expect(this.mediator.store.rootPath).toEqual(Mock.mediator.rootPath);
- expect(this.mediator.store.endPoint).toEqual(Mock.mediator.endPoint);
- expect(this.mediator.store.editable).toEqual(Mock.mediator.editable);
- });
-
- it('the sidebar time tracking and assignees components to have been mounted', () => {
- expect(Vue.prototype.$mount).toHaveBeenCalledTimes(2);
- });
-});
diff --git a/spec/javascripts/sidebar/sidebar_mediator_spec.js b/spec/javascripts/sidebar/sidebar_mediator_spec.js
index 2b00fa17334..e246f41ee82 100644
--- a/spec/javascripts/sidebar/sidebar_mediator_spec.js
+++ b/spec/javascripts/sidebar/sidebar_mediator_spec.js
@@ -14,6 +14,7 @@ describe('Sidebar mediator', () => {
SidebarService.singleton = null;
SidebarStore.singleton = null;
SidebarMediator.singleton = null;
+ Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
});
it('assigns yourself ', () => {
diff --git a/spec/javascripts/sidebar/sidebar_service_spec.js b/spec/javascripts/sidebar/sidebar_service_spec.js
index d41162096a6..91a4dd669a7 100644
--- a/spec/javascripts/sidebar/sidebar_service_spec.js
+++ b/spec/javascripts/sidebar/sidebar_service_spec.js
@@ -10,6 +10,7 @@ describe('Sidebar service', () => {
afterEach(() => {
SidebarService.singleton = null;
+ Vue.http.interceptors = _.without(Vue.http.interceptors, Mock.sidebarMockInterceptor);
});
it('gets the data', (done) => {
diff --git a/spec/javascripts/signin_tabs_memoizer_spec.js b/spec/javascripts/signin_tabs_memoizer_spec.js
index 5b4f5933b34..0a32797c3e2 100644
--- a/spec/javascripts/signin_tabs_memoizer_spec.js
+++ b/spec/javascripts/signin_tabs_memoizer_spec.js
@@ -1,6 +1,6 @@
import AccessorUtilities from '~/lib/utils/accessor';
-require('~/signin_tabs_memoizer');
+import '~/signin_tabs_memoizer';
((global) => {
describe('SigninTabsMemoizer', () => {
diff --git a/spec/javascripts/smart_interval_spec.js b/spec/javascripts/smart_interval_spec.js
index 4366ec2a5b8..7833bf3fb04 100644
--- a/spec/javascripts/smart_interval_spec.js
+++ b/spec/javascripts/smart_interval_spec.js
@@ -1,4 +1,4 @@
-require('~/smart_interval');
+import '~/smart_interval';
(() => {
const DEFAULT_MAX_INTERVAL = 100;
diff --git a/spec/javascripts/syntax_highlight_spec.js b/spec/javascripts/syntax_highlight_spec.js
index cea223bd243..946f98379ce 100644
--- a/spec/javascripts/syntax_highlight_spec.js
+++ b/spec/javascripts/syntax_highlight_spec.js
@@ -1,6 +1,6 @@
/* eslint-disable space-before-function-paren, no-var, no-return-assign, quotes */
-require('~/syntax_highlight');
+import '~/syntax_highlight';
(function() {
describe('Syntax Highlighter', function() {
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index 07dc51a7815..13827a26571 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -1,13 +1,15 @@
-// enable test fixtures
-require('jasmine-jquery');
+import $ from 'jquery';
+import _ from 'underscore';
+import 'jasmine-jquery';
+import '~/commons';
-jasmine.getFixtures().fixturesPath = 'base/spec/javascripts/fixtures';
-jasmine.getJSONFixtures().fixturesPath = 'base/spec/javascripts/fixtures';
+// enable test fixtures
+jasmine.getFixtures().fixturesPath = '/base/spec/javascripts/fixtures';
+jasmine.getJSONFixtures().fixturesPath = '/base/spec/javascripts/fixtures';
-// include common libraries
-require('~/commons/index.js');
-window.$ = window.jQuery = require('jquery');
-window._ = require('underscore');
+// globalize common libraries
+window.$ = window.jQuery = $;
+window._ = _;
// stub expected globals
window.gl = window.gl || {};
@@ -55,7 +57,6 @@ if (process.env.BABEL_ENV === 'coverage') {
'./merge_conflicts/merge_conflicts_bundle.js',
'./merge_conflicts/components/inline_conflict_lines.js',
'./merge_conflicts/components/parallel_conflict_lines.js',
- './merge_request_widget/ci_bundle.js',
'./monitoring/monitoring_bundle.js',
'./network/network_bundle.js',
'./network/branch_graph.js',
diff --git a/spec/javascripts/todos_spec.js b/spec/javascripts/todos_spec.js
index 66e4fbd6304..cd74aba4a4e 100644
--- a/spec/javascripts/todos_spec.js
+++ b/spec/javascripts/todos_spec.js
@@ -1,5 +1,5 @@
-require('~/todos');
-require('~/lib/utils/common_utils');
+import '~/todos';
+import '~/lib/utils/common_utils';
describe('Todos', () => {
preloadFixtures('todos/todos.html.raw');
diff --git a/spec/javascripts/u2f/authenticate_spec.js b/spec/javascripts/u2f/authenticate_spec.js
index af2d02b6b29..a160c86308d 100644
--- a/spec/javascripts/u2f/authenticate_spec.js
+++ b/spec/javascripts/u2f/authenticate_spec.js
@@ -2,11 +2,11 @@
/* global MockU2FDevice */
/* global U2FAuthenticate */
-require('~/u2f/authenticate');
-require('~/u2f/util');
-require('~/u2f/error');
-require('vendor/u2f');
-require('./mock_u2f_device');
+import '~/u2f/authenticate';
+import '~/u2f/util';
+import '~/u2f/error';
+import 'vendor/u2f';
+import './mock_u2f_device';
(function() {
describe('U2FAuthenticate', function() {
diff --git a/spec/javascripts/u2f/mock_u2f_device.js b/spec/javascripts/u2f/mock_u2f_device.js
index 6677fe9c1ee..4eb8ad3d9e4 100644
--- a/spec/javascripts/u2f/mock_u2f_device.js
+++ b/spec/javascripts/u2f/mock_u2f_device.js
@@ -1,12 +1,10 @@
/* eslint-disable space-before-function-paren, no-var, prefer-rest-params, wrap-iife, no-unused-expressions, no-return-assign, no-param-reassign, max-len */
(function() {
- var bind = function(fn, me) { return function() { return fn.apply(me, arguments); }; };
-
this.MockU2FDevice = (function() {
function MockU2FDevice() {
- this.respondToAuthenticateRequest = bind(this.respondToAuthenticateRequest, this);
- this.respondToRegisterRequest = bind(this.respondToRegisterRequest, this);
+ this.respondToAuthenticateRequest = this.respondToAuthenticateRequest.bind(this);
+ this.respondToRegisterRequest = this.respondToRegisterRequest.bind(this);
window.u2f || (window.u2f = {});
window.u2f.register = (function(_this) {
return function(appId, registerRequests, signRequests, callback) {
diff --git a/spec/javascripts/u2f/register_spec.js b/spec/javascripts/u2f/register_spec.js
index 3960759f7cb..a445c80f2af 100644
--- a/spec/javascripts/u2f/register_spec.js
+++ b/spec/javascripts/u2f/register_spec.js
@@ -2,11 +2,11 @@
/* global MockU2FDevice */
/* global U2FRegister */
-require('~/u2f/register');
-require('~/u2f/util');
-require('~/u2f/error');
-require('vendor/u2f');
-require('./mock_u2f_device');
+import '~/u2f/register';
+import '~/u2f/util';
+import '~/u2f/error';
+import 'vendor/u2f';
+import './mock_u2f_device';
(function() {
describe('U2FRegister', function() {
diff --git a/spec/javascripts/version_check_image_spec.js b/spec/javascripts/version_check_image_spec.js
index 464c1fce210..9637bd0414a 100644
--- a/spec/javascripts/version_check_image_spec.js
+++ b/spec/javascripts/version_check_image_spec.js
@@ -1,9 +1,8 @@
-const ClassSpecHelper = require('./helpers/class_spec_helper');
-const VersionCheckImage = require('~/version_check_image');
-require('jquery');
+import VersionCheckImage from '~/version_check_image';
+import ClassSpecHelper from './helpers/class_spec_helper';
describe('VersionCheckImage', function () {
- describe('.bindErrorEvent', function () {
+ describe('bindErrorEvent', function () {
ClassSpecHelper.itShouldBeAStaticMethod(VersionCheckImage, 'bindErrorEvent');
beforeEach(function () {
diff --git a/spec/javascripts/visibility_select_spec.js b/spec/javascripts/visibility_select_spec.js
index 9727c03c91e..c2eaea7c2ed 100644
--- a/spec/javascripts/visibility_select_spec.js
+++ b/spec/javascripts/visibility_select_spec.js
@@ -1,4 +1,4 @@
-require('~/visibility_select');
+import '~/visibility_select';
(() => {
const VisibilitySelect = gl.VisibilitySelect;
@@ -22,7 +22,7 @@ require('~/visibility_select');
spyOn(Element.prototype, 'querySelector').and.callFake(selector => mockElements[selector]);
});
- describe('#constructor', function () {
+ describe('constructor', function () {
beforeEach(function () {
this.visibilitySelect = new VisibilitySelect(mockElements.container);
});
@@ -48,7 +48,7 @@ require('~/visibility_select');
});
});
- describe('#init', function () {
+ describe('init', function () {
describe('if there is a select', function () {
beforeEach(function () {
this.visibilitySelect = new VisibilitySelect(mockElements.container);
@@ -85,7 +85,7 @@ require('~/visibility_select');
});
});
- describe('#updateHelpText', function () {
+ describe('updateHelpText', function () {
beforeEach(function () {
this.visibilitySelect = new VisibilitySelect(mockElements.container);
this.visibilitySelect.init();
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js
new file mode 100644
index 00000000000..a750bc78f36
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js
@@ -0,0 +1,39 @@
+import Vue from 'vue';
+import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author';
+
+const author = {
+ webUrl: 'http://foo.bar',
+ avatarUrl: 'http://gravatar.com/foo',
+ name: 'fatihacet',
+};
+const createComponent = () => {
+ const Component = Vue.extend(authorComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { author },
+ });
+};
+
+describe('MRWidgetAuthor', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const authorProp = authorComponent.props.author;
+
+ expect(authorProp).toBeDefined();
+ expect(authorProp.type instanceof Object).toBeTruthy();
+ expect(authorProp.required).toBeTruthy();
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const el = createComponent().$el;
+
+ expect(el.tagName).toEqual('A');
+ expect(el.getAttribute('href')).toEqual(author.webUrl);
+ expect(el.querySelector('img').getAttribute('src')).toEqual(author.avatarUrl);
+ expect(el.querySelector('.author').innerText.trim()).toEqual(author.name);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js
new file mode 100644
index 00000000000..515ddcbb875
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js
@@ -0,0 +1,61 @@
+import Vue from 'vue';
+import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time';
+
+const props = {
+ actionText: 'Merged by',
+ author: {
+ webUrl: 'http://foo.bar',
+ avatarUrl: 'http://gravatar.com/foo',
+ name: 'fatihacet',
+ },
+ dateTitle: '2017-03-23T23:02:00.807Z',
+ dateReadable: '12 hours ago',
+};
+const createComponent = () => {
+ const Component = Vue.extend(authorTimeComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: props,
+ });
+};
+
+describe('MRWidgetAuthorTime', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { actionText, author, dateTitle, dateReadable } = authorTimeComponent.props;
+ const ActionTextClass = actionText.type;
+ const DateTitleClass = dateTitle.type;
+ const DateReadableClass = dateReadable.type;
+
+ expect(new ActionTextClass() instanceof String).toBeTruthy();
+ expect(actionText.required).toBeTruthy();
+
+ expect(author.type instanceof Object).toBeTruthy();
+ expect(author.required).toBeTruthy();
+
+ expect(new DateTitleClass() instanceof String).toBeTruthy();
+ expect(dateTitle.required).toBeTruthy();
+
+ expect(new DateReadableClass() instanceof String).toBeTruthy();
+ expect(dateReadable.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components', () => {
+ expect(authorTimeComponent.components['mr-widget-author']).toBeDefined();
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const el = createComponent().$el;
+
+ expect(el.tagName).toEqual('H4');
+ expect(el.querySelector('a').getAttribute('href')).toEqual(props.author.webUrl);
+ expect(el.querySelector('time').innerText).toContain(props.dateReadable);
+ expect(el.querySelector('time').getAttribute('title')).toEqual(props.dateTitle);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js
new file mode 100644
index 00000000000..d4b200875df
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js
@@ -0,0 +1,188 @@
+import Vue from 'vue';
+import deploymentComponent from '~/vue_merge_request_widget/components/mr_widget_deployment';
+import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
+import { statusIconEntityMap } from '~/vue_shared/ci_status_icons';
+
+const deploymentMockData = [
+ {
+ id: 15,
+ name: 'review/diplo',
+ url: '/root/acets-review-apps/environments/15',
+ stop_url: '/root/acets-review-apps/environments/15/stop',
+ metrics_url: '/root/acets-review-apps/environments/15/deployments/1/metrics',
+ external_url: 'http://diplo.',
+ external_url_formatted: 'diplo.',
+ deployed_at: '2017-03-22T22:44:42.258Z',
+ deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ },
+];
+const createComponent = () => {
+ const Component = Vue.extend(deploymentComponent);
+ const mr = {
+ deployments: deploymentMockData,
+ };
+ const service = {};
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr, service },
+ });
+};
+
+describe('MRWidgetDeployment', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr, service } = deploymentComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+
+ expect(service.type instanceof Object).toBeTruthy();
+ expect(service.required).toBeTruthy();
+ });
+ });
+
+ describe('computed', () => {
+ describe('svg', () => {
+ it('should have the proper SVG icon', () => {
+ const vm = createComponent(deploymentMockData);
+ expect(vm.svg).toEqual(statusIconEntityMap.icon_status_success);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ let vm = createComponent();
+ const deployment = deploymentMockData[0];
+
+ describe('formatDate', () => {
+ it('should work', () => {
+ const readable = gl.utils.getTimeago().format(deployment.deployed_at);
+ expect(vm.formatDate(deployment.deployed_at)).toEqual(readable);
+ });
+ });
+
+ describe('hasExternalUrls', () => {
+ it('should return true', () => {
+ expect(vm.hasExternalUrls(deployment)).toBeTruthy();
+ });
+
+ it('should return false when there is not enough information', () => {
+ expect(vm.hasExternalUrls()).toBeFalsy();
+ expect(vm.hasExternalUrls({ external_url: 'Diplo' })).toBeFalsy();
+ expect(vm.hasExternalUrls({ external_url_formatted: 'Diplo' })).toBeFalsy();
+ });
+ });
+
+ describe('hasDeploymentTime', () => {
+ it('should return true', () => {
+ expect(vm.hasDeploymentTime(deployment)).toBeTruthy();
+ });
+
+ it('should return false when there is not enough information', () => {
+ expect(vm.hasDeploymentTime()).toBeFalsy();
+ expect(vm.hasDeploymentTime({ deployed_at: 'Diplo' })).toBeFalsy();
+ expect(vm.hasDeploymentTime({ deployed_at_formatted: 'Diplo' })).toBeFalsy();
+ });
+ });
+
+ describe('hasDeploymentMeta', () => {
+ it('should return true', () => {
+ expect(vm.hasDeploymentMeta(deployment)).toBeTruthy();
+ });
+
+ it('should return false when there is not enough information', () => {
+ expect(vm.hasDeploymentMeta()).toBeFalsy();
+ expect(vm.hasDeploymentMeta({ url: 'Diplo' })).toBeFalsy();
+ expect(vm.hasDeploymentMeta({ name: 'Diplo' })).toBeFalsy();
+ });
+ });
+
+ describe('stopEnvironment', () => {
+ const url = '/foo/bar';
+ const returnPromise = () => new Promise((resolve) => {
+ resolve({
+ json() {
+ return {
+ redirect_url: url,
+ };
+ },
+ });
+ });
+ const mockStopEnvironment = () => {
+ vm.stopEnvironment(deploymentMockData);
+ return vm;
+ };
+
+ it('should show a confirm dialog and call service.stopEnvironment when confirmed', (done) => {
+ spyOn(window, 'confirm').and.returnValue(true);
+ spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(true));
+ spyOn(gl.utils, 'visitUrl').and.returnValue(true);
+ vm = mockStopEnvironment();
+
+ expect(window.confirm).toHaveBeenCalled();
+ expect(MRWidgetService.stopEnvironment).toHaveBeenCalledWith(deploymentMockData.stop_url);
+ setTimeout(() => {
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith(url);
+ done();
+ }, 333);
+ });
+
+ it('should show a confirm dialog but should not work if the dialog is rejected', () => {
+ spyOn(window, 'confirm').and.returnValue(false);
+ spyOn(MRWidgetService, 'stopEnvironment').and.returnValue(returnPromise(false));
+ vm = mockStopEnvironment();
+
+ expect(window.confirm).toHaveBeenCalled();
+ expect(MRWidgetService.stopEnvironment).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+ const [deployment] = deploymentMockData;
+
+ beforeEach(() => {
+ vm = createComponent(deploymentMockData);
+ el = vm.$el;
+ });
+
+ it('should render template elements correctly', () => {
+ expect(el.classList.contains('mr-widget-heading')).toBeTruthy();
+ expect(el.querySelector('.js-icon-link')).toBeDefined();
+ expect(el.querySelector('.js-deploy-meta').getAttribute('href')).toEqual(deployment.url);
+ expect(el.querySelector('.js-deploy-meta').innerText).toContain(deployment.name);
+ expect(el.querySelector('.js-deploy-url').getAttribute('href')).toEqual(deployment.external_url);
+ expect(el.querySelector('.js-deploy-url').innerText).toContain(deployment.external_url_formatted);
+ expect(el.querySelector('.js-deploy-time').innerText).toContain(vm.formatDate(deployment.deployed_at));
+ expect(el.querySelector('.js-mr-memory-usage')).toBeDefined();
+ expect(el.querySelector('button')).toBeDefined();
+ });
+
+ it('should list multiple deployments', (done) => {
+ vm.mr.deployments.push(deployment);
+ vm.mr.deployments.push(deployment);
+
+ Vue.nextTick(() => {
+ expect(el.querySelectorAll('.ci-widget').length).toEqual(3);
+ expect(el.querySelectorAll('.js-mr-memory-usage').length).toEqual(3);
+ done();
+ });
+ });
+
+ it('should not have some elements when there is not enough data', (done) => {
+ vm.mr.deployments = [{}];
+
+ Vue.nextTick(() => {
+ expect(el.querySelectorAll('.js-deploy-meta').length).toEqual(0);
+ expect(el.querySelectorAll('.js-deploy-url').length).toEqual(0);
+ expect(el.querySelectorAll('.js-deploy-time').length).toEqual(0);
+ expect(el.querySelectorAll('.js-mr-memory-usage').length).toEqual(0);
+ expect(el.querySelectorAll('.button').length).toEqual(0);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js
new file mode 100644
index 00000000000..7f3eea7d2e5
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js
@@ -0,0 +1,102 @@
+import Vue from 'vue';
+import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header';
+
+const createComponent = (mr) => {
+ const Component = Vue.extend(headerComponent);
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ });
+};
+
+describe('MRWidgetHeader', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr } = headerComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+ });
+ });
+
+ describe('computed', () => {
+ let vm;
+ beforeEach(() => {
+ vm = createComponent({
+ divergedCommitsCount: 12,
+ sourceBranch: 'mr-widget-refactor',
+ sourceBranchLink: '/foo/bar/mr-widget-refactor',
+ targetBranch: 'master',
+ });
+ });
+
+ it('shouldShowCommitsBehindText', () => {
+ expect(vm.shouldShowCommitsBehindText).toBeTruthy();
+
+ vm.mr.divergedCommitsCount = 0;
+ expect(vm.shouldShowCommitsBehindText).toBeFalsy();
+ });
+
+ it('commitsText', () => {
+ expect(vm.commitsText).toEqual('commits');
+
+ vm.mr.divergedCommitsCount = 1;
+ expect(vm.commitsText).toEqual('commit');
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+ const sourceBranchPath = '/foo/bar/mr-widget-refactor';
+ const mr = {
+ divergedCommitsCount: 12,
+ sourceBranch: 'mr-widget-refactor',
+ sourceBranchLink: `<a href="${sourceBranchPath}">mr-widget-refactor</a>`,
+ targetBranchPath: 'foo/bar/commits-path',
+ targetBranch: 'master',
+ isOpen: true,
+ emailPatchesPath: '/mr/email-patches',
+ plainDiffPath: '/mr/plainDiffPath',
+ };
+
+ beforeEach(() => {
+ vm = createComponent(mr);
+ el = vm.$el;
+ });
+
+ it('should render template elements correctly', () => {
+ expect(el.classList.contains('mr-source-target')).toBeTruthy();
+ const sourceBranchLink = el.querySelectorAll('.label-branch')[0];
+ const targetBranchLink = el.querySelectorAll('.label-branch')[1];
+
+ expect(sourceBranchLink.textContent).toContain(mr.sourceBranch);
+ expect(targetBranchLink.textContent).toContain(mr.targetBranch);
+ expect(sourceBranchLink.querySelector('a').getAttribute('href')).toEqual(sourceBranchPath);
+ expect(targetBranchLink.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchPath);
+ expect(el.querySelector('.diverged-commits-count').textContent).toContain('12 commits behind');
+
+ expect(el.textContent).toContain('Check out branch');
+ expect(el.querySelectorAll('.dropdown li a')[0].getAttribute('href')).toEqual(mr.emailPatchesPath);
+ expect(el.querySelectorAll('.dropdown li a')[1].getAttribute('href')).toEqual(mr.plainDiffPath);
+ });
+
+ it('should not have right action links if the MR state is not open', (done) => {
+ vm.mr.isOpen = false;
+ Vue.nextTick(() => {
+ expect(el.textContent).not.toContain('Check out branch');
+ expect(el.querySelectorAll('.dropdown li a').length).toEqual(0);
+ done();
+ });
+ });
+
+ it('should not render diverged commits count if the MR has no diverged commits', (done) => {
+ vm.mr.divergedCommitsCount = null;
+ Vue.nextTick(() => {
+ expect(el.textContent).not.toContain('commits behind');
+ expect(el.querySelectorAll('.diverged-commits-count').length).toEqual(0);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js
new file mode 100644
index 00000000000..da9dff18ada
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js
@@ -0,0 +1,184 @@
+import Vue from 'vue';
+import memoryUsageComponent from '~/vue_merge_request_widget/components/mr_widget_memory_usage';
+import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
+
+const url = '/root/acets-review-apps/environments/15/deployments/1/metrics';
+
+const metricsMockData = {
+ success: true,
+ metrics: {
+ memory_values: [
+ {
+ metric: {},
+ values: [
+ [1493716685, '4.30859375'],
+ ],
+ },
+ ],
+ },
+ last_update: '2017-05-02T12:34:49.628Z',
+ deployment_time: 1493718485,
+};
+
+const createComponent = () => {
+ const Component = Vue.extend(memoryUsageComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: {
+ metricsUrl: url,
+ memoryMetrics: [],
+ deploymentTime: 0,
+ hasMetrics: false,
+ loadFailed: false,
+ loadingMetrics: true,
+ backOffRequestCounter: 0,
+ },
+ });
+};
+
+const messages = {
+ loadingMetrics: 'Loading deployment statistics.',
+ hasMetrics: 'Deployment memory usage:',
+ loadFailed: 'Failed to load deployment statistics.',
+ metricsUnavailable: 'Deployment statistics are not available currently.',
+};
+
+describe('MemoryUsage', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ describe('props', () => {
+ it('should have props with defaults', () => {
+ const { metricsUrl } = memoryUsageComponent.props;
+ const MetricsUrlTypeClass = metricsUrl.type;
+
+ Vue.nextTick(() => {
+ expect(new MetricsUrlTypeClass() instanceof String).toBeTruthy();
+ expect(metricsUrl.required).toBeTruthy();
+ });
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ const data = memoryUsageComponent.data();
+
+ expect(Array.isArray(data.memoryMetrics)).toBeTruthy();
+ expect(data.memoryMetrics.length).toBe(0);
+
+ expect(typeof data.deploymentTime).toBe('number');
+ expect(data.deploymentTime).toBe(0);
+
+ expect(typeof data.hasMetrics).toBe('boolean');
+ expect(data.hasMetrics).toBeFalsy();
+
+ expect(typeof data.loadFailed).toBe('boolean');
+ expect(data.loadFailed).toBeFalsy();
+
+ expect(typeof data.loadingMetrics).toBe('boolean');
+ expect(data.loadingMetrics).toBeTruthy();
+
+ expect(typeof data.backOffRequestCounter).toBe('number');
+ expect(data.backOffRequestCounter).toBe(0);
+ });
+ });
+
+ describe('methods', () => {
+ const { metrics, deployment_time } = metricsMockData;
+
+ describe('computeGraphData', () => {
+ it('should populate sparkline graph', () => {
+ vm.computeGraphData(metrics, deployment_time);
+ const { hasMetrics, memoryMetrics, deploymentTime } = vm;
+
+ expect(hasMetrics).toBeTruthy();
+ expect(memoryMetrics.length > 0).toBeTruthy();
+ expect(deploymentTime).toEqual(deployment_time);
+ });
+ });
+
+ describe('loadMetrics', () => {
+ const returnServicePromise = () => new Promise((resolve) => {
+ resolve({
+ json() {
+ return metricsMockData;
+ },
+ });
+ });
+
+ it('should load metrics data using MRWidgetService', (done) => {
+ spyOn(MRWidgetService, 'fetchMetrics').and.returnValue(returnServicePromise(true));
+ spyOn(vm, 'computeGraphData');
+
+ vm.loadMetrics();
+ setTimeout(() => {
+ expect(MRWidgetService.fetchMetrics).toHaveBeenCalledWith(url);
+ expect(vm.computeGraphData).toHaveBeenCalledWith(metrics, deployment_time);
+ done();
+ }, 333);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render template elements correctly', () => {
+ expect(el.classList.contains('mr-memory-usage')).toBeTruthy();
+ expect(el.querySelector('.js-usage-info')).toBeDefined();
+ });
+
+ it('should show loading metrics message while metrics are being loaded', (done) => {
+ vm.loadingMetrics = true;
+ vm.hasMetrics = false;
+ vm.loadFailed = false;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-usage-info.usage-info-loading')).toBeDefined();
+ expect(el.querySelector('.js-usage-info .usage-info-load-spinner')).toBeDefined();
+ expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadingMetrics);
+ done();
+ });
+ });
+
+ it('should show deployment memory usage when metrics are loaded', (done) => {
+ vm.loadingMetrics = false;
+ vm.hasMetrics = true;
+ vm.loadFailed = false;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.memory-graph-container')).toBeDefined();
+ expect(el.querySelector('.js-usage-info').innerText).toContain(messages.hasMetrics);
+ done();
+ });
+ });
+
+ it('should show failure message when metrics loading failed', (done) => {
+ vm.loadingMetrics = false;
+ vm.hasMetrics = false;
+ vm.loadFailed = true;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-usage-info.usage-info-failed')).toBeDefined();
+ expect(el.querySelector('.js-usage-info').innerText).toContain(messages.loadFailed);
+ done();
+ });
+ });
+
+ it('should show metrics unavailable message when metrics loading failed', (done) => {
+ vm.loadingMetrics = false;
+ vm.hasMetrics = false;
+ vm.loadFailed = false;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-usage-info.usage-info-unavailable')).toBeDefined();
+ expect(el.querySelector('.js-usage-info').innerText).toContain(messages.metricsUnavailable);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js
new file mode 100644
index 00000000000..4da4fc82c26
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js
@@ -0,0 +1,51 @@
+import Vue from 'vue';
+import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help';
+
+const props = {
+ missingBranch: 'this-is-not-the-branch-you-are-looking-for',
+};
+const text = `If the ${props.missingBranch} branch exists in your local repository`;
+
+const createComponent = () => {
+ const Component = Vue.extend(mergeHelpComponent);
+ return new Component({
+ el: document.createElement('div'),
+ propsData: props,
+ });
+};
+
+describe('MRWidgetMergeHelp', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { missingBranch } = mergeHelpComponent.props;
+ const MissingBranchTypeClass = missingBranch.type;
+
+ expect(new MissingBranchTypeClass() instanceof String).toBeTruthy();
+ expect(missingBranch.required).toBeFalsy();
+ expect(missingBranch.default).toEqual('');
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ it('should have the correct elements', () => {
+ expect(el.classList.contains('mr-widget-help')).toBeTruthy();
+ expect(el.textContent).toContain(text);
+ });
+
+ it('should not show missing branch name if missingBranch props is not provided', (done) => {
+ vm.missingBranch = null;
+ Vue.nextTick(() => {
+ expect(el.textContent).not.toContain(text);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
new file mode 100644
index 00000000000..647b59520f8
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -0,0 +1,131 @@
+import Vue from 'vue';
+import { statusIconEntityMap } from '~/vue_shared/ci_status_icons';
+import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline';
+import mockData from '../mock_data';
+
+const createComponent = (mr) => {
+ const Component = Vue.extend(pipelineComponent);
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ });
+};
+
+describe('MRWidgetPipeline', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr } = pipelineComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components added', () => {
+ expect(pipelineComponent.components['pipeline-stage']).toBeDefined();
+ expect(pipelineComponent.components.ciIcon).toBeDefined();
+ });
+ });
+
+ describe('computed', () => {
+ describe('svg', () => {
+ it('should have the proper SVG icon', () => {
+ const vm = createComponent({ pipeline: mockData.pipeline });
+
+ expect(vm.svg).toEqual(statusIconEntityMap.icon_status_failed);
+ });
+ });
+
+ describe('hasCIError', () => {
+ it('should return false when there is no CI error', () => {
+ const vm = createComponent({
+ pipeline: mockData.pipeline,
+ hasCI: true,
+ ciStatus: 'success',
+ });
+
+ expect(vm.hasCIError).toBeFalsy();
+ });
+
+ it('should return true when there is a CI error', () => {
+ const vm = createComponent({
+ pipeline: mockData.pipeline,
+ hasCI: true,
+ ciStatus: null,
+ });
+
+ expect(vm.hasCIError).toBeTruthy();
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+ const { pipeline } = mockData;
+ const mr = {
+ hasCI: true,
+ ciStatus: 'success',
+ pipelineDetailedStatus: pipeline.details.status,
+ pipeline,
+ };
+
+ beforeEach(() => {
+ vm = createComponent(mr);
+ el = vm.$el;
+ });
+
+ it('should render template elements correctly', () => {
+ expect(el.classList.contains('mr-widget-heading')).toBeTruthy();
+ expect(el.querySelectorAll('.ci-status-icon.ci-status-icon-success').length).toEqual(1);
+ expect(el.querySelector('.pipeline-id').textContent).toContain(`#${pipeline.id}`);
+ expect(el.innerText).toContain('passed');
+ expect(el.innerText).toContain('with stages');
+ expect(el.querySelector('.pipeline-id').getAttribute('href')).toEqual(pipeline.path);
+ expect(el.querySelectorAll('.stage-container').length).toEqual(2);
+ expect(el.querySelector('.js-ci-error')).toEqual(null);
+ expect(el.querySelector('.js-commit-link').getAttribute('href')).toEqual(pipeline.commit.commit_path);
+ expect(el.querySelector('.js-commit-link').textContent).toContain(pipeline.commit.short_id);
+ expect(el.querySelector('.js-mr-coverage').textContent).toContain(`Coverage ${pipeline.coverage}%.`);
+ });
+
+ it('should list single stage', (done) => {
+ pipeline.details.stages.splice(0, 1);
+
+ Vue.nextTick(() => {
+ expect(el.querySelectorAll('.stage-container button').length).toEqual(1);
+ expect(el.innerText).toContain('with stage');
+ done();
+ });
+ });
+
+ it('should not have stages when there is no stage', (done) => {
+ vm.mr.pipeline.details.stages = [];
+
+ Vue.nextTick(() => {
+ expect(el.querySelectorAll('.stage-container button').length).toEqual(0);
+ done();
+ });
+ });
+
+ it('should not have coverage text when pipeline has no coverage info', (done) => {
+ vm.mr.pipeline.coverage = null;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-mr-coverage')).toEqual(null);
+ done();
+ });
+ });
+
+ it('should show CI error when there is a CI error', (done) => {
+ vm.mr.ciStatus = null;
+
+ Vue.nextTick(() => {
+ expect(el.querySelectorAll('.js-ci-error').length).toEqual(1);
+ expect(el.innerText).toContain('Could not connect to the CI server');
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js
new file mode 100644
index 00000000000..f6e0c3dfb74
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js
@@ -0,0 +1,138 @@
+import Vue from 'vue';
+import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widget_related_links';
+
+const createComponent = (data) => {
+ const Component = Vue.extend(relatedLinksComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: data,
+ });
+};
+
+describe('MRWidgetRelatedLinks', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { relatedLinks } = relatedLinksComponent.props;
+
+ expect(relatedLinks).toBeDefined();
+ expect(relatedLinks.type instanceof Object).toBeTruthy();
+ expect(relatedLinks.required).toBeTruthy();
+ });
+ });
+
+ describe('computed', () => {
+ describe('hasLinks', () => {
+ it('should return correct value when we have links reference', () => {
+ const data = {
+ relatedLinks: {
+ closing: '/foo',
+ mentioned: '/foo',
+ assignToMe: '/foo',
+ },
+ };
+ const vm = createComponent(data);
+ expect(vm.hasLinks).toBeTruthy();
+
+ vm.relatedLinks.closing = null;
+ expect(vm.hasLinks).toBeTruthy();
+
+ vm.relatedLinks.mentioned = null;
+ expect(vm.hasLinks).toBeTruthy();
+
+ vm.relatedLinks.assignToMe = null;
+ expect(vm.hasLinks).toBeFalsy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ const data = {
+ relatedLinks: {
+ closing: '<a href="#">#23</a> and <a>#42</a>',
+ mentioned: '<a href="#">#7</a>',
+ },
+ };
+ const vm = createComponent(data);
+
+ describe('hasMultipleIssues', () => {
+ it('should return true if the given text has multiple issues', () => {
+ expect(vm.hasMultipleIssues(data.relatedLinks.closing)).toBeTruthy();
+ });
+
+ it('should return false if the given text has one issue', () => {
+ expect(vm.hasMultipleIssues(data.relatedLinks.mentioned)).toBeFalsy();
+ });
+ });
+
+ describe('issueLabel', () => {
+ it('should return true if the given text has multiple issues', () => {
+ expect(vm.issueLabel('closing')).toEqual('issues');
+ });
+
+ it('should return false if the given text has one issue', () => {
+ expect(vm.issueLabel('mentioned')).toEqual('issue');
+ });
+ });
+
+ describe('verbLabel', () => {
+ it('should return true if the given text has multiple issues', () => {
+ expect(vm.verbLabel('closing')).toEqual('are');
+ });
+
+ it('should return false if the given text has one issue', () => {
+ expect(vm.verbLabel('mentioned')).toEqual('is');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should have only have closing issues text', () => {
+ const vm = createComponent({
+ relatedLinks: {
+ closing: '<a href="#">#23</a> and <a>#42</a>',
+ },
+ });
+ const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim();
+
+ expect(content).toContain('Closes issues #23 and #42');
+ expect(content).not.toContain('mentioned');
+ });
+
+ it('should have only have mentioned issues text', () => {
+ const vm = createComponent({
+ relatedLinks: {
+ mentioned: '<a href="#">#7</a>',
+ },
+ });
+
+ expect(vm.$el.innerText).toContain('issue #7');
+ expect(vm.$el.innerText).toContain('is mentioned but will not be closed.');
+ expect(vm.$el.innerText).not.toContain('Closes');
+ });
+
+ it('should have closing and mentioned issues at the same time', () => {
+ const vm = createComponent({
+ relatedLinks: {
+ closing: '<a href="#">#7</a>',
+ mentioned: '<a href="#">#23</a> and <a>#42</a>',
+ },
+ });
+ const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim();
+
+ expect(content).toContain('Closes issue #7.');
+ expect(content).toContain('issues #23 and #42');
+ expect(content).toContain('are mentioned but will not be closed.');
+ });
+
+ it('should have assing issues link', () => {
+ const vm = createComponent({
+ relatedLinks: {
+ assignToMe: '<a href="#">Assign yourself to these issues</a>',
+ },
+ });
+
+ expect(vm.$el.innerText).toContain('Assign yourself to these issues');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js
new file mode 100644
index 00000000000..cac2f561a0b
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_archived_spec.js
@@ -0,0 +1,18 @@
+import Vue from 'vue';
+import archivedComponent from '~/vue_merge_request_widget/components/states/mr_widget_archived';
+
+describe('MRWidgetArchived', () => {
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const Component = Vue.extend(archivedComponent);
+ const el = new Component({
+ el: document.createElement('div'),
+ }).$el;
+
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.querySelector('button').classList.contains('btn-success')).toBeTruthy();
+ expect(el.querySelector('button').disabled).toBeTruthy();
+ expect(el.innerText).toContain('This project is archived, write access has been disabled.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
new file mode 100644
index 00000000000..47b4ba893e0
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -0,0 +1,32 @@
+import Vue from 'vue';
+import autoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed';
+
+const mergeError = 'This is the merge error';
+
+describe('MRWidgetAutoMergeFailed', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const mrProp = autoMergeFailedComponent.props.mr;
+
+ expect(mrProp.type instanceof Object).toBeTruthy();
+ expect(mrProp.required).toBeTruthy();
+ });
+ });
+
+ describe('template', () => {
+ const Component = Vue.extend(autoMergeFailedComponent);
+ const vm = new Component({
+ el: document.createElement('div'),
+ propsData: {
+ mr: { mergeError },
+ },
+ });
+
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(vm.$el.innerText).toContain('This merge request failed to be merged automatically.');
+ expect(vm.$el.innerText).toContain(mergeError);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
new file mode 100644
index 00000000000..3be11d47227
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
@@ -0,0 +1,19 @@
+import Vue from 'vue';
+import checkingComponent from '~/vue_merge_request_widget/components/states/mr_widget_checking';
+
+describe('MRWidgetChecking', () => {
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const Component = Vue.extend(checkingComponent);
+ const el = new Component({
+ el: document.createElement('div'),
+ }).$el;
+
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.querySelector('button').classList.contains('btn-success')).toBeTruthy();
+ expect(el.querySelector('button').disabled).toBeTruthy();
+ expect(el.innerText).toContain('Checking ability to merge automatically.');
+ expect(el.querySelector('i')).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
new file mode 100644
index 00000000000..47303d1e80f
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
@@ -0,0 +1,51 @@
+import Vue from 'vue';
+import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed';
+
+const mr = {
+ targetBranch: 'good-branch',
+ targetBranchPath: '/good-branch',
+ closedBy: {
+ name: 'Fatih Acet',
+ username: 'fatihacet',
+ },
+ updatedAt: '2017-03-23T20:08:08.845Z',
+ closedAt: '1 day ago',
+};
+
+const createComponent = () => {
+ const Component = Vue.extend(closedComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ }).$el;
+};
+
+describe('MRWidgetClosed', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const mrProp = closedComponent.props.mr;
+
+ expect(mrProp.type instanceof Object).toBeTruthy();
+ expect(mrProp.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components added', () => {
+ expect(closedComponent.components['mr-widget-author-and-time']).toBeDefined();
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const el = createComponent();
+
+ expect(el.querySelector('h4').textContent).toContain('Closed by');
+ expect(el.querySelector('h4').textContent).toContain(mr.closedBy.name);
+ expect(el.textContent).toContain('The changes were not merged into');
+ expect(el.querySelector('.label-branch').getAttribute('href')).toEqual(mr.targetBranchPath);
+ expect(el.querySelector('.label-branch').textContent).toContain(mr.targetBranch);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
new file mode 100644
index 00000000000..e7ae85caec4
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -0,0 +1,69 @@
+import Vue from 'vue';
+import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts';
+
+const path = '/conflicts';
+const createComponent = () => {
+ const Component = Vue.extend(conflictsComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: {
+ mr: {
+ canMerge: true,
+ conflictResolutionPath: path,
+ },
+ },
+ });
+};
+
+describe('MRWidgetConflicts', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr } = conflictsComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const el = createComponent().$el;
+ const resolveButton = el.querySelectorAll('.btn-group .btn')[0];
+ const mergeLocallyButton = el.querySelectorAll('.btn-group .btn')[1];
+
+ expect(el.textContent).toContain('There are merge conflicts.');
+ expect(el.textContent).not.toContain('ask someone with write access');
+ expect(el.querySelector('.btn-success').disabled).toBeTruthy();
+ expect(el.querySelectorAll('.btn-group .btn').length).toBe(2);
+ expect(resolveButton.textContent).toContain('Resolve conflicts');
+ expect(resolveButton.getAttribute('href')).toEqual(path);
+ expect(mergeLocallyButton.textContent).toContain('Merge locally');
+ });
+
+ describe('when user does not have permission to merge', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = createComponent();
+ vm.mr.canMerge = false;
+ });
+
+ it('should show proper message', (done) => {
+ Vue.nextTick(() => {
+ expect(vm.$el.textContent).toContain('ask someone with write access');
+ done();
+ });
+ });
+
+ it('should not have action buttons', (done) => {
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelectorAll('.btn').length).toBe(1);
+ expect(vm.$el.querySelector('a.js-resolve-conflicts-button')).toEqual(null);
+ expect(vm.$el.querySelector('a.js-merge-locally-button')).toEqual(null);
+ done();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
new file mode 100644
index 00000000000..587b83430d9
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
@@ -0,0 +1,122 @@
+import Vue from 'vue';
+import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+
+const mr = {
+ mergeError: 'Merge error happened.',
+};
+const createComponent = () => {
+ const Component = Vue.extend(failedToMergeComponent);
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ });
+};
+
+describe('MRWidgetFailedToMerge', () => {
+ describe('data', () => {
+ it('should have default data', () => {
+ const data = failedToMergeComponent.data();
+
+ expect(data.timer).toEqual(10);
+ expect(data.isRefreshing).toBeFalsy();
+ });
+ });
+
+ describe('computed', () => {
+ describe('timerText', () => {
+ it('should return correct timer text', () => {
+ const vm = createComponent();
+ expect(vm.timerText).toEqual('10 seconds');
+
+ vm.timer = 1;
+ expect(vm.timerText).toEqual('a second');
+ });
+ });
+ });
+
+ describe('created', () => {
+ it('should disable polling', () => {
+ spyOn(eventHub, '$emit');
+ createComponent();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('DisablePolling');
+ });
+ });
+
+ describe('methods', () => {
+ describe('refresh', () => {
+ it('should emit event to request component refresh', () => {
+ spyOn(eventHub, '$emit');
+ const vm = createComponent();
+
+ expect(vm.isRefreshing).toBeFalsy();
+
+ vm.refresh();
+ expect(vm.isRefreshing).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ expect(eventHub.$emit).toHaveBeenCalledWith('EnablePolling');
+ });
+ });
+
+ describe('updateTimer', () => {
+ it('should update timer and emit event when timer end', () => {
+ const vm = createComponent();
+ spyOn(vm, 'refresh');
+
+ expect(vm.timer).toEqual(10);
+
+ for (let i = 0; i < 10; i++) { // eslint-disable-line
+ expect(vm.timer).toEqual(10 - i);
+ vm.updateTimer();
+ }
+
+ expect(vm.refresh).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ it('should have correct elements', (done) => {
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.innerText).toContain('Merge error happened.');
+ expect(el.innerText).toContain('Refreshing in 10 seconds');
+ expect(el.innerText).not.toContain('Merge failed.');
+ expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(el.querySelector('button').innerText).toContain('Merge');
+ expect(el.querySelector('.js-refresh-button').innerText).toContain('Refresh now');
+ expect(el.querySelector('.js-refresh-label')).toEqual(null);
+ expect(el.innerText).not.toContain('Refreshing now...');
+ setTimeout(() => {
+ expect(el.innerText).toContain('Refreshing in 9 seconds');
+ done();
+ }, 1010);
+ });
+
+ it('should just generic merge failed message if merge_error is not available', (done) => {
+ vm.mr.mergeError = null;
+
+ Vue.nextTick(() => {
+ expect(el.innerText).toContain('Merge failed.');
+ expect(el.innerText).not.toContain('Merge error happened.');
+ done();
+ });
+ });
+
+ it('should show refresh label when refresh requested', () => {
+ vm.refresh();
+ Vue.nextTick(() => {
+ expect(el.innerText).not.toContain('Merge failed. Refreshing');
+ expect(el.innerText).toContain('Refreshing now...');
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js
new file mode 100644
index 00000000000..fb2ef606604
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_locked_spec.js
@@ -0,0 +1,33 @@
+import Vue from 'vue';
+import lockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_locked';
+
+describe('MRWidgetLocked', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr } = lockedComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const Component = Vue.extend(lockedComponent);
+ const mr = {
+ targetBranchPath: '/branch-path',
+ targetBranch: 'branch',
+ };
+ const el = new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ }).$el;
+
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.innerText).toContain('it is locked');
+ expect(el.innerText).toContain('changes will be merged into');
+ expect(el.querySelector('.label-branch a').getAttribute('href')).toEqual(mr.targetBranchPath);
+ expect(el.querySelector('.label-branch a').textContent).toContain(mr.targetBranch);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js
new file mode 100644
index 00000000000..8d8b90cea16
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js
@@ -0,0 +1,213 @@
+import Vue from 'vue';
+import mwpsComponent from '~/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+
+const targetBranchPath = '/foo/bar';
+const targetBranch = 'foo';
+const sha = '1EA2EZ34';
+
+const createComponent = () => {
+ const Component = Vue.extend(mwpsComponent);
+ const mr = {
+ shouldRemoveSourceBranch: false,
+ canRemoveSourceBranch: true,
+ canCancelAutomaticMerge: true,
+ mergeUserId: 1,
+ currentUserId: 1,
+ setToMWPSBy: {},
+ sha,
+ targetBranchPath,
+ targetBranch,
+ };
+
+ const service = {
+ cancelAutomaticMerge() {},
+ mergeResource: {
+ save() {},
+ },
+ };
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr, service },
+ });
+};
+
+describe('MRWidgetMergeWhenPipelineSucceeds', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr, service } = mwpsComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+
+ expect(service.type instanceof Object).toBeTruthy();
+ expect(service.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components added', () => {
+ expect(mwpsComponent.components['mr-widget-author']).toBeDefined();
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ const data = mwpsComponent.data();
+
+ expect(data.isCancellingAutoMerge).toBeFalsy();
+ expect(data.isRemovingSourceBranch).toBeFalsy();
+ });
+ });
+
+ describe('computed', () => {
+ describe('canRemoveSourceBranch', () => {
+ it('should return true when user is able to remove source branch', () => {
+ const vm = createComponent();
+
+ expect(vm.canRemoveSourceBranch).toBeTruthy();
+ });
+
+ it('should return false when user id is not the same with who set the MWPS', () => {
+ const vm = createComponent();
+
+ vm.mr.mergeUserId = 2;
+ expect(vm.canRemoveSourceBranch).toBeFalsy();
+
+ vm.mr.currentUserId = 2;
+ expect(vm.canRemoveSourceBranch).toBeTruthy();
+
+ vm.mr.currentUserId = 3;
+ expect(vm.canRemoveSourceBranch).toBeFalsy();
+ });
+
+ it('should return false when shouldRemoveSourceBranch set to false', () => {
+ const vm = createComponent();
+
+ vm.mr.shouldRemoveSourceBranch = true;
+ expect(vm.canRemoveSourceBranch).toBeFalsy();
+ });
+
+ it('should return false if user is not able to remove the source branch', () => {
+ const vm = createComponent();
+
+ vm.mr.canRemoveSourceBranch = false;
+ expect(vm.canRemoveSourceBranch).toBeFalsy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('cancelAutomaticMerge', () => {
+ it('should set flag and call service then tell main component to update the widget with data', (done) => {
+ const vm = createComponent();
+ const mrObj = {
+ is_new_mr_data: true,
+ };
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'cancelAutomaticMerge').and.returnValue(new Promise((resolve) => {
+ resolve({
+ json() {
+ return mrObj;
+ },
+ });
+ }));
+
+ vm.cancelAutomaticMerge();
+ setTimeout(() => {
+ expect(vm.isCancellingAutoMerge).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
+ done();
+ }, 333);
+ });
+ });
+
+ describe('removeSourceBranch', () => {
+ it('should set flag and call service then request main component to update the widget', (done) => {
+ const vm = createComponent();
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service.mergeResource, 'save').and.returnValue(new Promise((resolve) => {
+ resolve({
+ json() {
+ return {
+ status: 'merge_when_pipeline_succeeds',
+ };
+ },
+ });
+ }));
+
+ vm.removeSourceBranch();
+ setTimeout(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ expect(vm.service.mergeResource.save).toHaveBeenCalledWith({
+ sha,
+ merge_when_pipeline_succeeds: true,
+ should_remove_source_branch: true,
+ });
+ done();
+ }, 333);
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ it('should have correct elements', () => {
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.innerText).toContain('to be merged automatically when the pipeline succeeds.');
+ expect(el.innerText).toContain('The changes will be merged into');
+ expect(el.innerText).toContain(targetBranch);
+ expect(el.innerText).toContain('The source branch will not be removed.');
+ expect(el.querySelector('.js-cancel-auto-merge').innerText).toContain('Cancel automatic merge');
+ expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeFalsy();
+ expect(el.querySelector('.js-remove-source-branch').innerText).toContain('Remove source branch');
+ expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeFalsy();
+ });
+
+ it('should disable cancel auto merge button when the action is in progress', (done) => {
+ vm.isCancellingAutoMerge = true;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeTruthy();
+ done();
+ });
+ });
+
+ it('should show source branch will be removed text when it source branch set to remove', (done) => {
+ vm.mr.shouldRemoveSourceBranch = true;
+
+ Vue.nextTick(() => {
+ const normalizedText = el.innerText.replace(/\s+/g, ' ');
+ expect(normalizedText).toContain('The source branch will be removed.');
+ expect(normalizedText).not.toContain('The source branch will not be removed.');
+ done();
+ });
+ });
+
+ it('should not show remove source branch button when user not able to remove source branch', (done) => {
+ vm.mr.currentUserId = 4;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-remove-source-branch')).toEqual(null);
+ done();
+ });
+ });
+
+ it('should disable remove source branch button when the action is in progress', (done) => {
+ vm.isRemovingSourceBranch = true;
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeTruthy();
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
new file mode 100644
index 00000000000..6628010112d
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -0,0 +1,174 @@
+import Vue from 'vue';
+import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+
+const targetBranch = 'foo';
+
+const createComponent = () => {
+ const Component = Vue.extend(mergedComponent);
+ const mr = {
+ isRemovingSourceBranch: false,
+ cherryPickInForkPath: false,
+ canCherryPickInCurrentMR: true,
+ revertInForkPath: false,
+ canRevertInCurrentMR: true,
+ canRemoveSourceBranch: true,
+ sourceBranchRemoved: true,
+ mergedBy: {},
+ mergedAt: '',
+ updatedAt: '',
+ targetBranch,
+ };
+
+ const service = {
+ removeSourceBranch() {},
+ };
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr, service },
+ });
+};
+
+describe('MRWidgetMerged', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr, service } = mergedComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+
+ expect(service.type instanceof Object).toBeTruthy();
+ expect(service.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components added', () => {
+ expect(mergedComponent.components['mr-widget-author-and-time']).toBeDefined();
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ const data = mergedComponent.data();
+
+ expect(data.isMakingRequest).toBeFalsy();
+ });
+ });
+
+ describe('computed', () => {
+ describe('shouldShowRemoveSourceBranch', () => {
+ it('should correct value when fields changed', () => {
+ const vm = createComponent();
+ vm.mr.sourceBranchRemoved = false;
+ expect(vm.shouldShowRemoveSourceBranch).toBeTruthy();
+
+ vm.mr.sourceBranchRemoved = true;
+ expect(vm.shouldShowRemoveSourceBranch).toBeFalsy();
+
+ vm.mr.sourceBranchRemoved = false;
+ vm.mr.canRemoveSourceBranch = false;
+ expect(vm.shouldShowRemoveSourceBranch).toBeFalsy();
+
+ vm.mr.canRemoveSourceBranch = true;
+ vm.isMakingRequest = true;
+ expect(vm.shouldShowRemoveSourceBranch).toBeFalsy();
+
+ vm.mr.isRemovingSourceBranch = true;
+ vm.mr.canRemoveSourceBranch = true;
+ vm.isMakingRequest = true;
+ expect(vm.shouldShowRemoveSourceBranch).toBeFalsy();
+ });
+ });
+ describe('shouldShowSourceBranchRemoving', () => {
+ it('should correct value when fields changed', () => {
+ const vm = createComponent();
+ vm.mr.sourceBranchRemoved = false;
+ expect(vm.shouldShowSourceBranchRemoving).toBeFalsy();
+
+ vm.mr.sourceBranchRemoved = true;
+ expect(vm.shouldShowRemoveSourceBranch).toBeFalsy();
+
+ vm.mr.sourceBranchRemoved = false;
+ vm.isMakingRequest = true;
+ expect(vm.shouldShowSourceBranchRemoving).toBeTruthy();
+
+ vm.isMakingRequest = false;
+ vm.mr.isRemovingSourceBranch = true;
+ expect(vm.shouldShowSourceBranchRemoving).toBeTruthy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('removeSourceBranch', () => {
+ it('should set flag and call service then request main component to update the widget', (done) => {
+ const vm = createComponent();
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'removeSourceBranch').and.returnValue(new Promise((resolve) => {
+ resolve({
+ json() {
+ return {
+ message: 'Branch was removed',
+ };
+ },
+ });
+ }));
+
+ vm.removeSourceBranch();
+ setTimeout(() => {
+ const args = eventHub.$emit.calls.argsFor(0);
+ expect(vm.isMakingRequest).toBeTruthy();
+ expect(args[0]).toEqual('MRWidgetUpdateRequested');
+ expect(args[1]).not.toThrow();
+ done();
+ }, 333);
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ it('should have correct elements', () => {
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.querySelector('.js-mr-widget-author')).toBeDefined();
+ expect(el.innerText).toContain('The changes were merged into');
+ expect(el.innerText).toContain(targetBranch);
+ expect(el.innerText).toContain('The source branch has been removed.');
+ expect(el.innerText).toContain('Revert');
+ expect(el.innerText).toContain('Cherry-pick');
+ expect(el.innerText).not.toContain('You can remove source branch now.');
+ expect(el.innerText).not.toContain('The source branch is being removed.');
+ });
+
+ it('should not show source branch removed text', (done) => {
+ vm.mr.sourceBranchRemoved = false;
+
+ Vue.nextTick(() => {
+ expect(el.innerText).toContain('You can remove source branch now.');
+ expect(el.innerText).not.toContain('The source branch has been removed.');
+ done();
+ });
+ });
+
+ it('should show source branch removing text', (done) => {
+ vm.mr.isRemovingSourceBranch = true;
+ vm.mr.sourceBranchRemoved = false;
+
+ Vue.nextTick(() => {
+ expect(el.innerText).toContain('The source branch is being removed.');
+ expect(el.innerText).not.toContain('You can remove source branch now.');
+ expect(el.innerText).not.toContain('The source branch has been removed.');
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
new file mode 100644
index 00000000000..98674d12afb
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
@@ -0,0 +1,55 @@
+import Vue from 'vue';
+import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch';
+
+const createComponent = () => {
+ const Component = Vue.extend(missingBranchComponent);
+ const mr = {
+ sourceBranchRemoved: true,
+ };
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ });
+};
+
+describe('MRWidgetMissingBranch', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const mrProp = missingBranchComponent.props.mr;
+
+ expect(mrProp.type instanceof Object).toBeTruthy();
+ expect(mrProp.required).toBeTruthy();
+ });
+ });
+
+ describe('components', () => {
+ it('should have components added', () => {
+ expect(missingBranchComponent.components['mr-widget-merge-help']).toBeDefined();
+ });
+ });
+
+ describe('computed', () => {
+ describe('missingBranchName', () => {
+ it('should return proper branch name', () => {
+ const vm = createComponent();
+ expect(vm.missingBranchName).toEqual('source');
+
+ vm.mr.sourceBranchRemoved = false;
+ expect(vm.missingBranchName).toEqual('target');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should have correct elements', () => {
+ const el = createComponent().$el;
+ const content = el.textContent.replace(/\n(\s)+/g, ' ').trim();
+
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(content).toContain('source branch does not exist.');
+ expect(content).toContain('Please restore the source branch or use a different source branch.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
new file mode 100644
index 00000000000..61e00f4cf79
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
@@ -0,0 +1,17 @@
+import Vue from 'vue';
+import notAllowedComponent from '~/vue_merge_request_widget/components/states/mr_widget_not_allowed';
+
+describe('MRWidgetNotAllowed', () => {
+ describe('template', () => {
+ const Component = Vue.extend(notAllowedComponent);
+ const vm = new Component({
+ el: document.createElement('div'),
+ });
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(vm.$el.innerText).toContain('Ready to be merged automatically.');
+ expect(vm.$el.innerText).toContain('Ask someone with write access to this repository to merge this request.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
new file mode 100644
index 00000000000..a8a02fa6b66
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
@@ -0,0 +1,29 @@
+import Vue from 'vue';
+import nothingToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge';
+
+describe('MRWidgetNothingToMerge', () => {
+ describe('template', () => {
+ const Component = Vue.extend(nothingToMergeComponent);
+ const newBlobPath = '/foo';
+ const vm = new Component({
+ el: document.createElement('div'),
+ propsData: {
+ mr: { newBlobPath },
+ },
+ });
+
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('a').href).toContain(newBlobPath);
+ expect(vm.$el.innerText).toContain('Currently there are no changes in this merge request\'s source branch');
+ expect(vm.$el.innerText).toContain('Please push new commits or use a different branch.');
+ });
+
+ it('should not show new blob link if there is no link available', () => {
+ vm.mr.newBlobPath = null;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('a')).toEqual(null);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
new file mode 100644
index 00000000000..b293d118571
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
@@ -0,0 +1,16 @@
+import Vue from 'vue';
+import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked';
+
+describe('MRWidgetPipelineBlocked', () => {
+ describe('template', () => {
+ const Component = Vue.extend(pipelineBlockedComponent);
+ const vm = new Component({
+ el: document.createElement('div'),
+ });
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(vm.$el.innerText).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
new file mode 100644
index 00000000000..807fba705d4
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
@@ -0,0 +1,16 @@
+import Vue from 'vue';
+import pipelineFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_failed';
+
+describe('MRWidgetPipelineFailed', () => {
+ describe('template', () => {
+ const Component = Vue.extend(pipelineFailedComponent);
+ const vm = new Component({
+ el: document.createElement('div'),
+ });
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(vm.$el.innerText).toContain('The pipeline for this merge request failed. Please retry the job or push a new commit to fix the failure.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
new file mode 100644
index 00000000000..d043ad38b8b
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -0,0 +1,389 @@
+import Vue from 'vue';
+import readyToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_ready_to_merge';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+import * as simplePoll from '~/lib/utils/simple_poll';
+
+const commitMessage = 'This is the commit message';
+const commitMessageWithDescription = 'This is the commit message description';
+const createComponent = () => {
+ const Component = Vue.extend(readyToMergeComponent);
+ const mr = {
+ isPipelineActive: false,
+ pipeline: null,
+ isPipelineFailed: false,
+ onlyAllowMergeIfPipelineSucceeds: false,
+ hasCI: false,
+ ciStatus: null,
+ sha: '12345678',
+ commitMessage,
+ commitMessageWithDescription,
+ };
+
+ const service = {
+ merge() {},
+ poll() {},
+ };
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr, service },
+ });
+};
+
+describe('MRWidgetReadyToMerge', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = createComponent();
+ });
+
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr, service } = readyToMergeComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+
+ expect(service.type instanceof Object).toBeTruthy();
+ expect(service.required).toBeTruthy();
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ expect(vm.removeSourceBranch).toBeTruthy(true);
+ expect(vm.mergeWhenBuildSucceeds).toBeFalsy();
+ expect(vm.useCommitMessageWithDescription).toBeFalsy();
+ expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy();
+ expect(vm.showCommitMessageEditor).toBeFalsy();
+ expect(vm.isMakingRequest).toBeFalsy();
+ expect(vm.isMergingImmediately).toBeFalsy();
+ expect(vm.commitMessage).toBe(vm.mr.commitMessage);
+ expect(vm.successSvg).toBeDefined();
+ expect(vm.warningSvg).toBeDefined();
+ });
+ });
+
+ describe('computed', () => {
+ describe('commitMessageLinkTitle', () => {
+ const withDesc = 'Include description in commit message';
+ const withoutDesc = "Don't include description in commit message";
+
+ it('should return message wit description', () => {
+ expect(vm.commitMessageLinkTitle).toEqual(withDesc);
+ });
+
+ it('should return message without description', () => {
+ vm.useCommitMessageWithDescription = true;
+ expect(vm.commitMessageLinkTitle).toEqual(withoutDesc);
+ });
+ });
+
+ describe('mergeButtonClass', () => {
+ const defaultClass = 'btn btn-small btn-success accept-merge-request';
+ const failedClass = `${defaultClass} btn-danger`;
+ const inActionClass = `${defaultClass} btn-info`;
+
+ it('should return default class', () => {
+ vm.mr.pipeline = true;
+ expect(vm.mergeButtonClass).toEqual(defaultClass);
+ });
+
+ it('should return failed class when MR has CI but also has an unknown status', () => {
+ vm.mr.hasCI = true;
+ expect(vm.mergeButtonClass).toEqual(failedClass);
+ });
+
+ it('should return default class when MR has no pipeline', () => {
+ expect(vm.mergeButtonClass).toEqual(defaultClass);
+ });
+
+ it('should return in action class when pipeline is active', () => {
+ vm.mr.pipeline = {};
+ vm.mr.isPipelineActive = true;
+ expect(vm.mergeButtonClass).toEqual(inActionClass);
+ });
+
+ it('should return failed class when pipeline is failed', () => {
+ vm.mr.pipeline = {};
+ vm.mr.isPipelineFailed = true;
+ expect(vm.mergeButtonClass).toEqual(failedClass);
+ });
+ });
+
+ describe('mergeButtonText', () => {
+ it('should return Merge', () => {
+ expect(vm.mergeButtonText).toEqual('Merge');
+ });
+
+ it('should return Merge in progress', () => {
+ vm.isMergingImmediately = true;
+ expect(vm.mergeButtonText).toEqual('Merge in progress');
+ });
+
+ it('should return Merge when pipeline succeeds', () => {
+ vm.isMergingImmediately = false;
+ vm.mr.isPipelineActive = true;
+ expect(vm.mergeButtonText).toEqual('Merge when pipeline succeeds');
+ });
+ });
+
+ describe('shouldShowMergeOptionsDropdown', () => {
+ it('should return false with initial data', () => {
+ expect(vm.shouldShowMergeOptionsDropdown).toBeFalsy();
+ });
+
+ it('should return true when pipeline active', () => {
+ vm.mr.isPipelineActive = true;
+ expect(vm.shouldShowMergeOptionsDropdown).toBeTruthy();
+ });
+
+ it('should return false when pipeline active but only merge when pipeline succeeds set in project options', () => {
+ vm.mr.isPipelineActive = true;
+ vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
+ expect(vm.shouldShowMergeOptionsDropdown).toBeFalsy();
+ });
+ });
+
+ describe('isMergeButtonDisabled', () => {
+ it('should return false with initial data', () => {
+ expect(vm.isMergeButtonDisabled).toBeFalsy();
+ });
+
+ it('should return true when there is no commit message', () => {
+ vm.commitMessage = '';
+ expect(vm.isMergeButtonDisabled).toBeTruthy();
+ });
+
+ it('should return true if merge is not allowed', () => {
+ vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
+ vm.mr.isPipelineFailed = true;
+ expect(vm.isMergeButtonDisabled).toBeTruthy();
+ });
+
+ it('should return true when there vm instance is making request', () => {
+ vm.isMakingRequest = true;
+ expect(vm.isMergeButtonDisabled).toBeTruthy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('isMergeAllowed', () => {
+ it('should return false with initial data', () => {
+ expect(vm.isMergeAllowed()).toBeTruthy();
+ });
+
+ it('should return false when MR is set only merge when pipeline succeeds', () => {
+ vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
+ expect(vm.isMergeAllowed()).toBeTruthy();
+ });
+
+ it('should return true true', () => {
+ vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
+ vm.mr.isPipelineFailed = true;
+ expect(vm.isMergeAllowed()).toBeFalsy();
+ });
+ });
+
+ describe('updateCommitMessage', () => {
+ it('should revert flag and change commitMessage', () => {
+ expect(vm.useCommitMessageWithDescription).toBeFalsy();
+ expect(vm.commitMessage).toEqual(commitMessage);
+ vm.updateCommitMessage();
+ expect(vm.useCommitMessageWithDescription).toBeTruthy();
+ expect(vm.commitMessage).toEqual(commitMessageWithDescription);
+ vm.updateCommitMessage();
+ expect(vm.useCommitMessageWithDescription).toBeFalsy();
+ expect(vm.commitMessage).toEqual(commitMessage);
+ });
+ });
+
+ describe('toggleCommitMessageEditor', () => {
+ it('should toggle showCommitMessageEditor flag', () => {
+ expect(vm.showCommitMessageEditor).toBeFalsy();
+ vm.toggleCommitMessageEditor();
+ expect(vm.showCommitMessageEditor).toBeTruthy();
+ });
+ });
+
+ describe('handleMergeButtonClick', () => {
+ const returnPromise = status => new Promise((resolve) => {
+ resolve({
+ json() {
+ return { status };
+ },
+ });
+ });
+
+ it('should handle merge when pipeline succeeds', (done) => {
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'merge').and.returnValue(returnPromise('merge_when_pipeline_succeeds'));
+ vm.removeSourceBranch = false;
+ vm.handleMergeButtonClick(true);
+
+ setTimeout(() => {
+ expect(vm.setToMergeWhenPipelineSucceeds).toBeTruthy();
+ expect(vm.isMakingRequest).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+
+ const params = vm.service.merge.calls.argsFor(0)[0];
+ expect(params.sha).toEqual(vm.mr.sha);
+ expect(params.commit_message).toEqual(vm.mr.commitMessage);
+ expect(params.should_remove_source_branch).toBeFalsy();
+ expect(params.merge_when_pipeline_succeeds).toBeTruthy();
+ done();
+ }, 333);
+ });
+
+ it('should handle merge failed', (done) => {
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'merge').and.returnValue(returnPromise('failed'));
+ vm.handleMergeButtonClick(false, true);
+
+ setTimeout(() => {
+ expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy();
+ expect(vm.isMakingRequest).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('FailedToMerge', undefined);
+
+ const params = vm.service.merge.calls.argsFor(0)[0];
+ expect(params.should_remove_source_branch).toBeTruthy();
+ expect(params.merge_when_pipeline_succeeds).toBeFalsy();
+ done();
+ }, 333);
+ });
+
+ it('should handle merge action accepted case', (done) => {
+ spyOn(vm.service, 'merge').and.returnValue(returnPromise('success'));
+ spyOn(vm, 'initiateMergePolling');
+ vm.handleMergeButtonClick();
+
+ setTimeout(() => {
+ expect(vm.setToMergeWhenPipelineSucceeds).toBeFalsy();
+ expect(vm.isMakingRequest).toBeTruthy();
+ expect(vm.initiateMergePolling).toHaveBeenCalled();
+
+ const params = vm.service.merge.calls.argsFor(0)[0];
+ expect(params.should_remove_source_branch).toBeTruthy();
+ expect(params.merge_when_pipeline_succeeds).toBeFalsy();
+ done();
+ }, 333);
+ });
+ });
+
+ describe('initiateMergePolling', () => {
+ it('should call simplePoll', () => {
+ spyOn(simplePoll, 'default');
+ vm.initiateMergePolling();
+ expect(simplePoll.default).toHaveBeenCalled();
+ });
+ });
+
+ describe('handleMergePolling', () => {
+ const returnPromise = state => new Promise((resolve) => {
+ resolve({
+ json() {
+ return { state, source_branch_exists: true };
+ },
+ });
+ });
+
+ it('should call start and stop polling when MR merged', (done) => {
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'poll').and.returnValue(returnPromise('merged'));
+ spyOn(vm, 'initiateRemoveSourceBranchPolling');
+
+ let cpc = false; // continuePollingCalled
+ let spc = false; // stopPollingCalled
+
+ vm.handleMergePolling(() => { cpc = true; }, () => { spc = true; });
+ setTimeout(() => {
+ expect(vm.service.poll).toHaveBeenCalled();
+ expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
+ expect(eventHub.$emit).toHaveBeenCalledWith('FetchActionsContent');
+ expect(vm.initiateRemoveSourceBranchPolling).toHaveBeenCalled();
+ expect(cpc).toBeFalsy();
+ expect(spc).toBeTruthy();
+
+ done();
+ }, 333);
+ });
+
+ it('should continue polling until MR is merged', (done) => {
+ spyOn(vm.service, 'poll').and.returnValue(returnPromise('some_other_state'));
+ spyOn(vm, 'initiateRemoveSourceBranchPolling');
+
+ let cpc = false; // continuePollingCalled
+ let spc = false; // stopPollingCalled
+
+ vm.handleMergePolling(() => { cpc = true; }, () => { spc = true; });
+ setTimeout(() => {
+ expect(cpc).toBeTruthy();
+ expect(spc).toBeFalsy();
+
+ done();
+ }, 333);
+ });
+ });
+
+ describe('initiateRemoveSourceBranchPolling', () => {
+ it('should emit event and call simplePoll', () => {
+ spyOn(eventHub, '$emit');
+ spyOn(simplePoll, 'default');
+
+ vm.initiateRemoveSourceBranchPolling();
+ expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [true]);
+ expect(simplePoll.default).toHaveBeenCalled();
+ });
+ });
+
+ describe('handleRemoveBranchPolling', () => {
+ const returnPromise = state => new Promise((resolve) => {
+ resolve({
+ json() {
+ return { source_branch_exists: state };
+ },
+ });
+ });
+
+ it('should call start and stop polling when MR merged', (done) => {
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'poll').and.returnValue(returnPromise(false));
+
+ let cpc = false; // continuePollingCalled
+ let spc = false; // stopPollingCalled
+
+ vm.handleRemoveBranchPolling(() => { cpc = true; }, () => { spc = true; });
+ setTimeout(() => {
+ expect(vm.service.poll).toHaveBeenCalled();
+
+ const args = eventHub.$emit.calls.argsFor(0);
+ expect(args[0]).toEqual('MRWidgetUpdateRequested');
+ expect(args[1]).toBeDefined();
+ args[1]();
+ expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [false]);
+
+ expect(cpc).toBeFalsy();
+ expect(spc).toBeTruthy();
+
+ done();
+ }, 333);
+ });
+
+ it('should continue polling until MR is merged', (done) => {
+ spyOn(vm.service, 'poll').and.returnValue(returnPromise(true));
+
+ let cpc = false; // continuePollingCalled
+ let spc = false; // stopPollingCalled
+
+ vm.handleRemoveBranchPolling(() => { cpc = true; }, () => { spc = true; });
+ setTimeout(() => {
+ expect(cpc).toBeTruthy();
+ expect(spc).toBeFalsy();
+
+ done();
+ }, 333);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
new file mode 100644
index 00000000000..5fb1d69a8b3
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
@@ -0,0 +1,16 @@
+import Vue from 'vue';
+import shaMismatchComponent from '~/vue_merge_request_widget/components/states/mr_widget_sha_mismatch';
+
+describe('MRWidgetSHAMismatch', () => {
+ describe('template', () => {
+ const Component = Vue.extend(shaMismatchComponent);
+ const vm = new Component({
+ el: document.createElement('div'),
+ });
+ it('should have correct elements', () => {
+ expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(vm.$el.innerText).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging.');
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
new file mode 100644
index 00000000000..fe87f110354
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
@@ -0,0 +1,47 @@
+import Vue from 'vue';
+import unresolvedDiscussionsComponent from '~/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions';
+
+describe('MRWidgetUnresolvedDiscussions', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr } = unresolvedDiscussionsComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+ });
+ });
+
+ describe('template', () => {
+ let el;
+ let vm;
+ const path = 'foo/bar';
+
+ beforeEach(() => {
+ const Component = Vue.extend(unresolvedDiscussionsComponent);
+ const mr = {
+ createIssueToResolveDiscussionsPath: path,
+ };
+ vm = new Component({
+ el: document.createElement('div'),
+ propsData: { mr },
+ });
+ el = vm.$el;
+ });
+
+ it('should have correct elements', () => {
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.innerText).toContain('There are unresolved discussions. Please resolve these discussions');
+ expect(el.innerText).toContain('Create an issue to resolve them later');
+ expect(el.querySelector('.js-create-issue').getAttribute('href')).toEqual(path);
+ });
+
+ it('should not show create issue button if user cannot create issue', (done) => {
+ vm.mr.createIssueToResolveDiscussionsPath = '';
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-create-issue')).toEqual(null);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
new file mode 100644
index 00000000000..45bd1a69964
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -0,0 +1,96 @@
+import Vue from 'vue';
+import wipComponent from '~/vue_merge_request_widget/components/states/mr_widget_wip';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+
+const createComponent = () => {
+ const Component = Vue.extend(wipComponent);
+ const mr = {
+ title: 'The best MR ever',
+ removeWIPPath: '/path/to/remove/wip',
+ };
+ const service = {
+ removeWIP() {},
+ };
+ return new Component({
+ el: document.createElement('div'),
+ propsData: { mr, service },
+ });
+};
+
+describe('MRWidgetWIP', () => {
+ describe('props', () => {
+ it('should have props', () => {
+ const { mr, service } = wipComponent.props;
+
+ expect(mr.type instanceof Object).toBeTruthy();
+ expect(mr.required).toBeTruthy();
+
+ expect(service.type instanceof Object).toBeTruthy();
+ expect(service.required).toBeTruthy();
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ const vm = createComponent();
+ expect(vm.isMakingRequest).toBeFalsy();
+ });
+ });
+
+ describe('methods', () => {
+ const mrObj = {
+ is_new_mr_data: true,
+ };
+
+ describe('removeWIP', () => {
+ it('should make a request to service and handle response', (done) => {
+ const vm = createComponent();
+
+ spyOn(window, 'Flash').and.returnValue(true);
+ spyOn(eventHub, '$emit');
+ spyOn(vm.service, 'removeWIP').and.returnValue(new Promise((resolve) => {
+ resolve({
+ json() {
+ return mrObj;
+ },
+ });
+ }));
+
+ vm.removeWIP();
+ setTimeout(() => {
+ expect(vm.isMakingRequest).toBeTruthy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
+ expect(window.Flash).toHaveBeenCalledWith('The merge request can now be merged.', 'notice');
+ done();
+ }, 333);
+ });
+ });
+ });
+
+ describe('template', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ it('should have correct elements', () => {
+ expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.innerText).toContain('This merge request is currently Work In Progress and therefore unable to merge');
+ expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
+ expect(el.querySelector('button').innerText).toContain('Merge');
+ expect(el.querySelector('.js-remove-wip').innerText).toContain('Resolve WIP status');
+ });
+
+ it('should not show removeWIP button is user cannot update MR', (done) => {
+ vm.mr.removeWIPPath = '';
+
+ Vue.nextTick(() => {
+ expect(el.querySelector('.js-remove-wip')).toEqual(null);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
new file mode 100644
index 00000000000..e6f96d5588b
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -0,0 +1,214 @@
+/* eslint-disable */
+
+export default {
+ "id": 132,
+ "iid": 22,
+ "assignee_id": null,
+ "author_id": 1,
+ "description": "",
+ "lock_version": null,
+ "milestone_id": null,
+ "position": 0,
+ "state": "merged",
+ "title": "Update README.md",
+ "updated_by_id": null,
+ "created_at": "2017-04-07T12:27:26.718Z",
+ "updated_at": "2017-04-07T15:39:25.852Z",
+ "deleted_at": null,
+ "time_estimate": 0,
+ "total_time_spent": 0,
+ "human_time_estimate": null,
+ "human_total_time_spent": null,
+ "in_progress_merge_commit_sha": null,
+ "locked_at": null,
+ "merge_commit_sha": "53027d060246c8f47e4a9310fb332aa52f221775",
+ "merge_error": null,
+ "merge_params": {
+ "force_remove_source_branch": null
+ },
+ "merge_status": "can_be_merged",
+ "merge_user_id": null,
+ "merge_when_pipeline_succeeds": false,
+ "source_branch": "daaaa",
+ "source_project_id": 19,
+ "target_branch": "master",
+ "target_project_id": 19,
+ "merge_event": {
+ "author": {
+ "name": "Administrator",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "web_url": "http://localhost:3000/root"
+ },
+ "updated_at": "2017-04-07T15:39:25.696Z"
+ },
+ "closed_event": null,
+ "author": {
+ "name": "Administrator",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "web_url": "http://localhost:3000/root"
+ },
+ "merge_user": null,
+ "diff_head_sha": "104096c51715e12e7ae41f9333e9fa35b73f385d",
+ "diff_head_commit_short_id": "104096c5",
+ "merge_commit_message": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ "pipeline": {
+ "id": 172,
+ "user": {
+ "name": "Administrator",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "web_url": "http://localhost:3000/root"
+ },
+ "active": false,
+ "coverage": "92.16",
+ "path": "/root/acets-app/pipelines/172",
+ "details": {
+ "status": {
+ "icon": "icon_status_success",
+ "favicon": "favicon_status_success",
+ "text": "passed",
+ "label": "passed",
+ "group": "success",
+ "has_details": true,
+ "details_path": "/root/acets-app/pipelines/172"
+ },
+ "duration": null,
+ "finished_at": "2017-04-07T14:00:14.256Z",
+ "stages": [
+ {
+ "name": "build",
+ "title": "build: failed",
+ "status": {
+ "icon": "icon_status_failed",
+ "favicon": "favicon_status_failed",
+ "text": "failed",
+ "label": "failed",
+ "group": "failed",
+ "has_details": true,
+ "details_path": "/root/acets-app/pipelines/172#build"
+ },
+ "path": "/root/acets-app/pipelines/172#build",
+ "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=build"
+ },
+ {
+ "name": "review",
+ "title": "review: skipped",
+ "status": {
+ "icon": "icon_status_skipped",
+ "favicon": "favicon_status_skipped",
+ "text": "skipped",
+ "label": "skipped",
+ "group": "skipped",
+ "has_details": true,
+ "details_path": "/root/acets-app/pipelines/172#review"
+ },
+ "path": "/root/acets-app/pipelines/172#review",
+ "dropdown_path": "/root/acets-app/pipelines/172/stage.json?stage=review"
+ }
+ ],
+ "artifacts": [
+
+ ],
+ "manual_actions": [
+ {
+ "name": "stop_review",
+ "path": "/root/acets-app/builds/1427/play",
+ "playable": false
+ }
+ ]
+ },
+ "flags": {
+ "latest": false,
+ "triggered": false,
+ "stuck": false,
+ "yaml_errors": false,
+ "retryable": true,
+ "cancelable": false
+ },
+ "ref": {
+ "name": "daaaa",
+ "path": "/root/acets-app/tree/daaaa",
+ "tag": false,
+ "branch": true
+ },
+ "commit": {
+ "id": "104096c51715e12e7ae41f9333e9fa35b73f385d",
+ "short_id": "104096c5",
+ "title": "Update README.md",
+ "created_at": "2017-04-07T15:27:18.000+03:00",
+ "parent_ids": [
+ "2396536178668d8930c29d904e53bd4d06228b32"
+ ],
+ "message": "Update README.md",
+ "author_name": "Administrator",
+ "author_email": "admin@example.com",
+ "authored_date": "2017-04-07T15:27:18.000+03:00",
+ "committer_name": "Administrator",
+ "committer_email": "admin@example.com",
+ "committed_date": "2017-04-07T15:27:18.000+03:00",
+ "author": {
+ "name": "Administrator",
+ "username": "root",
+ "id": 1,
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "web_url": "http://localhost:3000/root"
+ },
+ "author_gravatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
+ "commit_url": "http://localhost:3000/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d",
+ "commit_path": "/root/acets-app/commit/104096c51715e12e7ae41f9333e9fa35b73f385d"
+ },
+ "retry_path": "/root/acets-app/pipelines/172/retry",
+ "created_at": "2017-04-07T12:27:19.520Z",
+ "updated_at": "2017-04-07T15:28:44.800Z"
+ },
+ "work_in_progress": false,
+ "source_branch_exists": false,
+ "mergeable_discussions_state": true,
+ "conflicts_can_be_resolved_in_ui": false,
+ "branch_missing": true,
+ "commits_count": 1,
+ "has_conflicts": false,
+ "can_be_merged": true,
+ "has_ci": true,
+ "ci_status": "success",
+ "pipeline_status_path": "/root/acets-app/merge_requests/22/pipeline_status",
+ "issues_links": {
+ "closing": "",
+ "mentioned_but_not_closing": ""
+ },
+ "current_user": {
+ "can_resolve_conflicts": true,
+ "can_remove_source_branch": false,
+ "can_revert_on_current_merge_request": true,
+ "can_cherry_pick_on_current_merge_request": true
+ },
+ "target_branch_path": "/root/acets-app/branches/master",
+ "source_branch_path": "/root/acets-app/branches/daaaa",
+ "conflict_resolution_ui_path": "/root/acets-app/merge_requests/22/conflicts",
+ "remove_wip_path": "/root/acets-app/merge_requests/22/remove_wip",
+ "cancel_merge_when_pipeline_succeeds_path": "/root/acets-app/merge_requests/22/cancel_merge_when_pipeline_succeeds",
+ "create_issue_to_resolve_discussions_path": "/root/acets-app/issues/new?merge_request_to_resolve_discussions_of=22",
+ "merge_path": "/root/acets-app/merge_requests/22/merge",
+ "cherry_pick_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+revert+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1",
+ "revert_in_fork_path": "/root/acets-app/forks?continue%5Bnotice%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+has+been+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.+Try+to+cherry-pick+this+commit+again.&continue%5Bnotice_now%5D=You%27re+not+allowed+to+make+changes+to+this+project+directly.+A+fork+of+this+project+is+being+created+that+you+can+make+changes+in%2C+so+you+can+submit+a+merge+request.&continue%5Bto%5D=%2Froot%2Facets-app%2Fmerge_requests%2F22&namespace_key=1",
+ "email_patches_path": "/root/acets-app/merge_requests/22.patch",
+ "plain_diff_path": "/root/acets-app/merge_requests/22.diff",
+ "ci_status_path": "/root/acets-app/merge_requests/22/ci_status",
+ "status_path": "/root/acets-app/merge_requests/22.json",
+ "merge_check_path": "/root/acets-app/merge_requests/22/merge_check",
+ "ci_environments_status_url": "/root/acets-app/merge_requests/22/ci_environments_status",
+ "project_archived": false,
+ "merge_commit_message_with_description": "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ "diverged_commits_count": 0,
+ "only_allow_merge_if_pipeline_succeeds": false,
+ "commit_change_content_path": "/root/acets-app/merge_requests/22/commit_change_content"
+}
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
new file mode 100644
index 00000000000..bdc18243a15
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -0,0 +1,324 @@
+import Vue from 'vue';
+import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
+import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options';
+import eventHub from '~/vue_merge_request_widget/event_hub';
+import mockData from './mock_data';
+
+const createComponent = () => {
+ delete mrWidgetOptions.el; // Prevent component mounting
+ gl.mrWidgetData = mockData;
+ const Component = Vue.extend(mrWidgetOptions);
+ return new Component();
+};
+
+const returnPromise = data => new Promise((resolve) => {
+ resolve({
+ json() {
+ return data;
+ },
+ body: data,
+ });
+});
+
+describe('mrWidgetOptions', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = createComponent();
+ });
+
+ describe('data', () => {
+ it('should instantiate Store and Service', () => {
+ expect(vm.mr).toBeDefined();
+ expect(vm.service).toBeDefined();
+ });
+ });
+
+ describe('computed', () => {
+ describe('componentName', () => {
+ it('should return merged component', () => {
+ expect(vm.componentName).toEqual('mr-widget-merged');
+ });
+
+ it('should return conflicts component', () => {
+ vm.mr.state = 'conflicts';
+ expect(vm.componentName).toEqual('mr-widget-conflicts');
+ });
+ });
+
+ describe('shouldRenderMergeHelp', () => {
+ it('should return false for the initial merged state', () => {
+ expect(vm.shouldRenderMergeHelp).toBeFalsy();
+ });
+
+ it('should return true for a state which requires help widget', () => {
+ vm.mr.state = 'conflicts';
+ expect(vm.shouldRenderMergeHelp).toBeTruthy();
+ });
+ });
+
+ describe('shouldRenderPipelines', () => {
+ it('should return true for the initial data', () => {
+ expect(vm.shouldRenderPipelines).toBeTruthy();
+ });
+
+ it('should return true when pipeline is empty but MR.hasCI is set to true', () => {
+ vm.mr.pipeline = {};
+ expect(vm.shouldRenderPipelines).toBeTruthy();
+ });
+
+ it('should return true when pipeline available', () => {
+ vm.mr.hasCI = false;
+ expect(vm.shouldRenderPipelines).toBeTruthy();
+ });
+
+ it('should return false when there is no pipeline', () => {
+ vm.mr.pipeline = {};
+ vm.mr.hasCI = false;
+ expect(vm.shouldRenderPipelines).toBeFalsy();
+ });
+ });
+
+ describe('shouldRenderRelatedLinks', () => {
+ it('should return false for the initial data', () => {
+ expect(vm.shouldRenderRelatedLinks).toBeFalsy();
+ });
+
+ it('should return true if there is relatedLinks in MR', () => {
+ vm.mr.relatedLinks = {};
+ expect(vm.shouldRenderRelatedLinks).toBeTruthy();
+ });
+ });
+
+ describe('shouldRenderDeployments', () => {
+ it('should return false for the initial data', () => {
+ expect(vm.shouldRenderDeployments).toBeFalsy();
+ });
+
+ it('should return true if there is deployments', () => {
+ vm.mr.deployments.push({}, {});
+ expect(vm.shouldRenderDeployments).toBeTruthy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('checkStatus', () => {
+ it('should tell service to check status', (done) => {
+ spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
+ spyOn(vm.mr, 'setData');
+ let isCbExecuted = false;
+ const cb = () => {
+ isCbExecuted = true;
+ };
+
+ vm.checkStatus(cb);
+
+ setTimeout(() => {
+ expect(vm.service.checkStatus).toHaveBeenCalled();
+ expect(vm.mr.setData).toHaveBeenCalled();
+ expect(isCbExecuted).toBeTruthy();
+ done();
+ }, 333);
+ });
+ });
+
+ describe('initPolling', () => {
+ it('should call SmartInterval', () => {
+ spyOn(gl, 'SmartInterval').and.returnValue({
+ resume() {},
+ stopTimer() {},
+ });
+ vm.initPolling();
+
+ expect(vm.pollingInterval).toBeDefined();
+ expect(gl.SmartInterval).toHaveBeenCalled();
+ });
+ });
+
+ describe('initDeploymentsPolling', () => {
+ it('should call SmartInterval', () => {
+ spyOn(gl, 'SmartInterval');
+ vm.initDeploymentsPolling();
+
+ expect(vm.deploymentsInterval).toBeDefined();
+ expect(gl.SmartInterval).toHaveBeenCalled();
+ });
+ });
+
+ describe('fetchDeployments', () => {
+ it('should fetch deployments', (done) => {
+ spyOn(vm.service, 'fetchDeployments').and.returnValue(returnPromise([{ deployment: 1 }]));
+
+ vm.fetchDeployments();
+
+ setTimeout(() => {
+ expect(vm.service.fetchDeployments).toHaveBeenCalled();
+ expect(vm.mr.deployments.length).toEqual(1);
+ expect(vm.mr.deployments[0].deployment).toEqual(1);
+ done();
+ }, 333);
+ });
+ });
+
+ describe('fetchActionsContent', () => {
+ it('should fetch content of Cherry Pick and Revert modals', (done) => {
+ spyOn(vm.service, 'fetchMergeActionsContent').and.returnValue(returnPromise('hello world'));
+
+ vm.fetchActionsContent();
+
+ setTimeout(() => {
+ expect(vm.service.fetchMergeActionsContent).toHaveBeenCalled();
+ expect(document.body.textContent).toContain('hello world');
+ done();
+ }, 333);
+ });
+ });
+
+ describe('bindEventHubListeners', () => {
+ it('should bind eventHub listeners', () => {
+ spyOn(vm, 'checkStatus').and.returnValue(() => {});
+ spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
+ spyOn(vm, 'fetchActionsContent');
+ spyOn(vm.mr, 'setData');
+ spyOn(vm, 'resumePolling');
+ spyOn(vm, 'stopPolling');
+ spyOn(eventHub, '$on');
+
+ vm.bindEventHubListeners();
+
+ eventHub.$emit('SetBranchRemoveFlag', ['flag']);
+ expect(vm.mr.isRemovingSourceBranch).toEqual('flag');
+
+ eventHub.$emit('FailedToMerge');
+ expect(vm.mr.state).toEqual('failedToMerge');
+
+ eventHub.$emit('UpdateWidgetData', mockData);
+ expect(vm.mr.setData).toHaveBeenCalledWith(mockData);
+
+ eventHub.$emit('EnablePolling');
+ expect(vm.resumePolling).toHaveBeenCalled();
+
+ eventHub.$emit('DisablePolling');
+ expect(vm.stopPolling).toHaveBeenCalled();
+
+ const listenersWithServiceRequest = {
+ MRWidgetUpdateRequested: true,
+ FetchActionsContent: true,
+ };
+
+ const allArgs = eventHub.$on.calls.allArgs();
+ allArgs.forEach((params) => {
+ const eventName = params[0];
+ const callback = params[1];
+
+ if (listenersWithServiceRequest[eventName]) {
+ listenersWithServiceRequest[eventName] = callback;
+ }
+ });
+
+ listenersWithServiceRequest.MRWidgetUpdateRequested();
+ expect(vm.checkStatus).toHaveBeenCalled();
+
+ listenersWithServiceRequest.FetchActionsContent();
+ expect(vm.fetchActionsContent).toHaveBeenCalled();
+ });
+ });
+
+ describe('handleMounted', () => {
+ it('should call required methods to do the initial kick-off', () => {
+ spyOn(vm, 'initDeploymentsPolling');
+ spyOn(vm, 'setFavicon');
+
+ vm.handleMounted();
+
+ expect(vm.setFavicon).toHaveBeenCalled();
+ expect(vm.initDeploymentsPolling).toHaveBeenCalled();
+ });
+ });
+
+ describe('setFavicon', () => {
+ it('should call setFavicon method', () => {
+ spyOn(gl.utils, 'setFavicon');
+ vm.setFavicon();
+
+ expect(gl.utils.setFavicon).toHaveBeenCalledWith(vm.mr.ciStatusFaviconPath);
+ });
+
+ it('should not call setFavicon when there is no ciStatusFaviconPath', () => {
+ spyOn(gl.utils, 'setFavicon');
+ vm.mr.ciStatusFaviconPath = null;
+ vm.setFavicon();
+
+ expect(gl.utils.setFavicon).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('resumePolling', () => {
+ it('should call stopTimer on pollingInterval', () => {
+ spyOn(vm.pollingInterval, 'resume');
+
+ vm.resumePolling();
+ expect(vm.pollingInterval.resume).toHaveBeenCalled();
+ });
+ });
+
+ describe('stopPolling', () => {
+ it('should call stopTimer on pollingInterval', () => {
+ spyOn(vm.pollingInterval, 'stopTimer');
+
+ vm.stopPolling();
+ expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
+ });
+ });
+
+ describe('createService', () => {
+ it('should instantiate a Service', () => {
+ const endpoints = {
+ mergePath: '/nice/path',
+ mergeCheckPath: '/nice/path',
+ cancelAutoMergePath: '/nice/path',
+ removeWIPPath: '/nice/path',
+ sourceBranchPath: '/nice/path',
+ ciEnvironmentsStatusPath: '/nice/path',
+ statusPath: '/nice/path',
+ mergeActionsContentPath: '/nice/path',
+ };
+
+ const serviceInstance = vm.createService(endpoints);
+ const isInstanceOfMRService = serviceInstance instanceof MRWidgetService;
+ expect(isInstanceOfMRService).toBe(true);
+ Object.keys(serviceInstance).forEach((key) => {
+ expect(serviceInstance[key]).toBeDefined();
+ });
+ });
+ });
+ });
+
+ describe('components', () => {
+ it('should register all components', () => {
+ const comps = mrWidgetOptions.components;
+ expect(comps['mr-widget-header']).toBeDefined();
+ expect(comps['mr-widget-merge-help']).toBeDefined();
+ expect(comps['mr-widget-pipeline']).toBeDefined();
+ expect(comps['mr-widget-deployment']).toBeDefined();
+ expect(comps['mr-widget-related-links']).toBeDefined();
+ expect(comps['mr-widget-merged']).toBeDefined();
+ expect(comps['mr-widget-closed']).toBeDefined();
+ expect(comps['mr-widget-locked']).toBeDefined();
+ expect(comps['mr-widget-failed-to-merge']).toBeDefined();
+ expect(comps['mr-widget-wip']).toBeDefined();
+ expect(comps['mr-widget-archived']).toBeDefined();
+ expect(comps['mr-widget-conflicts']).toBeDefined();
+ expect(comps['mr-widget-nothing-to-merge']).toBeDefined();
+ expect(comps['mr-widget-not-allowed']).toBeDefined();
+ expect(comps['mr-widget-missing-branch']).toBeDefined();
+ expect(comps['mr-widget-ready-to-merge']).toBeDefined();
+ expect(comps['mr-widget-checking']).toBeDefined();
+ expect(comps['mr-widget-unresolved-discussions']).toBeDefined();
+ expect(comps['mr-widget-pipeline-blocked']).toBeDefined();
+ expect(comps['mr-widget-pipeline-failed']).toBeDefined();
+ expect(comps['mr-widget-merge-when-pipeline-succeeds']).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
new file mode 100644
index 00000000000..b63633c03b8
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/services/mr_widget_service_spec.js
@@ -0,0 +1,46 @@
+import Vue from 'vue';
+import VueResource from 'vue-resource';
+import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
+
+Vue.use(VueResource);
+
+describe('MRWidgetService', () => {
+ const mr = {
+ mergePath: './',
+ mergeCheckPath: './',
+ cancelAutoMergePath: './',
+ removeWIPPath: './',
+ sourceBranchPath: './',
+ ciEnvironmentsStatusPath: './',
+ statusPath: './',
+ mergeActionsContentPath: './',
+ isServiceStore: true,
+ };
+
+ it('should have store and resources created in constructor', () => {
+ const service = new MRWidgetService(mr);
+
+ expect(service.mergeResource).toBeDefined();
+ expect(service.mergeCheckResource).toBeDefined();
+ expect(service.cancelAutoMergeResource).toBeDefined();
+ expect(service.removeWIPResource).toBeDefined();
+ expect(service.removeSourceBranchResource).toBeDefined();
+ expect(service.deploymentsResource).toBeDefined();
+ expect(service.pollResource).toBeDefined();
+ expect(service.mergeActionsContentResource).toBeDefined();
+ });
+
+ it('should have methods defined', () => {
+ const service = new MRWidgetService(mr);
+
+ expect(service.merge()).toBeDefined();
+ expect(service.cancelAutomaticMerge()).toBeDefined();
+ expect(service.removeWIP()).toBeDefined();
+ expect(service.removeSourceBranch()).toBeDefined();
+ expect(service.fetchDeployments()).toBeDefined();
+ expect(service.poll()).toBeDefined();
+ expect(service.checkStatus()).toBeDefined();
+ expect(service.fetchMergeActionsContent()).toBeDefined();
+ expect(MRWidgetService.stopEnvironment()).toBeDefined();
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js b/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js
new file mode 100644
index 00000000000..9a331d99865
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js
@@ -0,0 +1,65 @@
+import getStateKey from '~/vue_merge_request_widget/stores/get_state_key';
+
+describe('getStateKey', () => {
+ it('should return proper state name', () => {
+ const context = {
+ mergeStatus: 'checked',
+ mergeWhenPipelineSucceeds: false,
+ canMerge: true,
+ onlyAllowMergeIfPipelineSucceeds: false,
+ isPipelineFailed: false,
+ hasMergeableDiscussionsState: false,
+ isPipelineBlocked: false,
+ canBeMerged: false,
+ };
+ const data = {
+ project_archived: false,
+ branch_missing: false,
+ commits_count: 2,
+ has_conflicts: false,
+ work_in_progress: false,
+ };
+ const bound = getStateKey.bind(context, data);
+ expect(bound()).toEqual(null);
+
+ context.canBeMerged = true;
+ expect(bound()).toEqual('readyToMerge');
+
+ context.hasSHAChanged = true;
+ expect(bound()).toEqual('shaMismatch');
+
+ context.isPipelineBlocked = true;
+ expect(bound()).toEqual('pipelineBlocked');
+
+ context.hasMergeableDiscussionsState = true;
+ expect(bound()).toEqual('unresolvedDiscussions');
+
+ context.onlyAllowMergeIfPipelineSucceeds = true;
+ context.isPipelineFailed = true;
+ expect(bound()).toEqual('pipelineFailed');
+
+ context.canMerge = false;
+ expect(bound()).toEqual('notAllowedToMerge');
+
+ context.mergeWhenPipelineSucceeds = true;
+ expect(bound()).toEqual('mergeWhenPipelineSucceeds');
+
+ data.work_in_progress = true;
+ expect(bound()).toEqual('workInProgress');
+
+ data.has_conflicts = true;
+ expect(bound()).toEqual('conflicts');
+
+ context.mergeStatus = 'unchecked';
+ expect(bound()).toEqual('checking');
+
+ data.commits_count = 0;
+ expect(bound()).toEqual('nothingToMerge');
+
+ data.branch_missing = true;
+ expect(bound()).toEqual('missingBranch');
+
+ data.project_archived = true;
+ expect(bound()).toEqual('archived');
+ });
+});
diff --git a/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
new file mode 100644
index 00000000000..56dd0198ae2
--- /dev/null
+++ b/spec/javascripts/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -0,0 +1,22 @@
+import MergeRequestStore from '~/vue_merge_request_widget/stores/mr_widget_store';
+import mockData from '../mock_data';
+
+describe('MergeRequestStore', () => {
+ describe('setData', () => {
+ let store;
+
+ beforeEach(() => {
+ store = new MergeRequestStore(mockData);
+ });
+
+ it('should set hasSHAChanged when the diff SHA changes', () => {
+ store.setData({ ...mockData, diff_head_sha: 'a-different-string' });
+ expect(store.hasSHAChanged).toBe(true);
+ });
+
+ it('should not set hasSHAChanged when other data changes', () => {
+ store.setData({ ...mockData, work_in_progress: !mockData.work_in_progress });
+ expect(store.hasSHAChanged).toBe(false);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/ci_action_icons_spec.js b/spec/javascripts/vue_shared/ci_action_icons_spec.js
new file mode 100644
index 00000000000..3d53a5ab24d
--- /dev/null
+++ b/spec/javascripts/vue_shared/ci_action_icons_spec.js
@@ -0,0 +1,27 @@
+import getActionIcon from '~/vue_shared/ci_action_icons';
+import cancelSVG from 'icons/_icon_action_cancel.svg';
+import retrySVG from 'icons/_icon_action_retry.svg';
+import playSVG from 'icons/_icon_action_play.svg';
+import stopSVG from 'icons/_icon_action_stop.svg';
+
+describe('getActionIcon', () => {
+ it('should return an empty string', () => {
+ expect(getActionIcon()).toEqual('');
+ });
+
+ it('should return cancel svg', () => {
+ expect(getActionIcon('icon_action_cancel')).toEqual(cancelSVG);
+ });
+
+ it('should return retry svg', () => {
+ expect(getActionIcon('icon_action_retry')).toEqual(retrySVG);
+ });
+
+ it('should return play svg', () => {
+ expect(getActionIcon('icon_action_play')).toEqual(playSVG);
+ });
+
+ it('should render stop svg', () => {
+ expect(getActionIcon('icon_action_stop')).toEqual(stopSVG);
+ });
+});
diff --git a/spec/javascripts/vue_shared/ci_status_icon_spec.js b/spec/javascripts/vue_shared/ci_status_icon_spec.js
new file mode 100644
index 00000000000..b6621d6054d
--- /dev/null
+++ b/spec/javascripts/vue_shared/ci_status_icon_spec.js
@@ -0,0 +1,27 @@
+import { borderlessStatusIconEntityMap, statusIconEntityMap } from '~/vue_shared/ci_status_icons';
+
+describe('CI status icons', () => {
+ const statuses = [
+ 'icon_status_canceled',
+ 'icon_status_created',
+ 'icon_status_failed',
+ 'icon_status_manual',
+ 'icon_status_pending',
+ 'icon_status_running',
+ 'icon_status_skipped',
+ 'icon_status_success',
+ 'icon_status_warning',
+ ];
+
+ it('should have a dictionary for borderless icons', () => {
+ statuses.forEach((status) => {
+ expect(borderlessStatusIconEntityMap[status]).toBeDefined();
+ });
+ });
+
+ it('should have a dictionary for icons', () => {
+ statuses.forEach((status) => {
+ expect(statusIconEntityMap[status]).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/ci_badge_link_spec.js b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js
new file mode 100644
index 00000000000..daed4da3e15
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/ci_badge_link_spec.js
@@ -0,0 +1,89 @@
+import Vue from 'vue';
+import ciBadge from '~/vue_shared/components/ci_badge_link.vue';
+
+describe('CI Badge Link Component', () => {
+ let CIBadge;
+
+ const statuses = {
+ canceled: {
+ text: 'canceled',
+ label: 'canceled',
+ group: 'canceled',
+ icon: 'icon_status_canceled',
+ details_path: 'status/canceled',
+ },
+ created: {
+ text: 'created',
+ label: 'created',
+ group: 'created',
+ icon: 'icon_status_created',
+ details_path: 'status/created',
+ },
+ failed: {
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ icon: 'icon_status_failed',
+ details_path: 'status/failed',
+ },
+ manual: {
+ text: 'manual',
+ label: 'manual action',
+ group: 'manual',
+ icon: 'icon_status_manual',
+ details_path: 'status/manual',
+ },
+ pending: {
+ text: 'pending',
+ label: 'pending',
+ group: 'pending',
+ icon: 'icon_status_pending',
+ details_path: 'status/pending',
+ },
+ running: {
+ text: 'running',
+ label: 'running',
+ group: 'running',
+ icon: 'icon_status_running',
+ details_path: 'status/running',
+ },
+ skipped: {
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ icon: 'icon_status_skipped',
+ details_path: 'status/skipped',
+ },
+ success_warining: {
+ text: 'passed',
+ label: 'passed',
+ group: 'success_with_warnings',
+ icon: 'icon_status_warning',
+ details_path: 'status/warning',
+ },
+ success: {
+ text: 'passed',
+ label: 'passed',
+ group: 'passed',
+ icon: 'icon_status_success',
+ details_path: 'status/passed',
+ },
+ };
+
+ it('should render each status badge', () => {
+ CIBadge = Vue.extend(ciBadge);
+ Object.keys(statuses).map((status) => {
+ const vm = new CIBadge({
+ propsData: {
+ status: statuses[status],
+ },
+ }).$mount();
+
+ expect(vm.$el.getAttribute('href')).toEqual(statuses[status].details_path);
+ expect(vm.$el.textContent.trim()).toEqual(statuses[status].text);
+ expect(vm.$el.getAttribute('class')).toEqual(`ci-status ci-${statuses[status].group}`);
+ expect(vm.$el.querySelector('svg')).toBeDefined();
+ return vm;
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/ci_icon_spec.js b/spec/javascripts/vue_shared/components/ci_icon_spec.js
new file mode 100644
index 00000000000..d8664408595
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/ci_icon_spec.js
@@ -0,0 +1,139 @@
+import Vue from 'vue';
+import ciIcon from '~/vue_shared/components/ci_icon.vue';
+
+describe('CI Icon component', () => {
+ let CiIcon;
+ beforeEach(() => {
+ CiIcon = Vue.extend(ciIcon);
+ });
+
+ it('should render a span element with an svg', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_success',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.tagName).toEqual('SPAN');
+ expect(component.$el.querySelector('span > svg')).toBeDefined();
+ });
+
+ it('should render a success status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_success',
+ group: 'success',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-success')).toEqual(true);
+ });
+
+ it('should render a failed status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_failed',
+ group: 'failed',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-failed')).toEqual(true);
+ });
+
+ it('should render success with warnings status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_warning',
+ group: 'warning',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-warning')).toEqual(true);
+ });
+
+ it('should render pending status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_pending',
+ group: 'pending',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-pending')).toEqual(true);
+ });
+
+ it('should render running status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_running',
+ group: 'running',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-running')).toEqual(true);
+ });
+
+ it('should render created status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_created',
+ group: 'created',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-created')).toEqual(true);
+ });
+
+ it('should render skipped status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_skipped',
+ group: 'skipped',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-skipped')).toEqual(true);
+ });
+
+ it('should render canceled status', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_canceled',
+ group: 'canceled',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-canceled')).toEqual(true);
+ });
+
+ it('should render status for manual action', () => {
+ const component = new CiIcon({
+ propsData: {
+ status: {
+ icon: 'icon_status_manual',
+ group: 'manual',
+ },
+ },
+ }).$mount();
+
+ expect(component.$el.classList.contains('ci-status-icon-manual')).toEqual(true);
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/commit_spec.js b/spec/javascripts/vue_shared/components/commit_spec.js
index df547299d75..0638483e7aa 100644
--- a/spec/javascripts/vue_shared/components/commit_spec.js
+++ b/spec/javascripts/vue_shared/components/commit_spec.js
@@ -61,16 +61,16 @@ describe('Commit component', () => {
});
it('should render a link to the ref url', () => {
- expect(component.$el.querySelector('.branch-name').getAttribute('href')).toEqual(props.commitRef.ref_url);
+ expect(component.$el.querySelector('.ref-name').getAttribute('href')).toEqual(props.commitRef.ref_url);
});
it('should render the ref name', () => {
- expect(component.$el.querySelector('.branch-name').textContent).toContain(props.commitRef.name);
+ expect(component.$el.querySelector('.ref-name').textContent).toContain(props.commitRef.name);
});
it('should render the commit short sha with a link to the commit url', () => {
- expect(component.$el.querySelector('.commit-id').getAttribute('href')).toEqual(props.commitUrl);
- expect(component.$el.querySelector('.commit-id').textContent).toContain(props.shortSha);
+ expect(component.$el.querySelector('.commit-sha').getAttribute('href')).toEqual(props.commitUrl);
+ expect(component.$el.querySelector('.commit-sha').textContent).toContain(props.shortSha);
});
it('should render the given commitIconSvg', () => {
@@ -86,7 +86,7 @@ describe('Commit component', () => {
it('Should render the author avatar with title and alt attributes', () => {
expect(
- component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('title'),
+ component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('data-original-title'),
).toContain(props.author.username);
expect(
component.$el.querySelector('.commit-title .avatar-image-container img').getAttribute('alt'),
diff --git a/spec/javascripts/vue_shared/components/loading_icon_spec.js b/spec/javascripts/vue_shared/components/loading_icon_spec.js
new file mode 100644
index 00000000000..1baf3537741
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/loading_icon_spec.js
@@ -0,0 +1,53 @@
+import Vue from 'vue';
+import loadingIcon from '~/vue_shared/components/loading_icon.vue';
+
+describe('Loading Icon Component', () => {
+ let LoadingIconComponent;
+
+ beforeEach(() => {
+ LoadingIconComponent = Vue.extend(loadingIcon);
+ });
+
+ it('should render a spinner font awesome icon', () => {
+ const component = new LoadingIconComponent().$mount();
+
+ expect(
+ component.$el.querySelector('i').getAttribute('class'),
+ ).toEqual('fa fa-spin fa-spinner fa-1x');
+
+ expect(component.$el.tagName).toEqual('DIV');
+ expect(component.$el.classList.contains('text-center')).toEqual(true);
+ });
+
+ it('should render accessibility attributes', () => {
+ const component = new LoadingIconComponent().$mount();
+
+ const icon = component.$el.querySelector('i');
+ expect(icon.getAttribute('aria-hidden')).toEqual('true');
+ expect(icon.getAttribute('aria-label')).toEqual('Loading');
+ });
+
+ it('should render the provided label', () => {
+ const component = new LoadingIconComponent({
+ propsData: {
+ label: 'This is a loading icon',
+ },
+ }).$mount();
+
+ expect(
+ component.$el.querySelector('i').getAttribute('aria-label'),
+ ).toEqual('This is a loading icon');
+ });
+
+ it('should render the provided size', () => {
+ const component = new LoadingIconComponent({
+ propsData: {
+ size: '2',
+ },
+ }).$mount();
+
+ expect(
+ component.$el.querySelector('i').classList.contains('fa-2x'),
+ ).toEqual(true);
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/memory_graph_spec.js b/spec/javascripts/vue_shared/components/memory_graph_spec.js
new file mode 100644
index 00000000000..d46a3f2328e
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/memory_graph_spec.js
@@ -0,0 +1,143 @@
+import Vue from 'vue';
+import memoryGraphComponent from '~/vue_shared/components/memory_graph';
+import { mockMetrics, mockMedian, mockMedianIndex } from './mock_data';
+
+const defaultHeight = '25';
+const defaultWidth = '100';
+
+const createComponent = () => {
+ const Component = Vue.extend(memoryGraphComponent);
+
+ return new Component({
+ el: document.createElement('div'),
+ propsData: {
+ metrics: [],
+ deploymentTime: 0,
+ width: '',
+ height: '',
+ pathD: '',
+ pathViewBox: '',
+ dotX: '',
+ dotY: '',
+ },
+ });
+};
+
+describe('MemoryGraph', () => {
+ let vm;
+ let el;
+
+ beforeEach(() => {
+ vm = createComponent();
+ el = vm.$el;
+ });
+
+ describe('props', () => {
+ it('should have props with defaults', (done) => {
+ const { metrics, deploymentTime, width, height } = memoryGraphComponent.props;
+
+ Vue.nextTick(() => {
+ const typeClassMatcher = (propItem, expectedType) => {
+ const PropItemTypeClass = propItem.type;
+ expect(new PropItemTypeClass() instanceof expectedType).toBeTruthy();
+ expect(propItem.required).toBeTruthy();
+ };
+
+ typeClassMatcher(metrics, Array);
+ typeClassMatcher(deploymentTime, Number);
+ typeClassMatcher(width, String);
+ typeClassMatcher(height, String);
+ done();
+ });
+ });
+ });
+
+ describe('data', () => {
+ it('should have default data', () => {
+ const data = memoryGraphComponent.data();
+ const dataValidator = (dataItem, expectedType, defaultVal) => {
+ expect(typeof dataItem).toBe(expectedType);
+ expect(dataItem).toBe(defaultVal);
+ };
+
+ dataValidator(data.pathD, 'string', '');
+ dataValidator(data.pathViewBox, 'string', '');
+ dataValidator(data.dotX, 'string', '');
+ dataValidator(data.dotY, 'string', '');
+ });
+ });
+
+ describe('computed', () => {
+ describe('getFormattedMedian', () => {
+ it('should show human readable median value based on provided median timestamp', () => {
+ vm.deploymentTime = mockMedian;
+ const formattedMedian = vm.getFormattedMedian;
+ expect(formattedMedian.indexOf('Deployed') > -1).toBeTruthy();
+ expect(formattedMedian.indexOf('ago') > -1).toBeTruthy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('getMedianMetricIndex', () => {
+ it('should return index of closest metric timestamp to that of median', () => {
+ const matchingIndex = vm.getMedianMetricIndex(mockMedian, mockMetrics);
+ expect(matchingIndex).toBe(mockMedianIndex);
+ });
+ });
+
+ describe('getGraphPlotValues', () => {
+ it('should return Object containing values to plot graph', () => {
+ const plotValues = vm.getGraphPlotValues(mockMedian, mockMetrics);
+ expect(plotValues.pathD).toBeDefined();
+ expect(Array.isArray(plotValues.pathD)).toBeTruthy();
+
+ expect(plotValues.pathViewBox).toBeDefined();
+ expect(typeof plotValues.pathViewBox).toBe('object');
+
+ expect(plotValues.dotX).toBeDefined();
+ expect(typeof plotValues.dotX).toBe('number');
+
+ expect(plotValues.dotY).toBeDefined();
+ expect(typeof plotValues.dotY).toBe('number');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render template elements correctly', () => {
+ expect(el.classList.contains('memory-graph-container')).toBeTruthy();
+ expect(el.querySelector('svg')).toBeDefined();
+ });
+
+ it('should render graph when renderGraph is called internally', (done) => {
+ const { pathD, pathViewBox, dotX, dotY } = vm.getGraphPlotValues(mockMedian, mockMetrics);
+ vm.height = defaultHeight;
+ vm.width = defaultWidth;
+ vm.pathD = `M ${pathD}`;
+ vm.pathViewBox = `0 0 ${pathViewBox.lineWidth} ${pathViewBox.diff}`;
+ vm.dotX = dotX;
+ vm.dotY = dotY;
+
+ Vue.nextTick(() => {
+ const svgEl = el.querySelector('svg');
+ expect(svgEl).toBeDefined();
+ expect(svgEl.getAttribute('height')).toBe(defaultHeight);
+ expect(svgEl.getAttribute('width')).toBe(defaultWidth);
+
+ const pathEl = el.querySelector('path');
+ expect(pathEl).toBeDefined();
+ expect(pathEl.getAttribute('d')).toBe(`M ${pathD}`);
+ expect(pathEl.getAttribute('viewBox')).toBe(`0 0 ${pathViewBox.lineWidth} ${pathViewBox.diff}`);
+
+ const circleEl = el.querySelector('circle');
+ expect(circleEl).toBeDefined();
+ expect(circleEl.getAttribute('r')).toBe('1.5');
+ expect(circleEl.getAttribute('tranform')).toBe('translate(0 -1)');
+ expect(circleEl.getAttribute('cx')).toBe(`${dotX}`);
+ expect(circleEl.getAttribute('cy')).toBe(`${dotY}`);
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/mock_data.js b/spec/javascripts/vue_shared/components/mock_data.js
new file mode 100644
index 00000000000..0d781bdca74
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/mock_data.js
@@ -0,0 +1,69 @@
+/* eslint-disable */
+
+export const mockMetrics = [
+ [1493716685, '4.30859375'],
+ [1493716745, '4.30859375'],
+ [1493716805, '4.30859375'],
+ [1493716865, '4.30859375'],
+ [1493716925, '4.30859375'],
+ [1493716985, '4.30859375'],
+ [1493717045, '4.30859375'],
+ [1493717105, '4.30859375'],
+ [1493717165, '4.30859375'],
+ [1493717225, '4.30859375'],
+ [1493717285, '4.30859375'],
+ [1493717345, '4.30859375'],
+ [1493717405, '4.30859375'],
+ [1493717465, '4.30859375'],
+ [1493717525, '4.30859375'],
+ [1493717585, '4.30859375'],
+ [1493717645, '4.30859375'],
+ [1493717705, '4.30859375'],
+ [1493717765, '4.30859375'],
+ [1493717825, '4.30859375'],
+ [1493717885, '4.30859375'],
+ [1493717945, '4.30859375'],
+ [1493718005, '4.30859375'],
+ [1493718065, '4.30859375'],
+ [1493718125, '4.30859375'],
+ [1493718185, '4.30859375'],
+ [1493718245, '4.30859375'],
+ [1493718305, '4.234375'],
+ [1493718365, '4.234375'],
+ [1493718425, '4.234375'],
+ [1493718485, '4.234375'],
+ [1493718545, '4.243489583333333'],
+ [1493718605, '4.2109375'],
+ [1493718665, '4.2109375'],
+ [1493718725, '4.2109375'],
+ [1493718785, '4.26171875'],
+ [1493718845, '4.26171875'],
+ [1493718905, '4.26171875'],
+ [1493718965, '4.26171875'],
+ [1493719025, '4.26171875'],
+ [1493719085, '4.26171875'],
+ [1493719145, '4.26171875'],
+ [1493719205, '4.26171875'],
+ [1493719265, '4.26171875'],
+ [1493719325, '4.26171875'],
+ [1493719385, '4.26171875'],
+ [1493719445, '4.26171875'],
+ [1493719505, '4.26171875'],
+ [1493719565, '4.26171875'],
+ [1493719625, '4.26171875'],
+ [1493719685, '4.26171875'],
+ [1493719745, '4.26171875'],
+ [1493719805, '4.26171875'],
+ [1493719865, '4.26171875'],
+ [1493719925, '4.26171875'],
+ [1493719985, '4.26171875'],
+ [1493720045, '4.26171875'],
+ [1493720105, '4.26171875'],
+ [1493720165, '4.26171875'],
+ [1493720225, '4.26171875'],
+ [1493720285, '4.26171875'],
+];
+
+export const mockMedian = 1493718485;
+
+export const mockMedianIndex = 30;
diff --git a/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js b/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js
index 699625cdbb7..286118917e8 100644
--- a/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js
+++ b/spec/javascripts/vue_shared/components/pipelines_table_row_spec.js
@@ -1,27 +1,47 @@
import Vue from 'vue';
import tableRowComp from '~/vue_shared/components/pipelines_table_row';
-import pipeline from '../../commit/pipelines/mock_data';
describe('Pipelines Table Row', () => {
- let component;
-
- beforeEach(() => {
+ const jsonFixtureName = 'pipelines/pipelines.json';
+ const buildComponent = (pipeline) => {
const PipelinesTableRowComponent = Vue.extend(tableRowComp);
-
- component = new PipelinesTableRowComponent({
+ return new PipelinesTableRowComponent({
el: document.querySelector('.test-dom-element'),
propsData: {
pipeline,
service: {},
},
}).$mount();
+ };
+
+ let component;
+ let pipeline;
+ let pipelineWithoutAuthor;
+ let pipelineWithoutCommit;
+
+ preloadFixtures(jsonFixtureName);
+
+ beforeEach(() => {
+ const pipelines = getJSONFixture(jsonFixtureName).pipelines;
+ pipeline = pipelines.find(p => p.id === 1);
+ pipelineWithoutAuthor = pipelines.find(p => p.id === 2);
+ pipelineWithoutCommit = pipelines.find(p => p.id === 3);
+ });
+
+ afterEach(() => {
+ component.$destroy();
});
it('should render a table row', () => {
+ component = buildComponent(pipeline);
expect(component.$el).toEqual('TR');
});
describe('status column', () => {
+ beforeEach(() => {
+ component = buildComponent(pipeline);
+ });
+
it('should render a pipeline link', () => {
expect(
component.$el.querySelector('td.commit-link a').getAttribute('href'),
@@ -36,6 +56,10 @@ describe('Pipelines Table Row', () => {
});
describe('information column', () => {
+ beforeEach(() => {
+ component = buildComponent(pipeline);
+ });
+
it('should render a pipeline link', () => {
expect(
component.$el.querySelector('td:nth-child(2) a').getAttribute('href'),
@@ -55,7 +79,7 @@ describe('Pipelines Table Row', () => {
).toEqual(pipeline.user.web_url);
expect(
- component.$el.querySelector('td:nth-child(2) img').getAttribute('title'),
+ component.$el.querySelector('td:nth-child(2) img').getAttribute('data-original-title'),
).toEqual(pipeline.user.name);
});
});
@@ -63,13 +87,59 @@ describe('Pipelines Table Row', () => {
describe('commit column', () => {
it('should render link to commit', () => {
- expect(
- component.$el.querySelector('td:nth-child(3) .commit-id').getAttribute('href'),
- ).toEqual(pipeline.commit.commit_path);
+ component = buildComponent(pipeline);
+
+ const commitLink = component.$el.querySelector('.branch-commit .commit-sha');
+ expect(commitLink.getAttribute('href')).toEqual(pipeline.commit.commit_path);
+ });
+
+ const findElements = () => {
+ const commitTitleElement = component.$el.querySelector('.branch-commit .commit-title');
+ const commitAuthorElement = commitTitleElement.querySelector('a.avatar-image-container');
+
+ if (!commitAuthorElement) {
+ return { commitAuthorElement };
+ }
+
+ const commitAuthorLink = commitAuthorElement.getAttribute('href');
+ const commitAuthorName = commitAuthorElement.querySelector('img.avatar').getAttribute('data-original-title');
+
+ return { commitAuthorElement, commitAuthorLink, commitAuthorName };
+ };
+
+ it('renders nothing without commit', () => {
+ expect(pipelineWithoutCommit.commit).toBe(null);
+ component = buildComponent(pipelineWithoutCommit);
+
+ const { commitAuthorElement } = findElements();
+
+ expect(commitAuthorElement).toBe(null);
+ });
+
+ it('renders commit author', () => {
+ component = buildComponent(pipeline);
+ const { commitAuthorLink, commitAuthorName } = findElements();
+
+ expect(commitAuthorLink).toEqual(pipeline.commit.author.web_url);
+ expect(commitAuthorName).toEqual(pipeline.commit.author.username);
+ });
+
+ it('renders commit with unregistered author', () => {
+ expect(pipelineWithoutAuthor.commit.author).toBe(null);
+ component = buildComponent(pipelineWithoutAuthor);
+
+ const { commitAuthorLink, commitAuthorName } = findElements();
+
+ expect(commitAuthorLink).toEqual(`mailto:${pipelineWithoutAuthor.commit.author_email}`);
+ expect(commitAuthorName).toEqual(pipelineWithoutAuthor.commit.author_name);
});
});
describe('stages column', () => {
+ beforeEach(() => {
+ component = buildComponent(pipeline);
+ });
+
it('should render an icon for each stage', () => {
expect(
component.$el.querySelectorAll('td:nth-child(4) .js-builds-dropdown-button').length,
@@ -78,6 +148,10 @@ describe('Pipelines Table Row', () => {
});
describe('actions column', () => {
+ beforeEach(() => {
+ component = buildComponent(pipeline);
+ });
+
it('should render the provided actions', () => {
expect(
component.$el.querySelectorAll('td:nth-child(6) ul li').length,
diff --git a/spec/javascripts/vue_shared/components/pipelines_table_spec.js b/spec/javascripts/vue_shared/components/pipelines_table_spec.js
index 4d3ced944d7..6cc178b8f1d 100644
--- a/spec/javascripts/vue_shared/components/pipelines_table_spec.js
+++ b/spec/javascripts/vue_shared/components/pipelines_table_spec.js
@@ -1,13 +1,19 @@
import Vue from 'vue';
import pipelinesTableComp from '~/vue_shared/components/pipelines_table';
import '~/lib/utils/datetime_utility';
-import pipeline from '../../commit/pipelines/mock_data';
describe('Pipelines Table', () => {
+ const jsonFixtureName = 'pipelines/pipelines.json';
+
+ let pipeline;
let PipelinesTableComponent;
+ preloadFixtures(jsonFixtureName);
+
beforeEach(() => {
PipelinesTableComponent = Vue.extend(pipelinesTableComp);
+ const pipelines = getJSONFixture(jsonFixtureName).pipelines;
+ pipeline = pipelines.find(p => p.id === 1);
});
describe('table', () => {
diff --git a/spec/javascripts/vue_shared/components/table_pagination_spec.js b/spec/javascripts/vue_shared/components/table_pagination_spec.js
index 96038718191..895e1c585b4 100644
--- a/spec/javascripts/vue_shared/components/table_pagination_spec.js
+++ b/spec/javascripts/vue_shared/components/table_pagination_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import paginationComp from '~/vue_shared/components/table_pagination';
+import paginationComp from '~/vue_shared/components/table_pagination.vue';
import '~/lib/utils/common_utils';
describe('Pagination component', () => {
diff --git a/spec/javascripts/vue_shared/components/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar_image_spec.js
new file mode 100644
index 00000000000..8daa7610274
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/user_avatar_image_spec.js
@@ -0,0 +1,54 @@
+import Vue from 'vue';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+
+const UserAvatarImageComponent = Vue.extend(UserAvatarImage);
+
+describe('User Avatar Image Component', function () {
+ describe('Initialization', function () {
+ beforeEach(function () {
+ this.propsData = {
+ size: 99,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+ };
+
+ this.userAvatarImage = new UserAvatarImageComponent({
+ propsData: this.propsData,
+ }).$mount();
+ });
+
+ it('should return a defined Vue component', function () {
+ expect(this.userAvatarImage).toBeDefined();
+ });
+
+ it('should have <img> as a child element', function () {
+ expect(this.userAvatarImage.$el.tagName).toBe('IMG');
+ });
+
+ it('should properly compute tooltipContainer', function () {
+ expect(this.userAvatarImage.tooltipContainer).toBe('body');
+ });
+
+ it('should properly render tooltipContainer', function () {
+ expect(this.userAvatarImage.$el.getAttribute('data-container')).toBe('body');
+ });
+
+ it('should properly compute avatarSizeClass', function () {
+ expect(this.userAvatarImage.avatarSizeClass).toBe('s99');
+ });
+
+ it('should properly render img css', function () {
+ const classList = this.userAvatarImage.$el.classList;
+ const containsAvatar = classList.contains('avatar');
+ const containsSizeClass = classList.contains('s99');
+ const containsCustomClass = classList.contains('myextraavatarclass');
+
+ expect(containsAvatar).toBe(true);
+ expect(containsSizeClass).toBe(true);
+ expect(containsCustomClass).toBe(true);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/user_avatar_link_spec.js b/spec/javascripts/vue_shared/components/user_avatar_link_spec.js
new file mode 100644
index 00000000000..52e450e9ba5
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/user_avatar_link_spec.js
@@ -0,0 +1,50 @@
+import Vue from 'vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
+
+describe('User Avatar Link Component', function () {
+ beforeEach(function () {
+ this.propsData = {
+ linkHref: 'myavatarurl.com',
+ imgSize: 99,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ imgCssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+ };
+
+ const UserAvatarLinkComponent = Vue.extend(UserAvatarLink);
+
+ this.userAvatarLink = new UserAvatarLinkComponent({
+ propsData: this.propsData,
+ }).$mount();
+
+ this.userAvatarImage = this.userAvatarLink.$children[0];
+ });
+
+ it('should return a defined Vue component', function () {
+ expect(this.userAvatarLink).toBeDefined();
+ });
+
+ it('should have user-avatar-image registered as child component', function () {
+ expect(this.userAvatarLink.$options.components.userAvatarImage).toBeDefined();
+ });
+
+ it('user-avatar-link should have user-avatar-image as child component', function () {
+ expect(this.userAvatarImage).toBeDefined();
+ });
+
+ it('should render <a> as a child element', function () {
+ expect(this.userAvatarLink.$el.tagName).toBe('A');
+ });
+
+ it('should have <img> as a child element', function () {
+ expect(this.userAvatarLink.$el.querySelector('img')).not.toBeNull();
+ });
+
+ it('should return neccessary props as defined', function () {
+ _.each(this.propsData, (val, key) => {
+ expect(this.userAvatarLink[key]).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js b/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js
new file mode 100644
index 00000000000..b8d639ffbec
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js
@@ -0,0 +1,29 @@
+import Vue from 'vue';
+import UserAvatarSvg from '~/vue_shared/components/user_avatar/user_avatar_svg.vue';
+import avatarSvg from 'icons/_icon_random.svg';
+
+const UserAvatarSvgComponent = Vue.extend(UserAvatarSvg);
+
+describe('User Avatar Svg Component', function () {
+ describe('Initialization', function () {
+ beforeEach(function () {
+ this.propsData = {
+ size: 99,
+ svg: avatarSvg,
+ };
+
+ this.userAvatarSvg = new UserAvatarSvgComponent({
+ propsData: this.propsData,
+ }).$mount();
+ });
+
+ it('should return a defined Vue component', function () {
+ expect(this.userAvatarSvg).toBeDefined();
+ });
+
+ it('should have <svg> as a child element', function () {
+ expect(this.userAvatarSvg.$el.tagName).toEqual('svg');
+ expect(this.userAvatarSvg.$el.innerHTML).toContain('<path');
+ });
+ });
+});
diff --git a/spec/javascripts/zen_mode_spec.js b/spec/javascripts/zen_mode_spec.js
index 99515f2e5f2..4399c8b2025 100644
--- a/spec/javascripts/zen_mode_spec.js
+++ b/spec/javascripts/zen_mode_spec.js
@@ -3,7 +3,7 @@
/* global Mousetrap */
/* global ZenMode */
-require('~/zen_mode');
+import '~/zen_mode';
(function() {
var enterZen, escapeKeydown, exitZen;
diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb
index d9e4525cb28..0f8ec8de7a0 100644
--- a/spec/lib/banzai/filter/external_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_link_filter_spec.rb
@@ -1,5 +1,22 @@
require 'spec_helper'
+shared_examples 'an external link with rel attribute' do
+ it 'adds rel="nofollow" to external links' do
+ expect(doc.at_css('a')).to have_attribute('rel')
+ expect(doc.at_css('a')['rel']).to include 'nofollow'
+ end
+
+ it 'adds rel="noreferrer" to external links' do
+ expect(doc.at_css('a')).to have_attribute('rel')
+ expect(doc.at_css('a')['rel']).to include 'noreferrer'
+ end
+
+ it 'adds rel="noopener" to external links' do
+ expect(doc.at_css('a')).to have_attribute('rel')
+ expect(doc.at_css('a')['rel']).to include 'noopener'
+ end
+end
+
describe Banzai::Filter::ExternalLinkFilter, lib: true do
include FilterSpecHelper
@@ -22,49 +39,58 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do
context 'for root links on document' do
let(:doc) { filter %q(<a href="https://google.com/">Google</a>) }
- it 'adds rel="nofollow" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'nofollow'
+ it_behaves_like 'an external link with rel attribute'
+ end
+
+ context 'for nested links on document' do
+ let(:doc) { filter %q(<p><a href="https://google.com/">Google</a></p>) }
+
+ it_behaves_like 'an external link with rel attribute'
+ end
+
+ context 'for invalid urls' do
+ it 'skips broken hrefs' do
+ doc = filter %q(<p><a href="don't crash on broken urls">Google</a></p>)
+ expected = %q(<p><a href="don't%20crash%20on%20broken%20urls">Google</a></p>)
+
+ expect(doc.to_html).to eq(expected)
end
- it 'adds rel="noreferrer" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'noreferrer'
+ it 'skips improperly formatted mailtos' do
+ doc = filter %q(<p><a href="mailto://jblogs@example.com">Email</a></p>)
+ expected = %q(<p><a href="mailto://jblogs@example.com">Email</a></p>)
+
+ expect(doc.to_html).to eq(expected)
end
end
- context 'for nested links on document' do
- let(:doc) { filter %q(<p><a href="https://google.com/">Google</a></p>) }
+ context 'for links with a username' do
+ context 'with a valid username' do
+ let(:doc) { filter %q(<a href="https://user@google.com/">Google</a>) }
- it 'adds rel="nofollow" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'nofollow'
+ it_behaves_like 'an external link with rel attribute'
end
- it 'adds rel="noreferrer" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'noreferrer'
+ context 'with an impersonated username' do
+ let(:internal) { Gitlab.config.gitlab.url }
+
+ let(:doc) { filter %Q(<a href="https://#{internal}@example.com" target="_blank">Reverse Tabnabbing</a>) }
+
+ it_behaves_like 'an external link with rel attribute'
end
end
context 'for non-lowercase scheme links' do
- let(:doc_with_http) { filter %q(<p><a href="httP://google.com/">Google</a></p>) }
- let(:doc_with_https) { filter %q(<p><a href="hTTpS://google.com/">Google</a></p>) }
-
- it 'adds rel="nofollow" to external links' do
- expect(doc_with_http.at_css('a')).to have_attribute('rel')
- expect(doc_with_https.at_css('a')).to have_attribute('rel')
+ context 'with http' do
+ let(:doc) { filter %q(<p><a href="httP://google.com/">Google</a></p>) }
- expect(doc_with_http.at_css('a')['rel']).to include 'nofollow'
- expect(doc_with_https.at_css('a')['rel']).to include 'nofollow'
+ it_behaves_like 'an external link with rel attribute'
end
- it 'adds rel="noreferrer" to external links' do
- expect(doc_with_http.at_css('a')).to have_attribute('rel')
- expect(doc_with_https.at_css('a')).to have_attribute('rel')
+ context 'with https' do
+ let(:doc) { filter %q(<p><a href="hTTpS://google.com/">Google</a></p>) }
- expect(doc_with_http.at_css('a')['rel']).to include 'noreferrer'
- expect(doc_with_https.at_css('a')['rel']).to include 'noreferrer'
+ it_behaves_like 'an external link with rel attribute'
end
it 'skips internal links' do
@@ -84,14 +110,6 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do
context 'for protocol-relative links' do
let(:doc) { filter %q(<p><a href="//google.com/">Google</a></p>) }
- it 'adds rel="nofollow" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'nofollow'
- end
-
- it 'adds rel="noreferrer" to external links' do
- expect(doc.at_css('a')).to have_attribute('rel')
- expect(doc.at_css('a')['rel']).to include 'noreferrer'
- end
+ it_behaves_like 'an external link with rel attribute'
end
end
diff --git a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
index 53abc056602..fe2c00bb2ca 100644
--- a/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
+++ b/spec/lib/ci/gitlab_ci_yaml_processor_spec.rb
@@ -225,7 +225,7 @@ module Ci
before_script: ["pwd"],
rspec: { script: "rspec", type: "test", only: %w(master deploy) },
staging: { script: "deploy", type: "deploy", only: %w(master deploy) },
- production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] },
+ production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }
})
config_processor = GitlabCiYamlProcessor.new(config, 'fork')
@@ -381,7 +381,7 @@ module Ci
before_script: ["pwd"],
rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] },
staging: { script: "deploy", type: "deploy", except: ["master"] },
- production: { script: "deploy", type: "deploy", except: ["master@fork"] },
+ production: { script: "deploy", type: "deploy", except: ["master@fork"] }
})
config_processor = GitlabCiYamlProcessor.new(config, 'fork')
@@ -716,7 +716,7 @@ module Ci
expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
- key: 'key',
+ key: 'key'
)
end
@@ -734,7 +734,7 @@ module Ci
expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
- key: 'key',
+ key: 'key'
)
end
@@ -743,7 +743,7 @@ module Ci
cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
rspec: {
script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' },
+ cache: { paths: ["test/"], untracked: false, key: 'local' }
}
})
@@ -753,7 +753,7 @@ module Ci
expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
paths: ["test/"],
untracked: false,
- key: 'local',
+ key: 'local'
)
end
end
diff --git a/spec/lib/container_registry/blob_spec.rb b/spec/lib/container_registry/blob_spec.rb
index f06e5fd54a2..ab010c6dfeb 100644
--- a/spec/lib/container_registry/blob_spec.rb
+++ b/spec/lib/container_registry/blob_spec.rb
@@ -98,7 +98,7 @@ describe ContainerRegistry::Blob do
context 'for a valid address' do
before do
stub_request(:get, location).
- with(headers: { 'Authorization' => nil }).
+ with { |request| !request.headers.include?('Authorization') }.
to_return(
status: 200,
headers: { 'Content-Type' => 'application/json' },
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
new file mode 100644
index 00000000000..ec03b533383
--- /dev/null
+++ b/spec/lib/container_registry/client_spec.rb
@@ -0,0 +1,39 @@
+# coding: utf-8
+require 'spec_helper'
+
+describe ContainerRegistry::Client do
+ let(:token) { '12345' }
+ let(:options) { { token: token } }
+ let(:client) { described_class.new("http://container-registry", options) }
+
+ describe '#blob' do
+ it 'GET /v2/:name/blobs/:digest' do
+ stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345").
+ with(headers: {
+ 'Accept' => 'application/octet-stream',
+ 'Authorization' => "bearer #{token}"
+ }).
+ to_return(status: 200, body: "Blob")
+
+ expect(client.blob('group/test', 'sha256:0123456789012345')).to eq('Blob')
+ end
+
+ it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do
+ stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345").
+ with(headers: {
+ 'Accept' => 'application/octet-stream',
+ 'Authorization' => "bearer #{token}"
+ }).
+ to_return(status: 307, body: "", headers: { Location: 'http://redirected' })
+ # We should probably use hash_excluding here, but that requires an update to WebMock:
+ # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb
+ stub_request(:get, "http://redirected/").
+ with { |request| !request.headers.include?('Authorization') }.
+ to_return(status: 200, body: "Successfully redirected")
+
+ response = client.blob('group/test', 'sha256:0123456789012345')
+
+ expect(response).to eq('Successfully redirected')
+ end
+ end
+end
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index 90628917943..7faa0f31b68 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -25,7 +25,7 @@ describe ExpandVariables do
result: 'keyvalueresult',
variables: [
{ key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
+ { key: 'variable2', value: 'result' }
] },
{ value: 'key${variable}${variable2}',
result: 'keyvalueresult',
@@ -37,7 +37,7 @@ describe ExpandVariables do
result: 'keyresultvalue',
variables: [
{ key: 'variable', value: 'value' },
- { key: 'variable2', value: 'result' },
+ { key: 'variable2', value: 'result' }
] },
{ value: 'key${variable2}${variable}',
result: 'keyresultvalue',
@@ -49,7 +49,7 @@ describe ExpandVariables do
result: 'review/feature/add-review-apps',
variables: [
{ key: 'CI_COMMIT_REF_NAME', value: 'feature/add-review-apps' }
- ] },
+ ] }
]
tests.each do |test|
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 0f47fb2fbd9..2c7ebb15fd7 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -22,7 +22,22 @@ module Gitlab
expect(Asciidoctor).to receive(:convert)
.with(input, expected_asciidoc_opts).and_return(html)
- expect(render(input)).to eq(html)
+ expect(render(input, context)).to eq(html)
+ end
+
+ context "with asciidoc_opts" do
+ it "merges the options with default ones" do
+ expected_asciidoc_opts = {
+ safe: :secure,
+ backend: :gitlab_html5,
+ attributes: described_class::DEFAULT_ADOC_ATTRS
+ }
+
+ expect(Asciidoctor).to receive(:convert)
+ .with(input, expected_asciidoc_opts).and_return(html)
+
+ render(input, context)
+ end
end
context "XSS" do
@@ -33,7 +48,7 @@ module Gitlab
},
'images' => {
input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
- output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt=\"Alt text\"></span></p>\n</div>"
+ output: "<img src=\"https://localhost.com/image.png\" alt=\"Alt text\">"
},
'pre' => {
input: '```mypre"><script>alert(3)</script>',
@@ -43,10 +58,18 @@ module Gitlab
links.each do |name, data|
it "does not convert dangerous #{name} into HTML" do
- expect(render(data[:input])).to eq(data[:output])
+ expect(render(data[:input], context)).to include(data[:output])
end
end
end
+
+ context 'external links' do
+ it 'adds the `rel` attribute to the link' do
+ output = render('link:https://google.com[Google]', context)
+
+ expect(output).to include('rel="nofollow noreferrer noopener"')
+ end
+ end
end
def render(*args)
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index d4a43192d03..50bc3ef1b7c 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -175,7 +175,7 @@ describe Gitlab::Auth, lib: true do
user = create(
:user,
username: 'normal_user',
- password: 'my-secret',
+ password: 'my-secret'
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -186,7 +186,7 @@ describe Gitlab::Auth, lib: true do
user = create(
:user,
username: 'oauth2',
- password: 'my-secret',
+ password: 'my-secret'
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb
index f84782ab440..c59ff7fb290 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/gitlab/backup/manager_spec.rb
@@ -151,7 +151,7 @@ describe Backup::Manager, lib: true do
allow(Dir).to receive(:glob).and_return(
[
'1451606400_2016_01_01_gitlab_backup.tar',
- '1451520000_2015_12_31_gitlab_backup.tar',
+ '1451520000_2015_12_31_gitlab_backup.tar'
]
)
end
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index 959ae02c222..8d81ed5856e 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -96,40 +96,77 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
end
end
- context 'protected branches check' do
- before do
- allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)
- end
-
- it 'returns an error if the user is not allowed to do forced pushes to protected branches' do
- expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+ context 'branches check' do
+ context 'trying to delete the default branch' do
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/master' }
- expect(subject.status).to be(false)
- expect(subject.message).to eq('You are not allowed to force push code to a protected branch on this project.')
+ it 'returns an error' do
+ expect(subject.status).to be(false)
+ expect(subject.message).to eq('The default branch of a project cannot be deleted.')
+ end
end
- it 'returns an error if the user is not allowed to merge to protected branches' do
- expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
- expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+ context 'protected branches check' do
+ before do
+ allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)
+ allow(ProtectedBranch).to receive(:protected?).with(project, 'feature').and_return(true)
+ end
- expect(subject.status).to be(false)
- expect(subject.message).to eq('You are not allowed to merge code into protected branches on this project.')
- end
+ it 'returns an error if the user is not allowed to do forced pushes to protected branches' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
- it 'returns an error if the user is not allowed to push to protected branches' do
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+ expect(subject.status).to be(false)
+ expect(subject.message).to eq('You are not allowed to force push code to a protected branch on this project.')
+ end
- expect(subject.status).to be(false)
- expect(subject.message).to eq('You are not allowed to push code to protected branches on this project.')
- end
+ it 'returns an error if the user is not allowed to merge to protected branches' do
+ expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
+ expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
+ expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- context 'branch deletion' do
- let(:newrev) { '0000000000000000000000000000000000000000' }
+ expect(subject.status).to be(false)
+ expect(subject.message).to eq('You are not allowed to merge code into protected branches on this project.')
+ end
+
+ it 'returns an error if the user is not allowed to push to protected branches' do
+ expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- it 'returns an error if the user is not allowed to delete protected branches' do
expect(subject.status).to be(false)
- expect(subject.message).to eq('You are not allowed to delete protected branches from this project.')
+ expect(subject.message).to eq('You are not allowed to push code to protected branches on this project.')
+ end
+
+ context 'branch deletion' do
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/feature' }
+
+ context 'if the user is not allowed to delete protected branches' do
+ it 'returns an error' do
+ expect(subject.status).to be(false)
+ expect(subject.message).to eq('You are not allowed to delete protected branches from this project. Only a project master or owner can delete a protected branch.')
+ end
+ end
+
+ context 'if the user is allowed to delete protected branches' do
+ before do
+ project.add_master(user)
+ end
+
+ context 'through the web interface' do
+ let(:protocol) { 'web' }
+
+ it 'allows branch deletion' do
+ expect(subject.status).to be(true)
+ end
+ end
+
+ context 'over SSH or HTTP' do
+ it 'returns an error' do
+ expect(subject.status).to be(false)
+ expect(subject.message).to eq('You can only delete protected branches using the web interface.')
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/global_spec.rb b/spec/lib/gitlab/ci/config/entry/global_spec.rb
index 684d01e9056..23270ad5053 100644
--- a/spec/lib/gitlab/ci/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/global_spec.rb
@@ -113,7 +113,7 @@ describe Gitlab::Ci::Config::Entry::Global do
describe '#variables_value' do
it 'returns variables' do
- expect(global.variables_value).to eq(VAR: 'value')
+ expect(global.variables_value).to eq('VAR' => 'value')
end
end
@@ -154,7 +154,7 @@ describe Gitlab::Ci::Config::Entry::Global do
services: ['postgres:9.1', 'mysql:5.5'],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'] },
- variables: { VAR: 'value' },
+ variables: { 'VAR' => 'value' },
ignore: false,
after_script: ['make clean'] },
spinach: { name: :spinach,
@@ -167,7 +167,7 @@ describe Gitlab::Ci::Config::Entry::Global do
cache: { key: 'k', untracked: true, paths: ['public/'] },
variables: {},
ignore: false,
- after_script: ['make clean'] },
+ after_script: ['make clean'] }
)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
index f15f02f403e..84bfef9e8ad 100644
--- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
@@ -13,6 +13,14 @@ describe Gitlab::Ci::Config::Entry::Variables do
it 'returns hash with key value strings' do
expect(entry.value).to eq config
end
+
+ context 'with numeric keys and values in the config' do
+ let(:config) { { 10 => 20 } }
+
+ it 'converts numeric key and numeric value into strings' do
+ expect(entry.value).to eq('10' => '20')
+ end
+ end
end
describe '#errors' do
diff --git a/spec/lib/gitlab/conflict/file_collection_spec.rb b/spec/lib/gitlab/conflict/file_collection_spec.rb
index 39d892c18c0..27f23ea70dc 100644
--- a/spec/lib/gitlab/conflict/file_collection_spec.rb
+++ b/spec/lib/gitlab/conflict/file_collection_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::Conflict::FileCollection, lib: true do
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start') }
- let(:file_collection) { Gitlab::Conflict::FileCollection.new(merge_request) }
+ let(:file_collection) { described_class.read_only(merge_request) }
describe '#files' do
it 'returns an array of Conflict::Files' do
diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb
index e18a219ef36..79632e2b6a3 100644
--- a/spec/lib/gitlab/contributions_calendar_spec.rb
+++ b/spec/lib/gitlab/contributions_calendar_spec.rb
@@ -47,7 +47,7 @@ describe Gitlab::ContributionsCalendar do
action: Event::CREATED,
target: @targets[project],
author: contributor,
- created_at: day,
+ created_at: day
)
end
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index 9d2ba481919..3610a0354e8 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -11,8 +11,6 @@ describe 'cycle analytics events' do
end
before do
- allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([context])
-
setup(context)
end
@@ -132,6 +130,8 @@ describe 'cycle analytics events' do
end
before do
+ merge_request.update(head_pipeline: pipeline)
+
create(:ci_build, pipeline: pipeline, status: :success, author: user)
create(:ci_build, pipeline: pipeline, status: :success, author: user)
@@ -228,6 +228,8 @@ describe 'cycle analytics events' do
end
before do
+ merge_request.update(head_pipeline: pipeline)
+
create(:ci_build, pipeline: pipeline, status: :success, author: user)
create(:ci_build, pipeline: pipeline, status: :success, author: user)
@@ -332,7 +334,7 @@ describe 'cycle analytics events' do
def setup(context)
milestone = create(:milestone, project: project)
context.update(milestone: milestone)
- mr = create_merge_request_closing_issue(context)
+ mr = create_merge_request_closing_issue(context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
end
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index dbcfb9b7400..e59cba35b2f 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -35,6 +35,7 @@ describe Gitlab::DataBuilder::Push, lib: true do
it { expect(data[:ref]).to eq('refs/tags/v1.1.0') }
it { expect(data[:user_id]).to eq(user.id) }
it { expect(data[:user_name]).to eq(user.name) }
+ it { expect(data[:user_username]).to eq(user.username) }
it { expect(data[:user_email]).to eq(user.email) }
it { expect(data[:user_avatar]).to eq(user.avatar_url) }
it { expect(data[:project_id]).to eq(project.id) }
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 737fac14f92..bd5ac6142be 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -247,6 +247,14 @@ describe Gitlab::Database::MigrationHelpers, lib: true do
expect(Project.where(archived: true).count).to eq(1)
end
end
+
+ context 'when the value is Arel.sql (Arel::Nodes::SqlLiteral)' do
+ it 'updates the value as a SQL expression' do
+ model.update_column_in_batches(:projects, :star_count, Arel.sql('1+1'))
+
+ expect(Project.sum(:star_count)).to eq(2 * Project.count)
+ end
+ end
end
describe '#add_column_with_default' do
@@ -382,13 +390,16 @@ describe Gitlab::Database::MigrationHelpers, lib: true do
expect(model).to receive(:add_column).
with(:users, :new, :integer,
limit: old_column.limit,
- default: old_column.default,
- null: old_column.null,
precision: old_column.precision,
scale: old_column.scale)
+ expect(model).to receive(:change_column_default).
+ with(:users, :new, old_column.default)
+
expect(model).to receive(:update_column_in_batches)
+ expect(model).to receive(:change_column_null).with(:users, :new, false)
+
expect(model).to receive(:copy_indexes).with(:users, :old, :new)
expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
@@ -406,13 +417,16 @@ describe Gitlab::Database::MigrationHelpers, lib: true do
expect(model).to receive(:add_column).
with(:users, :new, :integer,
limit: old_column.limit,
- default: old_column.default,
- null: old_column.null,
precision: old_column.precision,
scale: old_column.scale)
+ expect(model).to receive(:change_column_default).
+ with(:users, :new, old_column.default)
+
expect(model).to receive(:update_column_in_batches)
+ expect(model).to receive(:change_column_null).with(:users, :new, false)
+
expect(model).to receive(:copy_indexes).with(:users, :old, :new)
expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 64bc5fc0429..a3ab4e3dd9e 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -107,6 +107,15 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase do
expect(new_path).to eq('the-path0')
end
+ it "doesn't rename routes that start with a similar name" do
+ other_namespace = create(:namespace, path: 'the-path-but-not-really')
+ project = create(:empty_project, path: 'the-project', namespace: other_namespace)
+
+ subject.rename_path_for_routable(migration_namespace(namespace))
+
+ expect(project.route.reload.path).to eq('the-path-but-not-really/the-project')
+ end
+
context "the-path namespace -> subgroup -> the-path0 project" do
it "updates the route of the project correctly" do
subgroup = create(:group, path: "subgroup", parent: namespace)
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index a25c5da488a..c56fded7516 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -23,6 +23,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do
found_ids = subject.namespaces_for_paths(type: :child).
map(&:id)
+
expect(found_ids).to contain_exactly(child.id)
end
end
@@ -39,6 +40,22 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do
found_ids = subject.namespaces_for_paths(type: :child).
map(&:id)
+
+ expect(found_ids).to contain_exactly(namespace.id)
+ end
+
+ it 'has no namespaces that look the same' do
+ _root_namespace = create(:namespace, path: 'THE-path')
+ _similar_path = create(:namespace,
+ path: 'not-really-the-path',
+ parent: create(:namespace))
+ namespace = create(:namespace,
+ path: 'the-path',
+ parent: create(:namespace))
+
+ found_ids = subject.namespaces_for_paths(type: :child).
+ map(&:id)
+
expect(found_ids).to contain_exactly(namespace.id)
end
end
@@ -53,6 +70,20 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do
found_ids = subject.namespaces_for_paths(type: :top_level).
map(&:id)
+
+ expect(found_ids).to contain_exactly(root_namespace.id)
+ end
+
+ it 'has no namespaces that just look the same' do
+ root_namespace = create(:namespace, path: 'the-path')
+ _similar_path = create(:namespace, path: 'not-really-the-path')
+ _child_namespace = create(:namespace,
+ path: 'the-path',
+ parent: create(:namespace))
+
+ found_ids = subject.namespaces_for_paths(type: :top_level).
+ map(&:id)
+
expect(found_ids).to contain_exactly(root_namespace.id)
end
end
@@ -106,7 +137,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do
end
describe "#rename_namespace" do
- let(:namespace) { create(:namespace, path: 'the-path') }
+ let(:namespace) { create(:group, name: 'the-path') }
it 'renames paths & routes for the namespace' do
expect(subject).to receive(:rename_path_for_routable).
@@ -146,6 +177,31 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces do
subject.rename_namespace(namespace)
end
+
+ it "doesn't rename users for other namespaces" do
+ expect(subject).not_to receive(:rename_user)
+
+ subject.rename_namespace(namespace)
+ end
+
+ it 'renames the username of a namespace for a user' do
+ user = create(:user, username: 'the-path')
+
+ expect(subject).to receive(:rename_user).with('the-path', 'the-path0')
+
+ subject.rename_namespace(user.namespace)
+ end
+ end
+
+ describe '#rename_user' do
+ it 'renames a username' do
+ subject = described_class.new([], migration)
+ user = create(:user, username: 'broken')
+
+ subject.rename_user('broken', 'broken0')
+
+ expect(user.reload.username).to eq('broken0')
+ end
end
describe '#rename_namespaces' do
diff --git a/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb
new file mode 100644
index 00000000000..2e52097a946
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb
@@ -0,0 +1,60 @@
+require 'rails_helper'
+
+describe Gitlab::DependencyLinker::GemfileLinker, lib: true do
+ describe '.support?' do
+ it 'supports Gemfile' do
+ expect(described_class.support?('Gemfile')).to be_truthy
+ end
+
+ it 'supports gems.rb' do
+ expect(described_class.support?('gems.rb')).to be_truthy
+ end
+
+ it 'does not support other files' do
+ expect(described_class.support?('Gemfile.lock')).to be_falsey
+ end
+ end
+
+ describe '#link' do
+ let(:file_name) { 'Gemfile' }
+
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ source 'https://rubygems.org'
+
+ gem "rails", '4.2.6', github: "rails/rails"
+ gem 'rails-deprecated_sanitizer', '~> 1.0.3'
+ gem 'responders', '~> 2.0', :github => 'rails/responders'
+ gem 'sprockets', '~> 3.6.0', git: 'https://gitlab.example.com/gems/sprockets'
+ gem 'default_value_for', '~> 3.0.0'
+ CONTENT
+ end
+
+ subject { Gitlab::Highlight.highlight(file_name, file_content) }
+
+ def link(name, url)
+ %{<a href="#{url}" rel="noopener noreferrer" target="_blank">#{name}</a>}
+ end
+
+ it 'links sources' do
+ expect(subject).to include(link('https://rubygems.org', 'https://rubygems.org'))
+ end
+
+ it 'links dependencies' do
+ expect(subject).to include(link('rails', 'https://rubygems.org/gems/rails'))
+ expect(subject).to include(link('rails-deprecated_sanitizer', 'https://rubygems.org/gems/rails-deprecated_sanitizer'))
+ expect(subject).to include(link('responders', 'https://rubygems.org/gems/responders'))
+ expect(subject).to include(link('sprockets', 'https://rubygems.org/gems/sprockets'))
+ expect(subject).to include(link('default_value_for', 'https://rubygems.org/gems/default_value_for'))
+ end
+
+ it 'links GitHub repos' do
+ expect(subject).to include(link('rails/rails', 'https://github.com/rails/rails'))
+ expect(subject).to include(link('rails/responders', 'https://github.com/rails/responders'))
+ end
+
+ it 'links Git repos' do
+ expect(subject).to include(link('https://gitlab.example.com/gems/sprockets', 'https://gitlab.example.com/gems/sprockets'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb
new file mode 100644
index 00000000000..03d5b61d70c
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker_spec.rb
@@ -0,0 +1,13 @@
+require 'rails_helper'
+
+describe Gitlab::DependencyLinker, lib: true do
+ describe '.link' do
+ it 'links using GemfileLinker' do
+ blob_name = 'Gemfile'
+
+ expect(described_class::GemfileLinker).to receive(:link)
+
+ described_class.link(blob_name, nil, nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index c6bd4e81f4f..7d7d4a55e63 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::Diff::Highlight, lib: true do
end
it 'highlights and marks added lines' do
- code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class='idiff left'>RuntimeError</span></span><span class="p"><span class='idiff'>,</span></span><span class='idiff right'> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
+ code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
expect(subject[5].text).to eq(code)
end
@@ -67,7 +67,7 @@ describe Gitlab::Diff::Highlight, lib: true do
end
it 'marks added lines' do
- code = %q{+ raise <span class='idiff left right'>RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;}
+ code = %q{+ raise <span class="idiff left right">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;}
expect(subject[5].text).to eq(code)
expect(subject[5].text).to be_html_safe
diff --git a/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb
new file mode 100644
index 00000000000..d6e8b8ac4b2
--- /dev/null
+++ b/spec/lib/gitlab/diff/inline_diff_markdown_marker_spec.rb
@@ -0,0 +1,14 @@
+require 'spec_helper'
+
+describe Gitlab::Diff::InlineDiffMarkdownMarker, lib: true do
+ describe '#mark' do
+ let(:raw) { "abc 'def'" }
+ let(:inline_diffs) { [2..5] }
+ let(:subject) { described_class.new(raw).mark(inline_diffs, mode: :deletion) }
+
+ it 'marks the range' do
+ expect(subject).to eq("ab{-c &#39;d-}ef&#39;")
+ expect(subject).to be_html_safe
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/inline_diff_marker_spec.rb b/spec/lib/gitlab/diff/inline_diff_marker_spec.rb
index 198ff977f24..95da344802d 100644
--- a/spec/lib/gitlab/diff/inline_diff_marker_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_marker_spec.rb
@@ -1,26 +1,26 @@
require 'spec_helper'
describe Gitlab::Diff::InlineDiffMarker, lib: true do
- describe '#inline_diffs' do
+ describe '#mark' do
context "when the rich text is html safe" do
- let(:raw) { "abc 'def'" }
+ let(:raw) { "abc 'def'" }
let(:rich) { %{<span class="abc">abc</span><span class="space"> </span><span class="def">&#39;def&#39;</span>}.html_safe }
let(:inline_diffs) { [2..5] }
- let(:subject) { Gitlab::Diff::InlineDiffMarker.new(raw, rich).mark(inline_diffs) }
+ let(:subject) { described_class.new(raw, rich).mark(inline_diffs) }
- it 'marks the inline diffs' do
- expect(subject).to eq(%{<span class="abc">ab<span class='idiff left'>c</span></span><span class="space"><span class='idiff'> </span></span><span class="def"><span class='idiff right'>&#39;d</span>ef&#39;</span>})
+ it 'marks the range' do
+ expect(subject).to eq(%{<span class="abc">ab<span class="idiff left">c</span></span><span class="space"><span class="idiff"> </span></span><span class="def"><span class="idiff right">&#39;d</span>ef&#39;</span>})
expect(subject).to be_html_safe
end
end
context "when the text text is not html safe" do
- let(:raw) { "abc 'def'" }
+ let(:raw) { "abc 'def'" }
let(:inline_diffs) { [2..5] }
- let(:subject) { Gitlab::Diff::InlineDiffMarker.new(raw).mark(inline_diffs) }
+ let(:subject) { described_class.new(raw).mark(inline_diffs) }
- it 'marks the inline diffs' do
- expect(subject).to eq(%{ab<span class='idiff left right'>c &#39;d</span>ef&#39;})
+ it 'marks the range' do
+ expect(subject).to eq(%{ab<span class="idiff left right">c &#39;d</span>ef&#39;})
expect(subject).to be_html_safe
end
end
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index a10a251dc4a..4d202a76e1b 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -1372,7 +1372,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do
nil,
{ old_path: file_name, new_path: file_name, old_line: 5, new_line: 5 },
{ old_path: file_name, old_line: 6 },
- { new_path: file_name, new_line: 7 },
+ { new_path: file_name, new_line: 7 }
]
expect_positions(old_position_attrs, new_position_attrs)
@@ -1444,7 +1444,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do
nil,
{ old_path: file_name, new_path: file_name, old_line: 5, new_line: 5 },
{ old_path: file_name, old_line: 6 },
- { new_path: file_name, new_line: 7 },
+ { new_path: file_name, new_line: 7 }
]
expect_positions(old_position_attrs, new_position_attrs)
@@ -1498,7 +1498,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do
{ old_path: file_name, new_path: file_name, old_line: 5, new_line: 4 },
{ old_path: file_name, new_path: file_name, old_line: 6, new_line: 5 },
nil,
- { new_path: file_name, new_line: 6 },
+ { new_path: file_name, new_line: 6 }
]
expect_positions(old_position_attrs, new_position_attrs)
@@ -1746,7 +1746,7 @@ describe Gitlab::Diff::PositionTracer, lib: true do
{ old_path: file_name, new_path: file_name, old_line: 4, new_line: 5 },
{ old_path: file_name, old_line: 5 },
{ new_path: file_name, new_line: 6 },
- { new_path: file_name, new_line: 7 },
+ { new_path: file_name, new_line: 7 }
]
expect_positions(old_position_attrs, new_position_attrs)
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index f3dacb4ef04..5ae4a19263c 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches issue title endpoint' do
env = build_env(
- '/my-group/my-project/issues/123/rendered_title'
+ '/my-group/my-project/issues/123/realtime_changes'
)
result = described_class.match(env)
diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb
new file mode 100644
index 00000000000..5a32ffd462c
--- /dev/null
+++ b/spec/lib/gitlab/file_finder_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+describe Gitlab::FileFinder, lib: true do
+ describe '#find' do
+ let(:project) { create(:project, :public, :repository) }
+ let(:finder) { described_class.new(project, project.default_branch) }
+
+ it 'finds by name' do
+ results = finder.find('files')
+ expect(results.map(&:first)).to include('files/images/wm.svg')
+ end
+
+ it 'finds by content' do
+ results = finder.find('files')
+
+ blob = results.select { |result| result.first == "CHANGELOG" }.flatten.last
+
+ expect(blob.filename).to eq("CHANGELOG")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index cdf1b8beee3..9eac7660cd1 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -7,6 +7,51 @@ describe Gitlab::Git::Branch, seed_helper: true do
it { is_expected.to be_kind_of Array }
+ describe 'initialize' do
+ let(:commit_id) { 'f00' }
+ let(:commit_subject) { "My commit".force_encoding('ASCII-8BIT') }
+ let(:committer) do
+ Gitaly::FindLocalBranchCommitAuthor.new(
+ name: generate(:name),
+ email: generate(:email),
+ date: Google::Protobuf::Timestamp.new(seconds: 123)
+ )
+ end
+ let(:author) do
+ Gitaly::FindLocalBranchCommitAuthor.new(
+ name: generate(:name),
+ email: generate(:email),
+ date: Google::Protobuf::Timestamp.new(seconds: 456)
+ )
+ end
+ let(:gitaly_branch) do
+ Gitaly::FindLocalBranchResponse.new(
+ name: 'foo', commit_id: commit_id, commit_subject: commit_subject,
+ commit_author: author, commit_committer: committer
+ )
+ end
+ let(:attributes) do
+ {
+ id: commit_id,
+ message: commit_subject,
+ authored_date: Time.at(author.date.seconds),
+ author_name: author.name,
+ author_email: author.email,
+ committed_date: Time.at(committer.date.seconds),
+ committer_name: committer.name,
+ committer_email: committer.email
+ }
+ end
+ let(:branch) { described_class.new(repository, 'foo', gitaly_branch) }
+
+ it 'parses Gitaly::FindLocalBranchResponse correctly' do
+ expect(Gitlab::Git::Commit).to receive(:decorate).
+ with(hash_including(attributes)).and_call_original
+
+ expect(branch.dereferenced_target.message.encoding).to be(Encoding::UTF_8)
+ end
+ end
+
describe '#size' do
subject { super().size }
it { is_expected.to eq(SeedRepo::Repo::BRANCHES.size) }
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 7253a2edeff..4189aaef643 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -120,7 +120,7 @@ EOT
new_mode: 0100644,
from_id: '357406f3075a57708d0163752905cc1576fceacc',
to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
- raw_chunks: raw_chunks,
+ raw_chunks: raw_chunks
)
)
end
diff --git a/spec/lib/gitlab/git/encoding_helper_spec.rb b/spec/lib/gitlab/git/encoding_helper_spec.rb
index f6ac7b23d1d..1a3bf802a07 100644
--- a/spec/lib/gitlab/git/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/git/encoding_helper_spec.rb
@@ -19,8 +19,8 @@ describe Gitlab::Git::EncodingHelper do
[
'removes invalid bytes from ASCII-8bit encoded multibyte string. This can occur when a git diff match line truncates in the middle of a multibyte character. This occurs after the second word in this example. The test string is as short as we can get while still triggering the error condition when not looking at `detect[:confidence]`.',
"mu ns\xC3\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ".force_encoding('ASCII-8BIT'),
- "mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi ",
- ],
+ "mu ns\n Lorem ipsum dolor sit amet, consectetur adipisicing ut\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg kia elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p\n {: .normal_pn}\n \n-Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in\n# *Lorem ipsum\xC3\xB9l\xC3\xB9l\xC3\xA0 dolor\xC3\xB9k\xC3\xB9 sit\xC3\xA8b\xC3\xA8 N\xC3\xA8 amet b\xC3\xA0d\xC3\xAC*\n+# *consectetur\xC3\xB9l\xC3\xB9l\xC3\xA0 adipisicing\xC3\xB9k\xC3\xB9 elit\xC3\xA8b\xC3\xA8 N\xC3\xA8 sed do\xC3\xA0d\xC3\xAC*{: .italic .smcaps}\n \n \xEF\x9B\xA1 eiusmod tempor incididunt, ut\xC3\xAAn\xC3\xB9 labore et dolore. Tw\xC4\x83nj\xC3\xAC magna aliqua. Ut enim ad minim veniam\n {: .normal}\n@@ -9,5 +9,5 @@ quis nostrud\xC3\xAAt\xC3\xB9 exercitiation ullamco laboris m\xC3\xB9s\xC3\xB9k\xC3\xB9abc\xC3\xB9 nisi "
+ ]
].each do |description, test_string, xpect|
it description do
expect(ext_class.encode!(test_string)).to eq(xpect)
@@ -37,18 +37,18 @@ describe Gitlab::Git::EncodingHelper do
[
"encodes valid utf8 encoded string to utf8",
"λ, λ, λ".encode("UTF-8"),
- "λ, λ, λ".encode("UTF-8"),
+ "λ, λ, λ".encode("UTF-8")
],
[
"encodes valid ASCII-8BIT encoded string to utf8",
"ascii only".encode("ASCII-8BIT"),
- "ascii only".encode("UTF-8"),
+ "ascii only".encode("UTF-8")
],
[
"encodes valid ISO-8859-1 encoded string to utf8",
"Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("ISO-8859-1", "UTF-8"),
- "Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8"),
- ],
+ "Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8")
+ ]
].each do |description, test_string, xpect|
it description do
r = ext_class.encode_utf8(test_string.force_encoding('UTF-8'))
@@ -77,8 +77,8 @@ describe Gitlab::Git::EncodingHelper do
[
'removes invalid bytes from ASCII-8bit encoded multibyte string.',
"Lorem ipsum\xC3\n dolor sit amet, xy\xC3\xA0y\xC3\xB9abcd\xC3\xB9efg".force_encoding('ASCII-8BIT'),
- "Lorem ipsum\n dolor sit amet, xyàyùabcdùefg",
- ],
+ "Lorem ipsum\n dolor sit amet, xyàyùabcdùefg"
+ ]
].each do |description, test_string, xpect|
it description do
expect(ext_class.encode!(test_string)).to eq(xpect)
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index fea186fd4f4..cb107c6d1f9 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -26,6 +26,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context 'with gitaly enabled' do
before { stub_gitaly }
+ after { Gitlab::GitalyClient.clear_stubs! }
it 'gets the branch name from GitalyClient' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:default_branch_name)
@@ -120,6 +121,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context 'with gitaly enabled' do
before { stub_gitaly }
+ after { Gitlab::GitalyClient.clear_stubs! }
it 'gets the branch names from GitalyClient' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:branch_names)
@@ -157,6 +159,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
context 'with gitaly enabled' do
before { stub_gitaly }
+ after { Gitlab::GitalyClient.clear_stubs! }
it 'gets the tag names from GitalyClient' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:tag_names)
@@ -1046,6 +1049,28 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ describe '#ref_name_for_sha' do
+ let(:ref_path) { 'refs/heads' }
+ let(:sha) { repository.find_branch('master').dereferenced_target.id }
+ let(:ref_name) { 'refs/heads/master' }
+
+ it 'returns the ref name for the given sha' do
+ expect(repository.ref_name_for_sha(ref_path, sha)).to eq(ref_name)
+ end
+
+ it "returns an empty name if the ref doesn't exist" do
+ expect(repository.ref_name_for_sha(ref_path, "000000")).to eq("")
+ end
+
+ it "raise an exception if the ref is empty" do
+ expect { repository.ref_name_for_sha(ref_path, "") }.to raise_error(ArgumentError)
+ end
+
+ it "raise an exception if the ref is nil" do
+ expect { repository.ref_name_for_sha(ref_path, nil) }.to raise_error(ArgumentError)
+ end
+ end
+
describe '#find_commits' do
it 'should return a return a collection of commits' do
commits = repository.find_commits
@@ -1080,7 +1105,9 @@ describe Gitlab::Git::Repository, seed_helper: true do
ref = double()
allow(ref).to receive(:name) { 'bad-branch' }
allow(ref).to receive(:target) { raise Rugged::ReferenceError }
- allow(repository.rugged).to receive(:branches) { [ref] }
+ branches = double()
+ allow(branches).to receive(:each) { [ref].each }
+ allow(repository.rugged).to receive(:branches) { branches }
end
it 'should return empty branches' do
@@ -1264,7 +1291,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe '#local_branches' do
before(:all) do
- @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
+ @repo = Gitlab::Git::Repository.new('default', File.join(TEST_MUTABLE_REPO_PATH, '.git'))
end
after(:all) do
@@ -1279,6 +1306,29 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect(@repo.local_branches.any? { |branch| branch.name == 'remote_branch' }).to eq(false)
expect(@repo.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true)
end
+
+ context 'with gitaly enabled' do
+ before { stub_gitaly }
+ after { Gitlab::GitalyClient.clear_stubs! }
+
+ it 'gets the branches from GitalyClient' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
+ and_return([])
+ @repo.local_branches
+ end
+
+ it 'wraps GRPC not found' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
+ and_raise(GRPC::NotFound)
+ expect { @repo.local_branches }.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
+
+ it 'wraps GRPC exceptions' do
+ expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
+ and_raise(GRPC::Unknown)
+ expect { @repo.local_branches }.to raise_error(Gitlab::Git::CommandError)
+ end
+ end
end
def create_remote_branch(remote_name, branch_name, source_branch_name)
diff --git a/spec/lib/gitlab/git/util_spec.rb b/spec/lib/gitlab/git/util_spec.rb
index 69d3ca55397..88c871855df 100644
--- a/spec/lib/gitlab/git/util_spec.rb
+++ b/spec/lib/gitlab/git/util_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Git::Util do
["", 0],
["foo", 1],
["foo\n", 1],
- ["foo\n\n", 2],
+ ["foo\n\n", 2]
].each do |string, line_count|
it "counts #{line_count} lines in #{string.inspect}" do
expect(described_class.count_lines(string)).to eq(line_count)
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index d8b72615fab..25769977f24 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::GitAccess, lib: true do
- let(:access) { Gitlab::GitAccess.new(actor, project, 'web', authentication_abilities: authentication_abilities) }
+ let(:access) { Gitlab::GitAccess.new(actor, project, 'ssh', authentication_abilities: authentication_abilities) }
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:actor) { user }
diff --git a/spec/lib/gitlab/gitaly_client/commit_spec.rb b/spec/lib/gitlab/gitaly_client/commit_spec.rb
index 58f11ff8906..cf1bc74779e 100644
--- a/spec/lib/gitlab/gitaly_client/commit_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_spec.rb
@@ -1,28 +1,24 @@
require 'spec_helper'
describe Gitlab::GitalyClient::Commit do
- describe '.diff_from_parent' do
- let(:diff_stub) { double('Gitaly::Diff::Stub') }
- let(:project) { create(:project, :repository) }
- let(:repository_message) { project.repository.gitaly_repository }
- let(:commit) { project.commit('913c66a37b4a45b9769037c55c2d238bd0942d2e') }
-
- before do
- allow(Gitaly::Diff::Stub).to receive(:new).and_return(diff_stub)
- allow(diff_stub).to receive(:commit_diff).and_return([])
- end
+ let(:diff_stub) { double('Gitaly::Diff::Stub') }
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:repository_message) { repository.gitaly_repository }
+ let(:commit) { project.commit('913c66a37b4a45b9769037c55c2d238bd0942d2e') }
+ describe '#diff_from_parent' do
context 'when a commit has a parent' do
it 'sends an RPC request with the parent ID as left commit' do
request = Gitaly::CommitDiffRequest.new(
repository: repository_message,
left_commit_id: 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660',
- right_commit_id: commit.id,
+ right_commit_id: commit.id
)
- expect(diff_stub).to receive(:commit_diff).with(request)
+ expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_diff).with(request)
- described_class.diff_from_parent(commit)
+ described_class.new(repository).diff_from_parent(commit)
end
end
@@ -32,17 +28,17 @@ describe Gitlab::GitalyClient::Commit do
request = Gitaly::CommitDiffRequest.new(
repository: repository_message,
left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
- right_commit_id: initial_commit.id,
+ right_commit_id: initial_commit.id
)
- expect(diff_stub).to receive(:commit_diff).with(request)
+ expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_diff).with(request)
- described_class.diff_from_parent(initial_commit)
+ described_class.new(repository).diff_from_parent(initial_commit)
end
end
it 'returns a Gitlab::Git::DiffCollection' do
- ret = described_class.diff_from_parent(commit)
+ ret = described_class.new(repository).diff_from_parent(commit)
expect(ret).to be_kind_of(Gitlab::Git::DiffCollection)
end
@@ -50,9 +46,40 @@ describe Gitlab::GitalyClient::Commit do
it 'passes options to Gitlab::Git::DiffCollection' do
options = { max_files: 31, max_lines: 13 }
- expect(Gitlab::Git::DiffCollection).to receive(:new).with([], options)
+ expect(Gitlab::Git::DiffCollection).to receive(:new).with(kind_of(Enumerable), options)
+
+ described_class.new(repository).diff_from_parent(commit, options)
+ end
+ end
+
+ describe '#commit_deltas' do
+ context 'when a commit has a parent' do
+ it 'sends an RPC request with the parent ID as left commit' do
+ request = Gitaly::CommitDeltaRequest.new(
+ repository: repository_message,
+ left_commit_id: 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660',
+ right_commit_id: commit.id
+ )
+
+ expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_delta).with(request).and_return([])
- described_class.diff_from_parent(commit, options)
+ described_class.new(repository).commit_deltas(commit)
+ end
+ end
+
+ context 'when a commit does not have a parent' do
+ it 'sends an RPC request with empty tree ref as left commit' do
+ initial_commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863')
+ request = Gitaly::CommitDeltaRequest.new(
+ repository: repository_message,
+ left_commit_id: '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
+ right_commit_id: initial_commit.id
+ )
+
+ expect_any_instance_of(Gitaly::Diff::Stub).to receive(:commit_delta).with(request).and_return([])
+
+ described_class.new(repository).commit_deltas(initial_commit)
+ end
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_spec.rb b/spec/lib/gitlab/gitaly_client/ref_spec.rb
index 255f23e6270..d8cd2dcbd2a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_spec.rb
@@ -9,6 +9,13 @@ describe Gitlab::GitalyClient::Ref do
allow(Gitlab.config.gitaly).to receive(:enabled).and_return(true)
end
+ after do
+ # When we say `expect_any_instance_of(Gitaly::Ref::Stub)` a double is created,
+ # and because GitalyClient shares stubs these will get passed from example to
+ # example, which will cause an error, so we clean the stubs after each example.
+ Gitlab::GitalyClient.clear_stubs!
+ end
+
describe '#branch_names' do
it 'sends a find_all_branch_names message' do
expect_any_instance_of(Gitaly::Ref::Stub).
@@ -38,4 +45,27 @@ describe Gitlab::GitalyClient::Ref do
client.default_branch_name
end
end
+
+ describe '#local_branches' do
+ it 'sends a find_local_branches message' do
+ expect_any_instance_of(Gitaly::Ref::Stub).
+ to receive(:find_local_branches).with(gitaly_request_with_repo_path(repo_path)).
+ and_return([])
+
+ client.local_branches
+ end
+
+ it 'parses and sends the sort parameter' do
+ expect_any_instance_of(Gitaly::Ref::Stub).
+ to receive(:find_local_branches).
+ with(gitaly_request_with_params(sort_by: :UPDATED_DESC)).
+ and_return([])
+
+ client.local_branches(sort_by: 'updated_desc')
+ end
+
+ it 'raises an argument error if an invalid sort_by parameter is passed' do
+ expect { client.local_branches(sort_by: 'invalid_sort') }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 55fcf91fb6e..08ee0dff6b2 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -1,14 +1,19 @@
require 'spec_helper'
describe Gitlab::GitalyClient, lib: true do
- describe '.new_channel' do
+ describe '.stub' do
+ before { described_class.clear_stubs! }
+
context 'when passed a UNIX socket address' do
- it 'passes the address as-is to GRPC::Core::Channel initializer' do
+ it 'passes the address as-is to GRPC' do
address = 'unix:/tmp/gitaly.sock'
+ allow(Gitlab.config.repositories).to receive(:storages).and_return({
+ 'default' => { 'gitaly_address' => address }
+ })
- expect(GRPC::Core::Channel).to receive(:new).with(address, any_args)
+ expect(Gitaly::Commit::Stub).to receive(:new).with(address, any_args)
- described_class.new_channel(address)
+ described_class.stub(:commit, 'default')
end
end
@@ -17,9 +22,13 @@ describe Gitlab::GitalyClient, lib: true do
address = 'localhost:9876'
prefixed_address = "tcp://#{address}"
- expect(GRPC::Core::Channel).to receive(:new).with(address, any_args)
+ allow(Gitlab.config.repositories).to receive(:storages).and_return({
+ 'default' => { 'gitaly_address' => prefixed_address }
+ })
+
+ expect(Gitaly::Commit::Stub).to receive(:new).with(address, any_args)
- described_class.new_channel(prefixed_address)
+ described_class.stub(:commit, 'default')
end
end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index e49799ad105..e57b3053871 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -57,4 +57,15 @@ describe Gitlab::Highlight, lib: true do
end
end
end
+
+ describe '#highlight' do
+ subject { described_class.highlight(file_name, file_content, nowrap: false) }
+
+ it 'links dependencies via DependencyLinker' do
+ expect(Gitlab::DependencyLinker).to receive(:link).
+ with('file.name', 'Contents', anything).and_call_original
+
+ described_class.highlight('file.name', 'Contents')
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index baa81870e81..34f617e23a5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -85,6 +85,7 @@ merge_requests:
- merge_requests_closing_issues
- metrics
- timelogs
+- head_pipeline
merge_request_diff:
- merge_request
pipelines:
@@ -101,6 +102,8 @@ pipelines:
- cancelable_statuses
- manual_actions
- artifacts
+- pipeline_schedule
+- merge_requests
statuses:
- project
- pipeline
@@ -112,9 +115,13 @@ triggers:
- project
- trigger_requests
- owner
-- trigger_schedule
-trigger_schedule:
-- trigger
+pipeline_schedules:
+- project
+- owner
+- pipelines
+- last_pipeline
+pipeline_schedule:
+- pipelines
deploy_keys:
- user
- deploy_keys_projects
@@ -221,7 +228,7 @@ project:
- active_runners
- variables
- triggers
-- trigger_schedules
+- pipeline_schedules
- environments
- deployments
- project_feature
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index fdbb6a0556d..e3599d6fe59 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -6997,7 +6997,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "TeamcityService",
"category": "ci",
"default": false,
@@ -7041,7 +7041,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "RedmineService",
"category": "issue_tracker",
"default": false,
@@ -7063,7 +7063,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "PushoverService",
"category": "common",
"default": false,
@@ -7085,7 +7085,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "PivotalTrackerService",
"category": "common",
"default": false,
@@ -7108,7 +7108,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "JiraService",
"category": "issue_tracker",
"default": false,
@@ -7130,7 +7130,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "IrkerService",
"category": "common",
"default": false,
@@ -7174,7 +7174,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "GemnasiumService",
"category": "common",
"default": false,
@@ -7196,7 +7196,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "FlowdockService",
"category": "common",
"default": false,
@@ -7218,7 +7218,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "ExternalWikiService",
"category": "common",
"default": false,
@@ -7240,7 +7240,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "EmailsOnPushService",
"category": "common",
"default": false,
@@ -7262,7 +7262,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "DroneCiService",
"category": "ci",
"default": false,
@@ -7284,7 +7284,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "CustomIssueTrackerService",
"category": "issue_tracker",
"default": false,
@@ -7306,7 +7306,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "CampfireService",
"category": "common",
"default": false,
@@ -7328,7 +7328,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "BuildkiteService",
"category": "ci",
"default": false,
@@ -7350,7 +7350,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "BambooService",
"category": "ci",
"default": false,
@@ -7372,7 +7372,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "AssemblaService",
"category": "common",
"default": false,
@@ -7394,7 +7394,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"type": "AssemblaService",
"category": "common",
"default": false,
@@ -7416,7 +7416,7 @@
"merge_requests_events": true,
"tag_push_events": true,
"note_events": true,
- "build_events": true,
+ "job_events": true,
"category": "common",
"default": false,
"wiki_page_events": true,
diff --git a/spec/lib/gitlab/import_export/relation_factory_spec.rb b/spec/lib/gitlab/import_export/relation_factory_spec.rb
index 06cd8ab87ed..5417c7534ea 100644
--- a/spec/lib/gitlab/import_export/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_factory_spec.rb
@@ -33,7 +33,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do
'tag_push_events' => false,
'note_events' => true,
'enable_ssl_verification' => true,
- 'build_events' => false,
+ 'job_events' => false,
'wiki_page_events' => true,
'token' => token
}
@@ -95,7 +95,7 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do
'random_id' => 99,
'milestone_id' => 99,
'project_id' => 99,
- 'user_id' => 99,
+ 'user_id' => 99
}
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index a66086f8b47..c22fba11225 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -158,6 +158,7 @@ MergeRequest:
- time_estimate
- last_edited_at
- last_edited_by_id
+- head_pipeline_id
MergeRequestDiff:
- id
- state
@@ -188,6 +189,7 @@ Ci::Pipeline:
- user_id
- lock_version
- auto_canceled_by_id
+- pipeline_schedule_id
CommitStatus:
- id
- project_id
@@ -229,6 +231,7 @@ CommitStatus:
- lock_version
- coverage_regex
- auto_canceled_by_id
+- retried
Ci::Variable:
- id
- project_id
@@ -247,18 +250,19 @@ Ci::Trigger:
- owner_id
- description
- ref
-Ci::TriggerSchedule:
+Ci::PipelineSchedule:
- id
-- project_id
-- trigger_id
-- deleted_at
-- created_at
-- updated_at
+- description
+- ref
- cron
- cron_timezone
- next_run_at
-- ref
+- project_id
+- owner_id
- active
+- deleted_at
+- created_at
+- updated_at
DeployKey:
- id
- user_id
@@ -288,7 +292,7 @@ Service:
- tag_push_events
- note_events
- pipeline_events
-- build_events
+- job_events
- category
- default
- wiki_page_events
@@ -308,11 +312,12 @@ ProjectHook:
- note_events
- pipeline_events
- enable_ssl_verification
-- build_events
+- job_events
- wiki_page_events
- token
- group_id
- confidential_issues_events
+- repository_update_events
ProtectedBranch:
- id
- project_id
diff --git a/spec/lib/gitlab/other_markup_spec.rb b/spec/lib/gitlab/other_markup_spec.rb
index d6d53e8586c..c0f5fa9dc1f 100644
--- a/spec/lib/gitlab/other_markup_spec.rb
+++ b/spec/lib/gitlab/other_markup_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::OtherMarkup, lib: true do
}
links.each do |name, data|
it "does not convert dangerous #{name} into HTML" do
- expect(render(data[:file], data[:input])).to eq(data[:output])
+ expect(render(data[:file], data[:input], context)).to eq(data[:output])
end
end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index a7c8e7f1f57..1b8690ba613 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -22,11 +22,40 @@ describe Gitlab::ProjectSearchResults, lib: true do
end
describe 'blob search' do
- let(:project) { create(:project, :repository) }
- let(:results) { described_class.new(user, project, 'files').objects('blobs') }
+ let(:project) { create(:project, :public, :repository) }
+
+ subject(:results) { described_class.new(user, project, 'files').objects('blobs') }
+
+ context 'when repository is disabled' do
+ let(:project) { create(:project, :public, :repository, :repository_disabled) }
+
+ it 'hides blobs from members' do
+ project.add_reporter(user)
+
+ is_expected.to be_empty
+ end
+
+ it 'hides blobs from non-members' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when repository is internal' do
+ let(:project) { create(:project, :public, :repository, :repository_private) }
+
+ it 'finds blobs for members' do
+ project.add_reporter(user)
+
+ is_expected.not_to be_empty
+ end
+
+ it 'hides blobs from non-members' do
+ is_expected.to be_empty
+ end
+ end
it 'finds by name' do
- expect(results).to include(["files/images/wm.svg", nil])
+ expect(results.map(&:first)).to include('files/images/wm.svg')
end
it 'finds by content' do
@@ -70,6 +99,46 @@ describe Gitlab::ProjectSearchResults, lib: true do
end
end
+ describe 'wiki search' do
+ let(:project) { create(:project, :public) }
+ let(:wiki) { build(:project_wiki, project: project) }
+ let!(:wiki_page) { wiki.create_page('Title', 'Content') }
+
+ subject(:results) { described_class.new(user, project, 'Content').objects('wiki_blobs') }
+
+ context 'when wiki is disabled' do
+ let(:project) { create(:project, :public, :wiki_disabled) }
+
+ it 'hides wiki blobs from members' do
+ project.add_reporter(user)
+
+ is_expected.to be_empty
+ end
+
+ it 'hides wiki blobs from non-members' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when wiki is internal' do
+ let(:project) { create(:project, :public, :wiki_private) }
+
+ it 'finds wiki blobs for members' do
+ project.add_reporter(user)
+
+ is_expected.not_to be_empty
+ end
+
+ it 'hides wiki blobs from non-members' do
+ is_expected.to be_empty
+ end
+ end
+
+ it 'finds by content' do
+ expect(results).to include("master:Title.md:1:Content\n")
+ end
+ end
+
it 'does not list issues on private projects' do
issue = create(:issue, project: project)
@@ -79,7 +148,6 @@ describe Gitlab::ProjectSearchResults, lib: true do
end
describe 'confidential issues' do
- let(:project) { create(:empty_project) }
let(:query) { 'issue' }
let(:author) { create(:user) }
let(:assignee) { create(:user) }
@@ -277,6 +345,7 @@ describe Gitlab::ProjectSearchResults, lib: true do
context 'by commit hash' do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit('0b4bc9a') }
+
commit_hashes = { short: '0b4bc9a', full: '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
commit_hashes.each do |type, commit_hash|
diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
new file mode 100644
index 00000000000..d957dd932c4
--- /dev/null
+++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::Prometheus::Queries::DeploymentQuery, lib: true do
+ let(:environment) { create(:environment, slug: 'environment-slug') }
+ let(:deployment) { create(:deployment, environment: environment) }
+
+ let(:client) { double('prometheus_client') }
+ subject { described_class.new(client) }
+
+ around do |example|
+ time_without_subsecond_values = Time.local(2008, 9, 1, 12, 0, 0)
+ Timecop.freeze(time_without_subsecond_values) { example.run }
+ end
+
+ it 'sends appropriate queries to prometheus' do
+ start_time = (deployment.created_at - 30.minutes).to_f
+ stop_time = (deployment.created_at + 30.minutes).to_f
+ created_at = deployment.created_at.to_f
+
+ expect(client).to receive(:query_range).with('avg(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}) / 2^20',
+ start: start_time, stop: stop_time)
+ expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))',
+ time: created_at)
+ expect(client).to receive(:query).with('avg(avg_over_time(container_memory_usage_bytes{container_name!="POD",environment="environment-slug"}[30m]))',
+ time: stop_time)
+
+ expect(client).to receive(:query_range).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[2m])) * 100',
+ start: start_time, stop: stop_time)
+ expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
+ time: created_at)
+ expect(client).to receive(:query).with('avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="environment-slug"}[30m])) * 100',
+ time: stop_time)
+
+ expect(subject.query(deployment.id)).to eq(memory_values: nil, memory_before: nil, memory_after: nil,
+ cpu_values: nil, cpu_before: nil, cpu_after: nil)
+ end
+end
diff --git a/spec/lib/gitlab/prometheus_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index fc453a2704b..2d8bd2f6b97 100644
--- a/spec/lib/gitlab/prometheus_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::Prometheus, lib: true do
+describe Gitlab::PrometheusClient, lib: true do
include PrometheusHelpers
subject { described_class.new(api_url: 'https://prometheus.example.com') }
@@ -81,7 +81,11 @@ describe Gitlab::Prometheus, lib: true do
describe '#query' do
let(:prometheus_query) { prometheus_cpu_query('env-slug') }
- let(:query_url) { prometheus_query_url(prometheus_query) }
+ let(:query_url) { prometheus_query_with_time_url(prometheus_query, Time.now.utc) }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
context 'when request returns vector results' do
it 'returns data from the API call' do
@@ -123,6 +127,20 @@ describe Gitlab::Prometheus, lib: true do
Timecop.freeze { example.run }
end
+ context 'when non utc time is passed' do
+ let(:time_stop) { Time.now.in_time_zone("Warsaw") }
+ let(:time_start) { time_stop - 8.hours }
+
+ let(:query_url) { prometheus_query_range_url(prometheus_query, start: time_start.utc.to_f, stop: time_stop.utc.to_f) }
+
+ it 'passed dates are properly converted to utc' do
+ req_stub = stub_prometheus_request(query_url, body: prometheus_values_body('vector'))
+
+ subject.query_range(prometheus_query, start: time_start, stop: time_stop)
+ expect(req_stub).to have_been_requested
+ end
+ end
+
context 'when a start time is passed' do
let(:query_url) { prometheus_query_range_url(prometheus_query, start: 2.hours.ago) }
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index f94c9c2e315..f9025397107 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -29,7 +29,7 @@ describe ::Gitlab::RepoPath do
before do
allow(Gitlab.config.repositories).to receive(:storages).and_return({
'storage1' => { 'path' => '/foo' },
- 'storage2' => { 'path' => '/bar' },
+ 'storage2' => { 'path' => '/bar' }
})
end
diff --git a/spec/lib/gitlab/string_range_marker_spec.rb b/spec/lib/gitlab/string_range_marker_spec.rb
new file mode 100644
index 00000000000..7c77772b3f6
--- /dev/null
+++ b/spec/lib/gitlab/string_range_marker_spec.rb
@@ -0,0 +1,36 @@
+require 'spec_helper'
+
+describe Gitlab::StringRangeMarker, lib: true do
+ describe '#mark' do
+ context "when the rich text is html safe" do
+ let(:raw) { "abc <def>" }
+ let(:rich) { %{<span class="abc">abc</span><span class="space"> </span><span class="def">&lt;def&gt;</span>}.html_safe }
+ let(:inline_diffs) { [2..5] }
+ subject do
+ described_class.new(raw, rich).mark(inline_diffs) do |text, left:, right:|
+ "LEFT#{text}RIGHT"
+ end
+ end
+
+ it 'marks the inline diffs' do
+ expect(subject).to eq(%{<span class="abc">abLEFTcRIGHT</span><span class="space">LEFT RIGHT</span><span class="def">LEFT&lt;dRIGHTef&gt;</span>})
+ expect(subject).to be_html_safe
+ end
+ end
+
+ context "when the rich text is not html safe" do
+ let(:raw) { "abc <def>" }
+ let(:inline_diffs) { [2..5] }
+ subject do
+ described_class.new(raw).mark(inline_diffs) do |text, left:, right:|
+ "LEFT#{text}RIGHT"
+ end
+ end
+
+ it 'marks the inline diffs' do
+ expect(subject).to eq(%{abLEFTc &lt;dRIGHTef&gt;})
+ expect(subject).to be_html_safe
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/string_regex_marker_spec.rb b/spec/lib/gitlab/string_regex_marker_spec.rb
new file mode 100644
index 00000000000..2f5cf6c6e3b
--- /dev/null
+++ b/spec/lib/gitlab/string_regex_marker_spec.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe Gitlab::StringRegexMarker, lib: true do
+ describe '#mark' do
+ let(:raw) { %{"name": "AFNetworking"} }
+ let(:rich) { %{<span class="key">"name"</span><span class="punctuation">: </span><span class="value">"AFNetworking"</span>}.html_safe }
+ subject do
+ described_class.new(raw, rich).mark(/"[^"]+":\s*"(?<name>[^"]+)"/, group: :name) do |text, left:, right:|
+ %{<a href="#">#{text}</a>}
+ end
+ end
+
+ it 'marks the inline diffs' do
+ expect(subject).to eq(%{<span class="key">"name"</span><span class="punctuation">: </span><span class="value">"<a href="#">AFNetworking</a>"</span>})
+ expect(subject).to be_html_safe
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index bf1dfe7f412..b47e1b56fa9 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -17,6 +17,7 @@ describe Gitlab::UsageData do
edition
version
uuid
+ hostname
))
end
@@ -32,6 +33,7 @@ describe Gitlab::UsageData do
ci_pipelines
ci_runners
ci_triggers
+ ci_pipeline_schedules
deploy_keys
deployments
environments
@@ -48,7 +50,6 @@ describe Gitlab::UsageData do
pages_domains
protected_branches
releases
- services
snippets
todos
uploads
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index 2b27ff66c09..0d87cf25dbb 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -5,7 +5,7 @@ describe Gitlab::UserAccess, lib: true do
let(:project) { create(:project) }
let(:user) { create(:user) }
- describe 'can_push_to_branch?' do
+ describe '#can_push_to_branch?' do
describe 'push to none protected branch' do
it 'returns true if user is a master' do
project.team << [user, :master]
@@ -143,7 +143,7 @@ describe Gitlab::UserAccess, lib: true do
end
end
- describe 'can_create_tag?' do
+ describe '#can_create_tag?' do
describe 'push to none protected tag' do
it 'returns true if user is a master' do
project.add_user(user, :master)
@@ -211,4 +211,48 @@ describe Gitlab::UserAccess, lib: true do
end
end
end
+
+ describe '#can_delete_branch?' do
+ describe 'delete unprotected branch' do
+ it 'returns true if user is a master' do
+ project.add_user(user, :master)
+
+ expect(access.can_delete_branch?('random_branch')).to be_truthy
+ end
+
+ it 'returns true if user is a developer' do
+ project.add_user(user, :developer)
+
+ expect(access.can_delete_branch?('random_branch')).to be_truthy
+ end
+
+ it 'returns false if user is a reporter' do
+ project.add_user(user, :reporter)
+
+ expect(access.can_delete_branch?('random_branch')).to be_falsey
+ end
+ end
+
+ describe 'delete protected branch' do
+ let(:branch) { create(:protected_branch, project: project, name: "test") }
+
+ it 'returns true if user is a master' do
+ project.add_user(user, :master)
+
+ expect(access.can_delete_branch?(branch.name)).to be_truthy
+ end
+
+ it 'returns false if user is a developer' do
+ project.add_user(user, :developer)
+
+ expect(access.can_delete_branch?(branch.name)).to be_falsey
+ end
+
+ it 'returns false if user is a reporter' do
+ project.add_user(user, :reporter)
+
+ expect(access.can_delete_branch?(branch.name)).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index beb1791a429..fdbb55fc874 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -202,7 +202,7 @@ describe Gitlab::Workhorse, lib: true do
context 'when Gitaly is enabled' do
let(:gitaly_params) do
{
- GitalyAddress: Gitlab::GitalyClient.get_address('default'),
+ GitalyAddress: Gitlab::GitalyClient.address('default')
}
end
@@ -214,7 +214,7 @@ describe Gitlab::Workhorse, lib: true do
repo_param = { Repository: {
path: repo_path,
storage_name: 'default',
- relative_path: project.full_path + '.git',
+ relative_path: project.full_path + '.git'
} }
expect(subject).to include(repo_param)
diff --git a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
new file mode 100644
index 00000000000..bd5f85b901d
--- /dev/null
+++ b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb')
+
+describe AddHeadPipelineForEachMergeRequest do
+ let(:migration) { described_class.new }
+
+ let!(:project) { create(:empty_project) }
+ let!(:forked_project_link) { create(:forked_project_link, forked_from_project: project) }
+ let!(:other_project) { forked_project_link.forked_to_project }
+
+ let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") }
+ let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") }
+ let!(:pipeline_3) { create(:ci_pipeline, project: other_project, ref: "branch_1") }
+ let!(:pipeline_4) { create(:ci_pipeline, project: project, ref: "branch_2") }
+
+ let!(:mr_1) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_1", target_branch: "target_1") }
+ let!(:mr_2) { create(:merge_request, source_project: other_project, target_project: project, source_branch: "branch_1", target_branch: "target_2") }
+ let!(:mr_3) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_2", target_branch: "master") }
+ let!(:mr_4) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_3", target_branch: "master") }
+
+ context "#up" do
+ context "when source_project and source_branch of pipeline are the same of merge request" do
+ it "sets head_pipeline_id of given merge requests" do
+ migration.up
+
+ expect(mr_1.reload.head_pipeline_id).to eq(pipeline_1.id)
+ expect(mr_2.reload.head_pipeline_id).to eq(pipeline_3.id)
+ expect(mr_3.reload.head_pipeline_id).to eq(pipeline_4.id)
+ expect(mr_4.reload.head_pipeline_id).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb
new file mode 100644
index 00000000000..49e750a3f4d
--- /dev/null
+++ b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespaceless_pending_delete_projects.rb')
+
+describe CleanupNamespacelessPendingDeleteProjects do
+ before do
+ # Stub after_save callbacks that will fail when Project has no namespace
+ allow_any_instance_of(Project).to receive(:ensure_dir_exist).and_return(nil)
+ allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
+ end
+
+ describe '#up' do
+ it 'only cleans up pending delete projects' do
+ create(:empty_project)
+ create(:empty_project, pending_delete: true)
+ project = build(:empty_project, pending_delete: true, namespace_id: nil)
+ project.save(validate: false)
+
+ expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]])
+
+ described_class.new.up
+ end
+
+ it 'does nothing when no pending delete projects without namespace found' do
+ create(:empty_project)
+ create(:empty_project, pending_delete: true)
+
+ expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async)
+
+ described_class.new.up
+ end
+ end
+end
diff --git a/spec/migrations/fix_wrongly_renamed_routes_spec.rb b/spec/migrations/fix_wrongly_renamed_routes_spec.rb
new file mode 100644
index 00000000000..148290b0e7d
--- /dev/null
+++ b/spec/migrations/fix_wrongly_renamed_routes_spec.rb
@@ -0,0 +1,73 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170518231126_fix_wrongly_renamed_routes.rb')
+
+describe FixWronglyRenamedRoutes, truncate: true do
+ let(:subject) { described_class.new }
+ let(:broken_namespace) do
+ namespace = create(:group, name: 'apiis')
+ namespace.route.update_attribute(:path, 'api0is')
+ namespace
+ end
+
+ describe '#wrongly_renamed' do
+ it "includes routes that have names that don't match their namespace" do
+ broken_namespace
+ _other_namespace = create(:group, name: 'api0')
+
+ expect(subject.wrongly_renamed.map(&:id))
+ .to contain_exactly(broken_namespace.route.id)
+ end
+ end
+
+ describe "#paths_and_corrections" do
+ it 'finds the wrong path and gets the correction from the namespace' do
+ broken_namespace
+ namespace = create(:group, name: 'uploads-test')
+ namespace.route.update_attribute(:path, 'uploads0-test')
+
+ expected_result = [
+ { 'namespace_path' => 'apiis', 'path' => 'api0is' },
+ { 'namespace_path' => 'uploads-test', 'path' => 'uploads0-test' }
+ ]
+
+ expect(subject.paths_and_corrections).to include(*expected_result)
+ end
+ end
+
+ describe '#routes_in_namespace_query' do
+ it 'includes only the required routes' do
+ namespace = create(:group, path: 'hello')
+ project = create(:empty_project, namespace: namespace)
+ _other_namespace = create(:group, path: 'hello0')
+
+ result = Route.where(subject.routes_in_namespace_query('hello'))
+
+ expect(result).to contain_exactly(namespace.route, project.route)
+ end
+ end
+
+ describe '#up' do
+ let(:broken_project) do
+ project = create(:empty_project, namespace: broken_namespace, path: 'broken-project')
+ project.route.update_attribute(:path, 'api0is/broken-project')
+ project
+ end
+
+ it 'renames incorrectly named routes' do
+ broken_project
+
+ subject.up
+
+ expect(broken_project.route.reload.path).to eq('apiis/broken-project')
+ expect(broken_namespace.route.reload.path).to eq('apiis')
+ end
+
+ it "doesn't touch namespaces that look like something that should be renamed" do
+ namespace = create(:group, path: 'api0')
+
+ subject.up
+
+ expect(namespace.route.reload.path).to eq('api0')
+ end
+ end
+end
diff --git a/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb b/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb
deleted file mode 100644
index 57eb03e3c80..00000000000
--- a/spec/migrations/migrate_build_events_to_pipeline_events_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20170301205640_migrate_build_events_to_pipeline_events.rb')
-
-# This migration uses multiple threads, and thus different transactions. This
-# means data created in this spec may not be visible to some threads. To work
-# around this we use the TRUNCATE cleaning strategy.
-describe MigrateBuildEventsToPipelineEvents, truncate: true do
- let(:migration) { described_class.new }
- let(:project_with_pipeline_service) { create(:empty_project) }
- let(:project_with_build_service) { create(:empty_project) }
-
- before do
- ActiveRecord::Base.connection.execute <<-SQL
- INSERT INTO services (properties, build_events, pipeline_events, type)
- VALUES
- ('{"notify_only_broken_builds":true}', true, false, 'SlackService')
- , ('{"notify_only_broken_builds":true}', true, false, 'MattermostService')
- , ('{"notify_only_broken_builds":true}', true, false, 'HipchatService')
- ;
- SQL
-
- ActiveRecord::Base.connection.execute <<-SQL
- INSERT INTO services
- (properties, build_events, pipeline_events, type, project_id)
- VALUES
- ('{"notify_only_broken_builds":true}', true, false,
- 'BuildsEmailService', #{project_with_pipeline_service.id})
- , ('{"notify_only_broken_pipelines":true}', false, true,
- 'PipelinesEmailService', #{project_with_pipeline_service.id})
- , ('{"notify_only_broken_builds":true}', true, false,
- 'BuildsEmailService', #{project_with_build_service.id})
- ;
- SQL
- end
-
- describe '#up' do
- before do
- silence_migration = Module.new do
- # rubocop:disable Rails/Delegate
- def execute(query)
- connection.execute(query)
- end
- end
-
- migration.extend(silence_migration)
- migration.up
- end
-
- it 'migrates chat service properly' do
- [SlackService, MattermostService, HipchatService].each do |service|
- expect(service.count).to eq(1)
-
- verify_service_record(service.first)
- end
- end
-
- it 'migrates pipelines email service only if it has none before' do
- Project.find_each do |project|
- pipeline_service_count =
- project.services.where(type: 'PipelinesEmailService').count
-
- expect(pipeline_service_count).to eq(1)
-
- verify_service_record(project.pipelines_email_service)
- end
- end
-
- def verify_service_record(service)
- expect(service.notify_only_broken_pipelines).to be(true)
- expect(service.build_events).to be(false)
- expect(service.pipeline_events).to be(true)
- end
- end
-end
diff --git a/spec/migrations/migrate_user_project_view_spec.rb b/spec/migrations/migrate_user_project_view_spec.rb
index dacaa834aa9..70f8e0d6082 100644
--- a/spec/migrations/migrate_user_project_view_spec.rb
+++ b/spec/migrations/migrate_user_project_view_spec.rb
@@ -5,7 +5,12 @@ require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_proje
describe MigrateUserProjectView do
let(:migration) { described_class.new }
- let!(:user) { create(:user, project_view: 'readme') }
+ let!(:user) { create(:user) }
+
+ before do
+ # 0 is the numeric value for the old 'readme' option
+ user.update_column(:project_view, 0)
+ end
describe '#up' do
it 'updates project view setting with new value' do
diff --git a/spec/migrations/rename_users_with_renamed_namespace_spec.rb b/spec/migrations/rename_users_with_renamed_namespace_spec.rb
new file mode 100644
index 00000000000..1e9aab3d9a1
--- /dev/null
+++ b/spec/migrations/rename_users_with_renamed_namespace_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170518200835_rename_users_with_renamed_namespace.rb')
+
+describe RenameUsersWithRenamedNamespace, truncate: true do
+ it 'renames a user that had their namespace renamed to the namespace path' do
+ other_user = create(:user, username: 'kodingu')
+ other_user1 = create(:user, username: 'api0')
+
+ user = create(:user, username: "Users0")
+ user.update_attribute(:username, 'Users')
+ user1 = create(:user, username: "import0")
+ user1.update_attribute(:username, 'import')
+
+ described_class.new.up
+
+ expect(user.reload.username).to eq('Users0')
+ expect(user1.reload.username).to eq('import0')
+
+ expect(other_user.reload.username).to eq('kodingu')
+ expect(other_user1.reload.username).to eq('api0')
+ end
+end
diff --git a/spec/migrations/update_retried_for_ci_builds_spec.rb b/spec/migrations/update_retried_for_ci_builds_spec.rb
new file mode 100644
index 00000000000..3742b4dafe5
--- /dev/null
+++ b/spec/migrations/update_retried_for_ci_builds_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20170503004427_update_retried_for_ci_build.rb')
+
+describe UpdateRetriedForCiBuild, truncate: true do
+ let(:pipeline) { create(:ci_pipeline) }
+ let!(:build_old) { create(:ci_build, pipeline: pipeline, name: 'test') }
+ let!(:build_new) { create(:ci_build, pipeline: pipeline, name: 'test') }
+
+ before do
+ described_class.new.up
+ end
+
+ it 'updates ci_builds.is_retried' do
+ expect(build_old.reload).to be_retried
+ expect(build_new.reload).not_to be_retried
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index c2c19c62048..fa229542f70 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -89,7 +89,7 @@ describe ApplicationSetting, models: true do
storages = {
'custom1' => 'tmp/tests/custom_repositories_1',
'custom2' => 'tmp/tests/custom_repositories_2',
- 'custom3' => 'tmp/tests/custom_repositories_3',
+ 'custom3' => 'tmp/tests/custom_repositories_3'
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
@@ -211,4 +211,66 @@ describe ApplicationSetting, models: true do
expect(setting.domain_blacklist).to contain_exactly('example.com', 'test.com', 'foo.bar')
end
end
+
+ describe 'usage ping settings' do
+ context 'when the usage ping is disabled in gitlab.yml' do
+ before do
+ allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(false)
+ end
+
+ it 'does not allow the usage ping to be configured' do
+ expect(setting.usage_ping_can_be_configured?).to be_falsey
+ end
+
+ context 'when the usage ping is disabled in the DB' do
+ before do
+ setting.usage_ping_enabled = false
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+
+ context 'when the usage ping is enabled in the DB' do
+ before do
+ setting.usage_ping_enabled = true
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+ end
+
+ context 'when the usage ping is enabled in gitlab.yml' do
+ before do
+ allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(true)
+ end
+
+ it 'allows the usage ping to be configured' do
+ expect(setting.usage_ping_can_be_configured?).to be_truthy
+ end
+
+ context 'when the usage ping is disabled in the DB' do
+ before do
+ setting.usage_ping_enabled = false
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+
+ context 'when the usage ping is enabled in the DB' do
+ before do
+ setting.usage_ping_enabled = true
+ end
+
+ it 'returns true for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_truthy
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index f84c6b48173..f19e1af65a6 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -271,6 +271,52 @@ describe Blob do
end
end
+ describe '#auxiliary_viewer' do
+ context 'when the blob has an external storage error' do
+ before do
+ project.lfs_enabled = false
+ end
+
+ it 'returns nil' do
+ blob = fake_blob(path: 'LICENSE', lfs: true)
+
+ expect(blob.auxiliary_viewer).to be_nil
+ end
+ end
+
+ context 'when the blob is empty' do
+ it 'returns nil' do
+ blob = fake_blob(data: '')
+
+ expect(blob.auxiliary_viewer).to be_nil
+ end
+ end
+
+ context 'when the blob is stored externally' do
+ it 'returns a matching viewer' do
+ blob = fake_blob(path: 'LICENSE', lfs: true)
+
+ expect(blob.auxiliary_viewer).to be_a(BlobViewer::License)
+ end
+ end
+
+ context 'when the blob is binary' do
+ it 'returns nil' do
+ blob = fake_blob(path: 'LICENSE', binary: true)
+
+ expect(blob.auxiliary_viewer).to be_nil
+ end
+ end
+
+ context 'when the blob is text-based' do
+ it 'returns a matching text-based viewer' do
+ blob = fake_blob(path: 'LICENSE')
+
+ expect(blob.auxiliary_viewer).to be_a(BlobViewer::License)
+ end
+ end
+ end
+
describe '#rendered_as_text?' do
context 'when ignoring errors' do
context 'when the simple viewer is text-based' do
diff --git a/spec/models/blob_viewer/base_spec.rb b/spec/models/blob_viewer/base_spec.rb
index 740ad9d275e..92fbf64a6b7 100644
--- a/spec/models/blob_viewer/base_spec.rb
+++ b/spec/models/blob_viewer/base_spec.rb
@@ -7,10 +7,12 @@ describe BlobViewer::Base, model: true do
let(:viewer_class) do
Class.new(described_class) do
+ include BlobViewer::ServerSide
+
self.extensions = %w(pdf)
- self.max_size = 1.megabyte
- self.absolute_max_size = 5.megabytes
- self.client_side = false
+ self.binary = true
+ self.overridable_max_size = 1.megabyte
+ self.max_size = 5.megabytes
end
end
@@ -18,14 +20,47 @@ describe BlobViewer::Base, model: true do
describe '.can_render?' do
context 'when the extension is supported' do
- let(:blob) { fake_blob(path: 'file.pdf') }
+ context 'when the binaryness matches' do
+ let(:blob) { fake_blob(path: 'file.pdf', binary: true) }
- it 'returns true' do
- expect(viewer_class.can_render?(blob)).to be_truthy
+ it 'returns true' do
+ expect(viewer_class.can_render?(blob)).to be_truthy
+ end
+ end
+
+ context 'when the binaryness does not match' do
+ let(:blob) { fake_blob(path: 'file.pdf', binary: false) }
+
+ it 'returns false' do
+ expect(viewer_class.can_render?(blob)).to be_falsey
+ end
+ end
+ end
+
+ context 'when the file type is supported' do
+ before do
+ viewer_class.file_types = %i(license)
+ viewer_class.binary = false
+ end
+
+ context 'when the binaryness matches' do
+ let(:blob) { fake_blob(path: 'LICENSE', binary: false) }
+
+ it 'returns true' do
+ expect(viewer_class.can_render?(blob)).to be_truthy
+ end
+ end
+
+ context 'when the binaryness does not match' do
+ let(:blob) { fake_blob(path: 'LICENSE', binary: true) }
+
+ it 'returns false' do
+ expect(viewer_class.can_render?(blob)).to be_falsey
+ end
end
end
- context 'when the extension is not supported' do
+ context 'when the extension and file type are not supported' do
let(:blob) { fake_blob(path: 'file.txt') }
it 'returns false' do
@@ -34,45 +69,45 @@ describe BlobViewer::Base, model: true do
end
end
- describe '#too_large?' do
- context 'when the blob size is larger than the max size' do
+ describe '#exceeds_overridable_max_size?' do
+ context 'when the blob size is larger than the overridable max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) }
it 'returns true' do
- expect(viewer.too_large?).to be_truthy
+ expect(viewer.exceeds_overridable_max_size?).to be_truthy
end
end
- context 'when the blob size is smaller than the max size' do
+ context 'when the blob size is smaller than the overridable max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) }
it 'returns false' do
- expect(viewer.too_large?).to be_falsey
+ expect(viewer.exceeds_overridable_max_size?).to be_falsey
end
end
end
- describe '#absolutely_too_large?' do
- context 'when the blob size is larger than the absolute max size' do
+ describe '#exceeds_max_size?' do
+ context 'when the blob size is larger than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) }
it 'returns true' do
- expect(viewer.absolutely_too_large?).to be_truthy
+ expect(viewer.exceeds_max_size?).to be_truthy
end
end
- context 'when the blob size is smaller than the absolute max size' do
+ context 'when the blob size is smaller than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) }
it 'returns false' do
- expect(viewer.absolutely_too_large?).to be_falsey
+ expect(viewer.exceeds_max_size?).to be_falsey
end
end
end
describe '#can_override_max_size?' do
- context 'when the blob size is larger than the max size' do
- context 'when the blob size is larger than the absolute max size' do
+ context 'when the blob size is larger than the overridable max size' do
+ context 'when the blob size is larger than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) }
it 'returns false' do
@@ -80,7 +115,7 @@ describe BlobViewer::Base, model: true do
end
end
- context 'when the blob size is smaller than the absolute max size' do
+ context 'when the blob size is smaller than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) }
it 'returns true' do
@@ -89,7 +124,7 @@ describe BlobViewer::Base, model: true do
end
end
- context 'when the blob size is smaller than the max size' do
+ context 'when the blob size is smaller than the overridable max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) }
it 'returns false' do
@@ -104,7 +139,7 @@ describe BlobViewer::Base, model: true do
viewer.override_max_size = true
end
- context 'when the blob size is larger than the absolute max size' do
+ context 'when the blob size is larger than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.megabytes) }
it 'returns :too_large' do
@@ -112,7 +147,7 @@ describe BlobViewer::Base, model: true do
end
end
- context 'when the blob size is smaller than the absolute max size' do
+ context 'when the blob size is smaller than the max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) }
it 'returns nil' do
@@ -122,7 +157,7 @@ describe BlobViewer::Base, model: true do
end
context 'when the max size is not overridden' do
- context 'when the blob size is larger than the max size' do
+ context 'when the blob size is larger than the overridable max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 2.megabytes) }
it 'returns :too_large' do
@@ -130,7 +165,7 @@ describe BlobViewer::Base, model: true do
end
end
- context 'when the blob size is smaller than the max size' do
+ context 'when the blob size is smaller than the overridable max size' do
let(:blob) { fake_blob(path: 'file.pdf', size: 10.kilobytes) }
it 'returns nil' do
@@ -138,49 +173,5 @@ describe BlobViewer::Base, model: true do
end
end
end
-
- context 'when the viewer is server side but the blob is stored externally' do
- let(:project) { build(:empty_project, lfs_enabled: true) }
-
- let(:blob) { fake_blob(path: 'file.pdf', lfs: true) }
-
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- end
-
- it 'return :server_side_but_stored_externally' do
- expect(viewer.render_error).to eq(:server_side_but_stored_externally)
- end
- end
- end
-
- describe '#prepare!' do
- context 'when the viewer is server side' do
- let(:blob) { fake_blob(path: 'file.md') }
-
- before do
- viewer_class.client_side = false
- end
-
- it 'loads all blob data' do
- expect(blob).to receive(:load_all_data!)
-
- viewer.prepare!
- end
- end
-
- context 'when the viewer is client side' do
- let(:blob) { fake_blob(path: 'file.md') }
-
- before do
- viewer_class.client_side = true
- end
-
- it "doesn't load all blob data" do
- expect(blob).not_to receive(:load_all_data!)
-
- viewer.prepare!
- end
- end
end
end
diff --git a/spec/models/blob_viewer/changelog_spec.rb b/spec/models/blob_viewer/changelog_spec.rb
new file mode 100644
index 00000000000..9066c5a05ac
--- /dev/null
+++ b/spec/models/blob_viewer/changelog_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe BlobViewer::Changelog, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:blob) { fake_blob(path: 'CHANGELOG') }
+ subject { described_class.new(blob) }
+
+ describe '#render_error' do
+ context 'when there are no tags' do
+ before do
+ allow(project.repository).to receive(:tag_count).and_return(0)
+ end
+
+ it 'returns :no_tags' do
+ expect(subject.render_error).to eq(:no_tags)
+ end
+ end
+
+ context 'when there are tags' do
+ it 'returns nil' do
+ expect(subject.render_error).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/composer_json_spec.rb b/spec/models/blob_viewer/composer_json_spec.rb
new file mode 100644
index 00000000000..df4f1f4815c
--- /dev/null
+++ b/spec/models/blob_viewer/composer_json_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe BlobViewer::ComposerJson, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-SPEC.strip_heredoc
+ {
+ "name": "laravel/laravel",
+ "homepage": "https://laravel.com/"
+ }
+ SPEC
+ end
+ let(:blob) { fake_blob(path: 'composer.json', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#package_name' do
+ it 'returns the package name' do
+ expect(subject).to receive(:prepare!)
+
+ expect(subject.package_name).to eq('laravel/laravel')
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/gemspec_spec.rb b/spec/models/blob_viewer/gemspec_spec.rb
new file mode 100644
index 00000000000..81e932de290
--- /dev/null
+++ b/spec/models/blob_viewer/gemspec_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe BlobViewer::Gemspec, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ end
+ let(:blob) { fake_blob(path: 'activerecord.gemspec', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#package_name' do
+ it 'returns the package name' do
+ expect(subject).to receive(:prepare!)
+
+ expect(subject.package_name).to eq('activerecord')
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
new file mode 100644
index 00000000000..0c6c24ece21
--- /dev/null
+++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe BlobViewer::GitlabCiYml, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
+ let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#validation_message' do
+ it 'calls prepare! on the viewer' do
+ expect(subject).to receive(:prepare!)
+
+ subject.validation_message
+ end
+
+ context 'when the configuration is valid' do
+ it 'returns nil' do
+ expect(subject.validation_message).to be_nil
+ end
+ end
+
+ context 'when the configuration is invalid' do
+ let(:data) { 'oof' }
+
+ it 'returns the error message' do
+ expect(subject.validation_message).to eq('Invalid configuration format')
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/license_spec.rb b/spec/models/blob_viewer/license_spec.rb
new file mode 100644
index 00000000000..944ddd32b92
--- /dev/null
+++ b/spec/models/blob_viewer/license_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe BlobViewer::License, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:blob) { fake_blob(path: 'LICENSE') }
+ subject { described_class.new(blob) }
+
+ describe '#license' do
+ it 'returns the blob project repository license' do
+ expect(subject.license).not_to be_nil
+ expect(subject.license).to eq(project.repository.license)
+ end
+ end
+
+ describe '#render_error' do
+ context 'when there is no license' do
+ before do
+ allow(project.repository).to receive(:license).and_return(nil)
+ end
+
+ it 'returns :unknown_license' do
+ expect(subject.render_error).to eq(:unknown_license)
+ end
+ end
+
+ context 'when there is a license' do
+ it 'returns nil' do
+ expect(subject.render_error).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/package_json_spec.rb b/spec/models/blob_viewer/package_json_spec.rb
new file mode 100644
index 00000000000..5c9a9c81963
--- /dev/null
+++ b/spec/models/blob_viewer/package_json_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe BlobViewer::PackageJson, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-SPEC.strip_heredoc
+ {
+ "name": "module-name",
+ "version": "10.3.1"
+ }
+ SPEC
+ end
+ let(:blob) { fake_blob(path: 'package.json', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#package_name' do
+ it 'returns the package name' do
+ expect(subject).to receive(:prepare!)
+
+ expect(subject.package_name).to eq('module-name')
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/podspec_json_spec.rb b/spec/models/blob_viewer/podspec_json_spec.rb
new file mode 100644
index 00000000000..42a00940bc5
--- /dev/null
+++ b/spec/models/blob_viewer/podspec_json_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe BlobViewer::PodspecJson, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-SPEC.strip_heredoc
+ {
+ "name": "AFNetworking",
+ "version": "2.0.0"
+ }
+ SPEC
+ end
+ let(:blob) { fake_blob(path: 'AFNetworking.podspec.json', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#package_name' do
+ it 'returns the package name' do
+ expect(subject).to receive(:prepare!)
+
+ expect(subject.package_name).to eq('AFNetworking')
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/podspec_spec.rb b/spec/models/blob_viewer/podspec_spec.rb
new file mode 100644
index 00000000000..6c9f0f42d53
--- /dev/null
+++ b/spec/models/blob_viewer/podspec_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+
+describe BlobViewer::Podspec, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-SPEC.strip_heredoc
+ Pod::Spec.new do |spec|
+ spec.name = 'Reachability'
+ spec.version = '3.1.0'
+ end
+ SPEC
+ end
+ let(:blob) { fake_blob(path: 'Reachability.podspec', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#package_name' do
+ it 'returns the package name' do
+ expect(subject).to receive(:prepare!)
+
+ expect(subject.package_name).to eq('Reachability')
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/route_map_spec.rb b/spec/models/blob_viewer/route_map_spec.rb
new file mode 100644
index 00000000000..4854e0262d9
--- /dev/null
+++ b/spec/models/blob_viewer/route_map_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe BlobViewer::RouteMap, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:project) }
+ let(:data) do
+ <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ end
+ let(:blob) { fake_blob(path: '.gitlab/route-map.yml', data: data) }
+ subject { described_class.new(blob) }
+
+ describe '#validation_message' do
+ it 'calls prepare! on the viewer' do
+ expect(subject).to receive(:prepare!)
+
+ subject.validation_message
+ end
+
+ context 'when the configuration is valid' do
+ it 'returns nil' do
+ expect(subject.validation_message).to be_nil
+ end
+ end
+
+ context 'when the configuration is invalid' do
+ let(:data) { 'oof' }
+
+ it 'returns the error message' do
+ expect(subject.validation_message).to eq('Route map is not an array')
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/server_side_spec.rb b/spec/models/blob_viewer/server_side_spec.rb
new file mode 100644
index 00000000000..f047953d540
--- /dev/null
+++ b/spec/models/blob_viewer/server_side_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe BlobViewer::ServerSide, model: true do
+ include FakeBlobHelpers
+
+ let(:project) { build(:empty_project) }
+
+ let(:viewer_class) do
+ Class.new(BlobViewer::Base) do
+ include BlobViewer::ServerSide
+ end
+ end
+
+ subject { viewer_class.new(blob) }
+
+ describe '#prepare!' do
+ let(:blob) { fake_blob(path: 'file.txt') }
+
+ it 'loads all blob data' do
+ expect(blob).to receive(:load_all_data!)
+
+ subject.prepare!
+ end
+ end
+
+ describe '#render_error' do
+ context 'when the blob is stored externally' do
+ let(:project) { build(:empty_project, lfs_enabled: true) }
+
+ let(:blob) { fake_blob(path: 'file.pdf', lfs: true) }
+
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ end
+
+ it 'return :server_side_but_stored_externally' do
+ expect(subject.render_error).to eq(:server_side_but_stored_externally)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 5231ce28c9d..e971b4bc3f9 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -972,7 +972,7 @@ describe Ci::Build, :models do
'fix-1-foo' => 'fix-1-foo',
'a' * 63 => 'a' * 63,
'a' * 64 => 'a' * 63,
- 'FOO' => 'foo',
+ 'FOO' => 'foo'
}.each do |ref, slug|
it "transforms #{ref} to #{slug}" do
build.ref = ref
@@ -1144,7 +1144,7 @@ describe Ci::Build, :models do
{ key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true },
{ key: 'CI_REGISTRY_PASSWORD', value: build.token, public: false },
- { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false },
+ { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false }
]
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
new file mode 100644
index 00000000000..822b98c5f6c
--- /dev/null
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -0,0 +1,112 @@
+require 'spec_helper'
+
+describe Ci::PipelineSchedule, models: true do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:owner) }
+
+ it { is_expected.to have_many(:pipelines) }
+
+ it { is_expected.to respond_to(:ref) }
+ it { is_expected.to respond_to(:cron) }
+ it { is_expected.to respond_to(:cron_timezone) }
+ it { is_expected.to respond_to(:description) }
+ it { is_expected.to respond_to(:next_run_at) }
+ it { is_expected.to respond_to(:deleted_at) }
+
+ describe 'validations' do
+ it 'does not allow invalid cron patters' do
+ pipeline_schedule = build(:ci_pipeline_schedule, cron: '0 0 0 * *')
+
+ expect(pipeline_schedule).not_to be_valid
+ end
+
+ it 'does not allow invalid cron patters' do
+ pipeline_schedule = build(:ci_pipeline_schedule, cron_timezone: 'invalid')
+
+ expect(pipeline_schedule).not_to be_valid
+ end
+ end
+
+ describe '#set_next_run_at' do
+ let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
+
+ context 'when creates new pipeline schedule' do
+ let(:expected_next_run_at) do
+ Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone).
+ next_time_from(Time.now)
+ end
+
+ it 'updates next_run_at automatically' do
+ expect(Ci::PipelineSchedule.last.next_run_at).to eq(expected_next_run_at)
+ end
+ end
+
+ context 'when updates cron of exsisted pipeline schedule' do
+ let(:new_cron) { '0 0 1 1 *' }
+
+ let(:expected_next_run_at) do
+ Gitlab::Ci::CronParser.new(new_cron, pipeline_schedule.cron_timezone).
+ next_time_from(Time.now)
+ end
+
+ it 'updates next_run_at automatically' do
+ pipeline_schedule.update!(cron: new_cron)
+
+ expect(Ci::PipelineSchedule.last.next_run_at).to eq(expected_next_run_at)
+ end
+ end
+ end
+
+ describe '#schedule_next_run!' do
+ let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
+
+ context 'when reschedules after 10 days from now' do
+ let(:future_time) { 10.days.from_now }
+
+ let(:expected_next_run_at) do
+ Gitlab::Ci::CronParser.new(pipeline_schedule.cron, pipeline_schedule.cron_timezone).
+ next_time_from(future_time)
+ end
+
+ it 'points to proper next_run_at' do
+ Timecop.freeze(future_time) do
+ pipeline_schedule.schedule_next_run!
+
+ expect(pipeline_schedule.next_run_at).to eq(expected_next_run_at)
+ end
+ end
+ end
+ end
+
+ describe '#real_next_run' do
+ subject do
+ described_class.last.real_next_run(worker_cron: worker_cron,
+ worker_time_zone: worker_time_zone)
+ end
+
+ context 'when GitLab time_zone is UTC' do
+ before do
+ allow(Time).to receive(:zone)
+ .and_return(ActiveSupport::TimeZone[worker_time_zone])
+ end
+
+ let(:worker_time_zone) { 'UTC' }
+
+ context 'when cron_timezone is Eastern Time (US & Canada)' do
+ before do
+ create(:ci_pipeline_schedule, :nightly,
+ cron_timezone: 'Eastern Time (US & Canada)')
+ end
+
+ let(:worker_cron) { '0 1 2 3 *' }
+
+ it 'returns the next time worker executes' do
+ expect(subject.min).to eq(0)
+ expect(subject.hour).to eq(1)
+ expect(subject.day).to eq(2)
+ expect(subject.month).to eq(3)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 3b222ea1c3d..56b24ce62f3 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -13,6 +13,7 @@ describe Ci::Pipeline, models: true do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:auto_canceled_by) }
+ it { is_expected.to belong_to(:pipeline_schedule) }
it { is_expected.to have_many(:statuses) }
it { is_expected.to have_many(:trigger_requests) }
@@ -59,8 +60,8 @@ describe Ci::Pipeline, models: true do
subject { pipeline.retried }
before do
- @build1 = FactoryGirl.create :ci_build, pipeline: pipeline, name: 'deploy'
- @build2 = FactoryGirl.create :ci_build, pipeline: pipeline, name: 'deploy'
+ @build1 = create(:ci_build, pipeline: pipeline, name: 'deploy', retried: true)
+ @build2 = create(:ci_build, pipeline: pipeline, name: 'deploy')
end
it 'returns old builds' do
@@ -69,31 +70,31 @@ describe Ci::Pipeline, models: true do
end
describe "coverage" do
- let(:project) { FactoryGirl.create :empty_project, build_coverage_regex: "/.*/" }
- let(:pipeline) { FactoryGirl.create :ci_empty_pipeline, project: project }
+ let(:project) { create(:empty_project, build_coverage_regex: "/.*/") }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
it "calculates average when there are two builds with coverage" do
- FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline
- FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline
+ create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
+ create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
expect(pipeline.coverage).to eq("35.00")
end
it "calculates average when there are two builds with coverage and one with nil" do
- FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline
- FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline
- FactoryGirl.create :ci_build, pipeline: pipeline
+ create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
+ create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
+ create(:ci_build, pipeline: pipeline)
expect(pipeline.coverage).to eq("35.00")
end
it "calculates average when there are two builds with coverage and one is retried" do
- FactoryGirl.create :ci_build, name: "rspec", coverage: 30, pipeline: pipeline
- FactoryGirl.create :ci_build, name: "rubocop", coverage: 30, pipeline: pipeline
- FactoryGirl.create :ci_build, name: "rubocop", coverage: 40, pipeline: pipeline
+ create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
+ create(:ci_build, name: "rubocop", coverage: 30, pipeline: pipeline, retried: true)
+ create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
expect(pipeline.coverage).to eq("35.00")
end
it "calculates average when there is one build without coverage" do
- FactoryGirl.create :ci_build, pipeline: pipeline
+ FactoryGirl.create(:ci_build, pipeline: pipeline)
expect(pipeline.coverage).to be_nil
end
end
@@ -221,13 +222,15 @@ describe Ci::Pipeline, models: true do
%w(deploy running)])
end
- context 'when commit status is retried' do
+ context 'when commit status is retried' do
before do
create(:commit_status, pipeline: pipeline,
stage: 'build',
name: 'mac',
stage_idx: 0,
status: 'success')
+
+ pipeline.process!
end
it 'ignores the previous state' do
@@ -488,6 +491,10 @@ describe Ci::Pipeline, models: true do
context 'there are multiple of the same name' do
let!(:manual2) { create(:ci_build, :manual, pipeline: pipeline, name: 'deploy') }
+ before do
+ manual.update(retried: true)
+ end
+
it 'returns latest one' do
is_expected.to contain_exactly(manual2)
end
@@ -847,6 +854,16 @@ describe Ci::Pipeline, models: true do
end
end
end
+
+ context 'when there is a manual action present in the pipeline' do
+ before do
+ create(:ci_build, :manual, pipeline: pipeline)
+ end
+
+ it 'is not cancelable' do
+ expect(pipeline).not_to be_cancelable
+ end
+ end
end
describe '#cancel_running' do
@@ -1043,8 +1060,8 @@ describe Ci::Pipeline, models: true do
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: 'a288a022a53a5a944fae87bcec6efc87b7061808') }
it "returns merge requests whose `diff_head_sha` matches the pipeline's SHA" do
- merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref)
allow_any_instance_of(MergeRequest).to receive(:diff_head_sha) { 'a288a022a53a5a944fae87bcec6efc87b7061808' }
+ merge_request = create(:merge_request, source_project: project, head_pipeline: pipeline, source_branch: pipeline.ref)
expect(pipeline.merge_requests).to eq([merge_request])
end
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 372b662fab2..8f6ab908987 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -102,6 +102,10 @@ describe Ci::Stage, models: true do
context 'and builds are retried' do
let!(:new_build) { create_job(:ci_build, status: :success) }
+ before do
+ stage_build.update(retried: true)
+ end
+
it "returns status of latest build" do
is_expected.to eq('success')
end
diff --git a/spec/models/ci/trigger_schedule_spec.rb b/spec/models/ci/trigger_schedule_spec.rb
deleted file mode 100644
index 92447564d7c..00000000000
--- a/spec/models/ci/trigger_schedule_spec.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-require 'spec_helper'
-
-describe Ci::TriggerSchedule, models: true do
- it { is_expected.to belong_to(:project) }
- it { is_expected.to belong_to(:trigger) }
- it { is_expected.to respond_to(:ref) }
-
- describe '#set_next_run_at' do
- context 'when creates new TriggerSchedule' do
- before do
- trigger_schedule = create(:ci_trigger_schedule, :nightly)
- @expected_next_run_at = Gitlab::Ci::CronParser.new(trigger_schedule.cron, trigger_schedule.cron_timezone)
- .next_time_from(Time.now)
- end
-
- it 'updates next_run_at automatically' do
- expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
- end
- end
-
- context 'when updates cron of exsisted TriggerSchedule' do
- before do
- trigger_schedule = create(:ci_trigger_schedule, :nightly)
- new_cron = '0 0 1 1 *'
- trigger_schedule.update!(cron: new_cron) # Subject
- @expected_next_run_at = Gitlab::Ci::CronParser.new(new_cron, trigger_schedule.cron_timezone)
- .next_time_from(Time.now)
- end
-
- it 'updates next_run_at automatically' do
- expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
- end
- end
- end
-
- describe '#schedule_next_run!' do
- context 'when reschedules after 10 days from now' do
- before do
- trigger_schedule = create(:ci_trigger_schedule, :nightly)
- time_future = Time.now + 10.days
- allow(Time).to receive(:now).and_return(time_future)
- trigger_schedule.schedule_next_run! # Subject
- @expected_next_run_at = Gitlab::Ci::CronParser.new(trigger_schedule.cron, trigger_schedule.cron_timezone)
- .next_time_from(time_future)
- end
-
- it 'points to proper next_run_at' do
- expect(Ci::TriggerSchedule.last.next_run_at).to eq(@expected_next_run_at)
- end
- end
-
- context 'when cron is invalid' do
- before do
- trigger_schedule = create(:ci_trigger_schedule, :nightly)
- trigger_schedule.cron = 'Invalid-cron'
- trigger_schedule.schedule_next_run! # Subject
- end
-
- it 'sets nil to next_run_at' do
- expect(Ci::TriggerSchedule.last.next_run_at).to be_nil
- end
- end
-
- context 'when cron_timezone is invalid' do
- before do
- trigger_schedule = create(:ci_trigger_schedule, :nightly)
- trigger_schedule.cron_timezone = 'Invalid-cron_timezone'
- trigger_schedule.schedule_next_run! # Subject
- end
-
- it 'sets nil to next_run_at' do
- expect(Ci::TriggerSchedule.last.next_run_at).to be_nil
- end
- end
- end
-
- describe '#real_next_run' do
- subject do
- Ci::TriggerSchedule.last.real_next_run(worker_cron: worker_cron,
- worker_time_zone: worker_time_zone)
- end
-
- context 'when GitLab time_zone is UTC' do
- before do
- allow(Time).to receive(:zone)
- .and_return(ActiveSupport::TimeZone[worker_time_zone])
- end
-
- let(:worker_time_zone) { 'UTC' }
-
- context 'when cron_timezone is Eastern Time (US & Canada)' do
- before do
- create(:ci_trigger_schedule, :nightly,
- cron_timezone: 'Eastern Time (US & Canada)')
- end
-
- let(:worker_cron) { '0 1 2 3 *' }
-
- it 'returns the next time worker executes' do
- expect(subject.min).to eq(0)
- expect(subject.hour).to eq(1)
- expect(subject.day).to eq(2)
- expect(subject.month).to eq(3)
- end
- end
- end
- end
-end
diff --git a/spec/models/ci/trigger_spec.rb b/spec/models/ci/trigger_spec.rb
index d26121018ce..92c15c13c18 100644
--- a/spec/models/ci/trigger_spec.rb
+++ b/spec/models/ci/trigger_spec.rb
@@ -7,7 +7,6 @@ describe Ci::Trigger, models: true do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:owner) }
it { is_expected.to have_many(:trigger_requests) }
- it { is_expected.to have_one(:trigger_schedule) }
end
describe 'before_validation' do
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index 048d25869bc..fe8c52d5353 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Ci::Variable, models: true do
- subject { Ci::Variable.new }
+ subject { build(:ci_variable) }
let(:secret_value) { 'secret' }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 08b2169fea7..72f83d63224 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -388,33 +388,4 @@ eos
expect(described_class.valid_hash?('a' * 41)).to be false
end
end
-
- # describe '#raw_diffs' do
- # TODO: Uncomment when feature is reenabled
- # context 'Gitaly commit_raw_diffs feature enabled' do
- # before do
- # allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:commit_raw_diffs).and_return(true)
- # end
- #
- # context 'when a truthy deltas_only is not passed to args' do
- # it 'fetches diffs from Gitaly server' do
- # expect(Gitlab::GitalyClient::Commit).to receive(:diff_from_parent).
- # with(commit)
- #
- # commit.raw_diffs
- # end
- # end
- #
- # context 'when a truthy deltas_only is passed to args' do
- # it 'fetches diffs using Rugged' do
- # opts = { deltas_only: true }
- #
- # expect(Gitlab::GitalyClient::Commit).not_to receive(:diff_from_parent)
- # expect(commit.raw).to receive(:diffs).with(opts)
- #
- # commit.raw_diffs(opts)
- # end
- # end
- # end
- # end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 0ee85489574..6947affcc1e 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -157,9 +157,9 @@ describe CommitStatus, :models do
subject { described_class.latest.order(:id) }
let(:statuses) do
- [create_status(name: 'aa', ref: 'bb', status: 'running'),
- create_status(name: 'cc', ref: 'cc', status: 'pending'),
- create_status(name: 'aa', ref: 'cc', status: 'success'),
+ [create_status(name: 'aa', ref: 'bb', status: 'running', retried: true),
+ create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true),
+ create_status(name: 'aa', ref: 'cc', status: 'success', retried: true),
create_status(name: 'cc', ref: 'bb', status: 'success'),
create_status(name: 'aa', ref: 'bb', status: 'success')]
end
@@ -169,6 +169,22 @@ describe CommitStatus, :models do
end
end
+ describe '.retried' do
+ subject { described_class.retried.order(:id) }
+
+ let(:statuses) do
+ [create_status(name: 'aa', ref: 'bb', status: 'running', retried: true),
+ create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true),
+ create_status(name: 'aa', ref: 'cc', status: 'success', retried: true),
+ create_status(name: 'cc', ref: 'bb', status: 'success'),
+ create_status(name: 'aa', ref: 'bb', status: 'success')]
+ end
+
+ it 'returns unique statuses' do
+ is_expected.to contain_exactly(*statuses.values_at(0, 1, 2))
+ end
+ end
+
describe '.running_or_pending' do
subject { described_class.running_or_pending.order(:id) }
@@ -181,7 +197,7 @@ describe CommitStatus, :models do
end
it 'returns statuses that are running or pending' do
- is_expected.to eq(statuses.values_at(0, 1))
+ is_expected.to contain_exactly(*statuses.values_at(0, 1))
end
end
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 2092576e981..e382c7120de 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -163,3 +163,52 @@ describe Issue, "Mentionable" do
end
end
end
+
+describe Commit, 'Mentionable' do
+ let(:project) { create(:project, :public, :repository) }
+ let(:commit) { project.commit }
+
+ describe '#matches_cross_reference_regex?' do
+ it "is false when message doesn't reference anything" do
+ allow(commit.raw).to receive(:message).and_return "WIP: Do something"
+
+ expect(commit.matches_cross_reference_regex?).to be false
+ end
+
+ it 'is true if issue #number mentioned in title' do
+ allow(commit.raw).to receive(:message).and_return "#1"
+
+ expect(commit.matches_cross_reference_regex?).to be true
+ end
+
+ it 'is true if references an MR' do
+ allow(commit.raw).to receive(:message).and_return "See merge request !12"
+
+ expect(commit.matches_cross_reference_regex?).to be true
+ end
+
+ it 'is true if references a commit' do
+ allow(commit.raw).to receive(:message).and_return "a1b2c3d4"
+
+ expect(commit.matches_cross_reference_regex?).to be true
+ end
+
+ it 'is true if issue referenced by url' do
+ issue = create(:issue, project: project)
+
+ allow(commit.raw).to receive(:message).and_return Gitlab::UrlBuilder.build(issue)
+
+ expect(commit.matches_cross_reference_regex?).to be true
+ end
+
+ context 'with external issue tracker' do
+ let(:project) { create(:jira_project) }
+
+ it 'is true if external issues referenced' do
+ allow(commit.raw).to receive(:message).and_return 'JIRA-123'
+
+ expect(commit.matches_cross_reference_regex?).to be true
+ end
+ end
+ end
+end
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
index c2ba012a0e6..d0b919efcf9 100644
--- a/spec/models/cycle_analytics/test_spec.rb
+++ b/spec/models/cycle_analytics/test_spec.rb
@@ -14,6 +14,7 @@ describe 'CycleAnalytics#test', feature: true do
issue = context.create(:issue, project: context.project)
merge_request = context.create_merge_request_closing_issue(issue)
pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project)
+ merge_request.update(head_pipeline: pipeline)
{ pipeline: pipeline, issue: issue }
end,
start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 080ff2f3f43..4bda7d4314a 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -49,6 +49,34 @@ describe Deployment, models: true do
end
end
+ describe '#metrics' do
+ let(:deployment) { create(:deployment) }
+
+ subject { deployment.metrics }
+
+ context 'metrics are disabled' do
+ it { is_expected.to eq({}) }
+ end
+
+ context 'metrics are enabled' do
+ let(:simple_metrics) do
+ {
+ success: true,
+ metrics: {},
+ last_update: 42,
+ deployment_time: 1494408956
+ }
+ end
+
+ before do
+ allow(deployment.project).to receive_message_chain(:monitoring_service, :deployment_metrics)
+ .with(any_args).and_return(simple_metrics)
+ end
+
+ it { is_expected.to eq(simple_metrics) }
+ end
+ end
+
describe '#stop_action' do
let(:build) { create(:ci_build) }
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 28e5c3f80f4..12519de8636 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -393,7 +393,7 @@ describe Environment, models: true do
it 'returns the metrics from the deployment service' do
expect(project.monitoring_service)
- .to receive(:metrics).with(environment)
+ .to receive(:environment_metrics).with(environment)
.and_return(:fake_metrics)
is_expected.to eq(:fake_metrics)
@@ -438,7 +438,7 @@ describe Environment, models: true do
"foo**bar" => "foo-bar" + SUFFIX,
"*-foo" => "env-foo" + SUFFIX,
"staging-12345678-" => "staging-12345678" + SUFFIX,
- "staging-12345678-01234567" => "staging-12345678" + SUFFIX,
+ "staging-12345678-01234567" => "staging-12345678" + SUFFIX
}.each do |name, matcher|
it "returns a slug matching #{matcher}, given #{name}" do
slug = described_class.new(name: name).generate_slug
diff --git a/spec/models/global_milestone_spec.rb b/spec/models/global_milestone_spec.rb
index 55b87d1c48a..a14efda3eda 100644
--- a/spec/models/global_milestone_spec.rb
+++ b/spec/models/global_milestone_spec.rb
@@ -137,7 +137,7 @@ describe GlobalMilestone, models: true do
[
milestone1_project1,
milestone1_project2,
- milestone1_project3,
+ milestone1_project3
]
milestones_relation = Milestone.where(id: milestones.map(&:id))
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 3d60e52f23f..6ca1eb0374d 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -178,16 +178,20 @@ describe Group, models: true do
describe '#avatar_url' do
let!(:group) { create(:group, :access_requestable, :with_avatar) }
let(:user) { create(:user) }
- subject { group.avatar_url }
+ let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
+ let(:avatar_path) { "/uploads/group/avatar/#{group.id}/dk.png" }
context 'when avatar file is uploaded' do
- before do
- group.add_master(user)
- end
+ before { group.add_master(user) }
- let(:avatar_path) { "/uploads/group/avatar/#{group.id}/dk.png" }
+ it 'shows correct avatar url' do
+ expect(group.avatar_url).to eq(avatar_path)
+ expect(group.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
- it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
+ allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
+
+ expect(group.avatar_url).to eq([gitlab_host, avatar_path].join)
+ end
end
end
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index 8acec805584..4340170888d 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -1,6 +1,19 @@
require "spec_helper"
describe SystemHook, models: true do
+ context 'default attributes' do
+ let(:system_hook) { build(:system_hook) }
+
+ it 'sets defined default parameters' do
+ attrs = {
+ push_events: false,
+ repository_update_events: true,
+ enable_ssl_verification: true
+ }
+ expect(system_hook).to have_attributes(attrs)
+ end
+ end
+
describe "execute" do
let(:system_hook) { create(:system_hook) }
let(:user) { create(:user) }
@@ -105,4 +118,12 @@ describe SystemHook, models: true do
).once
end
end
+
+ describe '.repository_update_hooks' do
+ it 'returns hooks for repository update events only' do
+ hook = create(:system_hook, repository_update_events: true)
+ create(:system_hook, repository_update_events: false)
+ expect(SystemHook.repository_update_hooks).to eq([hook])
+ end
+ end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 725f5c2311f..bb4e70db2e9 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -38,46 +38,6 @@ describe Issue, models: true do
end
end
- describe "before_save" do
- describe "#update_cache_counts when an issue is reassigned" do
- let(:issue) { create(:issue) }
- let(:assignee) { create(:user) }
-
- context "when previous assignee exists" do
- before do
- issue.project.team << [assignee, :developer]
- issue.assignees << assignee
- end
-
- it "updates cache counts for new assignee" do
- user = create(:user)
-
- expect(user).to receive(:update_cache_counts)
-
- issue.assignees << user
- end
-
- it "updates cache counts for previous assignee" do
- issue.assignees.first
-
- expect_any_instance_of(User).to receive(:update_cache_counts)
-
- issue.assignees.destroy_all
- end
- end
-
- context "when previous assignee does not exist" do
- it "updates cache count for the new assignee" do
- issue.assignees = []
-
- expect_any_instance_of(User).to receive(:update_cache_counts)
-
- issue.assignees << assignee
- end
- end
- end
- end
-
describe '#card_attributes' do
it 'includes the author name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 6cf3dd30ead..ce870fcc1d3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -87,48 +87,6 @@ describe MergeRequest, models: true do
end
end
- describe "before_save" do
- describe "#update_cache_counts when a merge request is reassigned" do
- let(:project) { create :project }
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let(:assignee) { create :user }
-
- context "when previous assignee exists" do
- before do
- project.team << [assignee, :developer]
- merge_request.update(assignee: assignee)
- end
-
- it "updates cache counts for new assignee" do
- user = create(:user)
-
- expect(user).to receive(:update_cache_counts)
-
- merge_request.update(assignee: user)
- end
-
- it "updates cache counts for previous assignee" do
- old_assignee = merge_request.assignee
- allow(User).to receive(:find_by_id).with(old_assignee.id).and_return(old_assignee)
-
- expect(old_assignee).to receive(:update_cache_counts)
-
- merge_request.update(assignee: nil)
- end
- end
-
- context "when previous assignee does not exist" do
- it "updates cache count for the new assignee" do
- merge_request.update(assignee: nil)
-
- expect_any_instance_of(User).to receive(:update_cache_counts)
-
- merge_request.update(assignee: assignee)
- end
- end
- end
- end
-
describe '#card_attributes' do
it 'includes the author name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
@@ -760,13 +718,8 @@ describe MergeRequest, models: true do
describe '#head_pipeline' do
describe 'when the source project exists' do
it 'returns the latest pipeline' do
- pipeline = double(:ci_pipeline, ref: 'master')
-
- allow(subject).to receive(:diff_head_sha).and_return('123abc')
-
- expect(subject.source_project).to receive(:pipeline_for).
- with('master', '123abc').
- and_return(pipeline)
+ pipeline = create(:ci_empty_pipeline, project: subject.source_project, ref: 'master', status: 'running', sha: "123abc")
+ subject.update(head_pipeline: pipeline)
expect(subject.head_pipeline).to eq(pipeline)
end
@@ -1315,71 +1268,6 @@ describe MergeRequest, models: true do
end
end
- describe '#conflicts_can_be_resolved_in_ui?' do
- def create_merge_request(source_branch)
- create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start') do |mr|
- mr.mark_as_unmergeable
- end
- end
-
- it 'returns a falsey value when the MR can be merged without conflicts' do
- merge_request = create_merge_request('master')
- merge_request.mark_as_mergeable
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the MR is marked as having conflicts, but has none' do
- merge_request = create_merge_request('master')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the MR has a missing ref after a force push' do
- merge_request = create_merge_request('conflict-resolvable')
- allow(merge_request.conflicts).to receive(:merge_index).and_raise(Rugged::OdbError)
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the MR does not support new diff notes' do
- merge_request = create_merge_request('conflict-resolvable')
- merge_request.merge_request_diff.update_attributes(start_commit_sha: nil)
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the conflicts contain a large file' do
- merge_request = create_merge_request('conflict-too-large')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the conflicts contain a binary file' do
- merge_request = create_merge_request('conflict-binary-file')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a falsey value when the conflicts contain a file edited in one branch and deleted in another' do
- merge_request = create_merge_request('conflict-missing-side')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_falsey
- end
-
- it 'returns a truthy value when the conflicts are resolvable in the UI' do
- merge_request = create_merge_request('conflict-resolvable')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_truthy
- end
-
- it 'returns a truthy value when the conflicts have to be resolved in an editor' do
- merge_request = create_merge_request('conflict-contains-conflict-markers')
-
- expect(merge_request.conflicts_can_be_resolved_in_ui?).to be_truthy
- end
- end
-
describe "#source_project_missing?" do
let(:project) { create(:empty_project) }
let(:fork_project) { create(:empty_project, forked_from_project: project) }
@@ -1504,11 +1392,15 @@ describe MergeRequest, models: true do
describe '#mergeable_with_slash_command?' do
def create_pipeline(status)
- create(:ci_pipeline_with_one_job,
+ pipeline = create(:ci_pipeline_with_one_job,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
status: status)
+
+ merge_request.update(head_pipeline: pipeline)
+
+ pipeline
end
let(:project) { create(:project, :public, :repository, only_allow_merge_if_pipeline_succeeds: true) }
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 33ef67f97a7..cd0a4a94809 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -16,7 +16,7 @@ describe ProjectAuthorization do
it 'inserts rows in batches' do
described_class.insert_authorizations([
[user.id, project1.id, Gitlab::Access::MASTER],
- [user.id, project2.id, Gitlab::Access::MASTER],
+ [user.id, project2.id, Gitlab::Access::MASTER]
], 1)
expect(user.project_authorizations.count).to eq(2)
diff --git a/spec/models/project_services/asana_service_spec.rb b/spec/models/project_services/asana_service_spec.rb
index 48aef3a93f2..95c35162d96 100644
--- a/spec/models/project_services/asana_service_spec.rb
+++ b/spec/models/project_services/asana_service_spec.rb
@@ -28,7 +28,7 @@ describe AsanaService, models: true do
commits: messages.map do |m|
{
message: m,
- url: 'https://gitlab.com/',
+ url: 'https://gitlab.com/'
}
end
}
diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/project_services/chat_message/issue_message_spec.rb
index 34e2d94b1ed..c159ab00ab1 100644
--- a/spec/models/project_services/chat_message/issue_message_spec.rb
+++ b/spec/models/project_services/chat_message/issue_message_spec.rb
@@ -48,7 +48,7 @@ describe ChatMessage::IssueMessage, models: true do
title: "#100 Issue title",
title_link: "http://url.com",
text: "issue description",
- color: color,
+ color: color
}
])
end
diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/project_services/chat_message/merge_message_spec.rb
index fa0a1f4a5b7..61f17031172 100644
--- a/spec/models/project_services/chat_message/merge_message_spec.rb
+++ b/spec/models/project_services/chat_message/merge_message_spec.rb
@@ -22,7 +22,7 @@ describe ChatMessage::MergeMessage, models: true do
state: 'opened',
description: 'merge request description',
source_branch: 'source_branch',
- target_branch: 'target_branch',
+ target_branch: 'target_branch'
}
}
end
diff --git a/spec/models/project_services/chat_message/note_message_spec.rb b/spec/models/project_services/chat_message/note_message_spec.rb
index 7cd9c61ee2b..7996536218a 100644
--- a/spec/models/project_services/chat_message/note_message_spec.rb
+++ b/spec/models/project_services/chat_message/note_message_spec.rb
@@ -15,7 +15,7 @@ describe ChatMessage::NoteMessage, models: true do
project_url: 'http://somewhere.com',
repository: {
name: 'project_name',
- url: 'http://somewhere.com',
+ url: 'http://somewhere.com'
},
object_attributes: {
id: 10,
diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/project_services/chat_message/pipeline_message_spec.rb
index e005be42b0d..7d2599dc703 100644
--- a/spec/models/project_services/chat_message/pipeline_message_spec.rb
+++ b/spec/models/project_services/chat_message/pipeline_message_spec.rb
@@ -62,7 +62,7 @@ describe ChatMessage::PipelineMessage do
def build_message(status_text = status, name = user[:name])
"<http://example.gitlab.com|project_name>:" \
" Pipeline <http://example.gitlab.com/pipelines/123|#123>" \
- " of <http://example.gitlab.com/commits/develop|develop> branch" \
+ " of branch `<http://example.gitlab.com/commits/develop|develop>`" \
" by #{name} #{status_text} in 02:00:10"
end
end
@@ -81,7 +81,7 @@ describe ChatMessage::PipelineMessage do
expect(subject.pretext).to be_empty
expect(subject.attachments).to eq(message)
expect(subject.activity).to eq({
- title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of [develop](http://example.gitlab.com/commits/develop) branch by hacker passed',
+ title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by hacker passed',
subtitle: 'in [project_name](http://example.gitlab.com)',
text: 'in 02:00:10',
image: ''
@@ -98,7 +98,7 @@ describe ChatMessage::PipelineMessage do
expect(subject.pretext).to be_empty
expect(subject.attachments).to eq(message)
expect(subject.activity).to eq({
- title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of [develop](http://example.gitlab.com/commits/develop) branch by hacker failed',
+ title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by hacker failed',
subtitle: 'in [project_name](http://example.gitlab.com)',
text: 'in 02:00:10',
image: ''
@@ -113,7 +113,7 @@ describe ChatMessage::PipelineMessage do
expect(subject.pretext).to be_empty
expect(subject.attachments).to eq(message)
expect(subject.activity).to eq({
- title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of [develop](http://example.gitlab.com/commits/develop) branch by API failed',
+ title: 'Pipeline [#123](http://example.gitlab.com/pipelines/123) of branch `[develop](http://example.gitlab.com/commits/develop)` by API failed',
subtitle: 'in [project_name](http://example.gitlab.com)',
text: 'in 02:00:10',
image: ''
@@ -125,8 +125,8 @@ describe ChatMessage::PipelineMessage do
def build_markdown_message(status_text = status, name = user[:name])
"[project_name](http://example.gitlab.com):" \
" Pipeline [#123](http://example.gitlab.com/pipelines/123)" \
- " of [develop](http://example.gitlab.com/commits/develop)" \
- " branch by #{name} #{status_text} in 02:00:10"
+ " of branch `[develop](http://example.gitlab.com/commits/develop)`" \
+ " by #{name} #{status_text} in 02:00:10"
end
end
end
diff --git a/spec/models/project_services/chat_message/push_message_spec.rb b/spec/models/project_services/chat_message/push_message_spec.rb
index 63eb078c44e..e38117b75f6 100644
--- a/spec/models/project_services/chat_message/push_message_spec.rb
+++ b/spec/models/project_services/chat_message/push_message_spec.rb
@@ -21,19 +21,19 @@ describe ChatMessage::PushMessage, models: true do
before do
args[:commits] = [
{ message: 'message1', url: 'http://url1.com', id: 'abcdefghijkl', author: { name: 'author1' } },
- { message: 'message2', url: 'http://url2.com', id: '123456789012', author: { name: 'author2' } },
+ { message: 'message2', url: 'http://url2.com', id: '123456789012', author: { name: 'author2' } }
]
end
context 'without markdown' do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed to branch <http://url.com/commits/master|master> of '\
+ 'test.user pushed to branch `<http://url.com/commits/master|master>` of '\
'<http://url.com|project_name> (<http://url.com/compare/before...after|Compare changes>)')
expect(subject.attachments).to eq([{
text: "<http://url1.com|abcdefgh>: message1 - author1\n\n"\
"<http://url2.com|12345678>: message2 - author2",
- color: color,
+ color: color
}])
end
end
@@ -45,7 +45,7 @@ describe ChatMessage::PushMessage, models: true do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed to branch [master](http://url.com/commits/master) of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))')
+ 'test.user pushed to branch `[master](http://url.com/commits/master)` of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))')
expect(subject.attachments).to eq(
"[abcdefgh](http://url1.com): message1 - author1\n\n[12345678](http://url2.com): message2 - author2")
expect(subject.activity).to eq({
@@ -74,7 +74,7 @@ describe ChatMessage::PushMessage, models: true do
context 'without markdown' do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq('test.user pushed new tag ' \
- '<http://url.com/commits/new_tag|new_tag> to ' \
+ '`<http://url.com/commits/new_tag|new_tag>` to ' \
'<http://url.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -87,7 +87,7 @@ describe ChatMessage::PushMessage, models: true do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed new tag [new_tag](http://url.com/commits/new_tag) to [project_name](http://url.com)')
+ 'test.user pushed new tag `[new_tag](http://url.com/commits/new_tag)` to [project_name](http://url.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
title: 'test.user created tag',
@@ -107,7 +107,7 @@ describe ChatMessage::PushMessage, models: true do
context 'without markdown' do
it 'returns a message regarding a new branch' do
expect(subject.pretext).to eq(
- 'test.user pushed new branch <http://url.com/commits/master|master> to '\
+ 'test.user pushed new branch `<http://url.com/commits/master|master>` to '\
'<http://url.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -120,7 +120,7 @@ describe ChatMessage::PushMessage, models: true do
it 'returns a message regarding a new branch' do
expect(subject.pretext).to eq(
- 'test.user pushed new branch [master](http://url.com/commits/master) to [project_name](http://url.com)')
+ 'test.user pushed new branch `[master](http://url.com/commits/master)` to [project_name](http://url.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
title: 'test.user created branch',
@@ -140,7 +140,7 @@ describe ChatMessage::PushMessage, models: true do
context 'without markdown' do
it 'returns a message regarding a removed branch' do
expect(subject.pretext).to eq(
- 'test.user removed branch master from <http://url.com|project_name>')
+ 'test.user removed branch `master` from <http://url.com|project_name>')
expect(subject.attachments).to be_empty
end
end
@@ -152,7 +152,7 @@ describe ChatMessage::PushMessage, models: true do
it 'returns a message regarding a removed branch' do
expect(subject.pretext).to eq(
- 'test.user removed branch master from [project_name](http://url.com)')
+ 'test.user removed branch `master` from [project_name](http://url.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
title: 'test.user removed branch',
diff --git a/spec/models/project_services/chat_message/wiki_page_message_spec.rb b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
index 0df7db2abc2..4ca1b8aa7b7 100644
--- a/spec/models/project_services/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/project_services/chat_message/wiki_page_message_spec.rb
@@ -53,7 +53,7 @@ describe ChatMessage::WikiPageMessage, models: true do
expect(subject.attachments).to eq([
{
text: "Wiki page description",
- color: color,
+ color: color
}
])
end
@@ -66,7 +66,7 @@ describe ChatMessage::WikiPageMessage, models: true do
expect(subject.attachments).to eq([
{
text: "Wiki page description",
- color: color,
+ color: color
}
])
end
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index e69eb0098dd..c1c2f2a7219 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -54,7 +54,7 @@ describe KubernetesService, models: true, caching: true do
'a' * 63 => true,
'a' * 64 => false,
'a.b' => false,
- 'a*b' => false,
+ 'a*b' => false
}.each do |namespace, validity|
it "validates #{namespace} as #{validity ? 'valid' : 'invalid'}" do
subject.namespace = namespace
@@ -168,7 +168,7 @@ describe KubernetesService, models: true, caching: true do
{ key: 'KUBE_TOKEN', value: 'token', public: false },
{ key: 'KUBE_NAMESPACE', value: 'my-project', public: true },
{ key: 'KUBE_CA_PEM', value: 'CA PEM DATA', public: true },
- { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true },
+ { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true }
)
end
end
@@ -179,7 +179,7 @@ describe KubernetesService, models: true, caching: true do
{ key: 'KUBE_URL', value: 'https://kube.domain.com', public: true },
{ key: 'KUBE_TOKEN', value: 'token', public: false },
{ key: 'KUBE_CA_PEM', value: 'CA PEM DATA', public: true },
- { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true },
+ { key: 'KUBE_CA_PEM_FILE', value: 'CA PEM DATA', public: true, file: true }
)
end
diff --git a/spec/models/project_services/pivotaltracker_service_spec.rb b/spec/models/project_services/pivotaltracker_service_spec.rb
index 45b2f1068bf..a76e909d04d 100644
--- a/spec/models/project_services/pivotaltracker_service_spec.rb
+++ b/spec/models/project_services/pivotaltracker_service_spec.rb
@@ -40,7 +40,7 @@ describe PivotaltrackerService, models: true do
name: 'Some User'
},
url: 'https://example.com/commit',
- message: 'commit message',
+ message: 'commit message'
}
]
}
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index f3126bc1e57..1f9d3c07b51 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -6,6 +6,7 @@ describe PrometheusService, models: true, caching: true do
let(:project) { create(:prometheus_project) }
let(:service) { project.prometheus_service }
+ let(:environment_query) { Gitlab::Prometheus::Queries::EnvironmentQuery }
describe "Associations" do
it { is_expected.to belong_to :project }
@@ -45,17 +46,18 @@ describe PrometheusService, models: true, caching: true do
end
end
- describe '#metrics' do
+ describe '#environment_metrics' do
let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
- subject { service.metrics(environment) }
around do |example|
Timecop.freeze { example.run }
end
context 'with valid data' do
+ subject { service.environment_metrics(environment) }
+
before do
- stub_reactive_cache(service, prometheus_data, 'env-slug')
+ stub_reactive_cache(service, prometheus_data, environment_query, environment.id)
end
it 'returns reactive data' do
@@ -64,15 +66,36 @@ describe PrometheusService, models: true, caching: true do
end
end
+ describe '#deployment_metrics' do
+ let(:deployment) { build_stubbed(:deployment)}
+ let(:deployment_query) { Gitlab::Prometheus::Queries::DeploymentQuery }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'with valid data' do
+ subject { service.deployment_metrics(deployment) }
+
+ before do
+ stub_reactive_cache(service, prometheus_data, deployment_query, deployment.id)
+ end
+
+ it 'returns reactive data' do
+ is_expected.to eq(prometheus_data.merge(deployment_time: deployment.created_at.to_i))
+ end
+ end
+ end
+
describe '#calculate_reactive_cache' do
- let(:environment) { build_stubbed(:environment, slug: 'env-slug') }
+ let(:environment) { create(:environment, slug: 'env-slug') }
around do |example|
Timecop.freeze { example.run }
end
subject do
- service.calculate_reactive_cache(environment.slug)
+ service.calculate_reactive_cache(environment_query.to_s, environment.id)
end
context 'when service is inactive' do
diff --git a/spec/models/project_snippet_spec.rb b/spec/models/project_snippet_spec.rb
index d9d7c0b0aaa..5fe4885eeb4 100644
--- a/spec/models/project_snippet_spec.rb
+++ b/spec/models/project_snippet_spec.rb
@@ -5,9 +5,6 @@ describe ProjectSnippet, models: true do
it { is_expected.to belong_to(:project) }
end
- describe "Mass assignment" do
- end
-
describe "Validation" do
it { is_expected.to validate_presence_of(:project) }
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 2fc8ffed80a..f2b4e9070b4 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -73,6 +73,7 @@ describe Project, models: true do
it { is_expected.to have_many(:notification_settings).dependent(:destroy) }
it { is_expected.to have_many(:forks).through(:forked_project_links) }
it { is_expected.to have_many(:uploads).dependent(:destroy) }
+ it { is_expected.to have_many(:pipeline_schedules).dependent(:destroy) }
context 'after initialized' do
it "has a project_feature" do
@@ -812,8 +813,16 @@ describe Project, models: true do
context 'when avatar file is uploaded' do
let(:project) { create(:empty_project, :with_avatar) }
let(:avatar_path) { "/uploads/project/avatar/#{project.id}/dk.png" }
+ let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
- it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
+ it 'shows correct url' do
+ expect(project.avatar_url).to eq(avatar_path)
+ expect(project.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
+
+ allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
+
+ expect(project.avatar_url).to eq([gitlab_host, avatar_path].join)
+ end
end
context 'When avatar file in git' do
@@ -964,7 +973,7 @@ describe Project, models: true do
before do
storages = {
'default' => { 'path' => 'tmp/tests/repositories' },
- 'picked' => { 'path' => 'tmp/tests/repositories' },
+ 'picked' => { 'path' => 'tmp/tests/repositories' }
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index ff29f6f66ba..c5ffbda9821 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -35,7 +35,7 @@ describe ProjectStatistics, models: true do
commit_count: 8.exabytes - 1,
repository_size: 2.exabytes,
lfs_objects_size: 2.exabytes,
- build_artifacts_size: 4.exabytes - 1,
+ build_artifacts_size: 4.exabytes - 1
)
statistics.reload
@@ -149,7 +149,7 @@ describe ProjectStatistics, models: true do
it "sums all storage counters" do
statistics.update!(
repository_size: 2,
- lfs_objects_size: 3,
+ lfs_objects_size: 3
)
statistics.reload
diff --git a/spec/models/protected_branch/merge_access_level_spec.rb b/spec/models/protected_branch/merge_access_level_spec.rb
new file mode 100644
index 00000000000..1e7242e9fa8
--- /dev/null
+++ b/spec/models/protected_branch/merge_access_level_spec.rb
@@ -0,0 +1,5 @@
+require 'spec_helper'
+
+describe ProtectedBranch::MergeAccessLevel, :models do
+ it { is_expected.to validate_inclusion_of(:access_level).in_array([Gitlab::Access::MASTER, Gitlab::Access::DEVELOPER, Gitlab::Access::NO_ACCESS]) }
+end
diff --git a/spec/models/protected_branch/push_access_level_spec.rb b/spec/models/protected_branch/push_access_level_spec.rb
new file mode 100644
index 00000000000..de68351198c
--- /dev/null
+++ b/spec/models/protected_branch/push_access_level_spec.rb
@@ -0,0 +1,5 @@
+require 'spec_helper'
+
+describe ProtectedBranch::PushAccessLevel, :models do
+ it { is_expected.to validate_inclusion_of(:access_level).in_array([Gitlab::Access::MASTER, Gitlab::Access::DEVELOPER, Gitlab::Access::NO_ACCESS]) }
+end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 179a443c43d..ca347cf92c9 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -7,9 +7,6 @@ describe ProtectedBranch, models: true do
it { is_expected.to belong_to(:project) }
end
- describe "Mass assignment" do
- end
-
describe 'Validation' do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:name) }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index dd6514b3b50..718b7d5e86b 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Repository, models: true do
include RepoHelpers
- TestBlob = Struct.new(:name)
+ TestBlob = Struct.new(:path)
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
@@ -110,22 +110,11 @@ describe Repository, models: true do
end
describe '#ref_name_for_sha' do
- context 'ref found' do
- it 'returns the ref' do
- allow_any_instance_of(Gitlab::Popen).to receive(:popen).
- and_return(["b8d95eb4969eefacb0a58f6a28f6803f8070e7b9 commit\trefs/environments/production/77\n", 0])
+ it 'returns the ref' do
+ allow(repository.raw_repository).to receive(:ref_name_for_sha).
+ and_return('refs/environments/production/77')
- expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77'
- end
- end
-
- context 'ref not found' do
- it 'returns nil' do
- allow_any_instance_of(Gitlab::Popen).to receive(:popen).
- and_return(["", 0])
-
- expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq nil
- end
+ expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77'
end
end
@@ -565,31 +554,31 @@ describe Repository, models: true do
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
- expect(repository.changelog.name).to eq('changelog')
+ expect(repository.changelog.path).to eq('changelog')
end
it 'accepts news instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('news')])
- expect(repository.changelog.name).to eq('news')
+ expect(repository.changelog.path).to eq('news')
end
it 'accepts history instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('history')])
- expect(repository.changelog.name).to eq('history')
+ expect(repository.changelog.path).to eq('history')
end
it 'accepts changes instead of changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changes')])
- expect(repository.changelog.name).to eq('changes')
+ expect(repository.changelog.path).to eq('changes')
end
it 'is case-insensitive' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('CHANGELOG')])
- expect(repository.changelog.name).to eq('CHANGELOG')
+ expect(repository.changelog.path).to eq('CHANGELOG')
end
end
@@ -624,7 +613,7 @@ describe Repository, models: true do
repository.create_file(user, 'LICENSE', 'Copyright!',
message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license_blob.name).to eq('LICENSE')
+ expect(repository.license_blob.path).to eq('LICENSE')
end
%w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename|
@@ -654,7 +643,7 @@ describe Repository, models: true do
expect(repository.license_key).to be_nil
end
- it 'detects license file with no recognizable open-source license content' do
+ it 'returns nil when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Copyright!',
message: 'Add LICENSE', branch_name: 'master')
@@ -670,12 +659,45 @@ describe Repository, models: true do
end
end
+ describe '#license' do
+ before do
+ repository.delete_file(user, 'LICENSE',
+ message: 'Remove LICENSE', branch_name: 'master')
+ end
+
+ it 'returns nil when no license is detected' do
+ expect(repository.license).to be_nil
+ end
+
+ it 'returns nil when the repository does not exist' do
+ expect(repository).to receive(:exists?).and_return(false)
+
+ expect(repository.license).to be_nil
+ end
+
+ it 'returns nil when the content is not recognizable' do
+ repository.create_file(user, 'LICENSE', 'Copyright!',
+ message: 'Add LICENSE', branch_name: 'master')
+
+ expect(repository.license).to be_nil
+ end
+
+ it 'returns the license' do
+ license = Licensee::License.new('mit')
+ repository.create_file(user, 'LICENSE',
+ license.content,
+ message: 'Add LICENSE', branch_name: 'master')
+
+ expect(repository.license).to eq(license)
+ end
+ end
+
describe "#gitlab_ci_yml", caching: true do
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
- expect(repository.gitlab_ci_yml.name).to eq('.gitlab-ci.yml')
+ expect(repository.gitlab_ci_yml.path).to eq('.gitlab-ci.yml')
end
it 'returns nil if not exists' do
@@ -1626,15 +1648,25 @@ describe Repository, models: true do
describe '#readme', caching: true do
context 'with a non-existing repository' do
it 'returns nil' do
- expect(repository).to receive(:tree).with(:head).and_return(nil)
+ allow(repository).to receive(:tree).with(:head).and_return(nil)
expect(repository.readme).to be_nil
end
end
context 'with an existing repository' do
- it 'returns the README' do
- expect(repository.readme).to be_an_instance_of(Gitlab::Git::Blob)
+ context 'when no README exists' do
+ it 'returns nil' do
+ allow_any_instance_of(Tree).to receive(:readme).and_return(nil)
+
+ expect(repository.readme).to be_nil
+ end
+ end
+
+ context 'when a README exists' do
+ it 'returns the README' do
+ expect(repository.readme).to be_an_instance_of(ReadmeBlob)
+ end
end
end
end
@@ -1825,11 +1857,12 @@ describe Repository, models: true do
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches).
- with(%i(rendered_readme license_blob license_key))
+ with(%i(rendered_readme license_blob license_key license))
expect(repository).to receive(:rendered_readme)
expect(repository).to receive(:license_blob)
expect(repository).to receive(:license_key)
+ expect(repository).to receive(:license)
repository.refresh_method_caches(%i(readme license))
end
@@ -1873,12 +1906,18 @@ describe Repository, models: true do
describe '#is_ancestor?' do
context 'Gitaly is_ancestor feature enabled' do
- it "asks Gitaly server if it's an ancestor" do
- commit = repository.commit
- expect(repository.raw_repository).to receive(:is_ancestor?).and_call_original
+ let(:commit) { repository.commit }
+ let(:ancestor) { commit.parents.first }
+
+ before do
+ allow(Gitlab::GitalyClient).to receive(:enabled?).and_return(true)
allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:is_ancestor).and_return(true)
+ end
+
+ it "asks Gitaly server if it's an ancestor" do
+ expect_any_instance_of(Gitlab::GitalyClient::Commit).to receive(:is_ancestor).with(ancestor.id, commit.id)
- expect(repository.is_ancestor?(commit.id, commit.id)).to be true
+ repository.is_ancestor?(ancestor.id, commit.id)
end
end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 75b1fc7e216..1e5c96fe593 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -131,46 +131,6 @@ describe Snippet, models: true do
end
end
- describe '.accessible_to' do
- let(:author) { create(:author) }
- let(:project) { create(:empty_project) }
-
- let!(:public_snippet) { create(:snippet, :public) }
- let!(:internal_snippet) { create(:snippet, :internal) }
- let!(:private_snippet) { create(:snippet, :private, author: author) }
-
- let!(:project_public_snippet) { create(:snippet, :public, project: project) }
- let!(:project_internal_snippet) { create(:snippet, :internal, project: project) }
- let!(:project_private_snippet) { create(:snippet, :private, project: project) }
-
- it 'returns only public snippets when user is blank' do
- expect(described_class.accessible_to(nil)).to match_array [public_snippet, project_public_snippet]
- end
-
- it 'returns only public, and internal snippets for regular users' do
- user = create(:user)
-
- expect(described_class.accessible_to(user)).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet]
- end
-
- it 'returns public, internal snippets and project private snippets for project members' do
- member = create(:user)
- project.team << [member, :developer]
-
- expect(described_class.accessible_to(member)).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
- end
-
- it 'returns private snippets where the user is the author' do
- expect(described_class.accessible_to(author)).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet]
- end
-
- it 'returns all snippets when for admins' do
- admin = create(:admin)
-
- expect(described_class.accessible_to(admin)).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet, project_private_snippet]
- end
- end
-
describe '#participants' do
let(:project) { create(:empty_project, :public) }
let(:snippet) { create(:snippet, content: 'foo', project: project) }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 63e71f5ff2f..6a15830a15c 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -344,6 +344,35 @@ describe User, models: true do
end
end
+ describe '#update_tracked_fields!', :redis do
+ let(:request) { OpenStruct.new(remote_ip: "127.0.0.1") }
+ let(:user) { create(:user) }
+
+ it 'writes trackable attributes' do
+ expect do
+ user.update_tracked_fields!(request)
+ end.to change { user.reload.current_sign_in_at }
+ end
+
+ it 'does not write trackable attributes when called a second time within the hour' do
+ user.update_tracked_fields!(request)
+
+ expect do
+ user.update_tracked_fields!(request)
+ end.not_to change { user.reload.current_sign_in_at }
+ end
+
+ it 'writes trackable attributes for a different user' do
+ user2 = create(:user)
+
+ user.update_tracked_fields!(request)
+
+ expect do
+ user2.update_tracked_fields!(request)
+ end.to change { user2.reload.current_sign_in_at }
+ end
+ end
+
shared_context 'user keys' do
let(:user) { create(:user) }
let!(:key) { create(:key, user: user) }
@@ -647,7 +676,7 @@ describe User, models: true do
protocol_and_expectation = {
'http' => false,
'ssh' => true,
- '' => true,
+ '' => true
}
protocol_and_expectation.each do |protocol, expected|
@@ -900,10 +929,20 @@ describe User, models: true do
end
context 'with a group route matching the given path' do
- let!(:group) { create(:group, path: 'group_path') }
+ context 'when the group namespace has an owner_id (legacy data)' do
+ let!(:group) { create(:group, path: 'group_path', owner: user) }
- it 'returns nil' do
- expect(User.find_by_full_path('group_path')).to eq(nil)
+ it 'returns nil' do
+ expect(User.find_by_full_path('group_path')).to eq(nil)
+ end
+ end
+
+ context 'when the group namespace does not have an owner_id' do
+ let!(:group) { create(:group, path: 'group_path') }
+
+ it 'returns nil' do
+ expect(User.find_by_full_path('group_path')).to eq(nil)
+ end
end
end
end
@@ -935,12 +974,19 @@ describe User, models: true do
describe '#avatar_url' do
let(:user) { create(:user, :with_avatar) }
- subject { user.avatar_url }
context 'when avatar file is uploaded' do
+ let(:gitlab_host) { "http://#{Gitlab.config.gitlab.host}" }
let(:avatar_path) { "/uploads/user/avatar/#{user.id}/dk.png" }
- it { should eq "http://#{Gitlab.config.gitlab.host}#{avatar_path}" }
+ it 'shows correct avatar url' do
+ expect(user.avatar_url).to eq(avatar_path)
+ expect(user.avatar_url(only_path: false)).to eq([gitlab_host, avatar_path].join)
+
+ allow(ActionController::Base).to receive(:asset_host).and_return(gitlab_host)
+
+ expect(user.avatar_url).to eq([gitlab_host, avatar_path].join)
+ end
end
end
@@ -1741,4 +1787,32 @@ describe User, models: true do
expect(user.preferred_language).to eq('en')
end
end
+
+ context '#invalidate_issue_cache_counts' do
+ let(:user) { build_stubbed(:user) }
+
+ it 'invalidates cache for issue counter' do
+ cache_mock = double
+
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
+
+ allow(Rails).to receive(:cache).and_return(cache_mock)
+
+ user.invalidate_issue_cache_counts
+ end
+ end
+
+ context '#invalidate_merge_request_cache_counts' do
+ let(:user) { build_stubbed(:user) }
+
+ it 'invalidates cache for Merge Request counter' do
+ cache_mock = double
+
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
+
+ allow(Rails).to receive(:cache).and_return(cache_mock)
+
+ user.invalidate_merge_request_cache_counts
+ end
+ end
end
diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb
index 0e15beaa5e8..650432520bb 100644
--- a/spec/policies/environment_policy_spec.rb
+++ b/spec/policies/environment_policy_spec.rb
@@ -33,7 +33,7 @@ describe EnvironmentPolicy do
let(:project) { create(:project, :public) }
before do
- project.add_master(user)
+ project.add_developer(user)
end
context 'when team member has ability to stop environment' do
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 064847ee3dc..0d3af1f4499 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -43,7 +43,7 @@ describe ProjectPolicy, models: true do
let(:master_permissions) do
%i[
- push_code_to_protected_branches update_project_snippet update_environment
+ delete_protected_branch update_project_snippet update_environment
update_deployment admin_milestone admin_project_snippet
admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index d0758af57dd..e1771b636b8 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -1,7 +1,9 @@
require 'spec_helper'
describe ProjectSnippetPolicy, models: true do
- let(:current_user) { create(:user) }
+ let(:regular_user) { create(:user) }
+ let(:external_user) { create(:user, :external) }
+ let(:project) { create(:empty_project) }
let(:author_permissions) do
[
@@ -10,13 +12,15 @@ describe ProjectSnippetPolicy, models: true do
]
end
- subject { described_class.abilities(current_user, project_snippet).to_set }
+ def abilities(user, snippet_visibility)
+ snippet = create(:project_snippet, snippet_visibility, project: project)
- context 'public snippet' do
- let(:project_snippet) { create(:project_snippet, :public) }
+ described_class.abilities(user, snippet).to_set
+ end
+ context 'public snippet' do
context 'no user' do
- let(:current_user) { nil }
+ subject { abilities(nil, :public) }
it do
is_expected.to include(:read_project_snippet)
@@ -25,6 +29,17 @@ describe ProjectSnippetPolicy, models: true do
end
context 'regular user' do
+ subject { abilities(regular_user, :public) }
+
+ it do
+ is_expected.to include(:read_project_snippet)
+ is_expected.not_to include(*author_permissions)
+ end
+ end
+
+ context 'external user' do
+ subject { abilities(external_user, :public) }
+
it do
is_expected.to include(:read_project_snippet)
is_expected.not_to include(*author_permissions)
@@ -33,10 +48,8 @@ describe ProjectSnippetPolicy, models: true do
end
context 'internal snippet' do
- let(:project_snippet) { create(:project_snippet, :internal) }
-
context 'no user' do
- let(:current_user) { nil }
+ subject { abilities(nil, :internal) }
it do
is_expected.not_to include(:read_project_snippet)
@@ -45,6 +58,28 @@ describe ProjectSnippetPolicy, models: true do
end
context 'regular user' do
+ subject { abilities(regular_user, :internal) }
+
+ it do
+ is_expected.to include(:read_project_snippet)
+ is_expected.not_to include(*author_permissions)
+ end
+ end
+
+ context 'external user' do
+ subject { abilities(external_user, :internal) }
+
+ it do
+ is_expected.not_to include(:read_project_snippet)
+ is_expected.not_to include(*author_permissions)
+ end
+ end
+
+ context 'project team member external user' do
+ subject { abilities(external_user, :internal) }
+
+ before { project.team << [external_user, :developer] }
+
it do
is_expected.to include(:read_project_snippet)
is_expected.not_to include(*author_permissions)
@@ -53,10 +88,8 @@ describe ProjectSnippetPolicy, models: true do
end
context 'private snippet' do
- let(:project_snippet) { create(:project_snippet, :private) }
-
context 'no user' do
- let(:current_user) { nil }
+ subject { abilities(nil, :private) }
it do
is_expected.not_to include(:read_project_snippet)
@@ -65,6 +98,8 @@ describe ProjectSnippetPolicy, models: true do
end
context 'regular user' do
+ subject { abilities(regular_user, :private) }
+
it do
is_expected.not_to include(:read_project_snippet)
is_expected.not_to include(*author_permissions)
@@ -72,7 +107,9 @@ describe ProjectSnippetPolicy, models: true do
end
context 'snippet author' do
- let(:project_snippet) { create(:project_snippet, :private, author: current_user) }
+ let(:snippet) { create(:project_snippet, :private, author: regular_user) }
+
+ subject { described_class.abilities(regular_user, snippet).to_set }
it do
is_expected.to include(:read_project_snippet)
@@ -80,8 +117,21 @@ describe ProjectSnippetPolicy, models: true do
end
end
- context 'project team member' do
- before { project_snippet.project.team << [current_user, :developer] }
+ context 'project team member normal user' do
+ subject { abilities(regular_user, :private) }
+
+ before { project.team << [regular_user, :developer] }
+
+ it do
+ is_expected.to include(:read_project_snippet)
+ is_expected.not_to include(*author_permissions)
+ end
+ end
+
+ context 'project team member external user' do
+ subject { abilities(external_user, :private) }
+
+ before { project.team << [external_user, :developer] }
it do
is_expected.to include(:read_project_snippet)
@@ -90,7 +140,7 @@ describe ProjectSnippetPolicy, models: true do
end
context 'admin user' do
- let(:current_user) { create(:admin) }
+ subject { abilities(create(:admin), :private) }
it do
is_expected.to include(:read_project_snippet)
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
new file mode 100644
index 00000000000..44720fc4448
--- /dev/null
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -0,0 +1,356 @@
+require 'spec_helper'
+
+describe MergeRequestPresenter do
+ let(:resource) { create :merge_request, source_project: project }
+ let(:project) { create :empty_project }
+ let(:user) { create(:user) }
+
+ describe '#ci_status' do
+ subject { described_class.new(resource).ci_status }
+
+ context 'when no head pipeline' do
+ it 'return status using CiService' do
+ ci_service = double(MockCiService)
+ ci_status = double
+
+ allow(resource.source_project)
+ .to receive(:ci_service)
+ .and_return(ci_service)
+
+ allow(resource).to receive(:head_pipeline).and_return(nil)
+
+ expect(ci_service).to receive(:commit_status)
+ .with(resource.diff_head_sha, resource.source_branch)
+ .and_return(ci_status)
+
+ is_expected.to eq(ci_status)
+ end
+ end
+
+ context 'when head pipeline present' do
+ let(:pipeline) { build_stubbed(:ci_pipeline) }
+
+ before do
+ allow(resource).to receive(:head_pipeline).and_return(pipeline)
+ end
+
+ context 'success with warnings' do
+ before do
+ allow(pipeline).to receive(:success?) { true }
+ allow(pipeline).to receive(:has_warnings?) { true }
+ end
+
+ it 'returns "success_with_warnings"' do
+ is_expected.to eq('success_with_warnings')
+ end
+ end
+
+ context 'pipeline HAS status AND its not success with warnings' do
+ before do
+ allow(pipeline).to receive(:success?) { false }
+ allow(pipeline).to receive(:has_warnings?) { false }
+ end
+
+ it 'returns pipeline status' do
+ is_expected.to eq('pending')
+ end
+ end
+
+ context 'pipeline has NO status AND its not success with warnings' do
+ before do
+ allow(pipeline).to receive(:status) { nil }
+ allow(pipeline).to receive(:success?) { false }
+ allow(pipeline).to receive(:has_warnings?) { false }
+ end
+
+ it 'returns "preparing"' do
+ is_expected.to eq('preparing')
+ end
+ end
+ end
+ end
+
+ describe '#conflict_resolution_path' do
+ let(:project) { create :empty_project }
+ let(:user) { create :user }
+ let(:presenter) { described_class.new(resource, current_user: user) }
+ let(:path) { presenter.conflict_resolution_path }
+
+ context 'when MR cannot be resolved in UI' do
+ it 'does not return conflict resolution path' do
+ allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { false }
+
+ expect(path).to be_nil
+ end
+ end
+
+ context 'when conflicts cannot be resolved by user' do
+ it 'does not return conflict resolution path' do
+ allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { true }
+ allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_by?).with(user) { false }
+
+ expect(path).to be_nil
+ end
+ end
+
+ context 'when able to access conflict resolution UI' do
+ it 'does return conflict resolution path' do
+ allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_in_ui?) { true }
+ allow(presenter).to receive_message_chain(:conflicts, :can_be_resolved_by?).with(user) { true }
+
+ expect(path)
+ .to eq("/#{project.full_path}/merge_requests/#{resource.iid}/conflicts")
+ end
+ end
+ end
+
+ context 'issues links' do
+ let(:project) { create(:project, :private, creator: user, namespace: user.namespace) }
+ let(:issue_a) { create(:issue, project: project) }
+ let(:issue_b) { create(:issue, project: project) }
+
+ let(:resource) do
+ create(:merge_request,
+ source_project: project, target_project: project,
+ description: "Fixes #{issue_a.to_reference} Related #{issue_b.to_reference}")
+ end
+
+ before do
+ project.team << [user, :developer]
+
+ allow(resource.project).to receive(:default_branch)
+ .and_return(resource.target_branch)
+ end
+
+ describe '#closing_issues_links' do
+ subject { described_class.new(resource, current_user: user).closing_issues_links }
+
+ it 'presents closing issues links' do
+ is_expected.to match("#{project.full_path}/issues/#{issue_a.iid}")
+ end
+
+ it 'does not present related issues links' do
+ is_expected.not_to match("#{project.full_path}/issues/#{issue_b.iid}")
+ end
+ end
+
+ describe '#mentioned_issues_links' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .mentioned_issues_links
+ end
+
+ it 'presents related issues links' do
+ is_expected.to match("#{project.full_path}/issues/#{issue_b.iid}")
+ end
+
+ it 'does not present closing issues links' do
+ is_expected.not_to match("#{project.full_path}/issues/#{issue_a.iid}")
+ end
+ end
+
+ describe '#assign_to_closing_issues_link' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .assign_to_closing_issues_link
+ end
+
+ before do
+ assign_issues_service = double(MergeRequests::AssignIssuesService, assignable_issues: assignable_issues)
+ allow(MergeRequests::AssignIssuesService).to receive(:new)
+ .and_return(assign_issues_service)
+ end
+
+ context 'single closing issue' do
+ let(:issue) { create(:issue) }
+ let(:assignable_issues) { [issue] }
+
+ it 'returns correct link with correct text' do
+ is_expected
+ .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues")
+
+ is_expected
+ .to match("Assign yourself to this issue")
+ end
+ end
+
+ context 'multiple closing issues' do
+ let(:issues) { create_list(:issue, 2) }
+ let(:assignable_issues) { issues }
+
+ it 'returns correct link with correct text' do
+ is_expected
+ .to match("#{project.full_path}/merge_requests/#{resource.iid}/assign_related_issues")
+
+ is_expected
+ .to match("Assign yourself to these issues")
+ end
+ end
+
+ context 'no closing issue' do
+ let(:assignable_issues) { [] }
+
+ it 'returns correct link with correct text' do
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+
+ describe '#cancel_merge_when_pipeline_succeeds_path' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .cancel_merge_when_pipeline_succeeds_path
+ end
+
+ context 'when can cancel mwps' do
+ it 'returns path' do
+ allow(resource).to receive(:can_cancel_merge_when_pipeline_succeeds?)
+ .with(user)
+ .and_return(true)
+
+ is_expected.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/cancel_merge_when_pipeline_succeeds")
+ end
+ end
+
+ context 'when cannot cancel mwps' do
+ it 'returns nil' do
+ allow(resource).to receive(:can_cancel_merge_when_pipeline_succeeds?)
+ .with(user)
+ .and_return(false)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#merge_path' do
+ subject do
+ described_class.new(resource, current_user: user).merge_path
+ end
+
+ context 'when can be merged by user' do
+ it 'returns path' do
+ allow(resource).to receive(:can_be_merged_by?)
+ .with(user)
+ .and_return(true)
+
+ is_expected
+ .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/merge")
+ end
+ end
+
+ context 'when cannot be merged by user' do
+ it 'returns nil' do
+ allow(resource).to receive(:can_be_merged_by?)
+ .with(user)
+ .and_return(false)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#create_issue_to_resolve_discussions_path' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .create_issue_to_resolve_discussions_path
+ end
+
+ context 'when can create issue and issues enabled' do
+ it 'returns path' do
+ allow(project).to receive(:issues_enabled?) { true }
+ project.team << [user, :master]
+
+ is_expected
+ .to eq("/#{resource.project.full_path}/issues/new?merge_request_to_resolve_discussions_of=#{resource.iid}")
+ end
+ end
+
+ context 'when cannot create issue' do
+ it 'returns nil' do
+ allow(project).to receive(:issues_enabled?) { true }
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when issues disabled' do
+ it 'returns nil' do
+ allow(project).to receive(:issues_enabled?) { false }
+ project.team << [user, :master]
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#remove_wip_path' do
+ subject do
+ described_class.new(resource, current_user: user).remove_wip_path
+ end
+
+ context 'when merge request enabled and has permission' do
+ it 'has remove_wip_path' do
+ allow(project).to receive(:merge_requests_enabled?) { true }
+ project.team << [user, :master]
+
+ is_expected
+ .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}/remove_wip")
+ end
+ end
+
+ context 'when has no permission' do
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#target_branch_commits_path' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .target_branch_commits_path
+ end
+
+ context 'when target branch exists' do
+ it 'returns path' do
+ allow(resource).to receive(:target_branch_exists?) { true }
+
+ is_expected
+ .to eq("/#{resource.target_project.full_path}/commits/#{resource.target_branch}")
+ end
+ end
+
+ context 'when target branch does not exists' do
+ it 'returns nil' do
+ allow(resource).to receive(:target_branch_exists?) { false }
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#source_branch_path' do
+ subject do
+ described_class.new(resource, current_user: user).source_branch_path
+ end
+
+ context 'when source branch exists' do
+ it 'returns path' do
+ allow(resource).to receive(:source_branch_exists?) { true }
+
+ is_expected
+ .to eq("/#{resource.source_project.full_path}/branches/#{resource.source_branch}")
+ end
+ end
+
+ context 'when source branch does not exists' do
+ it 'returns nil' do
+ allow(resource).to receive(:source_branch_exists?) { false }
+
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 7eaa89837c8..c64499fc8c0 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -406,19 +406,6 @@ describe API::Branches do
delete api("/projects/#{project.id}/repository/branches/foobar", user)
expect(response).to have_http_status(404)
end
-
- it "removes protected branch" do
- create(:protected_branch, project: project, name: branch_name)
- delete api("/projects/#{project.id}/repository/branches/#{branch_name}", user)
- expect(response).to have_http_status(405)
- expect(json_response['message']).to eq('Protected branch cant be removed')
- end
-
- it "does not remove HEAD branch" do
- delete api("/projects/#{project.id}/repository/branches/master", user)
- expect(response).to have_http_status(405)
- expect(json_response['message']).to eq('Cannot remove HEAD branch')
- end
end
describe "DELETE /projects/:id/repository/merged_branches" do
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 1233cdc64c4..1c163cee152 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -26,8 +26,8 @@ describe API::CommitStatuses do
create(:commit_status, { pipeline: commit, ref: commit.ref }.merge(opts))
end
- let!(:status1) { create_status(master, status: 'running') }
- let!(:status2) { create_status(master, name: 'coverage', status: 'pending') }
+ let!(:status1) { create_status(master, status: 'running', retried: true) }
+ let!(:status2) { create_status(master, name: 'coverage', status: 'pending', retried: true) }
let!(:status3) { create_status(develop, status: 'running', allow_failure: true) }
let!(:status4) { create_status(master, name: 'coverage', status: 'success') }
let!(:status5) { create_status(develop, name: 'coverage', status: 'success') }
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index fa28047d49c..deb2cac6869 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -329,7 +329,7 @@ describe API::Files do
end
let(:get_params) do
{
- ref: 'master',
+ ref: 'master'
}
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 3e27a3bee77..90b36374ded 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -73,7 +73,7 @@ describe API::Groups do
storage_size: 702,
repository_size: 123,
lfs_objects_size: 234,
- build_artifacts_size: 345,
+ build_artifacts_size: 345
}.stringify_keys
exposed_attributes = attributes.dup
exposed_attributes['job_artifacts_size'] = exposed_attributes.delete('build_artifacts_size')
@@ -178,7 +178,7 @@ describe API::Groups do
expect(json_response['path']).to eq(group1.path)
expect(json_response['description']).to eq(group1.description)
expect(json_response['visibility']).to eq(Gitlab::VisibilityLevel.string_level(group1.visibility_level))
- expect(json_response['avatar_url']).to eq(group1.avatar_url)
+ expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false))
expect(json_response['web_url']).to eq(group1.web_url)
expect(json_response['request_access_enabled']).to eq(group1.request_access_enabled)
expect(json_response['full_name']).to eq(group1.full_name)
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index da2b56c040b..79cac721202 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -1124,7 +1124,7 @@ describe API::Issues do
end
context 'CE restrictions' do
- it 'updates an issue with several assignee but only one has been applied' do
+ it 'updates an issue with several assignees but only one has been applied' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
assignee_ids: [user2.id, guest.id]
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index aee0e17a153..0f9330b062d 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -60,7 +60,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['job_events']).to eq(hook.build_events)
+ expect(json_response['job_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
@@ -148,7 +148,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['job_events']).to eq(hook.build_events)
+ expect(json_response['job_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index ab70ce5cd2f..d5c3b5b34ad 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -661,7 +661,7 @@ describe API::Projects do
'name' => user.namespace.name,
'path' => user.namespace.path,
'kind' => user.namespace.kind,
- 'full_path' => user.namespace.full_path,
+ 'full_path' => user.namespace.full_path
})
end
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index c7b84173570..2eb191d6049 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -32,8 +32,9 @@ describe API::SystemHooks do
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['url']).to eq(hook.url)
- expect(json_response.first['push_events']).to be true
+ expect(json_response.first['push_events']).to be false
expect(json_response.first['tag_push_events']).to be false
+ expect(json_response.first['repository_update_events']).to be true
end
end
end
diff --git a/spec/requests/api/v3/branches_spec.rb b/spec/requests/api/v3/branches_spec.rb
index 72f8fbe71fb..c88f7788697 100644
--- a/spec/requests/api/v3/branches_spec.rb
+++ b/spec/requests/api/v3/branches_spec.rb
@@ -47,19 +47,6 @@ describe API::V3::Branches do
delete v3_api("/projects/#{project.id}/repository/branches/foobar", user)
expect(response).to have_http_status(404)
end
-
- it "removes protected branch" do
- create(:protected_branch, project: project, name: branch_name)
- delete v3_api("/projects/#{project.id}/repository/branches/#{branch_name}", user)
- expect(response).to have_http_status(405)
- expect(json_response['message']).to eq('Protected branch cant be removed')
- end
-
- it "does not remove HEAD branch" do
- delete v3_api("/projects/#{project.id}/repository/branches/master", user)
- expect(response).to have_http_status(405)
- expect(json_response['message']).to eq('Cannot remove HEAD branch')
- end
end
describe "DELETE /projects/:id/repository/merged_branches" do
diff --git a/spec/requests/api/v3/files_spec.rb b/spec/requests/api/v3/files_spec.rb
index 5bcbb441979..378ca1720ff 100644
--- a/spec/requests/api/v3/files_spec.rb
+++ b/spec/requests/api/v3/files_spec.rb
@@ -53,7 +53,7 @@ describe API::V3::Files do
let(:params) do
{
file_path: 'app/models/application.rb',
- ref: 'master',
+ ref: 'master'
}
end
@@ -263,7 +263,7 @@ describe API::V3::Files do
let(:get_params) do
{
file_path: file_path,
- ref: 'master',
+ ref: 'master'
}
end
diff --git a/spec/requests/api/v3/groups_spec.rb b/spec/requests/api/v3/groups_spec.rb
index 2862580cc70..bc261b5e07c 100644
--- a/spec/requests/api/v3/groups_spec.rb
+++ b/spec/requests/api/v3/groups_spec.rb
@@ -69,7 +69,7 @@ describe API::V3::Groups do
storage_size: 702,
repository_size: 123,
lfs_objects_size: 234,
- build_artifacts_size: 345,
+ build_artifacts_size: 345
}.stringify_keys
project1.statistics.update!(attributes)
@@ -176,7 +176,7 @@ describe API::V3::Groups do
expect(json_response['path']).to eq(group1.path)
expect(json_response['description']).to eq(group1.description)
expect(json_response['visibility_level']).to eq(group1.visibility_level)
- expect(json_response['avatar_url']).to eq(group1.avatar_url)
+ expect(json_response['avatar_url']).to eq(group1.avatar_url(only_path: false))
expect(json_response['web_url']).to eq(group1.web_url)
expect(json_response['request_access_enabled']).to eq(group1.request_access_enabled)
expect(json_response['full_name']).to eq(group1.full_name)
diff --git a/spec/requests/api/v3/project_hooks_spec.rb b/spec/requests/api/v3/project_hooks_spec.rb
index a3a4c77d09d..1969d1c7f2b 100644
--- a/spec/requests/api/v3/project_hooks_spec.rb
+++ b/spec/requests/api/v3/project_hooks_spec.rb
@@ -58,7 +58,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['build_events']).to eq(hook.build_events)
+ expect(json_response['build_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
@@ -143,7 +143,7 @@ describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['merge_requests_events']).to eq(hook.merge_requests_events)
expect(json_response['tag_push_events']).to eq(hook.tag_push_events)
expect(json_response['note_events']).to eq(hook.note_events)
- expect(json_response['build_events']).to eq(hook.build_events)
+ expect(json_response['build_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
diff --git a/spec/requests/api/v3/projects_spec.rb b/spec/requests/api/v3/projects_spec.rb
index e15b90d7a9e..dc7c3d125b1 100644
--- a/spec/requests/api/v3/projects_spec.rb
+++ b/spec/requests/api/v3/projects_spec.rb
@@ -227,7 +227,7 @@ describe API::V3::Projects do
storage_size: 702,
repository_size: 123,
lfs_objects_size: 234,
- build_artifacts_size: 345,
+ build_artifacts_size: 345
}
project4.statistics.update!(attributes)
@@ -706,7 +706,7 @@ describe API::V3::Projects do
'name' => user.namespace.name,
'path' => user.namespace.path,
'kind' => user.namespace.kind,
- 'full_path' => user.namespace.full_path,
+ 'full_path' => user.namespace.full_path
})
end
diff --git a/spec/requests/api/v3/system_hooks_spec.rb b/spec/requests/api/v3/system_hooks_spec.rb
index 72c7d14b8ba..ae427541abb 100644
--- a/spec/requests/api/v3/system_hooks_spec.rb
+++ b/spec/requests/api/v3/system_hooks_spec.rb
@@ -31,8 +31,9 @@ describe API::V3::SystemHooks do
expect(response).to have_http_status(200)
expect(json_response).to be_an Array
expect(json_response.first['url']).to eq(hook.url)
- expect(json_response.first['push_events']).to be true
+ expect(json_response.first['push_events']).to be false
expect(json_response.first['tag_push_events']).to be false
+ expect(json_response.first['repository_update_events']).to be true
end
end
end
diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb
index 108f73bb965..286de277ae7 100644
--- a/spec/requests/ci/api/builds_spec.rb
+++ b/spec/requests/ci/api/builds_spec.rb
@@ -185,7 +185,7 @@ describe Ci::API::Builds do
{ "key" => "CI_PIPELINE_TRIGGERED", "value" => "true", "public" => true },
{ "key" => "DB_NAME", "value" => "postgres", "public" => true },
{ "key" => "SECRET_KEY", "value" => "secret_value", "public" => false },
- { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false },
+ { "key" => "TRIGGER_KEY_1", "value" => "TRIGGER_VALUE_1", "public" => false }
)
end
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 5d495bc9e7d..0c9b4121adf 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -425,7 +425,7 @@ describe 'Git LFS API and storage' do
'size' => sample_size,
'error' => {
'code' => 404,
- 'message' => "Object does not exist on the server or you don't have permissions to access it",
+ 'message' => "Object does not exist on the server or you don't have permissions to access it"
}
}
]
@@ -456,7 +456,7 @@ describe 'Git LFS API and storage' do
'size' => 1575078,
'error' => {
'code' => 404,
- 'message' => "Object does not exist on the server or you don't have permissions to access it",
+ 'message' => "Object does not exist on the server or you don't have permissions to access it"
}
}
]
@@ -493,7 +493,7 @@ describe 'Git LFS API and storage' do
'size' => 1575078,
'error' => {
'code' => 404,
- 'message' => "Object does not exist on the server or you don't have permissions to access it",
+ 'message' => "Object does not exist on the server or you don't have permissions to access it"
}
},
{
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index a4f85c22943..05176c3beaa 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -61,7 +61,7 @@ describe 'OpenID Connect requests' do
email: private_email.email,
public_email: public_email.email,
website_url: 'https://example.com',
- avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png"),
+ avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png")
)
end
@@ -79,7 +79,7 @@ describe 'OpenID Connect requests' do
'email_verified' => true,
'website' => 'https://example.com',
'profile' => 'http://localhost/alice',
- 'picture' => "http://localhost/uploads/user/avatar/#{user.id}/dk.png",
+ 'picture' => "http://localhost/uploads/user/avatar/#{user.id}/dk.png"
})
end
end
@@ -98,7 +98,7 @@ describe 'OpenID Connect requests' do
expect(@payload['sub']).to eq hashed_subject
end
- it 'includes the time of the last authentication' do
+ it 'includes the time of the last authentication', :redis do
expect(@payload['auth_time']).to eq user.current_sign_in_at.to_i
end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 33940f70b1c..d92daa345b3 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -9,8 +9,6 @@ describe 'cycle analytics events', api: true do
before do
project.team << [user, :developer]
- allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
-
3.times do |count|
Timecop.freeze(Time.now + count.days) do
create_cycle
@@ -121,9 +119,10 @@ describe 'cycle analytics events', api: true do
def create_cycle
milestone = create(:milestone, project: project)
issue.update(milestone: milestone)
- mr = create_merge_request_closing_issue(issue)
+ mr = create_merge_request_closing_issue(issue, commit_message: "References #{issue.to_reference}")
pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha)
+ mr.update(head_pipeline_id: pipeline.id)
pipeline.run
create(:ci_build, pipeline: pipeline, status: :success, author: user)
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 50e96d56191..d5400bbaaf1 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -243,7 +243,6 @@ describe 'project routing' do
# diffs_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/diffs(.:format) projects/merge_requests#diffs
# commits_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/commits(.:format) projects/merge_requests#commits
# merge_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/merge(.:format) projects/merge_requests#merge
- # merge_check_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/merge_check(.:format) projects/merge_requests#merge_check
# ci_status_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/ci_status(.:format) projects/merge_requests#ci_status
# toggle_subscription_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/toggle_subscription(.:format) projects/merge_requests#toggle_subscription
# branch_from_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/branch_from(.:format) projects/merge_requests#branch_from
@@ -272,10 +271,6 @@ describe 'project routing' do
)
end
- it 'to #merge_check' do
- expect(get('/gitlab/gitlabhq/merge_requests/1/merge_check')).to route_to('projects/merge_requests#merge_check', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
- end
-
it 'to #branch_from' do
expect(get('/gitlab/gitlabhq/merge_requests/branch_from')).to route_to('projects/merge_requests#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 9f6defe1450..abacc50a371 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -249,17 +249,34 @@ describe RootController, 'routing' do
end
end
-# new_user_session GET /users/sign_in(.:format) devise/sessions#new
-# user_session POST /users/sign_in(.:format) devise/sessions#create
-# destroy_user_session DELETE /users/sign_out(.:format) devise/sessions#destroy
-# user_omniauth_authorize /users/auth/:provider(.:format) omniauth_callbacks#passthru
-# user_omniauth_callback /users/auth/:action/callback(.:format) omniauth_callbacks#(?-mix:(?!))
-# user_password POST /users/password(.:format) devise/passwords#create
-# new_user_password GET /users/password/new(.:format) devise/passwords#new
-# edit_user_password GET /users/password/edit(.:format) devise/passwords#edit
-# PUT /users/password(.:format) devise/passwords#update
describe "Authentication", "routing" do
- # pending
+ it "GET /users/sign_in" do
+ expect(get("/users/sign_in")).to route_to('sessions#new')
+ end
+
+ it "POST /users/sign_in" do
+ expect(post("/users/sign_in")).to route_to('sessions#create')
+ end
+
+ it "DELETE /users/sign_out" do
+ expect(delete("/users/sign_out")).to route_to('sessions#destroy')
+ end
+
+ it "POST /users/password" do
+ expect(post("/users/password")).to route_to('passwords#create')
+ end
+
+ it "GET /users/password/new" do
+ expect(get("/users/password/new")).to route_to('passwords#new')
+ end
+
+ it "GET /users/password/edit" do
+ expect(get("/users/password/edit")).to route_to('passwords#edit')
+ end
+
+ it "PUT /users/password" do
+ expect(put("/users/password")).to route_to('passwords#update')
+ end
end
describe "Groups", "routing" do
diff --git a/spec/serializers/analytics_issue_entity_spec.rb b/spec/serializers/analytics_issue_entity_spec.rb
index 68086216ba9..75d606d5eb3 100644
--- a/spec/serializers/analytics_issue_entity_spec.rb
+++ b/spec/serializers/analytics_issue_entity_spec.rb
@@ -9,7 +9,7 @@ describe AnalyticsIssueEntity do
iid: "1",
id: "1",
created_at: "2016-11-12 15:04:02.948604",
- author: user,
+ author: user
}
end
diff --git a/spec/serializers/analytics_issue_serializer_spec.rb b/spec/serializers/analytics_issue_serializer_spec.rb
index ba24cf8e481..7c14c198a74 100644
--- a/spec/serializers/analytics_issue_serializer_spec.rb
+++ b/spec/serializers/analytics_issue_serializer_spec.rb
@@ -16,7 +16,7 @@ describe AnalyticsIssueSerializer do
iid: "1",
id: "1",
created_at: "2016-11-12 15:04:02.948604",
- author: user,
+ author: user
}
end
diff --git a/spec/serializers/build_entity_spec.rb b/spec/serializers/build_entity_spec.rb
index 897a28b7305..b5eb84ae43b 100644
--- a/spec/serializers/build_entity_spec.rb
+++ b/spec/serializers/build_entity_spec.rb
@@ -6,7 +6,7 @@ describe BuildEntity do
let(:request) { double('request') }
before do
- allow(request).to receive(:user).and_return(user)
+ allow(request).to receive(:current_user).and_return(user)
end
let(:entity) do
diff --git a/spec/serializers/build_serializer_spec.rb b/spec/serializers/build_serializer_spec.rb
index 7f1abecfafe..01e2cfed6f8 100644
--- a/spec/serializers/build_serializer_spec.rb
+++ b/spec/serializers/build_serializer_spec.rb
@@ -4,7 +4,7 @@ describe BuildSerializer do
let(:user) { create(:user) }
let(:serializer) do
- described_class.new(user: user)
+ described_class.new(current_user: user)
end
subject { serializer.represent(resource) }
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 69355bcde42..522c92ce295 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -8,7 +8,7 @@ describe DeploymentEntity do
subject { entity.as_json }
before do
- allow(request).to receive(:user).and_return(user)
+ allow(request).to receive(:current_user).and_return(user)
end
it 'exposes internal deployment id' do
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 1909e6385b5..d2ad6c44702 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -6,7 +6,7 @@ describe EnvironmentSerializer do
let(:json) do
described_class
- .new(user: user, project: project)
+ .new(current_user: user, project: project)
.represent(resource)
end
diff --git a/spec/serializers/event_entity_spec.rb b/spec/serializers/event_entity_spec.rb
new file mode 100644
index 00000000000..bb54597c967
--- /dev/null
+++ b/spec/serializers/event_entity_spec.rb
@@ -0,0 +1,13 @@
+require 'spec_helper'
+
+describe EventEntity do
+ subject { described_class.represent(create(:event)).as_json }
+
+ it 'exposes author' do
+ expect(subject).to include(:author)
+ end
+
+ it 'exposes core elements of event' do
+ expect(subject).to include(:updated_at)
+ end
+end
diff --git a/spec/serializers/merge_request_basic_serializer_spec.rb b/spec/serializers/merge_request_basic_serializer_spec.rb
new file mode 100644
index 00000000000..4daf5a59d0c
--- /dev/null
+++ b/spec/serializers/merge_request_basic_serializer_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe MergeRequestBasicSerializer do
+ let(:resource) { create(:merge_request) }
+ let(:user) { create(:user) }
+
+ subject { described_class.new.represent(resource) }
+
+ it 'has important MergeRequest attributes' do
+ expect(subject).to include(:merge_status)
+ end
+end
diff --git a/spec/serializers/merge_request_entity_spec.rb b/spec/serializers/merge_request_entity_spec.rb
new file mode 100644
index 00000000000..b75c73e78c2
--- /dev/null
+++ b/spec/serializers/merge_request_entity_spec.rb
@@ -0,0 +1,145 @@
+require 'spec_helper'
+
+describe MergeRequestEntity do
+ let(:project) { create :empty_project }
+ let(:resource) { create(:merge_request, source_project: project, target_project: project) }
+ let(:user) { create(:user) }
+
+ let(:request) { double('request', current_user: user) }
+
+ subject do
+ described_class.new(resource, request: request).as_json
+ end
+
+ it 'includes author' do
+ req = double('request')
+
+ author_payload = UserEntity
+ .represent(resource.author, request: req)
+ .as_json
+
+ expect(subject[:author]).to eq(author_payload)
+ end
+
+ it 'includes pipeline' do
+ req = double('request', current_user: user)
+ pipeline = build_stubbed(:ci_pipeline)
+ allow(resource).to receive(:head_pipeline).and_return(pipeline)
+
+ pipeline_payload = PipelineEntity
+ .represent(pipeline, request: req)
+ .as_json
+
+ expect(subject[:pipeline]).to eq(pipeline_payload)
+ end
+
+ it 'includes issues_links' do
+ issues_links = subject[:issues_links]
+
+ expect(issues_links).to include(:closing, :mentioned_but_not_closing,
+ :assign_to_closing)
+ end
+
+ it 'has important MergeRequest attributes' do
+ expect(subject).to include(:diff_head_sha, :merge_commit_message,
+ :has_conflicts, :has_ci, :merge_path,
+ :conflict_resolution_path,
+ :cancel_merge_when_pipeline_succeeds_path,
+ :create_issue_to_resolve_discussions_path,
+ :source_branch_path, :target_branch_commits_path,
+ :commits_count)
+ end
+
+ it 'has email_patches_path' do
+ expect(subject[:email_patches_path])
+ .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch")
+ end
+
+ it 'has plain_diff_path' do
+ expect(subject[:plain_diff_path])
+ .to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.diff")
+ end
+
+ it 'has merge_commit_message_with_description' do
+ expect(subject[:merge_commit_message_with_description])
+ .to eq(resource.merge_commit_message(include_description: true))
+ end
+
+ describe 'new_blob_path' do
+ context 'when user can push to project' do
+ it 'returns path' do
+ project.add_developer(user)
+
+ expect(subject[:new_blob_path])
+ .to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
+ end
+ end
+
+ context 'when user cannot push to project' do
+ it 'returns nil' do
+ expect(subject[:new_blob_path]).to be_nil
+ end
+ end
+ end
+
+ describe 'diff_head_sha' do
+ before do
+ allow(resource).to receive(:diff_head_sha) { 'sha' }
+ end
+
+ context 'when no diff head commit' do
+ it 'returns nil' do
+ allow(resource).to receive(:diff_head_commit) { nil }
+
+ expect(subject[:diff_head_sha]).to be_nil
+ end
+ end
+
+ context 'when diff head commit present' do
+ it 'returns diff head commit short id' do
+ allow(resource).to receive(:diff_head_commit) { double }
+
+ expect(subject[:diff_head_sha]).to eq('sha')
+ end
+ end
+ end
+
+ it 'includes merge_event' do
+ create(:event, :merged, author: user, project: resource.project, target: resource)
+
+ expect(subject[:merge_event]).to include(:author, :updated_at)
+ end
+
+ it 'includes closed_event' do
+ create(:event, :closed, author: user, project: resource.project, target: resource)
+
+ expect(subject[:closed_event]).to include(:author, :updated_at)
+ end
+
+ describe 'diverged_commits_count' do
+ context 'when MR open and its diverging' do
+ it 'returns diverged commits count' do
+ allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true,
+ diverged_commits_count: 10)
+
+ expect(subject[:diverged_commits_count]).to eq(10)
+ end
+ end
+
+ context 'when MR is not open' do
+ it 'returns 0' do
+ allow(resource).to receive_messages(open?: false)
+
+ expect(subject[:diverged_commits_count]).to be_zero
+ end
+ end
+
+ context 'when MR is not diverging' do
+ it 'returns 0' do
+ allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false)
+
+ expect(subject[:diverged_commits_count]).to be_zero
+ end
+ end
+ end
+end
diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb
new file mode 100644
index 00000000000..73fbecc153d
--- /dev/null
+++ b/spec/serializers/merge_request_serializer_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe MergeRequestSerializer do
+ let(:user) { build_stubbed(:user) }
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ let(:serializer) do
+ described_class.new(current_user: user)
+ end
+
+ describe '#represent' do
+ let(:opts) { { basic: basic } }
+ subject { serializer.represent(merge_request, basic: basic) }
+
+ context 'when basic param is truthy' do
+ let(:basic) { true }
+
+ it 'calls super class #represent with correct params' do
+ expect_any_instance_of(BaseSerializer).to receive(:represent)
+ .with(merge_request, opts, MergeRequestBasicEntity)
+
+ subject
+ end
+ end
+
+ context 'when basic param is falsy' do
+ let(:basic) { false }
+
+ it 'calls super class #represent with correct params' do
+ expect_any_instance_of(BaseSerializer).to receive(:represent)
+ .with(merge_request, opts, MergeRequestEntity)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 93d5a21419d..d2482ac434b 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -5,7 +5,7 @@ describe PipelineEntity do
let(:request) { double('request') }
before do
- allow(request).to receive(:user).and_return(user)
+ allow(request).to receive(:current_user).and_return(user)
end
let(:entity) do
@@ -19,7 +19,7 @@ describe PipelineEntity do
let(:pipeline) { create(:ci_empty_pipeline) }
it 'contains required fields' do
- expect(subject).to include :id, :user, :path
+ expect(subject).to include :id, :user, :path, :coverage
expect(subject).to include :ref, :commit
expect(subject).to include :updated_at, :created_at
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index ecde45a6d44..f2426db6d81 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -4,7 +4,7 @@ describe PipelineSerializer do
let(:user) { create(:user) }
let(:serializer) do
- described_class.new(user: user)
+ described_class.new(current_user: user)
end
subject { serializer.represent(resource) }
@@ -44,7 +44,7 @@ describe PipelineSerializer do
end
let(:serializer) do
- described_class.new(user: user)
+ described_class.new(current_user: user)
.with_pagination(request, response)
end
@@ -113,7 +113,7 @@ describe PipelineSerializer do
it "verifies number of queries" do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expect(recorded.count).to be_within(1).of(50)
+ expect(recorded.count).to be_within(1).of(58)
expect(recorded.cached_count).to eq(0)
end
diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb
index 0412b2d7741..64b3217b809 100644
--- a/spec/serializers/stage_entity_spec.rb
+++ b/spec/serializers/stage_entity_spec.rb
@@ -14,7 +14,7 @@ describe StageEntity do
end
before do
- allow(request).to receive(:user).and_return(user)
+ allow(request).to receive(:current_user).and_return(user)
create(:ci_build, :success, pipeline: pipeline)
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index fa5014cee07..b536103ed65 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -27,12 +27,50 @@ describe Ci::CreatePipelineService, services: true do
)
end
- it { expect(pipeline).to be_kind_of(Ci::Pipeline) }
- it { expect(pipeline).to be_valid }
- it { expect(pipeline).to eq(project.pipelines.last) }
- it { expect(pipeline).to have_attributes(user: user) }
- it { expect(pipeline).to have_attributes(status: 'pending') }
- it { expect(pipeline.builds.first).to be_kind_of(Ci::Build) }
+ it 'creates a pipeline' do
+ expect(pipeline).to be_kind_of(Ci::Pipeline)
+ expect(pipeline).to be_valid
+ expect(pipeline).to eq(project.pipelines.last)
+ expect(pipeline).to have_attributes(user: user)
+ expect(pipeline).to have_attributes(status: 'pending')
+ expect(pipeline.builds.first).to be_kind_of(Ci::Build)
+ end
+
+ context '#update_merge_requests_head_pipeline' do
+ it 'updates head pipeline of each merge request' do
+ merge_request_1 = create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project)
+ merge_request_2 = create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project)
+
+ head_pipeline = pipeline
+
+ expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
+ expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
+ end
+
+ context 'when there is no pipeline for source branch' do
+ it "does not update merge request head pipeline" do
+ merge_request = create(:merge_request, source_branch: 'other_branch', target_branch: "branch_1", source_project: project)
+
+ head_pipeline = pipeline
+
+ expect(merge_request.reload.head_pipeline).not_to eq(head_pipeline)
+ end
+ end
+
+ context 'when merge request target project is different from source project' do
+ let!(:target_project) { create(:empty_project) }
+ let!(:forked_project_link) { create(:forked_project_link, forked_to_project: project, forked_from_project: target_project) }
+
+ it 'updates head pipeline for merge request' do
+ merge_request =
+ create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project, target_project: target_project)
+
+ head_pipeline = pipeline
+
+ expect(merge_request.reload.head_pipeline).to eq(head_pipeline)
+ end
+ end
+ end
context 'auto-cancel enabled' do
before do
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index cf773866a6f..fc5de5d069a 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -268,6 +268,24 @@ describe Ci::ProcessPipelineService, '#execute', :services do
end
end
+ context 'when there are only manual actions in stages' do
+ before do
+ create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('deploy', stage_idx: 2, when: 'manual')
+ create_build('check', stage_idx: 3)
+
+ process_pipeline
+ end
+
+ it 'processes all jobs until blocking actions encountered' do
+ expect(all_builds_statuses).to eq(%w[manual manual manual created])
+ expect(all_builds_names).to eq(%w[image build deploy check])
+
+ expect(pipeline.reload).to be_blocked
+ end
+ end
+
context 'when blocking manual actions are defined' do
before do
create_build('code:test', stage_idx: 0)
@@ -425,6 +443,21 @@ describe Ci::ProcessPipelineService, '#execute', :services do
end
end
+ context 'updates a list of retried builds' do
+ subject { described_class.retried.order(:id) }
+
+ let!(:build_retried) { create_build('build') }
+ let!(:build) { create_build('build') }
+ let!(:test) { create_build('test') }
+
+ it 'returns unique statuses' do
+ process_pipeline
+
+ expect(all_builds.latest).to contain_exactly(build, test)
+ expect(all_builds.retried).to contain_exactly(build_retried)
+ end
+ end
+
def process_pipeline
described_class.new(pipeline.project, user).execute(pipeline)
end
@@ -441,6 +474,10 @@ describe Ci::ProcessPipelineService, '#execute', :services do
builds.pluck(:name)
end
+ def all_builds_names
+ all_builds.pluck(:name)
+ end
+
def builds_statuses
builds.pluck(:status)
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index b2d37657770..7254e6b357a 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,7 +22,7 @@ describe Ci::RetryBuildService, :services do
%i[type lock_version target_url base_tags
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
- user_id auto_canceled_by_id].freeze
+ user_id auto_canceled_by_id retried].freeze
shared_examples 'build duplication' do
let(:build) do
@@ -115,7 +115,7 @@ describe Ci::RetryBuildService, :services do
end
describe '#reprocess' do
- let(:new_build) { service.reprocess(build) }
+ let(:new_build) { service.reprocess!(build) }
context 'when user has ability to execute build' do
before do
@@ -131,11 +131,16 @@ describe Ci::RetryBuildService, :services do
it 'does not enqueue the new build' do
expect(new_build).to be_created
end
+
+ it 'does mark old build as retried' do
+ expect(new_build).to be_latest
+ expect(build.reload).to be_retried
+ end
end
context 'when user does not have ability to execute build' do
it 'raises an error' do
- expect { service.reprocess(build) }
+ expect { service.reprocess!(build) }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 40e151545c9..d941d56c0d8 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -13,7 +13,7 @@ describe Ci::RetryPipelineService, '#execute', :services do
context 'when there are already retried jobs present' do
before do
- create_build('rspec', :canceled, 0)
+ create_build('rspec', :canceled, 0, retried: true)
create_build('rspec', :failed, 0)
end
diff --git a/spec/services/cohorts_service_spec.rb b/spec/services/cohorts_service_spec.rb
index 1e99442fdcb..77595d7ba2d 100644
--- a/spec/services/cohorts_service_spec.rb
+++ b/spec/services/cohorts_service_spec.rb
@@ -89,7 +89,7 @@ describe CohortsService do
activity_months: [{ total: 2, percentage: 100 }],
total: 2,
inactive: 1
- },
+ }
]
expect(described_class.new.execute).to eq(months_included: 12,
diff --git a/spec/services/create_deployment_service_spec.rb b/spec/services/create_deployment_service_spec.rb
index a883705bd45..f35d7a33548 100644
--- a/spec/services/create_deployment_service_spec.rb
+++ b/spec/services/create_deployment_service_spec.rb
@@ -255,7 +255,7 @@ describe CreateDeploymentService, services: true do
environment: 'production',
ref: 'master',
tag: false,
- sha: '97de212e80737a608d939f648d959671fb0a0142b',
+ sha: '97de212e80737a608d939f648d959671fb0a0142b'
}
end
diff --git a/spec/services/delete_merged_branches_service_spec.rb b/spec/services/delete_merged_branches_service_spec.rb
index 7b921f606f8..cae74df9c90 100644
--- a/spec/services/delete_merged_branches_service_spec.rb
+++ b/spec/services/delete_merged_branches_service_spec.rb
@@ -6,33 +6,22 @@ describe DeleteMergedBranchesService, services: true do
let(:project) { create(:project, :repository) }
context '#execute' do
- context 'unprotected branches' do
- before do
- service.execute
- end
+ it 'deletes a branch that was merged' do
+ service.execute
- it 'deletes a branch that was merged' do
- expect(project.repository.branch_names).not_to include('improve/awesome')
- end
+ expect(project.repository.branch_names).not_to include('improve/awesome')
+ end
- it 'keeps branch that is unmerged' do
- expect(project.repository.branch_names).to include('feature')
- end
+ it 'keeps branch that is unmerged' do
+ service.execute
- it 'keeps "master"' do
- expect(project.repository.branch_names).to include('master')
- end
+ expect(project.repository.branch_names).to include('feature')
end
- context 'protected branches' do
- before do
- create(:protected_branch, name: 'improve/awesome', project: project)
- service.execute
- end
+ it 'keeps "master"' do
+ service.execute
- it 'keeps protected branch' do
- expect(project.repository.branch_names).to include('improve/awesome')
- end
+ expect(project.repository.branch_names).to include('master')
end
context 'user without rights' do
diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb
index 0477cac6677..ab06f45dbb9 100644
--- a/spec/services/git_push_service_spec.rb
+++ b/spec/services/git_push_service_spec.rb
@@ -584,7 +584,7 @@ describe GitPushService, services: true do
commit = double(:commit)
diff = double(:diff, new_path: 'README.md')
- expect(commit).to receive(:raw_diffs).with(deltas_only: true).
+ expect(commit).to receive(:raw_deltas).
and_return([diff])
service.push_commits = [commit]
@@ -622,12 +622,21 @@ describe GitPushService, services: true do
it 'only schedules a limited number of commits' do
allow(service).to receive(:push_commits).
- and_return(Array.new(1000, double(:commit, to_hash: {})))
+ and_return(Array.new(1000, double(:commit, to_hash: {}, matches_cross_reference_regex?: true)))
expect(ProcessCommitWorker).to receive(:perform_async).exactly(100).times
service.process_commit_messages
end
+
+ it "skips commits which don't include cross-references" do
+ allow(service).to receive(:push_commits).
+ and_return([double(:commit, to_hash: {}, matches_cross_reference_regex?: false)])
+
+ expect(ProcessCommitWorker).not_to receive(:perform_async)
+
+ service.process_commit_messages
+ end
end
def execute_service(project, user, oldrev, newrev, ref)
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index 5b1639ca0d6..6437d00e451 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -62,7 +62,7 @@ describe Issuable::BulkUpdateService, services: true do
expect(result[:count]).to eq(1)
end
- it 'updates the assignee to the use ID passed' do
+ it 'updates the assignee to the user ID passed' do
assignee = create(:user)
project.team << [assignee, :developer]
@@ -100,7 +100,7 @@ describe Issuable::BulkUpdateService, services: true do
expect(result[:count]).to eq(1)
end
- it 'updates the assignee to the use ID passed' do
+ it 'updates the assignee to the user ID passed' do
assignee = create(:user)
project.team << [assignee, :developer]
expect { bulk_update(issue, assignee_ids: [assignee.id]) }
@@ -163,7 +163,7 @@ describe Issuable::BulkUpdateService, services: true do
{
label_ids: labels.map(&:id),
add_label_ids: add_labels.map(&:id),
- remove_label_ids: remove_labels.map(&:id),
+ remove_label_ids: remove_labels.map(&:id)
}
end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 55d635235b0..bed25fe7ccf 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -136,7 +136,7 @@ describe Issues::BuildService, services: true do
user,
title: 'Issue #1',
description: 'Issue description',
- milestone_id: milestone.id,
+ milestone_id: milestone.id
).execute
expect(issue.title).to eq('Issue #1')
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 51840531711..0a1f41719f7 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -51,8 +51,10 @@ describe Issues::CloseService, services: true do
end
end
- it { expect(issue).to be_valid }
- it { expect(issue).to be_closed }
+ it 'closes the issue' do
+ expect(issue).to be_valid
+ expect(issue).to be_closed
+ end
it 'sends email to user2 about assign of new issue' do
email = ActionMailer::Base.deliveries.last
@@ -96,9 +98,11 @@ describe Issues::CloseService, services: true do
described_class.new(project, user).close_issue(issue)
end
- it { expect(issue).to be_valid }
- it { expect(issue).to be_opened }
- it { expect(todo.reload).to be_pending }
+ it 'closes the issue' do
+ expect(issue).to be_valid
+ expect(issue).to be_opened
+ expect(todo.reload).to be_pending
+ end
end
end
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 01edc46496d..dab1a3469f7 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -118,6 +118,22 @@ describe Issues::CreateService, services: true do
end
end
+ context 'when assignee is set' do
+ let(:opts) do
+ { title: 'Title',
+ description: 'Description',
+ assignees: [assignee] }
+ end
+
+ it 'invalidates open issues counter for assignees when issue is assigned' do
+ project.team << [assignee, :master]
+
+ described_class.new(project, user, opts).execute
+
+ expect(assignee.assigned_open_issues_count).to eq 1
+ end
+ end
+
it 'executes issue hooks when issue is not confidential' do
opts = { title: 'Title', description: 'Description', confidential: false }
diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb
index c3b4c2176ee..86f218dec12 100644
--- a/spec/services/issues/resolve_discussions_spec.rb
+++ b/spec/services/issues/resolve_discussions_spec.rb
@@ -77,7 +77,7 @@ describe Issues::ResolveDiscussions, services: true do
_second_discussion = Discussion.new([create(:diff_note_on_merge_request, :resolved,
noteable: merge_request,
project: merge_request.target_project,
- line_number: 15,
+ line_number: 15
)])
service = DummyService.new(
project,
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 1954d8739f6..5184c1d5f19 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -59,6 +59,13 @@ describe Issues::UpdateService, services: true do
expect(issue.due_date).to eq Date.tomorrow
end
+ it 'updates open issue counter for assignees when issue is reassigned' do
+ update_issue(assignee_ids: [user2.id])
+
+ expect(user3.assigned_open_issues_count).to eq 0
+ expect(user2.assigned_open_issues_count).to eq 1
+ end
+
it 'sorts issues as specified by parameters' do
issue1 = create(:issue, project: project, assignees: [user3])
issue2 = create(:issue, project: project, assignees: [user3])
diff --git a/spec/services/members/authorized_destroy_service_spec.rb b/spec/services/members/authorized_destroy_service_spec.rb
index ab440d18e9f..8a6732faa19 100644
--- a/spec/services/members/authorized_destroy_service_spec.rb
+++ b/spec/services/members/authorized_destroy_service_spec.rb
@@ -10,6 +10,27 @@ describe Members::AuthorizedDestroyService, services: true do
Issue.assigned_to(user).count + MergeRequest.assigned_to(user).count
end
+ context 'Invited users' do
+ # Regression spec for issue: https://gitlab.com/gitlab-org/gitlab-ce/issues/32504
+ it 'destroys invited project member' do
+ project.team << [member_user, :developer]
+
+ member = create :project_member, :invited, project: project
+
+ expect { described_class.new(member, member_user).execute }
+ .to change { Member.count }.from(2).to(1)
+ end
+
+ it 'destroys invited group member' do
+ group.add_developer(member_user)
+
+ member = create :group_member, :invited, group: group
+
+ expect { described_class.new(member, member_user).execute }
+ .to change { Member.count }.from(2).to(1)
+ end
+ end
+
context 'Group member' do
it "unassigns issues and merge requests" do
group.add_developer(member_user)
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
new file mode 100644
index 00000000000..23982b9e6e1
--- /dev/null
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -0,0 +1,80 @@
+require 'spec_helper'
+
+describe MergeRequests::Conflicts::ListService do
+ describe '#can_be_resolved_in_ui?' do
+ def create_merge_request(source_branch)
+ create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start') do |mr|
+ mr.mark_as_unmergeable
+ end
+ end
+
+ def conflicts_service(merge_request)
+ described_class.new(merge_request)
+ end
+
+ it 'returns a falsey value when the MR can be merged without conflicts' do
+ merge_request = create_merge_request('master')
+ merge_request.mark_as_mergeable
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the MR is marked as having conflicts, but has none' do
+ merge_request = create_merge_request('master')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when one of the MR branches is missing' do
+ merge_request = create_merge_request('conflict-resolvable')
+ merge_request.project.repository.rm_branch(merge_request.author, 'conflict-resolvable')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the MR has a missing ref after a force push' do
+ merge_request = create_merge_request('conflict-resolvable')
+ service = conflicts_service(merge_request)
+ allow(service.conflicts).to receive(:merge_index).and_raise(Rugged::OdbError)
+
+ expect(service.can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the MR does not support new diff notes' do
+ merge_request = create_merge_request('conflict-resolvable')
+ merge_request.merge_request_diff.update_attributes(start_commit_sha: nil)
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the conflicts contain a large file' do
+ merge_request = create_merge_request('conflict-too-large')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the conflicts contain a binary file' do
+ merge_request = create_merge_request('conflict-binary-file')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a falsey value when the conflicts contain a file edited in one branch and deleted in another' do
+ merge_request = create_merge_request('conflict-missing-side')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
+ end
+
+ it 'returns a truthy value when the conflicts are resolvable in the UI' do
+ merge_request = create_merge_request('conflict-resolvable')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_truthy
+ end
+
+ it 'returns a truthy value when the conflicts have to be resolved in an editor' do
+ merge_request = create_merge_request('conflict-contains-conflict-markers')
+
+ expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_truthy
+ end
+ end
+end
diff --git a/spec/services/merge_requests/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index 3afd6b92900..19e8d5cc5f1 100644
--- a/spec/services/merge_requests/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe MergeRequests::ResolveService do
+describe MergeRequests::Conflicts::ResolveService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -24,6 +24,8 @@ describe MergeRequests::ResolveService do
end
describe '#execute' do
+ let(:service) { described_class.new(merge_request) }
+
context 'with section params' do
let(:params) do
{
@@ -50,7 +52,7 @@ describe MergeRequests::ResolveService do
context 'when the source and target project are the same' do
before do
- described_class.new(project, user, params).execute(merge_request)
+ service.execute(user, params)
end
it 'creates a commit with the message' do
@@ -74,15 +76,26 @@ describe MergeRequests::ResolveService do
branch_name: 'conflict-start')
end
- before do
- described_class.new(fork_project, user, params).execute(merge_request_from_fork)
+ def resolve_conflicts
+ described_class.new(merge_request_from_fork).execute(user, params)
+ end
+
+ it 'gets conflicts from the source project' do
+ expect(fork_project.repository.rugged).to receive(:merge_commits).and_call_original
+ expect(project.repository.rugged).not_to receive(:merge_commits)
+
+ resolve_conflicts
end
it 'creates a commit with the message' do
+ resolve_conflicts
+
expect(merge_request_from_fork.source_branch_head.message).to eq(params[:commit_message])
end
it 'creates a commit with the correct parents' do
+ resolve_conflicts
+
expect(merge_request_from_fork.source_branch_head.parents.map(&:id)).
to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813',
target_head])
@@ -115,7 +128,7 @@ describe MergeRequests::ResolveService do
end
before do
- described_class.new(project, user, params).execute(merge_request)
+ service.execute(user, params)
end
it 'creates a commit with the message' do
@@ -154,15 +167,15 @@ describe MergeRequests::ResolveService do
}
end
- let(:service) { described_class.new(project, user, invalid_params) }
-
it 'raises a MissingResolution error' do
- expect { service.execute(merge_request) }.
+ expect { service.execute(user, invalid_params) }.
to raise_error(Gitlab::Conflict::File::MissingResolution)
end
end
context 'when the content of a file is unchanged' do
+ let(:list_service) { MergeRequests::Conflicts::ListService.new(merge_request) }
+
let(:invalid_params) do
{
files: [
@@ -173,17 +186,15 @@ describe MergeRequests::ResolveService do
}, {
old_path: 'files/ruby/regex.rb',
new_path: 'files/ruby/regex.rb',
- content: merge_request.conflicts.file_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb').content
+ content: list_service.conflicts.file_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb').content
}
],
commit_message: 'This is a commit message!'
}
end
- let(:service) { described_class.new(project, user, invalid_params) }
-
it 'raises a MissingResolution error' do
- expect { service.execute(merge_request) }.
+ expect { service.execute(user, invalid_params) }.
to raise_error(Gitlab::Conflict::File::MissingResolution)
end
end
@@ -202,11 +213,9 @@ describe MergeRequests::ResolveService do
}
end
- let(:service) { described_class.new(project, user, invalid_params) }
-
it 'raises a MissingFiles error' do
- expect { service.execute(merge_request) }.
- to raise_error(MergeRequests::ResolveService::MissingFiles)
+ expect { service.execute(user, invalid_params) }.
+ to raise_error(described_class::MissingFiles)
end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index ace82380cc9..b70e9d534a4 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -27,10 +27,12 @@ describe MergeRequests::CreateService, services: true do
@merge_request = service.execute
end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request.title).to eq('Awesome merge_request') }
- it { expect(@merge_request.assignee).to be_nil }
- it { expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') }
+ it 'creates an MR' do
+ expect(@merge_request).to be_valid
+ expect(@merge_request.title).to eq('Awesome merge_request')
+ expect(@merge_request.assignee).to be_nil
+ expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1')
+ end
it 'executes hooks with default action' do
expect(service).to have_received(:execute_hooks).with(@merge_request)
@@ -144,6 +146,26 @@ describe MergeRequests::CreateService, services: true do
expect(merge_request.assignee).to eq(assignee)
end
+ context 'when assignee is set' do
+ let(:opts) do
+ {
+ title: 'Title',
+ description: 'Description',
+ assignee_id: assignee.id,
+ source_branch: 'feature',
+ target_branch: 'master'
+ }
+ end
+
+ it 'invalidates open merge request counter for assignees when merge request is assigned' do
+ project.team << [assignee, :master]
+
+ described_class.new(project, user, opts).execute
+
+ expect(assignee.assigned_open_merge_requests_count).to eq 1
+ end
+ end
+
context "when issuable feature is private" do
before do
project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE,
diff --git a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb
index 769b3193275..3ef5135e6a3 100644
--- a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb
@@ -82,6 +82,10 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do
sha: merge_request_head, status: 'success')
end
+ before do
+ mr_merge_if_green_enabled.update(head_pipeline: triggering_pipeline)
+ end
+
it "merges all merge requests with merge when the pipeline succeeds enabled" do
expect(MergeWorker).to receive(:perform_async)
service.trigger(triggering_pipeline)
@@ -124,6 +128,8 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do
sha: mr_conflict.diff_head_sha, status: 'success')
end
+ before { mr_conflict.update(head_pipeline: conflict_pipeline) }
+
it 'does not merge the merge request' do
expect(MergeWorker).not_to receive(:perform_async)
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 03215a4624a..1f109eab268 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -348,7 +348,7 @@ describe MergeRequests::RefreshService, services: true do
title: 'fixup! Fix issue',
work_in_progress?: true,
to_reference: 'ccccccc'
- ),
+ )
])
refresh_service.execute(@oldrev, @newrev, 'refs/heads/wip')
reload_mrs
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 31487c0f794..860a7798857 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -59,14 +59,16 @@ describe MergeRequests::UpdateService, services: true do
end
end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request.title).to eq('New title') }
- it { expect(@merge_request.assignee).to eq(user2) }
- it { expect(@merge_request).to be_closed }
- it { expect(@merge_request.labels.count).to eq(1) }
- it { expect(@merge_request.labels.first.title).to eq(label.name) }
- it { expect(@merge_request.target_branch).to eq('target') }
- it { expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1') }
+ it 'mathces base expectations' do
+ expect(@merge_request).to be_valid
+ expect(@merge_request.title).to eq('New title')
+ expect(@merge_request.assignee).to eq(user2)
+ expect(@merge_request).to be_closed
+ expect(@merge_request.labels.count).to eq(1)
+ expect(@merge_request.labels.first.title).to eq(label.name)
+ expect(@merge_request.target_branch).to eq('target')
+ expect(@merge_request.merge_params['force_remove_source_branch']).to eq('1')
+ end
it 'executes hooks with update action' do
expect(service).to have_received(:execute_hooks).
@@ -148,9 +150,11 @@ describe MergeRequests::UpdateService, services: true do
end
end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request.state).to eq('merged') }
- it { expect(@merge_request.merge_error).to be_nil }
+ it 'merges the MR' do
+ expect(@merge_request).to be_valid
+ expect(@merge_request.state).to eq('merged')
+ expect(@merge_request.merge_error).to be_nil
+ end
end
context 'with finished pipeline' do
@@ -167,18 +171,22 @@ describe MergeRequests::UpdateService, services: true do
end
end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request.state).to eq('merged') }
+ it 'merges the MR' do
+ expect(@merge_request).to be_valid
+ expect(@merge_request.state).to eq('merged')
+ end
end
context 'with active pipeline' do
before do
service_mock = double
- create(:ci_pipeline_with_one_job,
+ pipeline = create(:ci_pipeline_with_one_job,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
+ merge_request.update(head_pipeline: pipeline)
+
expect(MergeRequests::MergeWhenPipelineSucceedsService).to receive(:new).with(project, user).
and_return(service_mock)
expect(service_mock).to receive(:execute).with(merge_request)
@@ -200,8 +208,10 @@ describe MergeRequests::UpdateService, services: true do
end
end
- it { expect(@merge_request.state).to eq('opened') }
- it { expect(@merge_request.merge_error).not_to be_nil }
+ it 'does not merge the MR' do
+ expect(@merge_request.state).to eq('opened')
+ expect(@merge_request.merge_error).not_to be_nil
+ end
end
context 'MR can not be merged when note sha != MR sha' do
@@ -297,6 +307,15 @@ describe MergeRequests::UpdateService, services: true do
end
end
+ context 'when the assignee changes' do
+ it 'updates open merge request counter for assignees when merge request is reassigned' do
+ update_merge_request(assignee_id: user2.id)
+
+ expect(user3.assigned_open_merge_requests_count).to eq 0
+ expect(user2.assigned_open_merge_requests_count).to eq 1
+ end
+ end
+
context 'when the target branch change' do
before do
update_merge_request({ target_branch: 'target' })
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 74f96b97909..de3bbc6b6a1 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -350,7 +350,7 @@ describe NotificationService, services: true do
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_participant),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_mentioned),
create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_disabled),
- create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author),
+ create(:note_on_personal_snippet, noteable: snippet, note: 'note', author: @u_note_author)
]
end
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 063b3bd76eb..0657b7e93fe 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -6,7 +6,6 @@ describe Projects::ParticipantsService, services: true do
let(:project) { create(:empty_project, :public) }
let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png')) }
let(:user) { create(:user) }
- let(:base_url) { Settings.send(:build_base_gitlab_url) }
let!(:group_member) { create(:group_member, group: group, user: user) }
it 'should return an url for the avatar' do
@@ -14,7 +13,7 @@ describe Projects::ParticipantsService, services: true do
groups = participants.groups
expect(groups.size).to eq 1
- expect(groups.first[:avatar_url]).to eq "#{base_url}/uploads/group/avatar/#{group.id}/dk.png"
+ expect(groups.first[:avatar_url]).to eq("/uploads/group/avatar/#{group.id}/dk.png")
end
it 'should return an url for the avatar with relative url' do
@@ -25,7 +24,7 @@ describe Projects::ParticipantsService, services: true do
groups = participants.groups
expect(groups.size).to eq 1
- expect(groups.first[:avatar_url]).to eq "#{base_url}/gitlab/uploads/group/avatar/#{group.id}/dk.png"
+ expect(groups.first[:avatar_url]).to eq("/gitlab/uploads/group/avatar/#{group.id}/dk.png")
end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 29ccce59c53..b957517c715 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -26,6 +26,7 @@ describe Projects::TransferService, services: true do
it { expect(@result).to eq false }
it { expect(project.namespace).to eq(user.namespace) }
+ it { expect(project.errors.messages[:new_namespace].first).to eq 'Please select a new namespace for your project.' }
end
context 'disallow transfering of project with tags' do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 516566eddef..7a9cd7553b1 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -178,7 +178,7 @@ describe SystemNoteService, services: true do
end
it 'builds a correct phrase when assignee removed' do
- expect(build_note([assignee1], [])).to eq 'removed all assignees'
+ expect(build_note([assignee1], [])).to eq 'removed assignee'
end
it 'builds a correct phrase when assignees changed' do
diff --git a/spec/sidekiq/cron/job_gem_dependency_spec.rb b/spec/sidekiq/cron/job_gem_dependency_spec.rb
new file mode 100644
index 00000000000..2e30cf025b0
--- /dev/null
+++ b/spec/sidekiq/cron/job_gem_dependency_spec.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe Sidekiq::Cron::Job do
+ describe 'cron jobs' do
+ context 'when rufus-scheduler depends on ZoTime or EoTime' do
+ before do
+ described_class
+ .create(name: 'TestCronWorker',
+ cron: Settings.cron_jobs[:pipeline_schedule_worker]['cron'],
+ class: Settings.cron_jobs[:pipeline_schedule_worker]['job_class'])
+ end
+
+ it 'does not get "Rufus::Scheduler::ZoTime/EtOrbi::EoTime into an exact number"' do
+ expect { described_class.all.first.should_enque?(Time.now) }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index e2d5928e5b2..a58f4e664b7 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -10,7 +10,7 @@ require 'shoulda/matchers'
require 'rspec/retry'
rspec_profiling_is_configured =
- ENV['RSPEC_PROFILING_POSTGRES_URL'] ||
+ ENV['RSPEC_PROFILING_POSTGRES_URL'].present? ||
ENV['RSPEC_PROFILING']
branch_can_be_profiled =
ENV['GITLAB_DATABASE'] == 'postgresql' &&
diff --git a/spec/support/cycle_analytics_helpers.rb b/spec/support/cycle_analytics_helpers.rb
index 8ad042f5e3b..66545127a44 100644
--- a/spec/support/cycle_analytics_helpers.rb
+++ b/spec/support/cycle_analytics_helpers.rb
@@ -20,7 +20,7 @@ module CycleAnalyticsHelpers
ref: 'refs/heads/master').execute
end
- def create_merge_request_closing_issue(issue, message: nil, source_branch: nil)
+ def create_merge_request_closing_issue(issue, message: nil, source_branch: nil, commit_message: 'commit message')
if !source_branch || project.repository.commit(source_branch).blank?
source_branch = generate(:branch)
project.repository.add_branch(user, source_branch, 'master')
@@ -30,7 +30,7 @@ module CycleAnalyticsHelpers
user,
generate(:branch),
'content',
- message: 'commit message',
+ message: commit_message,
branch_name: source_branch)
project.repository.commit(sha)
diff --git a/spec/support/dropzone_helper.rb b/spec/support/dropzone_helper.rb
index 984ec7d2741..02fdeb08afe 100644
--- a/spec/support/dropzone_helper.rb
+++ b/spec/support/dropzone_helper.rb
@@ -6,32 +6,52 @@ module DropzoneHelper
# Dropzone events to perform the actual upload.
#
# This method waits for the upload to complete before returning.
- def dropzone_file(file_path)
+ # max_file_size is an optional parameter.
+ # If it's not 0, then it used in dropzone.maxFilesize parameter.
+ # wait_for_queuecomplete is an optional parameter.
+ # If it's 'false', then the helper will NOT wait for backend response
+ # It lets to test behaviors while AJAX is processing.
+ def dropzone_file(files, max_file_size = 0, wait_for_queuecomplete = true)
# Generate a fake file input that Capybara can attach to
page.execute_script <<-JS.strip_heredoc
+ $('#fakeFileInput').remove();
var fakeFileInput = window.$('<input/>').attr(
- {id: 'fakeFileInput', type: 'file'}
+ {id: 'fakeFileInput', type: 'file', multiple: true}
).appendTo('body');
window._dropzoneComplete = false;
JS
- # Attach the file to the fake input selector with Capybara
- attach_file('fakeFileInput', file_path)
+ # Attach files to the fake input selector with Capybara
+ attach_file('fakeFileInput', files)
# Manually trigger a Dropzone "drop" event with the fake input's file list
page.execute_script <<-JS.strip_heredoc
- var fileList = [$('#fakeFileInput')[0].files[0]];
- var e = jQuery.Event('drop', { dataTransfer : { files : fileList } });
-
var dropzone = $('.div-dropzone')[0].dropzone;
+ dropzone.options.autoProcessQueue = false;
+
+ if (#{max_file_size} > 0) {
+ dropzone.options.maxFilesize = #{max_file_size};
+ }
+
dropzone.on('queuecomplete', function() {
window._dropzoneComplete = true;
});
- dropzone.listeners[0].events.drop(e);
+
+ var fileList = [$('#fakeFileInput')[0].files];
+
+ $.map(fileList, function(file){
+ var e = jQuery.Event('drop', { dataTransfer : { files : file } });
+
+ dropzone.listeners[0].events.drop(e);
+ });
+
+ dropzone.processQueue();
JS
- # Wait until Dropzone's fired `queuecomplete`
- loop until page.evaluate_script('window._dropzoneComplete === true')
+ if wait_for_queuecomplete
+ # Wait until Dropzone's fired `queuecomplete`
+ loop until page.evaluate_script('window._dropzoneComplete === true')
+ end
end
end
diff --git a/spec/support/filtered_search_helpers.rb b/spec/support/filtered_search_helpers.rb
index 36be0bb6bf8..37cc308e613 100644
--- a/spec/support/filtered_search_helpers.rb
+++ b/spec/support/filtered_search_helpers.rb
@@ -73,11 +73,11 @@ module FilteredSearchHelpers
end
def remove_recent_searches
- execute_script('window.localStorage.removeItem(\'issue-recent-searches\');')
+ execute_script('window.localStorage.clear();')
end
- def set_recent_searches(input)
- execute_script("window.localStorage.setItem('issue-recent-searches', '#{input}');")
+ def set_recent_searches(key, input)
+ execute_script("window.localStorage.setItem('#{key}', '#{input}');")
end
def wait_for_filtered_search(text)
diff --git a/spec/support/generate-seed-repo-rb b/spec/support/generate-seed-repo-rb
new file mode 100755
index 00000000000..7335f74c0e9
--- /dev/null
+++ b/spec/support/generate-seed-repo-rb
@@ -0,0 +1,162 @@
+#!/usr/bin/env ruby
+#
+# # generate-seed-repo-rb
+#
+# This script generates the seed_repo.rb file used by lib/gitlab/git
+# tests. The seed_repo.rb file needs to be updated anytime there is a
+# Git push to https://gitlab.com/gitlab-org/gitlab-git-test.
+#
+# Usage:
+#
+# ./spec/support/generate-seed-repo-rb > spec/support/seed_repo.rb
+#
+#
+
+require 'erb'
+require 'tempfile'
+
+SOURCE = 'https://gitlab.com/gitlab-org/gitlab-git-test.git'.freeze
+SCRIPT_NAME = 'generate-seed-repo-rb'.freeze
+REPO_NAME = 'gitlab-git-test.git'.freeze
+
+def main
+ Dir.mktmpdir do |dir|
+ unless system(*%W[git clone --bare #{SOURCE} #{REPO_NAME}], chdir: dir)
+ abort "git clone failed"
+ end
+ repo = File.join(dir, REPO_NAME)
+ erb = ERB.new(DATA.read)
+ erb.run(binding)
+ end
+end
+
+def capture!(cmd, dir)
+ output = IO.popen(cmd, 'r', chdir: dir) { |io| io.read }
+ raise "command failed with #{$?}: #{cmd.join(' ')}" unless $?.success?
+ output.chomp
+end
+
+main
+
+__END__
+# This file is generated by <%= SCRIPT_NAME %>. Do not edit this file manually.
+#
+# Seed repo:
+<%= capture!(%w{git log --format=#\ %H\ %s}, repo) %>
+
+module SeedRepo
+ module BigCommit
+ ID = "913c66a37b4a45b9769037c55c2d238bd0942d2e".freeze
+ PARENT_ID = "cfe32cf61b73a0d5e9f13e774abde7ff789b1660".freeze
+ MESSAGE = "Files, encoding and much more".freeze
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ FILES_COUNT = 2
+ end
+
+ module Commit
+ ID = "570e7b2abdd848b95f2f578043fc23bd6f6fd24d".freeze
+ PARENT_ID = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9".freeze
+ MESSAGE = "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n".freeze
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ FILES = ["files/ruby/popen.rb", "files/ruby/regex.rb"].freeze
+ FILES_COUNT = 2
+ C_FILE_PATH = "files/ruby".freeze
+ C_FILES = ["popen.rb", "regex.rb", "version_info.rb"].freeze
+ BLOB_FILE = %{%h3= @key.title\n%hr\n%pre= @key.key\n.actions\n = link_to 'Remove', @key, :confirm => 'Are you sure?', :method => :delete, :class => \"btn danger delete-key\"\n\n\n}.freeze
+ BLOB_FILE_PATH = "app/views/keys/show.html.haml".freeze
+ end
+
+ module EmptyCommit
+ ID = "b0e52af38d7ea43cf41d8a6f2471351ac036d6c9".freeze
+ PARENT_ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze
+ MESSAGE = "Empty commit".freeze
+ AUTHOR_FULL_NAME = "Rémy Coutable".freeze
+ FILES = [].freeze
+ FILES_COUNT = FILES.count
+ end
+
+ module EncodingCommit
+ ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze
+ PARENT_ID = "66028349a123e695b589e09a36634d976edcc5e8".freeze
+ MESSAGE = "Add ISO-8859-encoded file".freeze
+ AUTHOR_FULL_NAME = "Stan Hu".freeze
+ FILES = ["encoding/iso8859.txt"].freeze
+ FILES_COUNT = FILES.count
+ end
+
+ module FirstCommit
+ ID = "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863".freeze
+ PARENT_ID = nil
+ MESSAGE = "Initial commit".freeze
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ FILES = ["LICENSE", ".gitignore", "README.md"].freeze
+ FILES_COUNT = 3
+ end
+
+ module LastCommit
+ ID = <%= capture!(%w[git show -s --format=%H HEAD], repo).inspect %>.freeze
+ PARENT_ID = <%= capture!(%w[git show -s --format=%P HEAD], repo).split.last.inspect %>.freeze
+ MESSAGE = <%= capture!(%w[git show -s --format=%s HEAD], repo).inspect %>.freeze
+ AUTHOR_FULL_NAME = <%= capture!(%w[git show -s --format=%an HEAD], repo).inspect %>.freeze
+ FILES = <%=
+ parents = capture!(%w[git show -s --format=%P HEAD], repo).split
+ merge_base = parents.size > 1 ? capture!(%w[git merge-base] + parents, repo) : parents.first
+ capture!( %W[git diff --name-only #{merge_base}..HEAD --], repo).split("\n").inspect
+ %>.freeze
+ FILES_COUNT = FILES.count
+ end
+
+ module Repo
+ HEAD = "master".freeze
+ BRANCHES = %w[
+<%= capture!(%W[git for-each-ref --format=#{' ' * 3}%(refname:strip=2) refs/heads/], repo) %>
+ ].freeze
+ TAGS = %w[
+<%= capture!(%W[git for-each-ref --format=#{' ' * 3}%(refname:strip=2) refs/tags/], repo) %>
+ ].freeze
+ end
+
+ module RubyBlob
+ ID = "7e3e39ebb9b2bf433b4ad17313770fbe4051649c".freeze
+ NAME = "popen.rb".freeze
+ CONTENT = <<-eos.freeze
+require 'fileutils'
+require 'open3'
+
+module Popen
+ extend self
+
+ def popen(cmd, path=nil)
+ unless cmd.is_a?(Array)
+ raise RuntimeError, "System commands must be given as an array of strings"
+ end
+
+ path ||= Dir.pwd
+
+ vars = {
+ "PWD" => path
+ }
+
+ options = {
+ chdir: path
+ }
+
+ unless File.directory?(path)
+ FileUtils.mkdir_p(path)
+ end
+
+ @cmd_output = ""
+ @cmd_status = 0
+
+ Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
+ @cmd_output << stdout.read
+ @cmd_output << stderr.read
+ @cmd_status = wait_thr.value.exitstatus
+ end
+
+ return @cmd_output, @cmd_status
+ end
+end
+ eos
+ end
+end
diff --git a/spec/support/kubernetes_helpers.rb b/spec/support/kubernetes_helpers.rb
index b5ed71ba3be..d2a1ded57ff 100644
--- a/spec/support/kubernetes_helpers.rb
+++ b/spec/support/kubernetes_helpers.rb
@@ -5,7 +5,7 @@ module KubernetesHelpers
{
"kind" => "APIResourceList",
"resources" => [
- { "name" => "pods", "namespaced" => true, "kind" => "Pod" },
+ { "name" => "pods", "namespaced" => true, "kind" => "Pod" }
]
}
end
@@ -22,13 +22,13 @@ module KubernetesHelpers
"metadata" => {
"name" => "kube-pod",
"creationTimestamp" => "2016-11-25T19:55:19Z",
- "labels" => { "app" => app },
+ "labels" => { "app" => app }
},
"spec" => {
"containers" => [
{ "name" => "container-0" },
- { "name" => "container-1" },
- ],
+ { "name" => "container-1" }
+ ]
},
"status" => { "phase" => "Running" }
}
diff --git a/spec/support/matchers/gitaly_matchers.rb b/spec/support/matchers/gitaly_matchers.rb
index 65dbc01f6e4..ed14bcec9f2 100644
--- a/spec/support/matchers/gitaly_matchers.rb
+++ b/spec/support/matchers/gitaly_matchers.rb
@@ -1,3 +1,9 @@
RSpec::Matchers.define :gitaly_request_with_repo_path do |path|
match { |actual| actual.repository.path == path }
end
+
+RSpec::Matchers.define :gitaly_request_with_params do |params|
+ match do |actual|
+ params.reduce(true) { |r, (key, val)| r && actual.send(key) == val }
+ end
+end
diff --git a/spec/support/milestone_tabs_examples.rb b/spec/support/milestone_tabs_examples.rb
index c69f8e11008..4ad8b0a16e1 100644
--- a/spec/support/milestone_tabs_examples.rb
+++ b/spec/support/milestone_tabs_examples.rb
@@ -1,7 +1,7 @@
shared_examples 'milestone tabs' do
def go(path, extra_params = {})
params = if milestone.is_a?(GlobalMilestone)
- { group_id: group.id, id: milestone.safe_title, title: milestone.title }
+ { group_id: group.to_param, id: milestone.safe_title, title: milestone.title }
else
{ namespace_id: project.namespace.to_param, project_id: project, id: milestone.iid }
end
diff --git a/spec/support/prometheus_helpers.rb b/spec/support/prometheus_helpers.rb
index a204365431b..6b9ebcf2bb3 100644
--- a/spec/support/prometheus_helpers.rb
+++ b/spec/support/prometheus_helpers.rb
@@ -1,10 +1,16 @@
module PrometheusHelpers
def prometheus_memory_query(environment_slug)
- %{(sum(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"}) / count(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"})) /1024/1024}
+ %{avg(container_memory_usage_bytes{container_name!="POD",environment="#{environment_slug}"}) / 2^20}
end
def prometheus_cpu_query(environment_slug)
- %{sum(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}[2m])) / count(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}) * 100}
+ %{avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="#{environment_slug}"}[2m])) * 100}
+ end
+
+ def prometheus_ping_url(prometheus_query)
+ query = { query: prometheus_query }.to_query
+
+ "https://prometheus.example.com/api/v1/query?#{query}"
end
def prometheus_query_url(prometheus_query)
@@ -13,11 +19,17 @@ module PrometheusHelpers
"https://prometheus.example.com/api/v1/query?#{query}"
end
- def prometheus_query_range_url(prometheus_query, start: 8.hours.ago)
+ def prometheus_query_with_time_url(prometheus_query, time)
+ query = { query: prometheus_query, time: time.to_f }.to_query
+
+ "https://prometheus.example.com/api/v1/query?#{query}"
+ end
+
+ def prometheus_query_range_url(prometheus_query, start: 8.hours.ago, stop: Time.now.to_f)
query = {
query: prometheus_query,
start: start.to_f,
- end: Time.now.utc.to_f,
+ end: stop,
step: 1.minute.to_i
}.to_query
@@ -39,7 +51,12 @@ module PrometheusHelpers
def stub_all_prometheus_requests(environment_slug, body: nil, status: 200)
stub_prometheus_request(
- prometheus_query_url(prometheus_memory_query(environment_slug)),
+ prometheus_query_with_time_url(prometheus_memory_query(environment_slug), Time.now.utc),
+ status: status,
+ body: body || prometheus_value_body
+ )
+ stub_prometheus_request(
+ prometheus_query_with_time_url(prometheus_memory_query(environment_slug), 8.hours.ago),
status: status,
body: body || prometheus_value_body
)
@@ -49,7 +66,12 @@ module PrometheusHelpers
body: body || prometheus_values_body
)
stub_prometheus_request(
- prometheus_query_url(prometheus_cpu_query(environment_slug)),
+ prometheus_query_with_time_url(prometheus_cpu_query(environment_slug), Time.now.utc),
+ status: status,
+ body: body || prometheus_value_body
+ )
+ stub_prometheus_request(
+ prometheus_query_with_time_url(prometheus_cpu_query(environment_slug), 8.hours.ago),
status: status,
body: body || prometheus_value_body
)
diff --git a/spec/features/protected_branches/access_control_ce_spec.rb b/spec/support/protected_branches/access_control_ce_shared_examples.rb
index d30e7947106..7fda4ade665 100644
--- a/spec/features/protected_branches/access_control_ce_spec.rb
+++ b/spec/support/protected_branches/access_control_ce_shared_examples.rb
@@ -31,7 +31,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
within(".protected-branches-list") do
find(".js-allowed-to-push").click
-
+
within('.js-allowed-to-push-container') do
expect(first("li")).to have_content("Roles")
click_on access_type_name
diff --git a/spec/features/protected_tags/access_control_ce_spec.rb b/spec/support/protected_tags/access_control_ce_shared_examples.rb
index a04fbcdd15f..12622cd548a 100644
--- a/spec/features/protected_tags/access_control_ce_spec.rb
+++ b/spec/support/protected_tags/access_control_ce_shared_examples.rb
@@ -9,7 +9,7 @@ RSpec.shared_examples "protected tags > access control > CE" do
allowed_to_create_button = find(".js-allowed-to-create")
unless allowed_to_create_button.text == access_type_name
- allowed_to_create_button.click
+ allowed_to_create_button.trigger('click')
find('.create_access_levels-container .dropdown-menu li', match: :first)
within('.create_access_levels-container .dropdown-menu') { click_on access_type_name }
end
diff --git a/spec/support/repo_helpers.rb b/spec/support/repo_helpers.rb
index e9d5c7b12ae..3c6956cf5e0 100644
--- a/spec/support/repo_helpers.rb
+++ b/spec/support/repo_helpers.rb
@@ -92,11 +92,11 @@ eos
changes = [
{
line_code: 'a5cc2925ca8258af241be7e5b0381edf30266302_20_20',
- file_path: '.gitignore',
+ file_path: '.gitignore'
},
{
line_code: '7445606fbf8f3683cd42bdc54b05d7a0bc2dfc44_4_6',
- file_path: '.gitmodules',
+ file_path: '.gitmodules'
}
]
diff --git a/spec/support/seed_repo.rb b/spec/support/seed_repo.rb
index 99a500bbbb1..cfe7fc980a8 100644
--- a/spec/support/seed_repo.rb
+++ b/spec/support/seed_repo.rb
@@ -1,4 +1,8 @@
+# This file is generated by generate-seed-repo-rb. Do not edit this file manually.
+#
# Seed repo:
+# 4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6 Merge branch 'master' into 'master'
+# 0e1b353b348f8477bdbec1ef47087171c5032cd9 adds an executable with different permissions
# 0e50ec4d3c7ce42ab74dda1d422cb2cbffe1e326 Merge branch 'lfs_pointers' into 'master'
# 33bcff41c232a11727ac6d660bd4b0c2ba86d63d Add valid and invalid lfs pointers
# 732401c65e924df81435deb12891ef570167d2e2 Update year in license file
@@ -94,7 +98,12 @@ module SeedRepo
master
merge-test
].freeze
- TAGS = %w[v1.0.0 v1.1.0 v1.2.0 v1.2.1].freeze
+ TAGS = %w[
+ v1.0.0
+ v1.1.0
+ v1.2.0
+ v1.2.1
+ ].freeze
end
module RubyBlob
diff --git a/spec/support/slack_mattermost_notifications_shared_examples.rb b/spec/support/slack_mattermost_notifications_shared_examples.rb
index b902fe90707..7e35ebb6c97 100644
--- a/spec/support/slack_mattermost_notifications_shared_examples.rb
+++ b/spec/support/slack_mattermost_notifications_shared_examples.rb
@@ -328,7 +328,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do
context 'only notify for the default branch' do
context 'when enabled' do
let(:pipeline) do
- create(:ci_pipeline, project: project, status: 'failed', ref: 'not-the-default-branch')
+ create(:ci_pipeline, :failed, project: project, ref: 'not-the-default-branch')
end
before do
@@ -342,6 +342,18 @@ RSpec.shared_examples 'slack or mattermost notifications' do
expect(result).to be_falsy
end
end
+
+ context 'when disabled' do
+ let(:pipeline) do
+ create(:ci_pipeline, :failed, project: project, ref: 'not-the-default-branch')
+ end
+
+ before do
+ chat_service.notify_only_default_branch = false
+ end
+
+ it_behaves_like 'call Slack/Mattermost API'
+ end
end
end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 8e31c26591b..b168098edea 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -40,8 +40,8 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
- 'add-ipython-files' => '6d85bb69',
- 'add-pdf-file' => 'e774ebd3'
+ 'add-ipython-files' => '6d85bb6',
+ 'add-pdf-file' => 'e774ebd'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
@@ -120,7 +120,7 @@ module TestEnv
end
def setup_gitaly
- socket_path = Gitlab::GitalyClient.get_address('default').sub(/\Aunix:/, '')
+ socket_path = Gitlab::GitalyClient.address('default').sub(/\Aunix:/, '')
gitaly_dir = File.dirname(socket_path)
unless File.directory?(gitaly_dir) || system('rake', "gitlab:gitaly:install[#{gitaly_dir}]")
@@ -133,7 +133,8 @@ module TestEnv
def start_gitaly(gitaly_dir)
gitaly_exec = File.join(gitaly_dir, 'gitaly')
gitaly_config = File.join(gitaly_dir, 'config.toml')
- @gitaly_pid = spawn(gitaly_exec, gitaly_config, [:out, :err] => '/dev/null')
+ log_file = Rails.root.join('log/gitaly-test.log').to_s
+ @gitaly_pid = spawn(gitaly_exec, gitaly_config, [:out, :err] => log_file)
end
def stop_gitaly
@@ -154,14 +155,14 @@ module TestEnv
FORKED_BRANCH_SHA)
end
- def setup_repo(repo_path, repo_path_bare, repo_name, branch_sha)
+ def setup_repo(repo_path, repo_path_bare, repo_name, refs)
clone_url = "https://gitlab.com/gitlab-org/#{repo_name}.git"
unless File.directory?(repo_path)
system(*%W(#{Gitlab.config.git.bin_path} clone -q #{clone_url} #{repo_path}))
end
- set_repo_refs(repo_path, branch_sha)
+ set_repo_refs(repo_path, refs)
unless File.directory?(repo_path_bare)
# We must copy bare repositories because we will push to them.
@@ -169,13 +170,12 @@ module TestEnv
end
end
- def copy_repo(project)
- base_repo_path = File.expand_path(factory_repo_path_bare)
+ def copy_repo(project, bare_repo:, refs:)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.full_path}.git")
FileUtils.mkdir_p(target_repo_path)
- FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
+ FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
- set_repo_refs(target_repo_path, BRANCH_SHA)
+ set_repo_refs(target_repo_path, refs)
end
def repos_path
@@ -190,15 +190,6 @@ module TestEnv
Gitlab.config.pages.path
end
- def copy_forked_repo_with_submodules(project)
- base_repo_path = File.expand_path(forked_repo_path_bare)
- target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.full_path}.git")
- FileUtils.mkdir_p(target_repo_path)
- FileUtils.cp_r("#{base_repo_path}/.", target_repo_path)
- FileUtils.chmod_R 0755, target_repo_path
- set_repo_refs(target_repo_path, FORKED_BRANCH_SHA)
- end
-
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
@@ -210,16 +201,20 @@ module TestEnv
Capybara.current_session.visit '/'
end
+ def factory_repo_path_bare
+ "#{factory_repo_path}_bare"
+ end
+
+ def forked_repo_path_bare
+ "#{forked_repo_path}_bare"
+ end
+
private
def factory_repo_path
@factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name)
end
- def factory_repo_path_bare
- "#{factory_repo_path}_bare"
- end
-
def factory_repo_name
'gitlab-test'
end
@@ -228,10 +223,6 @@ module TestEnv
@forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name)
end
- def forked_repo_path_bare
- "#{forked_repo_path}_bare"
- end
-
def forked_repo_name
'gitlab-test-fork'
end
@@ -243,19 +234,22 @@ module TestEnv
end
def set_repo_refs(repo_path, branch_sha)
- instructions = branch_sha.map {|branch, sha| "update refs/heads/#{branch}\x00#{sha}\x00" }.join("\x00") << "\x00"
+ instructions = branch_sha.map { |branch, sha| "update refs/heads/#{branch}\x00#{sha}\x00" }.join("\x00") << "\x00"
update_refs = %W(#{Gitlab.config.git.bin_path} update-ref --stdin -z)
reset = proc do
- IO.popen(update_refs, "w") {|io| io.write(instructions) }
- $?.success?
+ Dir.chdir(repo_path) do
+ IO.popen(update_refs, "w") { |io| io.write(instructions) }
+ $?.success?
+ end
end
- Dir.chdir(repo_path) do
- # Try to reset without fetching to avoid using the network.
- unless reset.call
- raise 'Could not fetch test seed repository.' unless system(*%W(#{Gitlab.config.git.bin_path} fetch origin))
- raise 'The fetched test seed does not contain the required revision.' unless reset.call
- end
+ # Try to reset without fetching to avoid using the network.
+ unless reset.call
+ raise 'Could not fetch test seed repository.' unless system(*%W(#{Gitlab.config.git.bin_path} -C #{repo_path} fetch origin))
+
+ # Before we used Git clone's --mirror option, bare repos could end up
+ # with missing refs, clearing them and retrying should fix the issue.
+ cleanup && init unless reset.call
end
end
end
diff --git a/spec/support/wait_for_requests.rb b/spec/support/wait_for_requests.rb
index 73da23391ee..d41e83ae128 100644
--- a/spec/support/wait_for_requests.rb
+++ b/spec/support/wait_for_requests.rb
@@ -1,15 +1,16 @@
require_relative './wait_for_ajax'
+require_relative './wait_for_vue_resource'
module WaitForRequests
extend self
include WaitForAjax
+ include WaitForVueResource
# This is inspired by http://www.salsify.com/blog/engineering/tearing-capybara-ajax-tests
def wait_for_requests_complete
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
wait_for('pending AJAX requests complete') do
- Gitlab::Testing::RequestBlockerMiddleware.num_active_requests.zero? &&
- finished_all_ajax_requests?
+ Gitlab::Testing::RequestBlockerMiddleware.num_active_requests.zero?
end
ensure
Gitlab::Testing::RequestBlockerMiddleware.allow_requests!
diff --git a/spec/support/wait_for_vue_resource.rb b/spec/support/wait_for_vue_resource.rb
index 4a4e2e16ee7..3bb3d9c2e51 100644
--- a/spec/support/wait_for_vue_resource.rb
+++ b/spec/support/wait_for_vue_resource.rb
@@ -1,7 +1,19 @@
module WaitForVueResource
def wait_for_vue_resource(spinner: true)
Timeout.timeout(Capybara.default_max_wait_time) do
- loop until page.evaluate_script('window.activeVueResources').zero?
+ loop until finished_all_vue_resource_requests?
end
end
+
+ private
+
+ def finished_all_vue_resource_requests?
+ return true unless javascript_test?
+
+ page.evaluate_script('window.activeVueResources || 0').zero?
+ end
+
+ def javascript_test?
+ Capybara.current_driver == Capybara.javascript_driver
+ end
end
diff --git a/spec/support/workhorse_helpers.rb b/spec/support/workhorse_helpers.rb
index 47673cd4c3a..ef1f9f68671 100644
--- a/spec/support/workhorse_helpers.rb
+++ b/spec/support/workhorse_helpers.rb
@@ -9,7 +9,7 @@ module WorkhorseHelpers
header = split_header.join(':')
[
type,
- JSON.parse(Base64.urlsafe_decode64(header)),
+ JSON.parse(Base64.urlsafe_decode64(header))
]
end
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index df2f2ce95e6..0ff1a988a9e 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -236,7 +236,6 @@ describe 'gitlab:app namespace rake task' do
'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- Gitlab::GitalyClient.configure_channels
# Create the projects now, after mocking the settings but before doing the backup
project_a
@@ -352,7 +351,7 @@ describe 'gitlab:app namespace rake task' do
end
it 'name has human readable time' do
- expect(@backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_\d+\.\d+\.\d+(-pre)?_gitlab_backup.tar$/)
+ expect(@backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_\d+\.\d+\.\d+.*_gitlab_backup.tar$/)
end
end
end # gitlab:app namespace
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index aaf998a546f..4a636decafd 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -80,28 +80,28 @@ describe 'gitlab:gitaly namespace rake task' do
it 'prints storage configuration in a TOML format' do
config = {
'default' => { 'path' => '/path/to/default' },
- 'nfs_01' => { 'path' => '/path/to/nfs_01' },
+ 'nfs_01' => { 'path' => '/path/to/nfs_01' }
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(config)
- orig_stdout = $stdout
- $stdout = StringIO.new
-
- header = ''
+ expected_output = ''
Timecop.freeze do
- header = <<~TOML
+ expected_output = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
+ [[storage]]
+ name = "default"
+ path = "/path/to/default"
+ [[storage]]
+ name = "nfs_01"
+ path = "/path/to/nfs_01"
TOML
- run_rake_task('gitlab:gitaly:storage_config')
end
- output = $stdout.string
- $stdout = orig_stdout
-
- expect(output).to include(header)
+ expect { run_rake_task('gitlab:gitaly:storage_config')}.
+ to output(expected_output).to_stdout
- parsed_output = TOML.parse(output)
+ parsed_output = TOML.parse(expected_output)
config.each do |name, params|
expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params['path'] })
end
diff --git a/spec/views/projects/_last_commit.html.haml_spec.rb b/spec/views/projects/_last_commit.html.haml_spec.rb
deleted file mode 100644
index eea1695b171..00000000000
--- a/spec/views/projects/_last_commit.html.haml_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-require 'spec_helper'
-
-describe 'projects/_last_commit', :view do
- let(:project) { create(:project, :repository) }
-
- context 'when there is a pipeline present for the commit' do
- context 'when pipeline is blocked' do
- let!(:pipeline) do
- create(:ci_pipeline, :blocked, project: project,
- sha: project.commit.id)
- end
-
- it 'shows correct pipeline badge' do
- render 'projects/last_commit', commit: project.commit,
- project: project,
- ref: :master
-
- expect(rendered).to have_text "blocked #{project.commit.short_id}"
- end
- end
- end
-end
diff --git a/spec/views/projects/blob/_viewer.html.haml_spec.rb b/spec/views/projects/blob/_viewer.html.haml_spec.rb
index 501f90c5f9a..c6b0ed8da3c 100644
--- a/spec/views/projects/blob/_viewer.html.haml_spec.rb
+++ b/spec/views/projects/blob/_viewer.html.haml_spec.rb
@@ -10,9 +10,9 @@ describe 'projects/blob/_viewer.html.haml', :view do
include BlobViewer::Rich
self.partial_name = 'text'
- self.max_size = 1.megabyte
- self.absolute_max_size = 5.megabytes
- self.client_side = false
+ self.overridable_max_size = 1.megabyte
+ self.max_size = 5.megabytes
+ self.load_async = true
end
end
@@ -35,9 +35,9 @@ describe 'projects/blob/_viewer.html.haml', :view do
render partial: 'projects/blob/viewer', locals: { viewer: viewer }
end
- context 'when the viewer is server side' do
+ context 'when the viewer is loaded asynchronously' do
before do
- viewer_class.client_side = false
+ viewer_class.load_async = true
end
context 'when there is no render error' do
@@ -47,10 +47,10 @@ describe 'projects/blob/_viewer.html.haml', :view do
expect(rendered).to have_css('.blob-viewer[data-url]')
end
- it 'displays a spinner' do
+ it 'renders the loading indicator' do
render_view
- expect(rendered).to have_css('i[aria-label="Loading content"]')
+ expect(view).to render_template('projects/blob/viewers/_loading')
end
end
@@ -65,9 +65,9 @@ describe 'projects/blob/_viewer.html.haml', :view do
end
end
- context 'when the viewer is client side' do
+ context 'when the viewer is loaded synchronously' do
before do
- viewer_class.client_side = true
+ viewer_class.load_async = false
end
context 'when there is no render error' do
diff --git a/spec/views/projects/imports/new.html.haml_spec.rb b/spec/views/projects/imports/new.html.haml_spec.rb
new file mode 100644
index 00000000000..9b293065797
--- /dev/null
+++ b/spec/views/projects/imports/new.html.haml_spec.rb
@@ -0,0 +1,22 @@
+require "spec_helper"
+
+describe "projects/imports/new.html.haml" do
+ let(:user) { create(:user) }
+
+ context 'when import fails' do
+ let(:project) { create(:project_empty_repo, import_status: :failed, import_error: '<a href="http://googl.com">Foo</a>', import_type: :gitlab_project, import_source: '/var/opt/gitlab/gitlab-rails/shared/tmp/project_exports/uploads/t.tar.gz', import_url: nil) }
+
+ before do
+ sign_in(user)
+ project.team << [user, :master]
+ end
+
+ it "escapes HTML in import errors" do
+ assign(:project, project)
+
+ render
+
+ expect(rendered).not_to have_link('Foo', href: "http://googl.com")
+ end
+ end
+end
diff --git a/spec/views/projects/pipelines/_stage.html.haml_spec.rb b/spec/views/projects/pipelines/_stage.html.haml_spec.rb
index 10095ad7694..9c91c4e0fbd 100644
--- a/spec/views/projects/pipelines/_stage.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/_stage.html.haml_spec.rb
@@ -39,9 +39,8 @@ describe 'projects/pipelines/_stage', :view do
context 'when there are retried builds present' do
before do
- create_list(:ci_build, 2, name: 'test:build',
- stage: stage.name,
- pipeline: pipeline)
+ create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline, retried: true)
+ create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline)
end
it 'shows only latest builds' do
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
deleted file mode 100644
index bb39ec8efbf..00000000000
--- a/spec/views/projects/pipelines/show.html.haml_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-require 'spec_helper'
-
-describe 'projects/pipelines/show' do
- include Devise::Test::ControllerHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
-
- let(:pipeline) do
- create(:ci_empty_pipeline,
- project: project,
- sha: project.commit.id,
- user: user)
- end
-
- before do
- controller.prepend_view_path('app/views/projects')
-
- create_build('build', 0, 'build', :success)
- create_build('test', 1, 'rspec 0:2', :pending)
- create_build('test', 1, 'rspec 1:2', :running)
- create_build('test', 1, 'spinach 0:2', :created)
- create_build('test', 1, 'spinach 1:2', :created)
- create_build('test', 1, 'audit', :created)
- create_build('deploy', 2, 'production', :created)
-
- create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3)
-
- assign(:project, project)
- assign(:pipeline, pipeline.present(current_user: user))
- assign(:commit, project.commit)
-
- allow(view).to receive(:can?).and_return(true)
- end
-
- it 'shows a graph with grouped stages' do
- render
-
- expect(rendered).to have_css('.js-pipeline-graph')
- expect(rendered).to have_css('.js-grouped-pipeline-dropdown')
-
- # header
- expect(rendered).to have_text("##{pipeline.id}")
- expect(rendered).to have_css('time', text: pipeline.created_at.strftime("%b %d, %Y"))
- expect(rendered).to have_selector(%Q(img[alt$="#{pipeline.user.name}'s avatar"]))
- expect(rendered).to have_link(pipeline.user.name, href: user_path(pipeline.user))
-
- # stages
- expect(rendered).to have_text('Build')
- expect(rendered).to have_text('Test')
- expect(rendered).to have_text('Deploy')
- expect(rendered).to have_text('External')
-
- # builds
- expect(rendered).to have_text('rspec')
- expect(rendered).to have_text('spinach')
- expect(rendered).to have_text('rspec 0:2')
- expect(rendered).to have_text('production')
- expect(rendered).to have_text('jenkins')
- end
-
- private
-
- def create_build(stage, stage_idx, name, status)
- create(:ci_build, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name, status: status)
- end
-end
diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb
index 900f8d4732f..33eba3e6d3d 100644
--- a/spec/views/projects/tree/show.html.haml_spec.rb
+++ b/spec/views/projects/tree/show.html.haml_spec.rb
@@ -21,17 +21,17 @@ describe 'projects/tree/show' do
let(:tree) { repository.tree(commit.id, path) }
before do
+ assign(:id, File.join(ref, path))
assign(:ref, ref)
- assign(:commit, commit)
- assign(:id, commit.id)
- assign(:tree, tree)
assign(:path, path)
+ assign(:last_commit, commit)
+ assign(:tree, tree)
end
it 'displays correctly' do
render
expect(rendered).to have_css('.js-project-refs-dropdown .dropdown-toggle-text', text: ref)
- expect(rendered).to have_css('.readme-holder .file-content', text: ref)
+ expect(rendered).to have_css('.readme-holder')
end
end
end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 7a590f64e3c..8c5303b61cc 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -105,7 +105,7 @@ describe GitGarbageCollectWorker do
author: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
committer: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
tree: old_commit.tree,
- parents: [old_commit],
+ parents: [old_commit]
)
GitOperationService.new(nil, project.repository).send(
:update_ref,
diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb
new file mode 100644
index 00000000000..8533b7b85e9
--- /dev/null
+++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe NamespacelessProjectDestroyWorker do
+ subject { described_class.new }
+
+ before do
+ # Stub after_save callbacks that will fail when Project has no namespace
+ allow_any_instance_of(Project).to receive(:ensure_dir_exist).and_return(nil)
+ allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
+ end
+
+ describe '#perform' do
+ context 'project has namespace' do
+ it 'does not do anything' do
+ project = create(:empty_project)
+
+ subject.perform(project.id)
+
+ expect(Project.unscoped.all).to include(project)
+ end
+ end
+
+ context 'project has no namespace' do
+ let!(:project) do
+ project = build(:empty_project, namespace_id: nil)
+ project.save(validate: false)
+ project
+ end
+
+ context 'project not a fork of another project' do
+ it "truncates the project's team" do
+ expect_any_instance_of(ProjectTeam).to receive(:truncate)
+
+ subject.perform(project.id)
+ end
+
+ it 'deletes the project' do
+ subject.perform(project.id)
+
+ expect(Project.unscoped.all).not_to include(project)
+ end
+
+ it 'does not call unlink_fork' do
+ is_expected.not_to receive(:unlink_fork)
+
+ subject.perform(project.id)
+ end
+
+ it 'does not do anything in Project#remove_pages method' do
+ expect(Gitlab::PagesTransfer).not_to receive(:new)
+
+ subject.perform(project.id)
+ end
+ end
+
+ context 'project forked from another' do
+ let!(:parent_project) { create(:empty_project) }
+
+ before do
+ create(:forked_project_link, forked_to_project: project, forked_from_project: parent_project)
+ end
+
+ it 'closes open merge requests' do
+ merge_request = create(:merge_request, source_project: project, target_project: parent_project)
+
+ subject.perform(project.id)
+
+ expect(merge_request.reload).to be_closed
+ end
+
+ it 'destroys the link' do
+ subject.perform(project.id)
+
+ expect(parent_project.forked_project_links).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/pipeline_metrics_worker_spec.rb b/spec/workers/pipeline_metrics_worker_spec.rb
index 5dbc0da95c2..ef71125c0b6 100644
--- a/spec/workers/pipeline_metrics_worker_spec.rb
+++ b/spec/workers/pipeline_metrics_worker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe PipelineMetricsWorker do
let(:project) { create(:project, :repository) }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref, head_pipeline: pipeline) }
let(:pipeline) do
create(:ci_empty_pipeline,
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
new file mode 100644
index 00000000000..9c650354d72
--- /dev/null
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -0,0 +1,64 @@
+require 'spec_helper'
+
+describe PipelineScheduleWorker do
+ subject { described_class.new.perform }
+
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+
+ let!(:pipeline_schedule) do
+ create(:ci_pipeline_schedule, :nightly, project: project, owner: user)
+ end
+
+ before do
+ stub_ci_pipeline_to_return_yaml_file
+
+ pipeline_schedule.update_column(:next_run_at, 1.day.ago)
+ end
+
+ context 'when the schedule is runnable by the user' do
+ before do
+ project.add_master(user)
+ end
+
+ context 'when there is a scheduled pipeline within next_run_at' do
+ it 'creates a new pipeline' do
+ expect { subject }.to change { project.pipelines.count }.by(1)
+ end
+
+ it 'updates the next_run_at field' do
+ subject
+
+ expect(pipeline_schedule.reload.next_run_at).to be > Time.now
+ end
+
+ it 'sets the schedule on the pipeline' do
+ subject
+
+ expect(project.pipelines.last.pipeline_schedule).to eq(pipeline_schedule)
+ end
+ end
+
+ context 'inactive schedule' do
+ before do
+ pipeline_schedule.deactivate!
+ end
+
+ it 'does not creates a new pipeline' do
+ expect { subject }.not_to change { project.pipelines.count }
+ end
+ end
+ end
+
+ context 'when the schedule is not runnable by the user' do
+ it 'deactivates the schedule' do
+ subject
+
+ expect(pipeline_schedule.reload.active).to be_falsy
+ end
+
+ it 'does not schedule a pipeline' do
+ expect { subject }.not_to change { project.pipelines.count }
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 0260416dbe2..f4bc63bcc6a 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -4,13 +4,16 @@ describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
- let(:project) { create(:project, :repository) }
let(:project_identifier) { "project-#{project.id}" }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
- context "as a resque worker" do
- it "reponds to #perform" do
+ let(:project) do
+ create(:project, :repository, auto_cancel_pending_pipelines: 'disabled')
+ end
+
+ context "as a sidekiq worker" do
+ it "responds to #perform" do
expect(described_class.new).to respond_to(:perform)
end
end
@@ -93,6 +96,27 @@ describe PostReceive do
end
end
+ describe '#process_repository_update' do
+ let(:changes) {'123456 789012 refs/heads/tést'}
+ let(:fake_hook_data) do
+ { event_name: 'repository_update' }
+ end
+
+ before do
+ allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner)
+ allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data)
+ # silence hooks so we can isolate
+ allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true)
+ allow(subject).to receive(:process_project_changes).and_return(true)
+ end
+
+ it 'calls SystemHooksService' do
+ expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true)
+
+ subject.perform(pwd(project), key_id, base64_changes)
+ end
+ end
+
context "webhook" do
it "fetches the correct project" do
expect(Project).to receive(:find_by).with(id: project.id.to_s)
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 9afe2e610b9..6295856b461 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -20,6 +20,14 @@ describe ProcessCommitWorker do
worker.perform(project.id, -1, commit.to_hash)
end
+ it 'does not process the commit when no issues are referenced' do
+ allow(worker).to receive(:build_commit).and_return(double(matches_cross_reference_regex?: false))
+
+ expect(worker).not_to receive(:process_commit_message)
+
+ worker.perform(project.id, user.id, commit.to_hash)
+ end
+
it 'processes the commit message' do
expect(worker).to receive(:process_commit_message).and_call_original
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index c23ffdf99c0..a4ba5f7c943 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -45,6 +45,18 @@ describe ProjectCacheWorker do
worker.perform(project.id, %w(readme))
end
+
+ context 'with plain readme' do
+ it 'refreshes the method caches' do
+ allow(MarkupHelper).to receive(:gitlab_markdown?).and_return(false)
+ allow(MarkupHelper).to receive(:plain?).and_return(true)
+
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches).
+ with(%i(readme)).
+ and_call_original
+ worker.perform(project.id, %w(readme))
+ end
+ end
end
end
diff --git a/spec/workers/repository_check/clear_worker_spec.rb b/spec/workers/repository_check/clear_worker_spec.rb
index a3b70c74787..3b1a64c5057 100644
--- a/spec/workers/repository_check/clear_worker_spec.rb
+++ b/spec/workers/repository_check/clear_worker_spec.rb
@@ -5,7 +5,7 @@ describe RepositoryCheck::ClearWorker do
project = create(:empty_project)
project.update_columns(
last_repository_check_failed: true,
- last_repository_check_at: Time.now,
+ last_repository_check_at: Time.now
)
described_class.new.perform
diff --git a/spec/workers/trigger_schedule_worker_spec.rb b/spec/workers/trigger_schedule_worker_spec.rb
deleted file mode 100644
index 861bed4442e..00000000000
--- a/spec/workers/trigger_schedule_worker_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-require 'spec_helper'
-
-describe TriggerScheduleWorker do
- let(:worker) { described_class.new }
-
- before do
- stub_ci_pipeline_to_return_yaml_file
- end
-
- context 'when there is a scheduled trigger within next_run_at' do
- let(:next_run_at) { 2.days.ago }
-
- let!(:trigger_schedule) do
- create(:ci_trigger_schedule, :nightly)
- end
-
- before do
- trigger_schedule.update_column(:next_run_at, next_run_at)
- end
-
- it 'creates a new trigger request' do
- expect { worker.perform }.to change { Ci::TriggerRequest.count }
- end
-
- it 'creates a new pipeline' do
- expect { worker.perform }.to change { Ci::Pipeline.count }
- expect(Ci::Pipeline.last).to be_pending
- end
-
- it 'updates next_run_at' do
- worker.perform
-
- expect(trigger_schedule.reload.next_run_at).not_to eq(next_run_at)
- end
-
- context 'inactive schedule' do
- before do
- trigger_schedule.update(active: false)
- end
-
- it 'does not create a new trigger' do
- expect { worker.perform }.not_to change { Ci::TriggerRequest.count }
- end
- end
- end
-
- context 'when there are no scheduled triggers within next_run_at' do
- before { create(:ci_trigger_schedule, :nightly) }
-
- it 'does not create a new pipeline' do
- expect { worker.perform }.not_to change { Ci::Pipeline.count }
- end
-
- it 'does not update next_run_at' do
- expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
- end
- end
-
- context 'when next_run_at is nil' do
- before do
- schedule = create(:ci_trigger_schedule, :nightly)
- schedule.update_column(:next_run_at, nil)
- end
-
- it 'does not create a new pipeline' do
- expect { worker.perform }.not_to change { Ci::Pipeline.count }
- end
-
- it 'does not update next_run_at' do
- expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
- end
- end
-end