summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJason Colyer <jcolyer@gitlab.com>2019-08-05 21:28:07 +0000
committerJason Colyer <jcolyer@gitlab.com>2019-08-05 21:28:07 +0000
commiteda10d00246ca6c870bb1a1d3ff76e876fdcd631 (patch)
treeb077073b7a7dc861d75171bdb136be88c0cb12fe
parent8b923c67414e66b5b7c98c75b54966a3013094de (diff)
parent0555bc64b8a4e7ae910ea5ab6754a83435bc52d8 (diff)
downloadgitlab-ce-65386-docs-add-elasticsearch-troubleshooting-to-administrator-troubleshooting-section.tar.gz
-rw-r--r--.gitlab-ci.yml1
-rw-r--r--.gitlab/CODEOWNERS14
-rw-r--r--.gitlab/ci/docs.gitlab-ci.yml10
-rw-r--r--.gitlab/ci/frontend.gitlab-ci.yml8
-rw-r--r--.gitlab/ci/global.gitlab-ci.yml4
-rw-r--r--.gitlab/ci/qa.gitlab-ci.yml20
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml7
-rw-r--r--.gitlab/ci/review.gitlab-ci.yml20
-rw-r--r--.gitlab/ci/test-metadata.gitlab-ci.yml2
-rw-r--r--.gitlab/issue_templates/Security Release.md20
-rw-r--r--.gitlab/issue_templates/Security developer workflow.md2
-rw-r--r--.mdlrc.style7
-rw-r--r--.prettierignore1
-rw-r--r--CHANGELOG.md13
-rw-r--r--app/assets/javascripts/clusters/clusters_bundle.js62
-rw-r--r--app/assets/javascripts/clusters/components/uninstall_application_confirmation_modal.vue7
-rw-r--r--app/assets/javascripts/clusters/stores/clusters_store.js1
-rw-r--r--app/assets/javascripts/commons/polyfills.js1
-rw-r--r--app/assets/javascripts/gl_form.js87
-rw-r--r--app/assets/javascripts/helpers/indent_helper.js182
-rw-r--r--app/assets/javascripts/jobs/components/job_app.vue5
-rw-r--r--app/assets/javascripts/jobs/components/sidebar.vue9
-rw-r--r--app/assets/javascripts/jobs/index.js29
-rw-r--r--app/assets/javascripts/lib/utils/common_utils.js65
-rw-r--r--app/assets/javascripts/lib/utils/keycodes.js10
-rw-r--r--app/assets/javascripts/lib/utils/undo_stack.js105
-rw-r--r--app/assets/javascripts/manual_ordering.js2
-rw-r--r--app/assets/javascripts/notes/stores/utils.js2
-rw-r--r--app/assets/javascripts/operation_settings/components/external_dashboard.vue2
-rw-r--r--app/assets/javascripts/performance_bar/components/detailed_metric.vue2
-rw-r--r--app/assets/javascripts/persistent_user_callout.js25
-rw-r--r--app/assets/javascripts/privacy_policy_update_callout.js8
-rw-r--r--app/assets/javascripts/projects/gke_cluster_namespace/index.js6
-rw-r--r--app/assets/javascripts/right_sidebar.js10
-rw-r--r--app/assets/javascripts/tracking.js67
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue2
-rw-r--r--app/assets/javascripts/vue_shared/components/markdown/toolbar.vue41
-rw-r--r--app/assets/stylesheets/application.scss1
-rw-r--r--app/assets/stylesheets/framework/common.scss1
-rw-r--r--app/assets/stylesheets/pages/builds.scss1
-rw-r--r--app/controllers/autocomplete_controller.rb2
-rw-r--r--app/controllers/boards/issues_controller.rb2
-rw-r--r--app/controllers/concerns/issuable_actions.rb22
-rw-r--r--app/controllers/concerns/notes_actions.rb2
-rw-r--r--app/controllers/groups_controller.rb4
-rw-r--r--app/controllers/projects/issues_controller.rb4
-rw-r--r--app/controllers/projects/notes_controller.rb2
-rw-r--r--app/controllers/snippets/notes_controller.rb4
-rw-r--r--app/finders/branches_finder.rb2
-rw-r--r--app/finders/container_repositories_finder.rb34
-rw-r--r--app/finders/notes_finder.rb42
-rw-r--r--app/helpers/application_settings_helper.rb2
-rw-r--r--app/models/application_setting_implementation.rb14
-rw-r--r--app/models/ci/build.rb14
-rw-r--r--app/models/ci/build_need.rb14
-rw-r--r--app/models/ci/pipeline.rb9
-rw-r--r--app/models/clusters/applications/cert_manager.rb36
-rw-r--r--app/models/clusters/applications/helm.rb28
-rw-r--r--app/models/clusters/applications/knative.rb10
-rw-r--r--app/models/clusters/applications/prometheus.rb10
-rw-r--r--app/models/commit_status.rb12
-rw-r--r--app/models/concerns/ci/metadatable.rb1
-rw-r--r--app/models/concerns/diff_positionable_note.rb8
-rw-r--r--app/models/concerns/group_descendant.rb4
-rw-r--r--app/models/concerns/relative_positioning.rb115
-rw-r--r--app/models/concerns/update_project_statistics.rb7
-rw-r--r--app/models/container_repository.rb6
-rw-r--r--app/models/diff_note.rb7
-rw-r--r--app/models/environment.rb2
-rw-r--r--app/models/group.rb2
-rw-r--r--app/models/hooks/system_hook.rb4
-rw-r--r--app/models/hooks/web_hook.rb6
-rw-r--r--app/models/merge_request.rb2
-rw-r--r--app/models/namespace/aggregation_schedule.rb14
-rw-r--r--app/models/note.rb4
-rw-r--r--app/models/project_statistics.rb6
-rw-r--r--app/policies/group_policy.rb1
-rw-r--r--app/serializers/analytics_issue_entity.rb2
-rw-r--r--app/serializers/user_serializer.rb17
-rw-r--r--app/services/application_settings/update_service.rb9
-rw-r--r--app/services/auth/container_registry_authentication_service.rb10
-rw-r--r--app/services/ci/process_pipeline_service.rb88
-rw-r--r--app/services/ci/retry_build_service.rb2
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb45
-rw-r--r--app/services/self_monitoring/project/create_service.rb32
-rw-r--r--app/services/web_hook_service.rb6
-rw-r--r--app/validators/addressable_url_validator.rb2
-rw-r--r--app/validators/system_hook_url_validator.rb18
-rw-r--r--app/views/admin/application_settings/_outbound.html.haml10
-rw-r--r--app/views/clusters/clusters/_banner.html.haml5
-rw-r--r--app/views/clusters/clusters/show.html.haml49
-rw-r--r--app/views/explore/projects/_projects.html.haml3
-rw-r--r--app/views/explore/projects/trending.html.haml2
-rw-r--r--app/views/help/_shortcuts.html.haml10
-rw-r--r--app/views/layouts/_head.html.haml1
-rw-r--r--app/views/projects/_flash_messages.html.haml2
-rw-r--r--app/views/projects/jobs/show.html.haml3
-rw-r--r--app/views/projects/mirrors/_instructions.html.haml2
-rw-r--r--app/views/projects/services/prometheus/_metrics.html.haml56
-rw-r--r--app/views/projects/services/prometheus/_show.html.haml9
-rw-r--r--app/views/projects/settings/ci_cd/_form.html.haml2
-rw-r--r--app/views/shared/issuable/_search_bar.html.haml5
-rw-r--r--app/views/shared/issuable/_sort_dropdown.html.haml2
-rw-r--r--app/views/shared/milestones/_top.html.haml2
-rw-r--r--app/views/shared/notes/_hints.html.haml11
-rw-r--r--app/views/shared/projects/_list.html.haml3
-rw-r--r--app/views/u2f/_register.html.haml2
-rw-r--r--app/workers/all_queues.yml1
-rw-r--r--app/workers/build_process_worker.rb16
-rw-r--r--app/workers/namespaces/root_statistics_worker.rb6
-rw-r--r--app/workers/namespaces/schedule_aggregation_worker.rb6
-rw-r--r--app/workers/pipeline_process_worker.rb7
-rwxr-xr-xbin/web_puma2
-rw-r--r--changelogs/unreleased/17276-breakage-in-displaying-svg-in-the-same-repository.yml5
-rw-r--r--changelogs/unreleased/26866-api-endpoint-to-list-the-docker-images-tags-of-a-group.yml6
-rw-r--r--changelogs/unreleased/50130-cluster-cluster-details-update-automatically-after-cluster-is-created.yml5
-rw-r--r--changelogs/unreleased/58256-incorrect-empty-state-message-displayed-on-explore-projects-tab.yml5
-rw-r--r--changelogs/unreleased/59521-job-sidebar-has-a-blank-block.yml5
-rw-r--r--changelogs/unreleased/60516-uninstall-tiller.yml5
-rw-r--r--changelogs/unreleased/60664-kubernetes-applications-uninstall-cert-manager.yml5
-rw-r--r--changelogs/unreleased/61776-fixing-the-U2F-warning-message-text-colour.yml5
-rw-r--r--changelogs/unreleased/63571-fix-gc-profiler-data-being-wiped.yml5
-rw-r--r--changelogs/unreleased/64092-removes-update-statistics-namespace-feature-flag.yml5
-rw-r--r--changelogs/unreleased/64341-user-callout-deferred-link-support.yml5
-rw-r--r--changelogs/unreleased/64675-Dashboard-URL-legend-border.yml5
-rw-r--r--changelogs/unreleased/GL-12757.yml5
-rw-r--r--changelogs/unreleased/double-slash-64592.yml5
-rw-r--r--changelogs/unreleased/fix-bin-web-puma-script-to-consider-rails-env.yml5
-rw-r--r--changelogs/unreleased/fix-i18n-updated-projects.yml5
-rw-r--r--changelogs/unreleased/fix-name-vs-path-problem-for-cycle-analytics.yml5
-rw-r--r--changelogs/unreleased/georgekoltsov-55474-outbound-setting-system-hooks.yml5
-rw-r--r--changelogs/unreleased/implement-dag.yml5
-rw-r--r--changelogs/unreleased/jprovazn-fix-positioning.yml5
-rw-r--r--changelogs/unreleased/jramsay-fix-mirroring-help-text-typo.yml5
-rw-r--r--changelogs/unreleased/jupyter-fixes-v1.yml5
-rw-r--r--changelogs/unreleased/mh-editor-indents.yml5
-rw-r--r--changelogs/unreleased/sh-disable-redis-peek.yml5
-rw-r--r--changelogs/unreleased/sh-fix-special-role-error-500.yml5
-rw-r--r--changelogs/unreleased/snowplow-ee-to-ce.yml5
-rw-r--r--changelogs/unreleased/update-pipelines-minutes-expiry-banner-to-an-alert-component-type.yml5
-rw-r--r--config/gitlab.yml.example5
-rw-r--r--config/initializers/0_inflections.rb1
-rw-r--r--config/initializers/1_settings.rb9
-rw-r--r--config/initializers/peek.rb7
-rw-r--r--config/routes/project.rb4
-rw-r--r--db/migrate/20190612111404_add_geo_container_sync_capacity.rb13
-rw-r--r--db/migrate/20190703043358_add_commit_id_to_draft_notes.rb11
-rw-r--r--db/migrate/20190726101050_rename_allow_local_requests_from_hooks_and_services_application_setting.rb17
-rw-r--r--db/migrate/20190726101133_add_allow_local_requests_from_system_hooks_to_application_settings.rb18
-rw-r--r--db/migrate/20190731084415_add_build_need.rb20
-rw-r--r--db/migrate/20190802012622_reorder_issues_project_id_relative_position_index.rb24
-rw-r--r--db/post_migrate/20190801114109_cleanup_allow_local_requests_from_hooks_and_services_application_setting_rename.rb17
-rw-r--r--db/schema.rb16
-rw-r--r--doc/administration/auth/ldap-ee.md126
-rw-r--r--doc/administration/auth/ldap.md67
-rw-r--r--doc/administration/geo/replication/index.md67
-rw-r--r--doc/administration/geo/replication/updating_the_geo_nodes.md34
-rw-r--r--doc/administration/gitaly/index.md102
-rw-r--r--doc/administration/high_availability/README.md18
-rw-r--r--doc/administration/index.md2
-rw-r--r--doc/administration/operations/fast_ssh_key_lookup.md4
-rw-r--r--doc/administration/plugins.md32
-rw-r--r--doc/administration/repository_storage_paths.md16
-rw-r--r--doc/administration/troubleshooting/diagnostics_tools.md27
-rw-r--r--doc/api/README.md7
-rw-r--r--doc/api/container_registry.md77
-rw-r--r--doc/api/jobs.md6
-rw-r--r--doc/api/services.md24
-rw-r--r--doc/api/settings.md14
-rw-r--r--doc/ci/ci_cd_for_external_repos/bitbucket_integration.md198
-rw-r--r--doc/ci/docker/using_docker_build.md342
-rw-r--r--doc/ci/docker/using_docker_images.md131
-rw-r--r--doc/ci/examples/artifactory_and_gitlab/index.md70
-rw-r--r--doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/index.md111
-rw-r--r--doc/ci/ssh_keys/README.md82
-rw-r--r--doc/ci/variables/README.md100
-rw-r--r--doc/ci/yaml/README.md16
-rw-r--r--doc/development/README.md4
-rw-r--r--doc/development/contributing/issue_workflow.md25
-rw-r--r--doc/development/fe_guide/event_tracking.md93
-rw-r--r--doc/development/testing_guide/end_to_end/index.md24
-rw-r--r--doc/development/testing_guide/end_to_end/quick_start_guide.md18
-rw-r--r--doc/development/testing_guide/end_to_end/style_guide.md48
-rw-r--r--doc/development/testing_guide/img/qa_on_merge_requests_cicd_architecture.pngbin64862 -> 0 bytes
-rw-r--r--doc/development/testing_guide/img/review_apps_cicd_architecture.pngbin136431 -> 0 bytes
-rw-r--r--doc/development/testing_guide/review_apps.md54
-rw-r--r--doc/development/what_requires_downtime.md2
-rw-r--r--doc/gitlab-basics/create-project.md4
-rw-r--r--doc/install/installation.md2
-rw-r--r--doc/install/requirements.md32
-rw-r--r--doc/integration/elasticsearch.md21
-rw-r--r--doc/raketasks/backup_restore.md19
-rw-r--r--doc/security/README.md2
-rw-r--r--doc/security/img/outbound_requests_section.pngbin7314 -> 0 bytes
-rw-r--r--doc/security/img/outbound_requests_section_v12_2.pngbin0 -> 21108 bytes
-rw-r--r--doc/security/rack_attack.md77
-rw-r--r--doc/security/rate_limits.md32
-rw-r--r--doc/security/webhooks.md11
-rw-r--r--doc/topics/autodevops/index.md6
-rw-r--r--doc/topics/git/index.md6
-rw-r--r--doc/topics/git/migrate_to_git_lfs/index.md174
-rw-r--r--doc/topics/git/partial_clone.md147
-rw-r--r--doc/topics/git/useful_git_commands.md210
-rw-r--r--doc/user/admin_area/settings/img/user_and_ip_rate_limits.pngbin0 -> 232777 bytes
-rw-r--r--doc/user/admin_area/settings/index.md1
-rw-r--r--doc/user/admin_area/settings/user_and_ip_rate_limits.md32
-rw-r--r--doc/user/application_security/dast/index.md5
-rw-r--r--doc/user/application_security/index.md32
-rw-r--r--doc/user/application_security/sast/analyzers.md37
-rw-r--r--doc/user/application_security/sast/index.md1
-rw-r--r--doc/user/clusters/applications.md2
-rw-r--r--doc/user/gitlab_com/index.md73
-rw-r--r--doc/user/group/bulk_editing/index.md15
-rw-r--r--doc/user/markdown.md43
-rw-r--r--doc/user/permissions.md6
-rw-r--r--doc/user/project/img/key_value_labels.pngbin6208 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_default.pngbin35620 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_default_v12_1.pngbin0 -> 207994 bytes
-rw-r--r--doc/user/project/img/labels_delete_v12_1.pngbin0 -> 29186 bytes
-rw-r--r--doc/user/project/img/labels_drag_priority_v12_1.gifbin0 -> 958437 bytes
-rw-r--r--doc/user/project/img/labels_epic_sidebar.pngbin47869 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_epic_sidebar_v12_1.pngbin0 -> 318960 bytes
-rw-r--r--doc/user/project/img/labels_generate_default.pngbin25797 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_generate_default_v12_1.pngbin0 -> 244739 bytes
-rw-r--r--doc/user/project/img/labels_group_issues.pngbin92070 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_group_issues_v12_1.pngbin0 -> 391121 bytes
-rw-r--r--doc/user/project/img/labels_key_value_v12_1.pngbin0 -> 332077 bytes
-rw-r--r--doc/user/project/img/labels_list.pngbin75215 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_list_v12_1.pngbin0 -> 360298 bytes
-rw-r--r--doc/user/project/img/labels_new_label_from_sidebar.gif (renamed from doc/user/project/img/new_label_from_sidebar.gif)bin759243 -> 759243 bytes
-rw-r--r--doc/user/project/img/labels_prioritized.pngbin56353 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_prioritized_v12_1.pngbin0 -> 178652 bytes
-rw-r--r--doc/user/project/img/labels_promotion.pngbin44021 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_promotion_v12_1.pngbin0 -> 29215 bytes
-rw-r--r--doc/user/project/img/labels_subscriptions.pngbin31716 -> 0 bytes
-rw-r--r--doc/user/project/img/labels_subscriptions_v12_1.pngbin0 -> 138684 bytes
-rw-r--r--doc/user/project/integrations/jira_server_configuration.md4
-rw-r--r--doc/user/project/integrations/prometheus.md5
-rw-r--r--doc/user/project/issues/related_issues.md2
-rw-r--r--doc/user/project/labels.md183
-rw-r--r--doc/user/project/merge_requests/merge_request_approvals.md10
-rw-r--r--doc/user/project/packages/npm_registry.md30
-rw-r--r--doc/user/project/pages/custom_domains_ssl_tls_certification/index.md2
-rw-r--r--doc/user/project/pipelines/settings.md7
-rw-r--r--doc/user/project/quick_actions.md13
-rw-r--r--doc/workflow/lfs/manage_large_binaries_with_git_lfs.md4
-rw-r--r--doc/workflow/shortcuts.md10
-rw-r--r--lib/api/api.rb3
-rw-r--r--lib/api/entities.rb2
-rw-r--r--lib/api/entities/container_registry.rb10
-rw-r--r--lib/api/group_container_repositories.rb39
-rw-r--r--lib/api/helpers/notes_helpers.rb2
-rw-r--r--lib/api/project_container_repositories.rb (renamed from lib/api/container_registry.rb)21
-rw-r--r--lib/api/settings.rb6
-rw-r--r--lib/feature.rb7
-rw-r--r--lib/gitlab/background_migration/migrate_legacy_artifacts.rb18
-rw-r--r--lib/gitlab/blob_helper.rb2
-rw-r--r--lib/gitlab/ci/ansi2html.rb27
-rw-r--r--lib/gitlab/ci/config/entry/job.rb20
-rw-r--r--lib/gitlab/ci/config/normalizer.rb64
-rw-r--r--lib/gitlab/ci/pipeline/seed/build.rb31
-rw-r--r--lib/gitlab/ci/pipeline/seed/stage.rb19
-rw-r--r--lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml5
-rw-r--r--lib/gitlab/ci/yaml_processor.rb18
-rw-r--r--lib/gitlab/cycle_analytics/base_query.rb13
-rw-r--r--lib/gitlab/cycle_analytics/code_event_fetcher.rb4
-rw-r--r--lib/gitlab/cycle_analytics/issue_event_fetcher.rb4
-rw-r--r--lib/gitlab/cycle_analytics/issue_helper.rb13
-rw-r--r--lib/gitlab/cycle_analytics/plan_event_fetcher.rb4
-rw-r--r--lib/gitlab/cycle_analytics/plan_helper.rb6
-rw-r--r--lib/gitlab/cycle_analytics/production_event_fetcher.rb1
-rw-r--r--lib/gitlab/cycle_analytics/review_event_fetcher.rb4
-rw-r--r--lib/gitlab/ee_compat_check.rb2
-rw-r--r--lib/gitlab/exclusive_lease_helpers.rb5
-rw-r--r--lib/gitlab/http_connection_adapter.rb4
-rw-r--r--lib/gitlab/kubernetes/helm/delete_command.rb11
-rw-r--r--lib/gitlab/kubernetes/helm/reset_command.rb54
-rw-r--r--lib/gitlab/kubernetes/kube_client.rb2
-rw-r--r--lib/gitlab/metrics/samplers/influx_sampler.rb22
-rw-r--r--lib/gitlab/metrics/samplers/ruby_sampler.rb23
-rw-r--r--lib/gitlab/octokit/middleware.rb2
-rw-r--r--lib/gitlab/profiler.rb3
-rw-r--r--lib/gitlab/project_search_results.rb2
-rw-r--r--lib/peek/views/redis_detailed.rb5
-rw-r--r--lib/peek/views/rugged.rb8
-rw-r--r--locale/gitlab.pot36
-rw-r--r--package.json7
-rw-r--r--qa/qa/specs/features/browser_ui/1_manage/login/login_via_oauth_spec.rb3
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb34
-rw-r--r--rubocop/cop/rspec/top_level_describe_path.rb35
-rw-r--r--rubocop/rubocop.rb1
-rwxr-xr-xscripts/review_apps/review-apps.sh25
-rw-r--r--spec/controllers/admin/clusters/applications_controller_spec.rb2
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb14
-rw-r--r--spec/controllers/concerns/issuable_actions_spec.rb69
-rw-r--r--spec/controllers/groups/clusters/applications_controller_spec.rb2
-rw-r--r--spec/controllers/help_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb43
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb16
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb2
-rw-r--r--spec/db/schema_spec.rb2
-rw-r--r--spec/factories/ci/build_need.rb8
-rw-r--r--spec/factories/clusters/applications/helm.rb16
-rw-r--r--spec/factories/container_repositories.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb7
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb2
-rw-r--r--spec/features/groups/labels/user_sees_links_to_issuables_spec.rb (renamed from spec/features/groups/labels/user_sees_links_to_issuables.rb)6
-rw-r--r--spec/features/instance_statistics/instance_statistics_spec.rb (renamed from spec/features/instance_statistics/instance_statistics.rb)0
-rw-r--r--spec/features/projects/clusters/applications_spec.rb10
-rw-r--r--spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb (renamed from spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb)3
-rw-r--r--spec/features/projects/labels/user_sees_links_to_issuables_spec.rb (renamed from spec/features/projects/labels/user_sees_links_to_issuables.rb)36
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb12
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb2
-rw-r--r--spec/features/snippets/user_sees_breadcrumb_links.rb19
-rw-r--r--spec/features/user_opens_link_to_comment_spec.rb (renamed from spec/features/user_opens_link_to_comment.rb)0
-rw-r--r--spec/features/users/add_email_to_existing_account_spec.rb (renamed from spec/features/users/add_email_to_existing_account.rb)0
-rw-r--r--spec/finders/container_repositories_finder_spec.rb44
-rw-r--r--spec/finders/notes_finder_spec.rb123
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json67
-rw-r--r--spec/fixtures/api/schemas/entities/discussions.json4
-rw-r--r--spec/fixtures/api/schemas/registry/repository.json6
-rw-r--r--spec/fixtures/clusters/sample_key.key9
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js51
-rw-r--r--spec/frontend/helpers/indent_helper_spec.js371
-rw-r--r--spec/frontend/helpers/vue_test_utils_helper.js18
-rw-r--r--spec/frontend/helpers/vue_test_utils_helper_spec.js (renamed from spec/javascripts/helpers/vue_test_utils_helper_spec.js)0
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js180
-rw-r--r--spec/frontend/lib/utils/undo_stack_spec.js237
-rw-r--r--spec/frontend/tracking_spec.js123
-rw-r--r--spec/javascripts/diffs/mock_data/diff_file.js2
-rw-r--r--spec/javascripts/helpers/vue_test_utils_helper.js38
-rw-r--r--spec/javascripts/jobs/components/job_app_spec.js683
-rw-r--r--spec/javascripts/notes/mock_data.js2
-rw-r--r--spec/javascripts/persistent_user_callout_spec.js87
-rw-r--r--spec/lib/feature_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb53
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb80
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb17
-rw-r--r--spec/lib/gitlab/http_spec.rb6
-rw-r--r--spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb65
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb20
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb20
-rw-r--r--spec/lib/gitlab/octokit/middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/lib/peek/views/rugged_spec.rb10
-rw-r--r--spec/models/ci/build_need_spec.rb13
-rw-r--r--spec/models/ci/build_spec.rb99
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb50
-rw-r--r--spec/models/clusters/applications/helm_spec.rb59
-rw-r--r--spec/models/clusters/applications/knative_spec.rb4
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb2
-rw-r--r--spec/models/clusters/cluster_spec.rb2
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb2
-rw-r--r--spec/models/commit_spec.rb1
-rw-r--r--spec/models/diff_note_spec.rb4
-rw-r--r--spec/models/environment_spec.rb29
-rw-r--r--spec/models/group_spec.rb1
-rw-r--r--spec/models/lfs_download_object_spec.rb2
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb34
-rw-r--r--spec/models/note_spec.rb16
-rw-r--r--spec/models/project_spec.rb10
-rw-r--r--spec/models/project_statistics_spec.rb13
-rw-r--r--spec/requests/api/group_container_repositories_spec.rb57
-rw-r--r--spec/requests/api/merge_requests_spec.rb2
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb (renamed from spec/requests/api/container_registry_spec.rb)75
-rw-r--r--spec/requests/api/settings_spec.rb17
-rw-r--r--spec/rubocop/cop/rspec/top_level_describe_path_spec.rb67
-rw-r--r--spec/serializers/analytics_issue_entity_spec.rb6
-rw-r--r--spec/serializers/analytics_issue_serializer_spec.rb6
-rw-r--r--spec/serializers/analytics_merge_request_serializer_spec.rb6
-rw-r--r--spec/serializers/cluster_application_entity_spec.rb2
-rw-r--r--spec/serializers/user_serializer_spec.rb30
-rw-r--r--spec/services/application_settings/update_service_spec.rb48
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb13
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb56
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb136
-rw-r--r--spec/services/ci/retry_build_service_spec.rb1
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb79
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb2
-rw-r--r--spec/services/self_monitoring/project/create_service_spec.rb26
-rw-r--r--spec/services/web_hook_service_spec.rb34
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_examples/application_setting_examples.rb62
-rw-r--r--spec/support/shared_examples/ci_trace_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/container_repositories_shared_examples.rb58
-rw-r--r--spec/support/shared_examples/discussions_provider_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/relative_positioning_shared_examples.rb124
-rw-r--r--spec/support/shared_examples/url_validator_examples.rb57
-rw-r--r--spec/tasks/gitlab/mail_google_schema_whitelisting.rb27
-rw-r--r--spec/validators/public_url_validator_spec.rb24
-rw-r--r--spec/validators/system_hook_url_validator_spec.rb8
-rw-r--r--spec/views/dashboard/projects/_blank_state_admin_welcome.haml_spec.rb (renamed from spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb)0
-rw-r--r--spec/views/dashboard/projects/_nav.html.haml_spec.rb (renamed from spec/views/dashboard/projects/_nav.html.haml.rb)4
-rw-r--r--spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb (renamed from spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb)0
-rw-r--r--spec/views/shared/_label_row.html.haml_spec.rb (renamed from spec/views/shared/_label_row.html.haml.rb)13
-rw-r--r--spec/views/shared/milestones/_issuable.html.haml_spec.rb (renamed from spec/views/shared/milestones/_issuable.html.haml.rb)0
-rw-r--r--spec/views/shared/milestones/_issuables.html.haml_spec.rb (renamed from spec/views/shared/milestones/_issuables.html.haml.rb)2
-rw-r--r--spec/views/shared/milestones/_top.html.haml_spec.rb (renamed from spec/views/shared/milestones/_top.html.haml.rb)1
-rw-r--r--spec/workers/build_process_worker_spec.rb26
-rw-r--r--spec/workers/namespaces/root_statistics_worker_spec.rb11
-rw-r--r--spec/workers/namespaces/schedule_aggregation_worker_spec.rb10
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb11
-rw-r--r--vendor/jupyter/values.yaml4
-rw-r--r--yarn.lock42
415 files changed, 6518 insertions, 3780 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5b39304444c..9086da10283 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -29,7 +29,6 @@ stages:
- qa
- post-test
- pages
- - post-cleanup
include:
- local: .gitlab/ci/global.gitlab-ci.yml
diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS
index 13c8b4a8458..cb3bc544132 100644
--- a/.gitlab/CODEOWNERS
+++ b/.gitlab/CODEOWNERS
@@ -1,6 +1,6 @@
# Backend Maintainers are the default for all ruby files
-*.rb @ashmckenzie @ayufan @dbalexandre @DouweM @dzaporozhets @godfat @grzesiek @mkozono @mayra-cabrera @nick.thomas @rspeicher @rymai @reprazent @smcgivern @tkuah
-*.rake @ashmckenzie @ayufan @dbalexandre @DouweM @dzaporozhets @godfat @grzesiek @mkozono @mayra-cabrera @nick.thomas @rspeicher @rymai @reprazent @smcgivern @tkuah
+*.rb @gitlab-org/maintainers/rails-backend
+*.rake @gitlab-org/maintainers/rails-backend
# Technical writing team are the default reviewers for everything in `doc/`
/doc/ @axil @marcia @eread @mikelewis
@@ -10,11 +10,11 @@ app/assets/ @ClemMakesApps @fatihacet @filipa @iamphill @mikegreiling @timzallma
*.scss @annabeldunstone @ClemMakesApps @fatihacet @filipa @iamphill @mikegreiling @timzallmann @kushalpandya @pslaughter
# Maintainers from the Database team should review changes in `db/`
-db/ @abrandl @NikolayS
-lib/gitlab/background_migration/ @abrandl @NikolayS
-lib/gitlab/database/ @abrandl @NikolayS
-lib/gitlab/sql/ @abrandl @NikolayS
-/ee/db/ @abrandl @NikolayS
+db/ @gl-database
+lib/gitlab/background_migration/ @gl-database
+lib/gitlab/database/ @gl-database
+lib/gitlab/sql/ @gl-database
+/ee/db/ @gl-database
# Feature specific owners
/ee/lib/gitlab/code_owners/ @reprazent
diff --git a/.gitlab/ci/docs.gitlab-ci.yml b/.gitlab/ci/docs.gitlab-ci.yml
index de1110f39fa..39ae62a43c9 100644
--- a/.gitlab/ci/docs.gitlab-ci.yml
+++ b/.gitlab/ci/docs.gitlab-ci.yml
@@ -15,7 +15,7 @@ review-docs-deploy-manual:
extends:
- .review-docs
- .no-docs-and-no-qa
- stage: build
+ stage: review
script:
- gem install gitlab --no-document
- ./$SCRIPT_NAME deploy
@@ -28,20 +28,20 @@ review-docs-deploy-manual:
# Useful to preview the docs changes live.
review-docs-deploy:
<<: *review-docs
- stage: post-test
+ stage: review
script:
- gem install gitlab --no-document
- ./$SCRIPT_NAME deploy
only:
- - /(^docs[\/-].*|.*-docs$)/@gitlab-org/gitlab-ce
- - /(^docs[\/-].*|.*-docs$)/@gitlab-org/gitlab-ee
+ - /(^docs[\/-].+|.+-docs$)/@gitlab-org/gitlab-ce
+ - /(^docs[\/-].+|.+-docs$)/@gitlab-org/gitlab-ee
except:
- /(^qa[\/-].*|.*-qa$)/
# Cleanup remote environment of gitlab-docs
review-docs-cleanup:
<<: *review-docs
- stage: post-cleanup
+ stage: review
environment:
name: review-docs/$CI_COMMIT_REF_SLUG
action: stop
diff --git a/.gitlab/ci/frontend.gitlab-ci.yml b/.gitlab/ci/frontend.gitlab-ci.yml
index fe369ffec13..d2148f01441 100644
--- a/.gitlab/ci/frontend.gitlab-ci.yml
+++ b/.gitlab/ci/frontend.gitlab-ci.yml
@@ -8,7 +8,7 @@
.use-pg: &use-pg
services:
- - name: postgres:9.6.11
+ - name: postgres:9.6.14
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- name: redis:alpine
@@ -19,7 +19,7 @@
dependencies:
- setup-test-env
services:
- - docker:stable-dind
+ - docker:19.03.0-dind
variables:
NODE_ENV: "production"
RAILS_ENV: "production"
@@ -73,7 +73,7 @@ gitlab:assets:compile pull-cache:
refs:
- master@gitlab-org/gitlab-ce
- master@gitlab-org/gitlab-ee
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
.compile-assets-metadata:
extends: .dedicated-runner
@@ -111,7 +111,7 @@ compile-assets pull-cache:
refs:
- master@gitlab-org/gitlab-ce
- master@gitlab-org/gitlab-ee
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
karma:
extends: .dedicated-no-docs-pull-cache-job
diff --git a/.gitlab/ci/global.gitlab-ci.yml b/.gitlab/ci/global.gitlab-ci.yml
index 4da7f404767..78ef346d417 100644
--- a/.gitlab/ci/global.gitlab-ci.yml
+++ b/.gitlab/ci/global.gitlab-ci.yml
@@ -31,12 +31,12 @@
.no-docs:
except:
refs:
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
.no-docs-and-no-qa:
except:
refs:
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
- /(^qa[\/-].*|.*-qa$)/
.dedicated-no-docs-pull-cache-job:
diff --git a/.gitlab/ci/qa.gitlab-ci.yml b/.gitlab/ci/qa.gitlab-ci.yml
index ee9e467886a..dcc681294d2 100644
--- a/.gitlab/ci/qa.gitlab-ci.yml
+++ b/.gitlab/ci/qa.gitlab-ci.yml
@@ -1,7 +1,6 @@
-package-and-qa:
+.package-and-qa-base:
image: ruby:2.6-alpine
stage: review # So even if review-deploy failed we can still run this
- when: manual
before_script: []
dependencies: []
cache: {}
@@ -13,5 +12,18 @@ package-and-qa:
- install_gitlab_gem
- ./scripts/trigger-build omnibus
only:
- - /.+/@gitlab-org/gitlab-ce
- - /.+/@gitlab-org/gitlab-ee
+ - branches@gitlab-org/gitlab-ce
+ - branches@gitlab-org/gitlab-ee
+
+package-and-qa:
+ extends: .package-and-qa-base
+ when: manual
+ except:
+ - /(^qa[\/-].*|.*-qa$)/
+
+package-and-qa-always:
+ extends: .package-and-qa-base
+ allow_failure: true
+ only:
+ - /(^qa[\/-].*|.*-qa$)/@gitlab-org/gitlab-ce
+ - /(^qa[\/-].*|.*-qa$)/@gitlab-org/gitlab-ee
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 1392768127b..8a89232fdd4 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -1,12 +1,12 @@
.use-pg: &use-pg
services:
- - name: postgres:9.6.11
+ - name: postgres:9.6.14
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- name: redis:alpine
.use-pg-10: &use-pg-10
services:
- - name: postgres:10.7
+ - name: postgres:10.9
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- name: redis:alpine
@@ -207,7 +207,7 @@ downtime_check:
- master
- tags
- /^[\d-]+-stable(-ee)?$/
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
- /(^qa[\/-].*|.*-qa$)/
dependencies:
- setup-test-env
@@ -223,6 +223,7 @@ ee_compat_check:
- /^security-/
- branches@gitlab-org/gitlab-ee
- branches@gitlab/gitlab-ee
+ - /(^docs[\/-].+|.+-docs$)/
retry: 0
artifacts:
name: "${CI_JOB_NAME}_${CI_COMIT_REF_NAME}_${CI_COMMIT_SHA}"
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml
index 41d52c4e095..ed6690c1023 100644
--- a/.gitlab/ci/review.gitlab-ci.yml
+++ b/.gitlab/ci/review.gitlab-ci.yml
@@ -7,7 +7,7 @@
except:
refs:
- master
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
.review-schedules-only: &review-schedules-only
only:
@@ -20,7 +20,7 @@
except:
refs:
- tags
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
.review-base: &review-base
extends: .dedicated-runner
@@ -35,7 +35,7 @@
<<: *review-base
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-qa-alpine
services:
- - docker:stable-dind
+ - docker:19.03.0-dind
tags:
- gitlab-org
- docker
@@ -101,8 +101,7 @@ schedule:review-build-cng:
- install_tiller
- install_external_dns
- download_chart
- - deploy || display_deployment_debug
- - wait_for_review_app_to_be_accessible
+ - deploy || (display_deployment_debug && exit 1)
- add_license
artifacts:
paths: [review_app_url.txt]
@@ -117,17 +116,21 @@ schedule:review-deploy:
<<: *review-schedules-only
review-stop:
- <<: *review-base
+ <<: *review-only
+ extends: .single-script-job-dedicated-runner
+ image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-charts-build-base
stage: review
when: manual
allow_failure: true
variables:
- GIT_DEPTH: "1"
+ SCRIPT_NAME: review_apps/review-apps.sh
environment:
<<: *review-environment
action: stop
script:
- - source scripts/review_apps/review-apps.sh
+ - wget $CI_PROJECT_URL/raw/$CI_COMMIT_SHA/scripts/utils.sh
+ - source utils.sh
+ - source $(basename $SCRIPT_NAME)
- delete
.review-qa-base: &review-qa-base
@@ -261,6 +264,7 @@ danger-review:
except:
refs:
- master
+ - /^[\d-]+-stable(-ee)?$/
variables:
- $CI_COMMIT_REF_NAME =~ /^ce-to-ee-.*/
- $CI_COMMIT_REF_NAME =~ /.*-stable(-ee)?-prepare-.*/
diff --git a/.gitlab/ci/test-metadata.gitlab-ci.yml b/.gitlab/ci/test-metadata.gitlab-ci.yml
index 3a5735a2be9..4c97a4feb18 100644
--- a/.gitlab/ci/test-metadata.gitlab-ci.yml
+++ b/.gitlab/ci/test-metadata.gitlab-ci.yml
@@ -71,7 +71,7 @@ flaky-examples-check:
except:
refs:
- master
- - /(^docs[\/-].*|.*-docs$)/
+ - /(^docs[\/-].+|.+-docs$)/
- /(^qa[\/-].*|.*-qa$)/
artifacts:
expire_in: 30d
diff --git a/.gitlab/issue_templates/Security Release.md b/.gitlab/issue_templates/Security Release.md
index ae469d3b125..3e60274623e 100644
--- a/.gitlab/issue_templates/Security Release.md
+++ b/.gitlab/issue_templates/Security Release.md
@@ -1,7 +1,7 @@
<!--
# Read me first!
-Set the title to: `Security Release: 11.4.X, 11.3.X, and 11.2.X`
+Set the title to: `Security Release: 12.2.X, 12.1.X, and 12.0.X`
-->
## Releases tasks
@@ -12,9 +12,9 @@ Set the title to: `Security Release: 11.4.X, 11.3.X, and 11.2.X`
## Version issues:
-* 11.4.X: {release task link}
-* 11.3.X: {release task link}
-* 11.2.X: {release task link}
+* 12.2.X: {release task link}
+* 12.1.X: {release task link}
+* 12.0.X: {release task link}
## Security Issues:
@@ -34,9 +34,9 @@ Set the title to: `Security Release: 11.4.X, 11.3.X, and 11.2.X`
| Version | MR |
|---------|----|
-| 11.4 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
-| 11.3 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
-| 11.2 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
+| 12.2 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
+| 12.1 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
+| 12.0 | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
| master | {https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/ link} |
@@ -48,9 +48,9 @@ Set the title to: `Security Release: 11.4.X, 11.3.X, and 11.2.X`
| Version | MR |
|---------|----|
-| 11.4| {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
-| 11.3 | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
-| 11.2 | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
+| 12.2 | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
+| 12.1 | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
+| 12.0 | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
| master | {https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/ link} |
diff --git a/.gitlab/issue_templates/Security developer workflow.md b/.gitlab/issue_templates/Security developer workflow.md
index 7857afb66c2..3e634de4f0c 100644
--- a/.gitlab/issue_templates/Security developer workflow.md
+++ b/.gitlab/issue_templates/Security developer workflow.md
@@ -17,7 +17,7 @@ Set the title to: `Description of the original issue`
#### Backports
-- [ ] Once the MR is ready to be merged, create MRs targeting the last 3 releases, plus the current RC if between the 7th and 22nd of the month.
+- [ ] Once the MR is ready to be merged, create MRs targeting the latest 3 stable branches
- [ ] At this point, it might be easy to squash the commits from the MR into one
- You can use the script `bin/secpick` instead of the following steps, to help you cherry-picking. See the [secpick documentation]
- [ ] Create each MR targeting the stable branch `X-Y-stable`, using the "Security Release" merge request template.
diff --git a/.mdlrc.style b/.mdlrc.style
index 0ca3611df0b..36fbba3543b 100644
--- a/.mdlrc.style
+++ b/.mdlrc.style
@@ -5,12 +5,19 @@
# for more detailed information on the rules and styles.
rule "MD001"
+rule "MD002"
rule "MD003", :style => :atx
+rule "MD006"
rule "MD011"
+rule "MD019"
+rule "MD022"
rule "MD023"
+rule "MD025"
+rule "MD028"
rule "MD032"
rule "MD034"
rule "MD037"
+rule "MD038"
# Should not be used currently:
diff --git a/.prettierignore b/.prettierignore
index dc9e572ab54..c9b945ac96d 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -7,3 +7,4 @@
# ignore stylesheets for now as this clashes with our linter
*.css
*.scss
+*.md
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 15b55f01c32..0752708d5e8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,19 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
+## 12.1.4
+
+### Fixed (3 changes, 1 of them is from the community)
+
+- Properly translate term in projects list. !30958
+- Add exclusive lease to mergeability check process. !31082
+- Fix Docker in Docker (DIND) listen port behavior change by adding DOCKER_TLS_CERTDIR in CI job templates. !31201 (Cameron Boulton)
+
+### Performance (1 change)
+
+- Improve job log rendering performance. !31262
+
+
## 12.1.3
### Fixed (11 changes)
diff --git a/app/assets/javascripts/clusters/clusters_bundle.js b/app/assets/javascripts/clusters/clusters_bundle.js
index aacfa0d87e6..5f5c8044b49 100644
--- a/app/assets/javascripts/clusters/clusters_bundle.js
+++ b/app/assets/javascripts/clusters/clusters_bundle.js
@@ -48,6 +48,9 @@ export default class Clusters {
} = document.querySelector('.js-edit-cluster-form').dataset;
this.clusterId = clusterId;
+ this.clusterNewlyCreatedKey = `cluster_${this.clusterId}_newly_created`;
+ this.clusterBannerDismissedKey = `cluster_${this.clusterId}_banner_dismissed`;
+
this.store = new ClustersStore();
this.store.setHelpPaths(helpPath, ingressHelpPath, ingressDnsHelpPath);
this.store.setManagePrometheusPath(managePrometheusPath);
@@ -81,18 +84,19 @@ export default class Clusters {
this.showTokenButton = document.querySelector('.js-show-cluster-token');
this.tokenField = document.querySelector('.js-cluster-token');
this.ingressDomainHelpText = document.querySelector('.js-ingress-domain-help-text');
- this.ingressDomainSnippet = this.ingressDomainHelpText.querySelector(
- '.js-ingress-domain-snippet',
- );
+ this.ingressDomainSnippet =
+ this.ingressDomainHelpText &&
+ this.ingressDomainHelpText.querySelector('.js-ingress-domain-snippet');
Clusters.initDismissableCallout();
initSettingsPanels();
- setupToggleButtons(document.querySelector('.js-cluster-enable-toggle-area'));
+ const toggleButtonsContainer = document.querySelector('.js-cluster-enable-toggle-area');
+ if (toggleButtonsContainer) {
+ setupToggleButtons(toggleButtonsContainer);
+ }
this.initApplications(clusterType);
- if (this.store.state.status !== 'created') {
- this.updateContainer(null, this.store.state.status, this.store.state.statusReason);
- }
+ this.updateContainer(null, this.store.state.status, this.store.state.statusReason);
this.addListeners();
if (statusPath) {
@@ -247,35 +251,56 @@ export default class Clusters {
setBannerDismissedState(status, isDismissed) {
if (AccessorUtilities.isLocalStorageAccessSafe()) {
- window.localStorage.setItem(
- `cluster_${this.clusterId}_banner_dismissed`,
- `${status}_${isDismissed}`,
- );
+ window.localStorage.setItem(this.clusterBannerDismissedKey, `${status}_${isDismissed}`);
}
}
isBannerDismissed(status) {
let bannerState;
if (AccessorUtilities.isLocalStorageAccessSafe()) {
- bannerState = window.localStorage.getItem(`cluster_${this.clusterId}_banner_dismissed`);
+ bannerState = window.localStorage.getItem(this.clusterBannerDismissedKey);
}
return bannerState === `${status}_true`;
}
- updateContainer(prevStatus, status, error) {
- this.hideAll();
+ setClusterNewlyCreated(state) {
+ if (AccessorUtilities.isLocalStorageAccessSafe()) {
+ window.localStorage.setItem(this.clusterNewlyCreatedKey, Boolean(state));
+ }
+ }
+
+ isClusterNewlyCreated() {
+ // once this is true, it will always be true for a given page load
+ if (!this.isNewlyCreated) {
+ let newlyCreated;
+ if (AccessorUtilities.isLocalStorageAccessSafe()) {
+ newlyCreated = window.localStorage.getItem(this.clusterNewlyCreatedKey);
+ }
+
+ this.isNewlyCreated = newlyCreated === 'true';
+ }
+ return this.isNewlyCreated;
+ }
- if (this.isBannerDismissed(status)) {
+ updateContainer(prevStatus, status, error) {
+ if (status !== 'created' && this.isBannerDismissed(status)) {
return;
}
this.setBannerDismissedState(status, false);
- // We poll all the time but only want the `created` banner to show when newly created
- if (this.store.state.status !== 'created' || prevStatus !== this.store.state.status) {
+ if (prevStatus !== status) {
+ this.hideAll();
+
switch (status) {
case 'created':
- this.successContainer.classList.remove('hidden');
+ if (this.isClusterNewlyCreated()) {
+ this.setClusterNewlyCreated(false);
+ this.successContainer.classList.remove('hidden');
+ } else if (prevStatus) {
+ this.setClusterNewlyCreated(true);
+ window.location.reload();
+ }
break;
case 'errored':
this.errorContainer.classList.remove('hidden');
@@ -292,7 +317,6 @@ export default class Clusters {
this.creatingContainer.classList.remove('hidden');
break;
default:
- this.hideAll();
}
}
}
diff --git a/app/assets/javascripts/clusters/components/uninstall_application_confirmation_modal.vue b/app/assets/javascripts/clusters/components/uninstall_application_confirmation_modal.vue
index 2ff6d5e32e2..4f60e543666 100644
--- a/app/assets/javascripts/clusters/components/uninstall_application_confirmation_modal.vue
+++ b/app/assets/javascripts/clusters/components/uninstall_application_confirmation_modal.vue
@@ -2,14 +2,17 @@
import { GlModal } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
import trackUninstallButtonClickMixin from 'ee_else_ce/clusters/mixins/track_uninstall_button_click';
-import { INGRESS, CERT_MANAGER, PROMETHEUS, RUNNER, KNATIVE, JUPYTER } from '../constants';
+import { HELM, INGRESS, CERT_MANAGER, PROMETHEUS, RUNNER, KNATIVE, JUPYTER } from '../constants';
const CUSTOM_APP_WARNING_TEXT = {
+ [HELM]: s__(
+ 'ClusterIntegration|The associated Tiller pod will be deleted and cannot be restored.',
+ ),
[INGRESS]: s__(
'ClusterIntegration|The associated load balancer and IP will be deleted and cannot be restored.',
),
[CERT_MANAGER]: s__(
- 'ClusterIntegration|The associated certifcate will be deleted and cannot be restored.',
+ 'ClusterIntegration|The associated private key will be deleted and cannot be restored.',
),
[PROMETHEUS]: s__('ClusterIntegration|All data will be deleted and cannot be restored.'),
[RUNNER]: s__('ClusterIntegration|Any running pipelines will be canceled.'),
diff --git a/app/assets/javascripts/clusters/stores/clusters_store.js b/app/assets/javascripts/clusters/stores/clusters_store.js
index f64f0ca616f..ada5a49e246 100644
--- a/app/assets/javascripts/clusters/stores/clusters_store.js
+++ b/app/assets/javascripts/clusters/stores/clusters_store.js
@@ -171,6 +171,7 @@ export default class ClusterStore {
this.state.applications.cert_manager.email || serverAppEntry.email;
} else if (appId === JUPYTER) {
this.state.applications.jupyter.hostname =
+ this.state.applications.jupyter.hostname ||
serverAppEntry.hostname ||
(this.state.applications.ingress.externalIp
? `jupyter.${this.state.applications.ingress.externalIp}.nip.io`
diff --git a/app/assets/javascripts/commons/polyfills.js b/app/assets/javascripts/commons/polyfills.js
index daa941a63cd..7a6ad3dc771 100644
--- a/app/assets/javascripts/commons/polyfills.js
+++ b/app/assets/javascripts/commons/polyfills.js
@@ -12,7 +12,6 @@ import 'core-js/es/promise/finally';
import 'core-js/es/string/code-point-at';
import 'core-js/es/string/from-code-point';
import 'core-js/es/string/includes';
-import 'core-js/es/string/repeat';
import 'core-js/es/string/starts-with';
import 'core-js/es/string/ends-with';
import 'core-js/es/symbol';
diff --git a/app/assets/javascripts/gl_form.js b/app/assets/javascripts/gl_form.js
index b98fe9f6ce2..a66555838ba 100644
--- a/app/assets/javascripts/gl_form.js
+++ b/app/assets/javascripts/gl_form.js
@@ -3,16 +3,9 @@ import autosize from 'autosize';
import GfmAutoComplete, { defaultAutocompleteConfig } from 'ee_else_ce/gfm_auto_complete';
import dropzoneInput from './dropzone_input';
import { addMarkdownListeners, removeMarkdownListeners } from './lib/utils/text_markdown';
-import IndentHelper from './helpers/indent_helper';
-import { keystroke } from './lib/utils/common_utils';
-import * as keys from './lib/utils/keycodes';
-import UndoStack from './lib/utils/undo_stack';
export default class GLForm {
constructor(form, enableGFM = {}) {
- this.handleKeyShortcuts = this.handleKeyShortcuts.bind(this);
- this.setState = this.setState.bind(this);
-
this.form = form;
this.textarea = this.form.find('textarea.js-gfm-input');
this.enableGFM = Object.assign({}, defaultAutocompleteConfig, enableGFM);
@@ -23,10 +16,6 @@ export default class GLForm {
this.enableGFM[item] = Boolean(dataSources[item]);
}
});
-
- this.undoStack = new UndoStack();
- this.indentHelper = new IndentHelper(this.textarea[0]);
-
// Before we start, we should clean up any previous data for this form
this.destroy();
// Set up the form
@@ -96,84 +85,9 @@ export default class GLForm {
clearEventListeners() {
this.textarea.off('focus');
this.textarea.off('blur');
- this.textarea.off('keydown');
removeMarkdownListeners(this.form);
}
- setState(state) {
- const selection = [this.textarea[0].selectionStart, this.textarea[0].selectionEnd];
- this.textarea.val(state);
- this.textarea[0].setSelectionRange(selection[0], selection[1]);
- }
-
- /*
- Handle keypresses for a custom undo/redo stack.
- We need this because the toolbar buttons and indentation helpers mess with the browser's
- native undo/redo capability.
- */
- handleUndo(event) {
- const content = this.textarea.val();
- const { selectionStart, selectionEnd } = this.textarea[0];
- const stack = this.undoStack;
-
- if (stack.isEmpty()) {
- // ==== Save initial state in undo history ====
- stack.save(content);
- }
-
- if (keystroke(event, keys.Z_KEY_CODE, 'l')) {
- // ==== Undo ====
- event.preventDefault();
- stack.save(content);
- if (stack.canUndo()) {
- this.setState(stack.undo());
- }
- } else if (keystroke(event, keys.Z_KEY_CODE, 'ls') || keystroke(event, keys.Y_KEY_CODE, 'l')) {
- // ==== Redo ====
- event.preventDefault();
- if (stack.canRedo()) {
- this.setState(stack.redo());
- }
- } else if (
- keystroke(event, keys.SPACE_KEY_CODE) ||
- keystroke(event, keys.ENTER_KEY_CODE) ||
- selectionStart !== selectionEnd
- ) {
- // ==== Save after finishing a word or before deleting a large selection ====
- stack.save(content);
- } else if (content === '') {
- // ==== Save after deleting everything ====
- stack.save('');
- } else {
- // ==== Save after 1 second of inactivity ====
- stack.scheduleSave(content);
- }
- }
-
- handleIndent(event) {
- if (keystroke(event, keys.LEFT_BRACKET_KEY_CODE, 'l')) {
- // ==== Unindent selected lines ====
- event.preventDefault();
- this.indentHelper.unindent();
- } else if (keystroke(event, keys.RIGHT_BRACKET_KEY_CODE, 'l')) {
- // ==== Indent selected lines ====
- event.preventDefault();
- this.indentHelper.indent();
- } else if (keystroke(event, keys.ENTER_KEY_CODE)) {
- // ==== Auto-indent new lines ====
- event.preventDefault();
- this.indentHelper.newline();
- } else if (keystroke(event, keys.BACKSPACE_KEY_CODE)) {
- // ==== Auto-delete indents at the beginning of the line ====
- this.indentHelper.backspace(event);
- }
- }
-
- handleKeyShortcuts(event) {
- this.handleIndent(event);
- this.handleUndo(event);
- }
-
addEventListeners() {
this.textarea.on('focus', function focusTextArea() {
$(this)
@@ -185,6 +99,5 @@ export default class GLForm {
.closest('.md-area')
.removeClass('is-focused');
});
- this.textarea.on('keydown', e => this.handleKeyShortcuts(e.originalEvent));
}
}
diff --git a/app/assets/javascripts/helpers/indent_helper.js b/app/assets/javascripts/helpers/indent_helper.js
deleted file mode 100644
index a8815fac04e..00000000000
--- a/app/assets/javascripts/helpers/indent_helper.js
+++ /dev/null
@@ -1,182 +0,0 @@
-const INDENT_SEQUENCE = ' ';
-
-function countLeftSpaces(text) {
- const i = text.split('').findIndex(c => c !== ' ');
- return i === -1 ? text.length : i;
-}
-
-/**
- * IndentHelper provides methods that allow manual and smart indentation in
- * textareas. It supports line indent/unindent, selection indent/unindent,
- * auto indentation on newlines, and smart deletion of indents with backspace.
- */
-export default class IndentHelper {
- /**
- * Creates a new IndentHelper and binds it to the given `textarea`. You can provide a custom indent sequence in the second parameter, but the `newline` and `backspace` operations may work funny if the indent sequence isn't spaces only.
- */
- constructor(textarea, indentSequence = INDENT_SEQUENCE) {
- this.element = textarea;
- this.seq = indentSequence;
- }
-
- getSelection() {
- return { start: this.element.selectionStart, end: this.element.selectionEnd };
- }
-
- isRangeSelection() {
- return this.element.selectionStart !== this.element.selectionEnd;
- }
-
- /**
- * Re-implementation of textarea's setRangeText method, because IE/Edge don't support it.
- *
- * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#dom-textarea%2Finput-setrangetext
- */
- setRangeText(replacement, start, end, selectMode) {
- // Disable eslint to remain as faithful as possible to the above linked spec
- /* eslint-disable no-param-reassign, no-case-declarations */
- const text = this.element.value;
-
- if (start > end) {
- throw new RangeError('setRangeText: start index must be less than or equal to end index');
- }
-
- // Clamp to [0, len]
- start = Math.max(0, Math.min(start, text.length));
- end = Math.max(0, Math.min(end, text.length));
-
- let selection = { start: this.element.selectionStart, end: this.element.selectionEnd };
-
- this.element.value = text.slice(0, start) + replacement + text.slice(end);
-
- const newLength = replacement.length;
- const newEnd = start + newLength;
-
- switch (selectMode) {
- case 'select':
- selection = { start, newEnd };
- break;
- case 'start':
- selection = { start, end: start };
- break;
- case 'end':
- selection = { start: newEnd, end: newEnd };
- break;
- case 'preserve':
- default:
- const oldLength = end - start;
- const delta = newLength - oldLength;
- if (selection.start > end) {
- selection.start += delta;
- } else if (selection.start > start) {
- selection.start = start;
- }
- if (selection.end > end) {
- selection.end += delta;
- } else if (selection.end > start) {
- selection.end = newEnd;
- }
- }
-
- this.element.setSelectionRange(selection.start, selection.end);
-
- /* eslint-enable no-param-reassign, no-case-declarations */
- }
-
- /**
- * Returns an array of lines in the textarea, with information about their
- * start/end offsets and whether they are included in the current selection.
- */
- splitLines() {
- const { start, end } = this.getSelection();
-
- const lines = this.element.value.split('\n');
- let textStart = 0;
- const lineObjects = [];
- lines.forEach(line => {
- const lineObj = {
- text: line,
- start: textStart,
- end: textStart + line.length,
- };
- lineObj.inSelection = lineObj.start <= end && lineObj.end >= start;
- lineObjects.push(lineObj);
- textStart += line.length + 1;
- });
- return lineObjects;
- }
-
- /**
- * Indents selected lines by one level.
- */
- indent() {
- const { start } = this.getSelection();
-
- const selectedLines = this.splitLines().filter(line => line.inSelection);
- if (!this.isRangeSelection() && start === selectedLines[0].start) {
- // Special case: if cursor is at the beginning of the line, move it one
- // indent right.
- const line = selectedLines[0];
- this.setRangeText(this.seq, line.start, line.start, 'end');
- } else {
- selectedLines.reverse();
- selectedLines.forEach(line => {
- this.setRangeText(INDENT_SEQUENCE, line.start, line.start, 'preserve');
- });
- }
- }
-
- /**
- * Unindents selected lines by one level.
- */
- unindent() {
- const lines = this.splitLines().filter(line => line.inSelection);
- lines.reverse();
- lines
- .filter(line => line.text.startsWith(this.seq))
- .forEach(line => {
- this.setRangeText('', line.start, line.start + this.seq.length, 'preserve');
- });
- }
-
- /**
- * Emulates a newline keypress, automatically indenting the new line.
- */
- newline() {
- const { start, end } = this.getSelection();
-
- if (this.isRangeSelection()) {
- // Manually kill the selection before calculating the indent
- this.setRangeText('', start, end, 'start');
- }
-
- // Auto-indent the next line
- const currentLine = this.splitLines().find(line => line.end >= start);
- const spaces = countLeftSpaces(currentLine.text);
- this.setRangeText(`\n${' '.repeat(spaces)}`, start, start, 'end');
- }
-
- /**
- * If the cursor is positioned at the end of a line's leading indents,
- * emulates a backspace keypress by deleting a single level of indents.
- * @param event The DOM KeyboardEvent that triggers this action, or null.
- */
- backspace(event) {
- const { start } = this.getSelection();
-
- // If the cursor is at the end of leading indents, delete an indent.
- if (!this.isRangeSelection()) {
- const currentLine = this.splitLines().find(line => line.end >= start);
- const cursorPosition = start - currentLine.start;
- if (countLeftSpaces(currentLine.text) === cursorPosition && cursorPosition > 0) {
- if (event) event.preventDefault();
-
- let spacesToDelete = cursorPosition % this.seq.length;
- if (spacesToDelete === 0) {
- spacesToDelete = this.seq.length;
- }
- this.setRangeText('', start - spacesToDelete, start, 'start');
- }
- }
- }
-}
diff --git a/app/assets/javascripts/jobs/components/job_app.vue b/app/assets/javascripts/jobs/components/job_app.vue
index ef9fb6d08d1..8da87f424c4 100644
--- a/app/assets/javascripts/jobs/components/job_app.vue
+++ b/app/assets/javascripts/jobs/components/job_app.vue
@@ -73,6 +73,10 @@ export default {
type: String,
required: true,
},
+ projectPath: {
+ type: String,
+ required: true,
+ },
logState: {
type: String,
required: true,
@@ -258,6 +262,7 @@ export default {
:quota-used="job.runners.quota.used"
:quota-limit="job.runners.quota.limit"
:runners-path="runnerHelpUrl"
+ :project-path="projectPath"
/>
<environments-block
diff --git a/app/assets/javascripts/jobs/components/sidebar.vue b/app/assets/javascripts/jobs/components/sidebar.vue
index e9704584c9f..06477477aad 100644
--- a/app/assets/javascripts/jobs/components/sidebar.vue
+++ b/app/assets/javascripts/jobs/components/sidebar.vue
@@ -73,15 +73,14 @@ export default {
},
renderBlock() {
return (
- this.job.merge_request ||
this.job.duration ||
- this.job.finished_data ||
+ this.job.finished_at ||
this.job.erased_at ||
this.job.queued ||
+ this.hasTimeout ||
this.job.runner ||
this.job.coverage ||
- this.job.tags.length ||
- this.job.cancel_path
+ this.job.tags.length
);
},
hasArtifact() {
@@ -160,7 +159,7 @@ export default {
</gl-link>
</div>
- <div :class="{ block: renderBlock }">
+ <div v-if="renderBlock" class="block">
<detail-row
v-if="job.duration"
:value="duration"
diff --git a/app/assets/javascripts/jobs/index.js b/app/assets/javascripts/jobs/index.js
index 06514fcce1d..add7f9b710a 100644
--- a/app/assets/javascripts/jobs/index.js
+++ b/app/assets/javascripts/jobs/index.js
@@ -10,16 +10,29 @@ export default () => {
JobApp,
},
render(createElement) {
+ const {
+ deploymentHelpUrl,
+ runnerHelpUrl,
+ runnerSettingsUrl,
+ variablesSettingsUrl,
+ endpoint,
+ pagePath,
+ logState,
+ buildStatus,
+ projectPath,
+ } = element.dataset;
+
return createElement('job-app', {
props: {
- deploymentHelpUrl: element.dataset.deploymentHelpUrl,
- runnerHelpUrl: element.dataset.runnerHelpUrl,
- runnerSettingsUrl: element.dataset.runnerSettingsUrl,
- variablesSettingsUrl: element.dataset.variablesSettingsUrl,
- endpoint: element.dataset.endpoint,
- pagePath: element.dataset.buildOptionsPagePath,
- logState: element.dataset.buildOptionsLogState,
- buildStatus: element.dataset.buildOptionsBuildStatus,
+ deploymentHelpUrl,
+ runnerHelpUrl,
+ runnerSettingsUrl,
+ variablesSettingsUrl,
+ endpoint,
+ pagePath,
+ logState,
+ buildStatus,
+ projectPath,
},
});
},
diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js
index 1a94aee2398..5e90893b684 100644
--- a/app/assets/javascripts/lib/utils/common_utils.js
+++ b/app/assets/javascripts/lib/utils/common_utils.js
@@ -203,71 +203,6 @@ export const isMetaKey = e => e.metaKey || e.ctrlKey || e.altKey || e.shiftKey;
// 3) Middle-click or Mouse Wheel Click (e.which is 2)
export const isMetaClick = e => e.metaKey || e.ctrlKey || e.which === 2;
-export const getPlatformLeaderKey = () => {
- // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
- if (navigator && navigator.platform && navigator.platform.startsWith('Mac')) {
- return 'meta';
- }
- return 'ctrl';
-};
-
-export const getPlatformLeaderKeyHTML = () => {
- if (getPlatformLeaderKey() === 'meta') {
- return '&#8984;';
- }
- // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
- return 'Ctrl';
-};
-
-export const isPlatformLeaderKey = e => {
- if (getPlatformLeaderKey() === 'meta') {
- return Boolean(e.metaKey);
- }
- return Boolean(e.ctrlKey);
-};
-
-/**
- * Tests if a KeyboardEvent corresponds exactly to a keystroke.
- *
- * This function avoids hacking around an old version of Mousetrap, which we ship at the moment. It should be removed after we upgrade to the newest Mousetrap. See:
- * - https://gitlab.com/gitlab-org/gitlab-ce/issues/63182
- * - https://gitlab.com/gitlab-org/gitlab-ce/issues/64246
- *
- * @example
- * // Matches the enter key with exactly zero modifiers
- * keystroke(event, 13)
- *
- * @example
- * // Matches Control-Shift-Z
- * keystroke(event, 90, 'cs')
- *
- * @param e The KeyboardEvent to test.
- * @param keyCode The key code of the key to test. Why keycodes? IE/Edge don't support the more convenient `key` and `code` properties.
- * @param modifiers A string of modifiers keys. Each modifier key is represented by one character. The set of pressed modifier keys must match the given string exactly. Available options are 'a' for Alt/Option, 'c' for Control, 'm' for Meta/Command, 's' for Shift, and 'l' for the leader key (Meta on MacOS and Control otherwise).
- * @returns {boolean} True if the KeyboardEvent corresponds to the given keystroke.
- */
-export const keystroke = (e, keyCode, modifiers = '') => {
- if (!e || !keyCode) {
- return false;
- }
-
- const leader = getPlatformLeaderKey();
- const mods = modifiers.toLowerCase().replace('l', leader.charAt(0));
-
- // Match depressed modifier keys
- if (
- e.altKey !== mods.includes('a') ||
- e.ctrlKey !== mods.includes('c') ||
- e.metaKey !== mods.includes('m') ||
- e.shiftKey !== mods.includes('s')
- ) {
- return false;
- }
-
- // Match the depressed key
- return keyCode === (e.keyCode || e.which);
-};
-
export const contentTop = () => {
const perfBar = $('#js-peek').outerHeight() || 0;
const mrTabsHeight = $('.merge-request-tabs').outerHeight() || 0;
diff --git a/app/assets/javascripts/lib/utils/keycodes.js b/app/assets/javascripts/lib/utils/keycodes.js
index e24fcf47d71..5e0f9b612a2 100644
--- a/app/assets/javascripts/lib/utils/keycodes.js
+++ b/app/assets/javascripts/lib/utils/keycodes.js
@@ -1,10 +1,4 @@
-export const BACKSPACE_KEY_CODE = 8;
-export const ENTER_KEY_CODE = 13;
-export const ESC_KEY_CODE = 27;
-export const SPACE_KEY_CODE = 32;
export const UP_KEY_CODE = 38;
export const DOWN_KEY_CODE = 40;
-export const Y_KEY_CODE = 89;
-export const Z_KEY_CODE = 90;
-export const LEFT_BRACKET_KEY_CODE = 219;
-export const RIGHT_BRACKET_KEY_CODE = 221;
+export const ENTER_KEY_CODE = 13;
+export const ESC_KEY_CODE = 27;
diff --git a/app/assets/javascripts/lib/utils/undo_stack.js b/app/assets/javascripts/lib/utils/undo_stack.js
deleted file mode 100644
index 6cfdc2a0a0f..00000000000
--- a/app/assets/javascripts/lib/utils/undo_stack.js
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * UndoStack provides a custom implementation of an undo/redo engine. It was originally written for GitLab's Markdown editor (`gl_form.js`), whose rich text editing capabilities broke native browser undo/redo behaviour.
- *
- * UndoStack supports predictable undos/redos, debounced saves, maximum history length, and duplicate detection.
- *
- * Usage:
- * - `stack = new UndoStack();`
- * - Saves a state to the stack with `stack.save(state)`.
- * - Get the current state with `stack.current()`.
- * - Revert to the previous state with `stack.undo()`.
- * - Redo a previous undo with `stack.redo()`;
- * - Queue a future save with `stack.scheduleSave(state, delay)`. Useful for text editors.
- * - See the full undo history in `stack.history`.
- */
-export default class UndoStack {
- constructor(maxLength = 1000) {
- this.clear();
- this.maxLength = maxLength;
-
- // If you're storing reference-types in the undo stack, you might want to
- // reassign this property to some deep-equals function.
- this.comparator = (a, b) => a === b;
- }
-
- current() {
- if (this.cursor === -1) {
- return undefined;
- }
- return this.history[this.cursor];
- }
-
- isEmpty() {
- return this.history.length === 0;
- }
-
- clear() {
- this.clearPending();
- this.history = [];
- this.cursor = -1;
- }
-
- save(state) {
- this.clearPending();
- if (this.comparator(state, this.current())) {
- // Don't save state if it's the same as the current state
- return;
- }
-
- this.history.length = this.cursor + 1;
- this.history.push(state);
- this.cursor += 1;
-
- if (this.history.length > this.maxLength) {
- this.history.shift();
- this.cursor -= 1;
- }
- }
-
- scheduleSave(state, delay = 1000) {
- this.clearPending();
- this.pendingState = state;
- this.timeout = setTimeout(this.saveNow.bind(this), delay);
- }
-
- saveNow() {
- // Persists scheduled saves immediately
- this.save(this.pendingState);
- this.clearPending();
- }
-
- clearPending() {
- // Cancels any scheduled saves
- if (this.timeout) {
- clearTimeout(this.timeout);
- delete this.timeout;
- delete this.pendingState;
- }
- }
-
- canUndo() {
- return this.cursor > 0;
- }
-
- undo() {
- this.clearPending();
- if (!this.canUndo()) {
- return undefined;
- }
- this.cursor -= 1;
- return this.history[this.cursor];
- }
-
- canRedo() {
- return this.cursor >= 0 && this.cursor < this.history.length - 1;
- }
-
- redo() {
- this.clearPending();
- if (!this.canRedo()) {
- return undefined;
- }
- this.cursor += 1;
- return this.history[this.cursor];
- }
-}
diff --git a/app/assets/javascripts/manual_ordering.js b/app/assets/javascripts/manual_ordering.js
index 012d1e70410..29a0e5a904a 100644
--- a/app/assets/javascripts/manual_ordering.js
+++ b/app/assets/javascripts/manual_ordering.js
@@ -21,7 +21,7 @@ const updateIssue = (url, issueList, { move_before_id, move_after_id }) =>
const initManualOrdering = () => {
const issueList = document.querySelector('.manual-ordering');
- if (!issueList || !(gon.features && gon.features.manualSorting) || !(gon.current_user_id > 0)) {
+ if (!issueList || !(gon.current_user_id > 0)) {
return;
}
diff --git a/app/assets/javascripts/notes/stores/utils.js b/app/assets/javascripts/notes/stores/utils.js
index ed4cef4a917..97dcd54fe88 100644
--- a/app/assets/javascripts/notes/stores/utils.js
+++ b/app/assets/javascripts/notes/stores/utils.js
@@ -21,7 +21,7 @@ export const getQuickActionText = note => {
text = __('Applying multiple commands');
} else {
const commandDescription = executedCommands[0].description.toLowerCase();
- text = sprintf(__('Applying command to %{commandDescription}', { commandDescription }));
+ text = sprintf(__('Applying command to %{commandDescription}'), { commandDescription });
}
}
diff --git a/app/assets/javascripts/operation_settings/components/external_dashboard.vue b/app/assets/javascripts/operation_settings/components/external_dashboard.vue
index ed518611d0b..3c5de189d51 100644
--- a/app/assets/javascripts/operation_settings/components/external_dashboard.vue
+++ b/app/assets/javascripts/operation_settings/components/external_dashboard.vue
@@ -50,9 +50,11 @@ export default {
<form>
<gl-form-group
:label="s__('ExternalMetrics|Full dashboard URL')"
+ label-for="full-dashboard-url"
:description="s__('ExternalMetrics|Enter the URL of the dashboard you want to link to')"
>
<gl-form-input
+ id="full-dashboard-url"
v-model="userDashboardUrl"
placeholder="https://my-org.gitlab.io/my-dashboards"
@keydown.enter.native.prevent="updateExternalDashboardUrl"
diff --git a/app/assets/javascripts/performance_bar/components/detailed_metric.vue b/app/assets/javascripts/performance_bar/components/detailed_metric.vue
index 5bc1d5e0533..73524827c5d 100644
--- a/app/assets/javascripts/performance_bar/components/detailed_metric.vue
+++ b/app/assets/javascripts/performance_bar/components/detailed_metric.vue
@@ -44,7 +44,7 @@ export default {
</script>
<template>
<div
- v-if="currentRequest.details"
+ v-if="currentRequest.details && metricDetails"
:id="`peek-view-${metric}`"
class="view qa-performance-bar-detailed-metric"
>
diff --git a/app/assets/javascripts/persistent_user_callout.js b/app/assets/javascripts/persistent_user_callout.js
index 4a08e158f6b..8d6a3781048 100644
--- a/app/assets/javascripts/persistent_user_callout.js
+++ b/app/assets/javascripts/persistent_user_callout.js
@@ -1,13 +1,17 @@
+import { parseBoolean } from './lib/utils/common_utils';
import axios from './lib/utils/axios_utils';
import { __ } from './locale';
import Flash from './flash';
+const DEFERRED_LINK_CLASS = 'deferred-link';
+
export default class PersistentUserCallout {
constructor(container) {
- const { dismissEndpoint, featureId } = container.dataset;
+ const { dismissEndpoint, featureId, deferLinks } = container.dataset;
this.container = container;
this.dismissEndpoint = dismissEndpoint;
this.featureId = featureId;
+ this.deferLinks = parseBoolean(deferLinks);
this.init();
}
@@ -15,9 +19,21 @@ export default class PersistentUserCallout {
init() {
const closeButton = this.container.querySelector('.js-close');
closeButton.addEventListener('click', event => this.dismiss(event));
+
+ if (this.deferLinks) {
+ this.container.addEventListener('click', event => {
+ const isDeferredLink = event.target.classList.contains(DEFERRED_LINK_CLASS);
+
+ if (isDeferredLink) {
+ const { href, target } = event.target;
+
+ this.dismiss(event, { href, target });
+ }
+ });
+ }
}
- dismiss(event) {
+ dismiss(event, deferredLinkOptions = null) {
event.preventDefault();
axios
@@ -26,6 +42,11 @@ export default class PersistentUserCallout {
})
.then(() => {
this.container.remove();
+
+ if (deferredLinkOptions) {
+ const { href, target } = deferredLinkOptions;
+ window.open(href, target);
+ }
})
.catch(() => {
Flash(__('An error occurred while dismissing the alert. Refresh the page and try again.'));
diff --git a/app/assets/javascripts/privacy_policy_update_callout.js b/app/assets/javascripts/privacy_policy_update_callout.js
new file mode 100644
index 00000000000..126b1ee1132
--- /dev/null
+++ b/app/assets/javascripts/privacy_policy_update_callout.js
@@ -0,0 +1,8 @@
+import PersistentUserCallout from '~/persistent_user_callout';
+
+function initPrivacyPolicyUpdateCallout() {
+ const callout = document.querySelector('.privacy-policy-update-64341');
+ PersistentUserCallout.factory(callout);
+}
+
+export default initPrivacyPolicyUpdateCallout;
diff --git a/app/assets/javascripts/projects/gke_cluster_namespace/index.js b/app/assets/javascripts/projects/gke_cluster_namespace/index.js
index 288740203ad..0ec4d8807b0 100644
--- a/app/assets/javascripts/projects/gke_cluster_namespace/index.js
+++ b/app/assets/javascripts/projects/gke_cluster_namespace/index.js
@@ -28,8 +28,10 @@ const setState = glManagedCheckbox => {
const initGkeNamespace = () => {
const glManagedCheckbox = document.querySelector('.js-gl-managed');
- setState(glManagedCheckbox); // this is needed in order to set the initial state
- glManagedCheckbox.addEventListener('change', () => setState(glManagedCheckbox));
+ if (glManagedCheckbox) {
+ setState(glManagedCheckbox); // this is needed in order to set the initial state
+ glManagedCheckbox.addEventListener('change', () => setState(glManagedCheckbox));
+ }
};
export default initGkeNamespace;
diff --git a/app/assets/javascripts/right_sidebar.js b/app/assets/javascripts/right_sidebar.js
index 930c0d5e958..40a2158de78 100644
--- a/app/assets/javascripts/right_sidebar.js
+++ b/app/assets/javascripts/right_sidebar.js
@@ -101,10 +101,12 @@ Sidebar.prototype.toggleTodo = function(e) {
this.todoUpdateDone(data);
})
.catch(() =>
- flash(sprintf(__('There was an error %{message} todo.')), {
- message:
- ajaxType === 'post' ? s__('RightSidebar|adding a') : s__('RightSidebar|deleting the'),
- }),
+ flash(
+ sprintf(__('There was an error %{message} todo.'), {
+ message:
+ ajaxType === 'post' ? s__('RightSidebar|adding a') : s__('RightSidebar|deleting the'),
+ }),
+ ),
);
};
diff --git a/app/assets/javascripts/tracking.js b/app/assets/javascripts/tracking.js
new file mode 100644
index 00000000000..2d0b099cf0b
--- /dev/null
+++ b/app/assets/javascripts/tracking.js
@@ -0,0 +1,67 @@
+import $ from 'jquery';
+
+const extractData = (el, opts = {}) => {
+ const { trackEvent, trackLabel = '', trackProperty = '' } = el.dataset;
+ let trackValue = el.dataset.trackValue || el.value || '';
+ if (el.type === 'checkbox' && !el.checked) trackValue = false;
+ return [
+ trackEvent + (opts.suffix || ''),
+ {
+ label: trackLabel,
+ property: trackProperty,
+ value: trackValue,
+ },
+ ];
+};
+
+export default class Tracking {
+ static enabled() {
+ return typeof window.snowplow === 'function';
+ }
+
+ static event(category = document.body.dataset.page, event = 'generic', data = {}) {
+ if (!this.enabled()) return false;
+ // eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
+ if (!category) throw new Error('Tracking: no category provided for tracking.');
+
+ return window.snowplow(
+ 'trackStructEvent',
+ category,
+ event,
+ Object.assign({}, { label: '', property: '', value: '' }, data),
+ );
+ }
+
+ constructor(category = document.body.dataset.page) {
+ this.category = category;
+ }
+
+ bind(container = document) {
+ if (!this.constructor.enabled()) return;
+ container.querySelectorAll(`[data-track-event]`).forEach(el => {
+ if (this.customHandlingFor(el)) return;
+ // jquery is required for select2, so we use it always
+ // see: https://github.com/select2/select2/issues/4686
+ $(el).on('click', this.eventHandler(this.category));
+ });
+ }
+
+ customHandlingFor(el) {
+ const classes = el.classList;
+
+ // bootstrap dropdowns
+ if (classes.contains('dropdown')) {
+ $(el).on('show.bs.dropdown', this.eventHandler(this.category, { suffix: '_show' }));
+ $(el).on('hide.bs.dropdown', this.eventHandler(this.category, { suffix: '_hide' }));
+ return true;
+ }
+
+ return false;
+ }
+
+ eventHandler(category = null, opts = {}) {
+ return e => {
+ this.constructor.event(category || this.category, ...extractData(e.currentTarget, opts));
+ };
+ }
+}
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
index 17ac8ada32d..76b96c8c1c0 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline_container.vue
@@ -60,7 +60,7 @@ export default {
return this.isPostMerge ? this.mr.mergePipeline : this.mr.pipeline;
},
showVisualReviewAppLink() {
- return Boolean(this.mr.visualReviewFF && this.mr.visualReviewAppAvailable);
+ return this.mr.visualReviewAppAvailable;
},
showMergeTrainInfo() {
return _.isNumber(this.mr.mergeTrainIndex);
diff --git a/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue b/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
index 21c44b59520..8ce5b615795 100644
--- a/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
+++ b/app/assets/javascripts/vue_shared/components/markdown/toolbar.vue
@@ -1,6 +1,5 @@
<script>
import { GlLink } from '@gitlab/ui';
-import { s__, sprintf } from '~/locale';
export default {
components: {
@@ -23,28 +22,8 @@ export default {
},
},
computed: {
- toolbarHelpHtml() {
- const mdLinkStart = `<a href="${this.markdownDocsPath}" target="_blank" rel="noopener noreferrer" tabindex="-1">`;
- const actionsLinkStart = `<a href="${this.quickActionsDocsPath}" target="_blank" rel="noopener noreferrer" tabindex="-1">`;
- const linkEnd = '</a>';
-
- if (this.markdownDocsPath && !this.quickActionsDocsPath) {
- return sprintf(
- s__('Editor|%{mdLinkStart}Markdown is supported%{mdLinkEnd}'),
- { mdLinkStart, mdLinkEnd: linkEnd },
- false,
- );
- } else if (this.markdownDocsPath && this.quickActionsDocsPath) {
- return sprintf(
- s__(
- 'Editor|%{mdLinkStart}Markdown%{mdLinkEnd} and %{actionsLinkStart}quick actions%{actionsLinkEnd} are supported',
- ),
- { mdLinkStart, mdLinkEnd: linkEnd, actionsLinkStart, actionsLinkEnd: linkEnd },
- false,
- );
- }
-
- return null;
+ hasQuickActionsDocsPath() {
+ return this.quickActionsDocsPath !== '';
},
},
};
@@ -53,7 +32,21 @@ export default {
<template>
<div class="comment-toolbar clearfix">
<div class="toolbar-text">
- <span v-html="toolbarHelpHtml"></span>
+ <template v-if="!hasQuickActionsDocsPath && markdownDocsPath">
+ <gl-link :href="markdownDocsPath" target="_blank" tabindex="-1">{{
+ __('Markdown is supported')
+ }}</gl-link>
+ </template>
+ <template v-if="hasQuickActionsDocsPath && markdownDocsPath">
+ <gl-link :href="markdownDocsPath" target="_blank" tabindex="-1">{{
+ __('Markdown')
+ }}</gl-link>
+ and
+ <gl-link :href="quickActionsDocsPath" target="_blank" tabindex="-1">{{
+ __('quick actions')
+ }}</gl-link>
+ are supported
+ </template>
</div>
<span v-if="canAttachFile" class="uploading-container">
<span class="uploading-progress-container hide">
diff --git a/app/assets/stylesheets/application.scss b/app/assets/stylesheets/application.scss
index fbf16aa324a..e98030f1511 100644
--- a/app/assets/stylesheets/application.scss
+++ b/app/assets/stylesheets/application.scss
@@ -12,7 +12,6 @@
// If you need to add unique style that should affect only one page - use pages/
// directory.
@import "at.js/dist/css/jquery.atwho";
-@import "pikaday/scss/pikaday";
@import "dropzone/dist/basic";
@import "select2/select2";
diff --git a/app/assets/stylesheets/framework/common.scss b/app/assets/stylesheets/framework/common.scss
index 6b44834cc52..f384a49e0ae 100644
--- a/app/assets/stylesheets/framework/common.scss
+++ b/app/assets/stylesheets/framework/common.scss
@@ -435,6 +435,7 @@ img.emoji {
/** COMMON SIZING CLASSES **/
.w-0 { width: 0; }
.w-8em { width: 8em; }
+.w-3rem { width: 3rem; }
.h-12em { height: 12em; }
diff --git a/app/assets/stylesheets/pages/builds.scss b/app/assets/stylesheets/pages/builds.scss
index 6e98908eeed..262c0bf5ed2 100644
--- a/app/assets/stylesheets/pages/builds.scss
+++ b/app/assets/stylesheets/pages/builds.scss
@@ -127,6 +127,7 @@
.section-header ~ .section.line {
margin-left: $gl-padding;
+ display: block;
}
}
diff --git a/app/controllers/autocomplete_controller.rb b/app/controllers/autocomplete_controller.rb
index 091327931c2..f111c7ca8cc 100644
--- a/app/controllers/autocomplete_controller.rb
+++ b/app/controllers/autocomplete_controller.rb
@@ -16,7 +16,7 @@ class AutocompleteController < ApplicationController
.new(params: params, current_user: current_user, project: project, group: group)
.execute
- render json: UserSerializer.new.represent(users)
+ render json: UserSerializer.new(params).represent(users, project: project)
end
def user
diff --git a/app/controllers/boards/issues_controller.rb b/app/controllers/boards/issues_controller.rb
index 90528f75ffd..1d1a72d21f1 100644
--- a/app/controllers/boards/issues_controller.rb
+++ b/app/controllers/boards/issues_controller.rb
@@ -26,7 +26,7 @@ module Boards
list_service = Boards::Issues::ListService.new(board_parent, current_user, filter_params)
issues = list_service.execute
issues = issues.page(params[:page]).per(params[:per] || 20).without_count
- Issue.move_to_end(issues) if Gitlab::Database.read_write?
+ Issue.move_nulls_to_end(issues) if Gitlab::Database.read_write?
issues = issues.preload(:milestone,
:assignees,
project: [
diff --git a/app/controllers/concerns/issuable_actions.rb b/app/controllers/concerns/issuable_actions.rb
index 6fa2f75be33..398cb728e05 100644
--- a/app/controllers/concerns/issuable_actions.rb
+++ b/app/controllers/concerns/issuable_actions.rb
@@ -98,13 +98,12 @@ module IssuableActions
render json: { notice: "#{quantity} #{resource_name.pluralize(quantity)} updated" }
end
- # rubocop: disable CodeReuse/ActiveRecord
+ # rubocop:disable CodeReuse/ActiveRecord
def discussions
- notes = issuable.discussion_notes
- .inc_relations_for_view
- .with_notes_filter(notes_filter)
- .includes(:noteable)
- .fresh
+ notes = NotesFinder.new(current_user, finder_params_for_issuable).execute
+ .inc_relations_for_view
+ .includes(:noteable)
+ .fresh
if notes_filter != UserPreference::NOTES_FILTERS[:only_comments]
notes = ResourceEvents::MergeIntoNotesService.new(issuable, current_user).execute(notes)
@@ -117,7 +116,7 @@ module IssuableActions
render json: discussion_serializer.represent(discussions, context: self)
end
- # rubocop: enable CodeReuse/ActiveRecord
+ # rubocop:enable CodeReuse/ActiveRecord
private
@@ -222,4 +221,13 @@ module IssuableActions
def parent
@project || @group # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def finder_params_for_issuable
+ {
+ target: @issuable,
+ notes_filter: notes_filter
+ }.tap { |new_params| new_params[:project] = project if respond_to?(:project, true) }
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
diff --git a/app/controllers/concerns/notes_actions.rb b/app/controllers/concerns/notes_actions.rb
index 0098c4cdf4c..d2a961efff7 100644
--- a/app/controllers/concerns/notes_actions.rb
+++ b/app/controllers/concerns/notes_actions.rb
@@ -243,7 +243,7 @@ module NotesActions
end
def notes_finder
- @notes_finder ||= NotesFinder.new(project, current_user, finder_params)
+ @notes_finder ||= NotesFinder.new(current_user, finder_params)
end
def note_serializer
diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb
index dda321bac79..5472ef05d7c 100644
--- a/app/controllers/groups_controller.rb
+++ b/app/controllers/groups_controller.rb
@@ -7,10 +7,6 @@ class GroupsController < Groups::ApplicationController
include PreviewMarkdown
include RecordUserLastActivity
- before_action do
- push_frontend_feature_flag(:manual_sorting, default_enabled: true)
- end
-
respond_to :html
prepend_before_action(only: [:show, :issues]) { authenticate_sessionless_user!(:rss) }
diff --git a/app/controllers/projects/issues_controller.rb b/app/controllers/projects/issues_controller.rb
index db7ca7ef0d7..bc9166b9df3 100644
--- a/app/controllers/projects/issues_controller.rb
+++ b/app/controllers/projects/issues_controller.rb
@@ -10,10 +10,6 @@ class Projects::IssuesController < Projects::ApplicationController
include SpammableActions
include RecordUserLastActivity
- before_action do
- push_frontend_feature_flag(:manual_sorting, default_enabled: true)
- end
-
def issue_except_actions
%i[index calendar new create bulk_update import_csv]
end
diff --git a/app/controllers/projects/notes_controller.rb b/app/controllers/projects/notes_controller.rb
index 3152a38fd8e..65d9b074eee 100644
--- a/app/controllers/projects/notes_controller.rb
+++ b/app/controllers/projects/notes_controller.rb
@@ -68,7 +68,7 @@ class Projects::NotesController < Projects::ApplicationController
alias_method :awardable, :note
def finder_params
- params.merge(last_fetched_at: last_fetched_at, notes_filter: notes_filter)
+ params.merge(project: project, last_fetched_at: last_fetched_at, notes_filter: notes_filter)
end
def authorize_admin_note!
diff --git a/app/controllers/snippets/notes_controller.rb b/app/controllers/snippets/notes_controller.rb
index 612897f27e6..551b37cb3d3 100644
--- a/app/controllers/snippets/notes_controller.rb
+++ b/app/controllers/snippets/notes_controller.rb
@@ -27,7 +27,9 @@ class Snippets::NotesController < ApplicationController
alias_method :noteable, :snippet
def finder_params
- params.merge(last_fetched_at: last_fetched_at, target_id: snippet.id, target_type: 'personal_snippet')
+ params.merge(last_fetched_at: last_fetched_at, target_id: snippet.id, target_type: 'personal_snippet').tap do |merged_params|
+ merged_params[:project] = project if respond_to?(:project)
+ end
end
def authorize_read_snippet!
diff --git a/app/finders/branches_finder.rb b/app/finders/branches_finder.rb
index b462c8053fa..291a24c1405 100644
--- a/app/finders/branches_finder.rb
+++ b/app/finders/branches_finder.rb
@@ -69,7 +69,7 @@ class BranchesFinder
return branches unless names
branch_names = names.to_set
- branches.filter do |branch|
+ branches.select do |branch|
branch_names.include?(branch.name)
end
end
diff --git a/app/finders/container_repositories_finder.rb b/app/finders/container_repositories_finder.rb
new file mode 100644
index 00000000000..eb91d7f825b
--- /dev/null
+++ b/app/finders/container_repositories_finder.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+class ContainerRepositoriesFinder
+ # id: group or project id
+ # container_type: :group or :project
+ def initialize(id:, container_type:)
+ @id = id
+ @type = container_type.to_sym
+ end
+
+ def execute
+ if project_type?
+ project.container_repositories
+ else
+ group.container_repositories
+ end
+ end
+
+ private
+
+ attr_reader :id, :type
+
+ def project_type?
+ type == :project
+ end
+
+ def project
+ Project.find(id)
+ end
+
+ def group
+ Group.find(id)
+ end
+end
diff --git a/app/finders/notes_finder.rb b/app/finders/notes_finder.rb
index 8f610d7dddb..f7d9100bb78 100644
--- a/app/finders/notes_finder.rb
+++ b/app/finders/notes_finder.rb
@@ -3,6 +3,8 @@
class NotesFinder
FETCH_OVERLAP = 5.seconds
+ attr_reader :target_type
+
# Used to filter Notes
# When used with target_type and target_id this returns notes specifically for the controller
#
@@ -10,15 +12,17 @@ class NotesFinder
# current_user - which user check authorizations with
# project - which project to look for notes on
# params:
+ # target: noteable
# target_type: string
# target_id: integer
# last_fetched_at: time
# search: string
#
- def initialize(project, current_user, params = {})
- @project = project
+ def initialize(current_user, params = {})
+ @project = params[:project]
@current_user = current_user
- @params = params
+ @params = params.dup
+ @target_type = @params[:target_type]
end
def execute
@@ -32,7 +36,27 @@ class NotesFinder
def target
return @target if defined?(@target)
- target_type = @params[:target_type]
+ if target_given?
+ use_explicit_target
+ else
+ find_target_by_type_and_ids
+ end
+ end
+
+ private
+
+ def target_given?
+ @params.key?(:target)
+ end
+
+ def use_explicit_target
+ @target = @params[:target]
+ @target_type = @target.class.name.underscore
+
+ @target
+ end
+
+ def find_target_by_type_and_ids
target_id = @params[:target_id]
target_iid = @params[:target_iid]
@@ -45,13 +69,11 @@ class NotesFinder
@project.commit(target_id)
end
else
- noteables_for_type_by_id(target_type, target_id, target_iid)
+ noteable_for_type_by_id(target_type, target_id, target_iid)
end
end
- private
-
- def noteables_for_type_by_id(type, id, iid)
+ def noteable_for_type_by_id(type, id, iid)
query = if id
{ id: id }
else
@@ -77,10 +99,6 @@ class NotesFinder
search(notes)
end
- def target_type
- @params[:target_type]
- end
-
# rubocop: disable CodeReuse/ActiveRecord
def notes_of_any_type
types = %w(commit issue merge_request snippet)
diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb
index 3847a35fbab..acbcf0ded17 100644
--- a/app/helpers/application_settings_helper.rb
+++ b/app/helpers/application_settings_helper.rb
@@ -160,6 +160,8 @@ module ApplicationSettingsHelper
:akismet_api_key,
:akismet_enabled,
:allow_local_requests_from_hooks_and_services,
+ :allow_local_requests_from_web_hooks_and_services,
+ :allow_local_requests_from_system_hooks,
:dns_rebinding_protection_enabled,
:archive_builds_in_human_readable,
:authorized_keys_enabled,
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 6efd07a6008..b7a4d7aa803 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -21,7 +21,8 @@ module ApplicationSettingImplementation
{
after_sign_up_text: nil,
akismet_enabled: false,
- allow_local_requests_from_hooks_and_services: false,
+ allow_local_requests_from_web_hooks_and_services: false,
+ allow_local_requests_from_system_hooks: true,
dns_rebinding_protection_enabled: true,
authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand
container_registry_token_expire_delay: 5,
@@ -158,9 +159,20 @@ module ApplicationSettingImplementation
end
def outbound_local_requests_whitelist_raw=(values)
+ clear_memoization(:outbound_local_requests_whitelist_arrays)
+
self.outbound_local_requests_whitelist = domain_strings_to_array(values)
end
+ def add_to_outbound_local_requests_whitelist(values_array)
+ clear_memoization(:outbound_local_requests_whitelist_arrays)
+
+ self.outbound_local_requests_whitelist ||= []
+ self.outbound_local_requests_whitelist += values_array
+
+ self.outbound_local_requests_whitelist.uniq!
+ end
+
def outbound_local_requests_whitelist_arrays
strong_memoize(:outbound_local_requests_whitelist_arrays) do
next [[], []] unless self.outbound_local_requests_whitelist
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 07813e03f3a..ac88d9714ac 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -38,6 +38,7 @@ module Ci
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
+ has_many :needs, class_name: 'Ci::BuildNeed', foreign_key: :build_id, inverse_of: :build
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
@@ -50,6 +51,7 @@ module Ci
accepts_nested_attributes_for :runner_session
accepts_nested_attributes_for :job_variables
+ accepts_nested_attributes_for :needs
delegate :url, to: :runner_session, prefix: true, allow_nil: true
delegate :terminal_specification, to: :runner_session, allow_nil: true
@@ -713,11 +715,17 @@ module Ci
depended_jobs = depends_on_builds
- return depended_jobs unless options[:dependencies].present?
+ # find all jobs that are needed
+ if Feature.enabled?(:ci_dag_support, project) && needs.exists?
+ depended_jobs = depended_jobs.where(name: needs.select(:name))
+ end
- depended_jobs.select do |job|
- options[:dependencies].include?(job.name)
+ # find all jobs that are dependent on
+ if options[:dependencies].present?
+ depended_jobs = depended_jobs.where(name: options[:dependencies])
end
+
+ depended_jobs
end
def empty_dependencies?
diff --git a/app/models/ci/build_need.rb b/app/models/ci/build_need.rb
new file mode 100644
index 00000000000..6531dfd332f
--- /dev/null
+++ b/app/models/ci/build_need.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ci
+ class BuildNeed < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id, inverse_of: :needs
+
+ validates :build, presence: true
+ validates :name, presence: true, length: { maximum: 128 }
+
+ scope :scoped_build, -> { where('ci_builds.id=ci_build_needs.build_id') }
+ end
+end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 1c76f401690..3b28eb246db 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -504,8 +504,9 @@ module Ci
return [] unless config_processor
strong_memoize(:stage_seeds) do
- seeds = config_processor.stages_attributes.map do |attributes|
- Gitlab::Ci::Pipeline::Seed::Stage.new(self, attributes)
+ seeds = config_processor.stages_attributes.inject([]) do |previous_stages, attributes|
+ seed = Gitlab::Ci::Pipeline::Seed::Stage.new(self, attributes, previous_stages)
+ previous_stages + [seed]
end
seeds.select(&:included?)
@@ -611,8 +612,8 @@ module Ci
end
# rubocop: disable CodeReuse/ServiceClass
- def process!
- Ci::ProcessPipelineService.new(project, user).execute(self)
+ def process!(trigger_build_ids = nil)
+ Ci::ProcessPipelineService.new(project, user).execute(self, trigger_build_ids)
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/clusters/applications/cert_manager.rb b/app/models/clusters/applications/cert_manager.rb
index 7d5a6dec519..2fc1b67dfd2 100644
--- a/app/models/clusters/applications/cert_manager.rb
+++ b/app/models/clusters/applications/cert_manager.rb
@@ -24,12 +24,6 @@ module Clusters
'stable/cert-manager'
end
- # We will implement this in future MRs.
- # Need to reverse postinstall step
- def allowed_to_uninstall?
- false
- end
-
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: 'certmanager',
@@ -41,12 +35,42 @@ module Clusters
)
end
+ def uninstall_command
+ Gitlab::Kubernetes::Helm::DeleteCommand.new(
+ name: 'certmanager',
+ rbac: cluster.platform_kubernetes_rbac?,
+ files: files,
+ postdelete: post_delete_script
+ )
+ end
+
private
def post_install_script
["kubectl create -f /data/helm/certmanager/config/cluster_issuer.yaml"]
end
+ def post_delete_script
+ [
+ delete_private_key,
+ delete_crd('certificates.certmanager.k8s.io'),
+ delete_crd('clusterissuers.certmanager.k8s.io'),
+ delete_crd('issuers.certmanager.k8s.io')
+ ].compact
+ end
+
+ def private_key_name
+ @private_key_name ||= cluster_issuer_content.dig('spec', 'acme', 'privateKeySecretRef', 'name')
+ end
+
+ def delete_private_key
+ "kubectl delete secret -n #{Gitlab::Kubernetes::Helm::NAMESPACE} #{private_key_name} --ignore-not-found" if private_key_name.present?
+ end
+
+ def delete_crd(definition)
+ "kubectl delete crd #{definition} --ignore-not-found"
+ end
+
def cluster_issuer_file
{
'cluster_issuer.yaml': cluster_issuer_yaml_content
diff --git a/app/models/clusters/applications/helm.rb b/app/models/clusters/applications/helm.rb
index a83d06c4b00..3a175fec148 100644
--- a/app/models/clusters/applications/helm.rb
+++ b/app/models/clusters/applications/helm.rb
@@ -14,6 +14,7 @@ module Clusters
include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus
+ include ::Gitlab::Utils::StrongMemoize
default_value_for :version, Gitlab::Kubernetes::Helm::HELM_VERSION
@@ -29,11 +30,22 @@ module Clusters
self.status = 'installable' if cluster&.platform_kubernetes_active?
end
- # We will implement this in future MRs.
- # Basically we need to check all other applications are not installed
- # first.
+ # It can only be uninstalled if there are no other applications installed
+ # or with intermitent installation statuses in the database.
def allowed_to_uninstall?
- false
+ strong_memoize(:allowed_to_uninstall) do
+ applications = nil
+
+ Clusters::Cluster::APPLICATIONS.each do |application_name, klass|
+ next if application_name == 'helm'
+
+ extra_apps = Clusters::Applications::Helm.where('EXISTS (?)', klass.select(1).where(cluster_id: cluster_id))
+
+ applications = applications.present? ? applications.or(extra_apps) : extra_apps
+ end
+
+ !applications.exists?
+ end
end
def install_command
@@ -44,6 +56,14 @@ module Clusters
)
end
+ def uninstall_command
+ Gitlab::Kubernetes::Helm::ResetCommand.new(
+ name: name,
+ files: files,
+ rbac: cluster.platform_kubernetes_rbac?
+ )
+ end
+
def has_ssl?
ca_key.present? && ca_cert.present?
end
diff --git a/app/models/clusters/applications/knative.rb b/app/models/clusters/applications/knative.rb
index 96f526e8a36..5eae23659ae 100644
--- a/app/models/clusters/applications/knative.rb
+++ b/app/models/clusters/applications/knative.rb
@@ -84,7 +84,7 @@ module Clusters
private
def delete_knative_services_and_metrics
- delete_knative_services + delete_knative_istio_metrics.to_a
+ delete_knative_services + delete_knative_istio_metrics
end
def delete_knative_services
@@ -117,11 +117,15 @@ module Clusters
end
def install_knative_metrics
- ["kubectl apply -f #{METRICS_CONFIG}"] if cluster.application_prometheus_available?
+ return [] unless cluster.application_prometheus_available?
+
+ ["kubectl apply -f #{METRICS_CONFIG}"]
end
def delete_knative_istio_metrics
- ["kubectl delete --ignore-not-found -f #{METRICS_CONFIG}"] if cluster.application_prometheus_available?
+ return [] unless cluster.application_prometheus_available?
+
+ ["kubectl delete --ignore-not-found -f #{METRICS_CONFIG}"]
end
def verify_cluster?
diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb
index f5375d29f3a..5eb535cab58 100644
--- a/app/models/clusters/applications/prometheus.rb
+++ b/app/models/clusters/applications/prometheus.rb
@@ -64,7 +64,7 @@ module Clusters
name: name,
rbac: cluster.platform_kubernetes_rbac?,
files: files,
- predelete: delete_knative_istio_metrics.to_a
+ predelete: delete_knative_istio_metrics
)
end
@@ -104,11 +104,15 @@ module Clusters
end
def install_knative_metrics
- ["kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}"] if cluster.application_knative_available?
+ return [] unless cluster.application_knative_available?
+
+ ["kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}"]
end
def delete_knative_istio_metrics
- ["kubectl delete -f #{Clusters::Applications::Knative::METRICS_CONFIG}"] if cluster.application_knative_available?
+ return [] unless cluster.application_knative_available?
+
+ ["kubectl delete -f #{Clusters::Applications::Knative::METRICS_CONFIG}"]
end
end
end
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index be6f3e9c5b0..a88cac6b8e6 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -43,6 +43,16 @@ class CommitStatus < ApplicationRecord
scope :after_stage, -> (index) { where('stage_idx > ?', index) }
scope :processables, -> { where(type: %w[Ci::Build Ci::Bridge]) }
+ scope :with_needs, -> (names = nil) do
+ needs = Ci::BuildNeed.scoped_build.select(1)
+ needs = needs.where(name: names) if names
+ where('EXISTS (?)', needs).preload(:needs)
+ end
+
+ scope :without_needs, -> do
+ where('NOT EXISTS (?)', Ci::BuildNeed.scoped_build.select(1))
+ end
+
# We use `CommitStatusEnums.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
enum_with_nil failure_reason: ::CommitStatusEnums.failure_reasons
@@ -116,7 +126,7 @@ class CommitStatus < ApplicationRecord
commit_status.run_after_commit do
if pipeline_id
if complete? || manual?
- PipelineProcessWorker.perform_async(pipeline_id)
+ PipelineProcessWorker.perform_async(pipeline_id, [id])
else
PipelineUpdateWorker.perform_async(pipeline_id)
end
diff --git a/app/models/concerns/ci/metadatable.rb b/app/models/concerns/ci/metadatable.rb
index 9eed9492b37..304cc71e9dc 100644
--- a/app/models/concerns/ci/metadatable.rb
+++ b/app/models/concerns/ci/metadatable.rb
@@ -29,6 +29,7 @@ module Ci
def degenerate!
self.class.transaction do
self.update!(options: nil, yaml_variables: nil)
+ self.needs.all.delete_all
self.metadata&.destroy
end
end
diff --git a/app/models/concerns/diff_positionable_note.rb b/app/models/concerns/diff_positionable_note.rb
index 2d09eff0111..195d9e107c5 100644
--- a/app/models/concerns/diff_positionable_note.rb
+++ b/app/models/concerns/diff_positionable_note.rb
@@ -10,6 +10,8 @@ module DiffPositionableNote
serialize :original_position, Gitlab::Diff::Position # rubocop:disable Cop/ActiveRecordSerialize
serialize :position, Gitlab::Diff::Position # rubocop:disable Cop/ActiveRecordSerialize
serialize :change_position, Gitlab::Diff::Position # rubocop:disable Cop/ActiveRecordSerialize
+
+ validate :diff_refs_match_commit, if: :for_commit?
end
%i(original_position position change_position).each do |meth|
@@ -71,4 +73,10 @@ module DiffPositionableNote
self.position = result[:position]
end
end
+
+ def diff_refs_match_commit
+ return if self.original_position.diff_refs == commit&.diff_refs
+
+ errors.add(:commit_id, 'does not match the diff refs')
+ end
end
diff --git a/app/models/concerns/group_descendant.rb b/app/models/concerns/group_descendant.rb
index cfffd845e43..ed14b73ac1b 100644
--- a/app/models/concerns/group_descendant.rb
+++ b/app/models/concerns/group_descendant.rb
@@ -42,7 +42,7 @@ module GroupDescendant
parent = child.parent
exception = ArgumentError.new <<~MSG
- parent: [GroupDescendant: #{parent.inspect}] was not preloaded for [#{child.inspect}]")
+ Parent was not preloaded for child when rendering group hierarchy.
This error is not user facing, but causes a +1 query.
MSG
extras = {
@@ -50,7 +50,7 @@ module GroupDescendant
child: child.inspect,
preloaded: preloaded.map(&:full_path)
}
- issue_url = 'https://gitlab.com/gitlab-org/gitlab-ce/issues/40785'
+ issue_url = 'https://gitlab.com/gitlab-org/gitlab-ce/issues/49404'
Gitlab::Sentry.track_exception(exception, issue_url: issue_url, extra: extras)
end
diff --git a/app/models/concerns/relative_positioning.rb b/app/models/concerns/relative_positioning.rb
index 9cd7b8d6258..6d3c7a7ed68 100644
--- a/app/models/concerns/relative_positioning.rb
+++ b/app/models/concerns/relative_positioning.rb
@@ -29,12 +29,8 @@ module RelativePositioning
MAX_POSITION = Gitlab::Database::MAX_INT_VALUE
IDEAL_DISTANCE = 500
- included do
- after_save :save_positionable_neighbours
- end
-
class_methods do
- def move_to_end(objects)
+ def move_nulls_to_end(objects)
objects = objects.reject(&:relative_position)
return if objects.empty?
@@ -43,7 +39,7 @@ module RelativePositioning
self.transaction do
objects.each do |object|
- relative_position = position_between(max_relative_position, MAX_POSITION)
+ relative_position = position_between(max_relative_position || START_POSITION, MAX_POSITION)
object.relative_position = relative_position
max_relative_position = relative_position
object.save(touch: false)
@@ -114,11 +110,12 @@ module RelativePositioning
return move_after(before) unless after
return move_before(after) unless before
- # If there is no place to insert an item we need to create one by moving the before item closer
- # to its predecessor. This process will recursively move all the predecessors until we have a place
+ # If there is no place to insert an item we need to create one by moving the item
+ # before this and all preceding items until there is a gap
+ before, after = after, before if after.relative_position < before.relative_position
if (after.relative_position - before.relative_position) < 2
- before.move_before
- @positionable_neighbours = [before] # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ after.move_sequence_before
+ before.reset
end
self.relative_position = self.class.position_between(before.relative_position, after.relative_position)
@@ -128,12 +125,8 @@ module RelativePositioning
pos_before = before.relative_position
pos_after = before.next_relative_position
- if before.shift_after?
- item_to_move = self.class.relative_positioning_query_base(self).find_by!(relative_position: pos_after)
- item_to_move.move_after
- @positionable_neighbours = [item_to_move] # rubocop:disable Gitlab/ModuleWithInstanceVariables
-
- pos_after = item_to_move.relative_position
+ if pos_after && (pos_after - pos_before) < 2
+ before.move_sequence_after
end
self.relative_position = self.class.position_between(pos_before, pos_after)
@@ -143,12 +136,8 @@ module RelativePositioning
pos_after = after.relative_position
pos_before = after.prev_relative_position
- if after.shift_before?
- item_to_move = self.class.relative_positioning_query_base(self).find_by!(relative_position: pos_before)
- item_to_move.move_before
- @positionable_neighbours = [item_to_move] # rubocop:disable Gitlab/ModuleWithInstanceVariables
-
- pos_before = item_to_move.relative_position
+ if pos_before && (pos_after - pos_before) < 2
+ after.move_sequence_before
end
self.relative_position = self.class.position_between(pos_before, pos_after)
@@ -162,36 +151,82 @@ module RelativePositioning
self.relative_position = self.class.position_between(min_relative_position || START_POSITION, MIN_POSITION)
end
- # Indicates if there is an item that should be shifted to free the place
- def shift_after?
- next_pos = next_relative_position
- next_pos && (next_pos - relative_position) == 1
+ # Moves the sequence before the current item to the middle of the next gap
+ # For example, we have 5 11 12 13 14 15 and the current item is 15
+ # This moves the sequence 11 12 13 14 to 8 9 10 11
+ def move_sequence_before
+ next_gap = find_next_gap_before
+ delta = optimum_delta_for_gap(next_gap)
+
+ move_sequence(next_gap[:start], relative_position, -delta)
end
- # Indicates if there is an item that should be shifted to free the place
- def shift_before?
- prev_pos = prev_relative_position
- prev_pos && (relative_position - prev_pos) == 1
+ # Moves the sequence after the current item to the middle of the next gap
+ # For example, we have 11 12 13 14 15 21 and the current item is 11
+ # This moves the sequence 12 13 14 15 to 15 16 17 18
+ def move_sequence_after
+ next_gap = find_next_gap_after
+ delta = optimum_delta_for_gap(next_gap)
+
+ move_sequence(relative_position, next_gap[:start], delta)
end
private
- # rubocop:disable Gitlab/ModuleWithInstanceVariables
- def save_positionable_neighbours
- return unless @positionable_neighbours
+ # Supposing that we have a sequence of items: 1 5 11 12 13 and the current item is 13
+ # This would return: `{ start: 11, end: 5 }`
+ def find_next_gap_before
+ items_with_next_pos = scoped_items
+ .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position DESC) AS next_pos')
+ .where('relative_position <= ?', relative_position)
+ .order(relative_position: :desc)
+
+ find_next_gap(items_with_next_pos).tap do |gap|
+ gap[:end] ||= MIN_POSITION
+ end
+ end
+
+ # Supposing that we have a sequence of items: 13 14 15 20 24 and the current item is 13
+ # This would return: `{ start: 15, end: 20 }`
+ def find_next_gap_after
+ items_with_next_pos = scoped_items
+ .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position ASC) AS next_pos')
+ .where('relative_position >= ?', relative_position)
+ .order(:relative_position)
- status = @positionable_neighbours.all? { |item| item.save(touch: false) }
- @positionable_neighbours = nil
+ find_next_gap(items_with_next_pos).tap do |gap|
+ gap[:end] ||= MAX_POSITION
+ end
+ end
+
+ def find_next_gap(items_with_next_pos)
+ gap = self.class.from(items_with_next_pos, :items_with_next_pos)
+ .where('ABS(pos - next_pos) > 1 OR next_pos IS NULL')
+ .limit(1)
+ .pluck(:pos, :next_pos)
+ .first
+
+ { start: gap[0], end: gap[1] }
+ end
- status
+ def optimum_delta_for_gap(gap)
+ delta = ((gap[:start] - gap[:end]) / 2.0).abs.ceil
+
+ [delta, IDEAL_DISTANCE].min
+ end
+
+ def move_sequence(start_pos, end_pos, delta)
+ scoped_items
+ .where.not(id: self.id)
+ .where('relative_position BETWEEN ? AND ?', start_pos, end_pos)
+ .update_all("relative_position = relative_position + #{delta}")
end
- # rubocop:enable Gitlab/ModuleWithInstanceVariables
def calculate_relative_position(calculation)
# When calculating across projects, this is much more efficient than
# MAX(relative_position) without the GROUP BY, due to index usage:
# https://gitlab.com/gitlab-org/gitlab-ce/issues/54276#note_119340977
- relation = self.class.relative_positioning_query_base(self)
+ relation = scoped_items
.order(Gitlab::Database.nulls_last_order('position', 'DESC'))
.group(self.class.relative_positioning_parent_column)
.limit(1)
@@ -203,4 +238,8 @@ module RelativePositioning
.first&.
last
end
+
+ def scoped_items
+ self.class.relative_positioning_query_base(self)
+ end
end
diff --git a/app/models/concerns/update_project_statistics.rb b/app/models/concerns/update_project_statistics.rb
index 570a735973f..869b3490f3f 100644
--- a/app/models/concerns/update_project_statistics.rb
+++ b/app/models/concerns/update_project_statistics.rb
@@ -73,15 +73,10 @@ module UpdateProjectStatistics
def schedule_namespace_aggregation_worker
run_after_commit do
- next unless schedule_aggregation_worker?
+ next if project.nil?
Namespaces::ScheduleAggregationWorker.perform_async(project.namespace_id)
end
end
-
- def schedule_aggregation_worker?
- !project.nil? &&
- Feature.enabled?(:update_statistics_namespace, project.root_ancestor)
- end
end
end
diff --git a/app/models/container_repository.rb b/app/models/container_repository.rb
index facd81dde80..2a5ae7930e6 100644
--- a/app/models/container_repository.rb
+++ b/app/models/container_repository.rb
@@ -70,10 +70,14 @@ class ContainerRepository < ApplicationRecord
digests = tags.map { |tag| tag.digest }.to_set
digests.all? do |digest|
- client.delete_repository_tag(self.path, digest)
+ delete_tag_by_digest(digest)
end
end
+ def delete_tag_by_digest(digest)
+ client.delete_repository_tag(self.path, digest)
+ end
+
def self.build_from_path(path)
self.new(project: path.repository_project,
name: path.repository_name)
diff --git a/app/models/diff_note.rb b/app/models/diff_note.rb
index f75c32633b1..861185dc222 100644
--- a/app/models/diff_note.rb
+++ b/app/models/diff_note.rb
@@ -20,7 +20,6 @@ class DiffNote < Note
validates :noteable_type, inclusion: { in: -> (_note) { noteable_types } }
validate :positions_complete
validate :verify_supported
- validate :diff_refs_match_commit, if: :for_commit?
before_validation :set_line_code, if: :on_text?
after_save :keep_around_commits
@@ -154,12 +153,6 @@ class DiffNote < Note
errors.add(:position, "is invalid")
end
- def diff_refs_match_commit
- return if self.original_position.diff_refs == self.commit.diff_refs
-
- errors.add(:commit_id, 'does not match the diff refs')
- end
-
def keep_around_commits
shas = [
self.original_position.base_sha,
diff --git a/app/models/environment.rb b/app/models/environment.rb
index 392481ea0cc..513427ac2c5 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -204,7 +204,7 @@ class Environment < ApplicationRecord
public_path = project.public_path_for_source_path(path, commit_sha)
return unless public_path
- [external_url, public_path].join('/')
+ [external_url.delete_suffix('/'), public_path.delete_prefix('/')].join('/')
end
def expire_etag_cache
diff --git a/app/models/group.rb b/app/models/group.rb
index 74eb556b1b5..6c868b1d1f0 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -44,6 +44,8 @@ class Group < Namespace
has_many :cluster_groups, class_name: 'Clusters::Group'
has_many :clusters, through: :cluster_groups, class_name: 'Clusters::Cluster'
+ has_many :container_repositories, through: :projects
+
has_many :todos
accepts_nested_attributes_for :variables, allow_destroy: true
diff --git a/app/models/hooks/system_hook.rb b/app/models/hooks/system_hook.rb
index 90b4588a325..3d54d17e787 100644
--- a/app/models/hooks/system_hook.rb
+++ b/app/models/hooks/system_hook.rb
@@ -14,8 +14,10 @@ class SystemHook < WebHook
default_value_for :repository_update_events, true
default_value_for :merge_requests_events, false
+ validates :url, system_hook_url: true
+
# Allow urls pointing localhost and the local network
def allow_local_requests?
- true
+ Gitlab::CurrentSettings.allow_local_requests_from_system_hooks?
end
end
diff --git a/app/models/hooks/web_hook.rb b/app/models/hooks/web_hook.rb
index daf7ff4b771..16fc7fdbd48 100644
--- a/app/models/hooks/web_hook.rb
+++ b/app/models/hooks/web_hook.rb
@@ -15,8 +15,8 @@ class WebHook < ApplicationRecord
has_many :web_hook_logs, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
- validates :url, presence: true, public_url: { allow_localhost: lambda(&:allow_local_requests?),
- allow_local_network: lambda(&:allow_local_requests?) }
+ validates :url, presence: true
+ validates :url, public_url: true, unless: ->(hook) { hook.is_a?(SystemHook) }
validates :token, format: { without: /\n/ }
validates :push_events_branch_filter, branch_filter: true
@@ -35,6 +35,6 @@ class WebHook < ApplicationRecord
# Allow urls pointing localhost and the local network
def allow_local_requests?
- false
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
end
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 8ade91933a4..5e8a6a7d5e5 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -752,7 +752,7 @@ class MergeRequest < ApplicationRecord
end
def check_mergeability
- MergeRequests::MergeabilityCheckService.new(self).execute
+ MergeRequests::MergeabilityCheckService.new(self).execute(retry_lease: false)
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/namespace/aggregation_schedule.rb b/app/models/namespace/aggregation_schedule.rb
index 0bef352cf24..61a7eb4b576 100644
--- a/app/models/namespace/aggregation_schedule.rb
+++ b/app/models/namespace/aggregation_schedule.rb
@@ -6,21 +6,13 @@ class Namespace::AggregationSchedule < ApplicationRecord
self.primary_key = :namespace_id
- DEFAULT_LEASE_TIMEOUT = 3.hours
+ DEFAULT_LEASE_TIMEOUT = 1.5.hours.to_i
REDIS_SHARED_KEY = 'gitlab:update_namespace_statistics_delay'.freeze
belongs_to :namespace
after_create :schedule_root_storage_statistics
- def self.delay_timeout
- redis_timeout = Gitlab::Redis::SharedState.with do |redis|
- redis.get(REDIS_SHARED_KEY)
- end
-
- redis_timeout.nil? ? DEFAULT_LEASE_TIMEOUT : redis_timeout.to_i
- end
-
def schedule_root_storage_statistics
run_after_commit_or_now do
try_obtain_lease do
@@ -28,7 +20,7 @@ class Namespace::AggregationSchedule < ApplicationRecord
.perform_async(namespace_id)
Namespaces::RootStatisticsWorker
- .perform_in(self.class.delay_timeout, namespace_id)
+ .perform_in(DEFAULT_LEASE_TIMEOUT, namespace_id)
end
end
end
@@ -37,7 +29,7 @@ class Namespace::AggregationSchedule < ApplicationRecord
# Used by ExclusiveLeaseGuard
def lease_timeout
- self.class.delay_timeout
+ DEFAULT_LEASE_TIMEOUT
end
# Used by ExclusiveLeaseGuard
diff --git a/app/models/note.rb b/app/models/note.rb
index 3f182c1f099..a12d1eb7243 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -27,6 +27,10 @@ class Note < ApplicationRecord
def values
constants.map {|const| self.const_get(const)}
end
+
+ def value?(val)
+ values.include?(val)
+ end
end
end
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index 3802d258664..47999a3694e 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -93,13 +93,7 @@ class ProjectStatistics < ApplicationRecord
def schedule_namespace_aggregation_worker
run_after_commit do
- next unless schedule_aggregation_worker?
-
Namespaces::ScheduleAggregationWorker.perform_async(project.namespace_id)
end
end
-
- def schedule_aggregation_worker?
- Feature.enabled?(:update_statistics_namespace, project&.root_ancestor)
- end
end
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 84b1873c05d..52c944491bf 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -68,6 +68,7 @@ class GroupPolicy < BasePolicy
rule { developer }.enable :admin_milestone
rule { reporter }.policy do
+ enable :read_container_image
enable :admin_label
enable :admin_list
enable :admin_issue
diff --git a/app/serializers/analytics_issue_entity.rb b/app/serializers/analytics_issue_entity.rb
index 29d4a6ae1d0..307ce14a921 100644
--- a/app/serializers/analytics_issue_entity.rb
+++ b/app/serializers/analytics_issue_entity.rb
@@ -26,6 +26,6 @@ class AnalyticsIssueEntity < Grape::Entity
private
def url_to(route, object)
- public_send("#{route}_url", object[:path], object[:name], object[:iid].to_s) # rubocop:disable GitlabSecurity/PublicSend
+ public_send("#{route}_url", object[:namespace_path], object[:project_path], object[:iid].to_s) # rubocop:disable GitlabSecurity/PublicSend
end
end
diff --git a/app/serializers/user_serializer.rb b/app/serializers/user_serializer.rb
index 2111e1b5667..d988caea92d 100644
--- a/app/serializers/user_serializer.rb
+++ b/app/serializers/user_serializer.rb
@@ -2,4 +2,21 @@
class UserSerializer < BaseSerializer
entity UserEntity
+
+ def represent(resource, opts = {}, entity = nil)
+ if params[:merge_request_iid]
+ merge_request = opts[:project].merge_requests.find_by_iid!(params[:merge_request_iid])
+ preload_max_member_access(merge_request.project, Array(resource))
+
+ super(resource, opts.merge(merge_request: merge_request), MergeRequestAssigneeEntity)
+ else
+ super
+ end
+ end
+
+ private
+
+ def preload_max_member_access(project, users)
+ project.team.max_member_access_for_user_ids(users.map(&:id))
+ end
end
diff --git a/app/services/application_settings/update_service.rb b/app/services/application_settings/update_service.rb
index 7eeaf8aade1..471df6e2d0c 100644
--- a/app/services/application_settings/update_service.rb
+++ b/app/services/application_settings/update_service.rb
@@ -15,6 +15,8 @@ module ApplicationSettings
update_terms(@params.delete(:terms))
+ add_to_outbound_local_requests_whitelist(@params.delete(:add_to_outbound_local_requests_whitelist))
+
if params.key?(:performance_bar_allowed_group_path)
params[:performance_bar_allowed_group_id] = performance_bar_allowed_group_id
end
@@ -32,6 +34,13 @@ module ApplicationSettings
params.key?(:usage_ping_enabled) || params.key?(:version_check_enabled)
end
+ def add_to_outbound_local_requests_whitelist(values)
+ values_array = Array(values).reject(&:empty?)
+ return if values_array.empty?
+
+ @application_setting.add_to_outbound_local_requests_whitelist(values_array)
+ end
+
def update_terms(terms)
return unless terms.present?
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index 707caee482c..0a069320936 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -17,6 +17,14 @@ module Auth
end
def self.full_access_token(*names)
+ access_token(%w(*), names)
+ end
+
+ def self.pull_access_token(*names)
+ access_token(['pull'], names)
+ end
+
+ def self.access_token(actions, names)
names = names.flatten
registry = Gitlab.config.registry
token = JSONWebToken::RSAToken.new(registry.key)
@@ -25,7 +33,7 @@ module Auth
token.expire_time = token_expire_at
token[:access] = names.map do |name|
- { type: 'repository', name: name, actions: %w(*) }
+ { type: 'repository', name: name, actions: actions }
end
token.encoded
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 207cc5017d0..99d4ff9ecd1 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -4,35 +4,69 @@ module Ci
class ProcessPipelineService < BaseService
attr_reader :pipeline
- def execute(pipeline)
+ def execute(pipeline, trigger_build_ids = nil)
@pipeline = pipeline
update_retried
- new_builds =
- stage_indexes_of_created_processables.flat_map do |index|
- process_stage(index)
- end
+ success = process_stages_without_needs
+
+ # we evaluate dependent needs,
+ # only when the another job has finished
+ success = process_builds_with_needs(trigger_build_ids) || success
@pipeline.update_status
- new_builds.any?
+ success
end
private
- def process_stage(index)
+ def process_stages_without_needs
+ stage_indexes_of_created_processables_without_needs.flat_map do |index|
+ process_stage_without_needs(index)
+ end.any?
+ end
+
+ def process_stage_without_needs(index)
current_status = status_for_prior_stages(index)
- return if HasStatus::BLOCKED_STATUS.include?(current_status)
+ return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
- if HasStatus::COMPLETED_STATUSES.include?(current_status)
- created_processables_in_stage(index).select do |build|
- Gitlab::OptimisticLocking.retry_lock(build) do |subject|
- Ci::ProcessBuildService.new(project, @user)
- .execute(build, current_status)
- end
- end
+ created_processables_in_stage_without_needs(index).select do |build|
+ process_build(build, current_status)
+ end
+ end
+
+ def process_builds_with_needs(trigger_build_ids)
+ return false unless trigger_build_ids.present?
+ return false unless Feature.enabled?(:ci_dag_support, project)
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ trigger_build_names = pipeline.statuses
+ .where(id: trigger_build_ids)
+ .select(:name)
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ created_processables
+ .with_needs(trigger_build_names)
+ .find_each
+ .map(&method(:process_build_with_needs))
+ .any?
+ end
+
+ def process_build_with_needs(build)
+ current_status = status_for_build_needs(build.needs.map(&:name))
+
+ return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+
+ process_build(build, current_status)
+ end
+
+ def process_build(build, current_status)
+ Gitlab::OptimisticLocking.retry_lock(build) do |subject|
+ Ci::ProcessBuildService.new(project, @user)
+ .execute(subject, current_status)
end
end
@@ -43,17 +77,33 @@ module Ci
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def stage_indexes_of_created_processables
- created_processables.order(:stage_idx).pluck(Arel.sql('DISTINCT stage_idx'))
+ def status_for_build_needs(needs)
+ pipeline.builds.where(name: needs).latest.status || 'success'
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def created_processables_in_stage(index)
- created_processables.where(stage_idx: index)
+ def stage_indexes_of_created_processables_without_needs
+ created_processables_without_needs.order(:stage_idx)
+ .pluck(Arel.sql('DISTINCT stage_idx'))
end
# rubocop: enable CodeReuse/ActiveRecord
+ # rubocop: disable CodeReuse/ActiveRecord
+ def created_processables_in_stage_without_needs(index)
+ created_processables_without_needs
+ .where(stage_idx: index)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def created_processables_without_needs
+ if Feature.enabled?(:ci_dag_support, project)
+ pipeline.processables.created.without_needs
+ else
+ pipeline.processables.created
+ end
+ end
+
def created_processables
pipeline.processables.created
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index fab8a179843..338495ba030 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -5,7 +5,7 @@ module Ci
CLONE_ACCESSORS = %i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
- description tag_list protected].freeze
+ description tag_list protected needs].freeze
def execute(build)
reprocess!(build).tap do |new_build|
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index 9fa50c9448f..962e2327b3e 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -3,6 +3,7 @@
module MergeRequests
class MergeabilityCheckService < ::BaseService
include Gitlab::Utils::StrongMemoize
+ include Gitlab::ExclusiveLeaseHelpers
delegate :project, to: :@merge_request
delegate :repository, to: :project
@@ -21,13 +22,35 @@ module MergeRequests
# where we need the current state of the merge ref in repository, the `recheck`
# argument is required.
#
+ # retry_lease - Concurrent calls wait for at least 10 seconds until the
+ # lease is granted (other process finishes running). Returns an error
+ # ServiceResponse if the lease is not granted during this time.
+ #
# Returns a ServiceResponse indicating merge_status is/became can_be_merged
# and the merge-ref is synced. Success in case of being/becoming mergeable,
# error otherwise.
- def execute(recheck: false)
+ def execute(recheck: false, retry_lease: true)
return ServiceResponse.error(message: 'Invalid argument') unless merge_request
return ServiceResponse.error(message: 'Unsupported operation') if Gitlab::Database.read_only?
+ return check_mergeability(recheck) unless merge_ref_auto_sync_lock_enabled?
+
+ in_write_lock(retry_lease: retry_lease) do |retried|
+ # When multiple calls are waiting for the same lock (retry_lease),
+ # it's possible that when granted, the MR status was already updated for
+ # that object, therefore we reset if there was a lease retry.
+ merge_request.reset if retried
+
+ check_mergeability(recheck)
+ end
+ rescue FailedToObtainLockError => error
+ ServiceResponse.error(message: error.message)
+ end
+
+ private
+
+ attr_reader :merge_request
+ def check_mergeability(recheck)
recheck! if recheck
update_merge_status
@@ -46,9 +69,21 @@ module MergeRequests
ServiceResponse.success(payload: payload)
end
- private
+ # It's possible for this service to send concurrent requests to Gitaly in order
+ # to "git update-ref" the same ref. Therefore we handle a light exclusive
+ # lease here.
+ #
+ def in_write_lock(retry_lease:, &block)
+ lease_key = "mergeability_check:#{merge_request.id}"
- attr_reader :merge_request
+ lease_opts = {
+ ttl: 1.minute,
+ retries: retry_lease ? 10 : 0,
+ sleep_sec: retry_lease ? 1.second : 0
+ }
+
+ in_lock(lease_key, lease_opts, &block)
+ end
def payload
strong_memoize(:payload) do
@@ -116,5 +151,9 @@ module MergeRequests
def merge_ref_auto_sync_enabled?
Feature.enabled?(:merge_ref_auto_sync, project, default_enabled: true)
end
+
+ def merge_ref_auto_sync_lock_enabled?
+ Feature.enabled?(:merge_ref_auto_sync_lock, project, default_enabled: true)
+ end
end
end
diff --git a/app/services/self_monitoring/project/create_service.rb b/app/services/self_monitoring/project/create_service.rb
index e5ef8c15456..8ffd22de127 100644
--- a/app/services/self_monitoring/project/create_service.rb
+++ b/app/services/self_monitoring/project/create_service.rb
@@ -14,6 +14,7 @@ module SelfMonitoring
steps :validate_admins,
:create_project,
:add_project_members,
+ :add_to_whitelist,
:add_prometheus_manual_configuration
def initialize
@@ -59,15 +60,29 @@ module SelfMonitoring
end
end
- def add_prometheus_manual_configuration
+ def add_to_whitelist
return success unless prometheus_enabled?
return success unless prometheus_listen_address.present?
- # TODO: Currently, adding the internal prometheus server as a manual configuration
- # is only possible if the setting to allow webhooks and services to connect
- # to local network is on.
- # https://gitlab.com/gitlab-org/gitlab-ce/issues/44496 will add
- # a whitelist that will allow connections to certain ips on the local network.
+ uri = parse_url(internal_prometheus_listen_address_uri)
+ return error(_('Prometheus listen_address is not a valid URI')) unless uri
+
+ result = ApplicationSettings::UpdateService.new(
+ Gitlab::CurrentSettings.current_application_settings,
+ project_owner,
+ outbound_local_requests_whitelist: [uri.normalized_host]
+ ).execute
+
+ if result
+ success
+ else
+ error(_('Could not add prometheus URL to whitelist'))
+ end
+ end
+
+ def add_prometheus_manual_configuration
+ return success unless prometheus_enabled?
+ return success unless prometheus_listen_address.present?
service = project.find_or_initialize_service('prometheus')
@@ -79,6 +94,11 @@ module SelfMonitoring
success
end
+ def parse_url(uri_string)
+ Addressable::URI.parse(uri_string)
+ rescue Addressable::URI::InvalidURIError, TypeError
+ end
+
def prometheus_enabled?
Gitlab.config.prometheus.enable
rescue Settingslogic::MissingSetting
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index 6d675c026bb..8c294218708 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -17,8 +17,10 @@ class WebHookService
@hook = hook
@data = data
@hook_name = hook_name.to_s
- @request_options = { timeout: Gitlab.config.gitlab.webhook_timeout }
- @request_options.merge!(allow_local_requests: true) if @hook.is_a?(SystemHook)
+ @request_options = {
+ timeout: Gitlab.config.gitlab.webhook_timeout,
+ allow_local_requests: hook.allow_local_requests?
+ }
end
def execute
diff --git a/app/validators/addressable_url_validator.rb b/app/validators/addressable_url_validator.rb
index 273e15ef925..bb445499cee 100644
--- a/app/validators/addressable_url_validator.rb
+++ b/app/validators/addressable_url_validator.rb
@@ -107,6 +107,6 @@ class AddressableUrlValidator < ActiveModel::EachValidator
# calls this validator.
#
# See https://gitlab.com/gitlab-org/gitlab-ee/issues/9833
- ApplicationSetting.current&.allow_local_requests_from_hooks_and_services?
+ ApplicationSetting.current&.allow_local_requests_from_web_hooks_and_services?
end
end
diff --git a/app/validators/system_hook_url_validator.rb b/app/validators/system_hook_url_validator.rb
new file mode 100644
index 00000000000..f4253006dad
--- /dev/null
+++ b/app/validators/system_hook_url_validator.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+# SystemHookUrlValidator
+#
+# Custom validator specific to SystemHook URLs. This validator works like AddressableUrlValidator but
+# it blocks urls pointing to localhost or the local network depending on
+# ApplicationSetting.allow_local_requests_from_system_hooks
+#
+# Example:
+# class SystemHook < WebHook
+# validates :url, system_hook_url: true
+# end
+#
+class SystemHookUrlValidator < PublicUrlValidator
+ def self.allow_setting_local_requests?
+ ApplicationSetting.current&.allow_local_requests_from_system_hooks?
+ end
+end
diff --git a/app/views/admin/application_settings/_outbound.html.haml b/app/views/admin/application_settings/_outbound.html.haml
index 4fecdb59e1d..ad26f52aea7 100644
--- a/app/views/admin/application_settings/_outbound.html.haml
+++ b/app/views/admin/application_settings/_outbound.html.haml
@@ -4,9 +4,13 @@
%fieldset
.form-group
.form-check
- = f.check_box :allow_local_requests_from_hooks_and_services, class: 'form-check-input'
- = f.label :allow_local_requests_from_hooks_and_services, class: 'form-check-label' do
- Allow requests to the local network from hooks and services
+ = f.check_box :allow_local_requests_from_web_hooks_and_services, class: 'form-check-input'
+ = f.label :allow_local_requests_from_web_hooks_and_services, class: 'form-check-label' do
+ = _('Allow requests to the local network from web hooks and services')
+ .form-check
+ = f.check_box :allow_local_requests_from_system_hooks, class: 'form-check-input'
+ = f.label :allow_local_requests_from_system_hooks, class: 'form-check-label' do
+ = _('Allow requests to the local network from system hooks')
.form-group
= f.label :outbound_local_requests_whitelist_raw, class: 'label-bold' do
diff --git a/app/views/clusters/clusters/_banner.html.haml b/app/views/clusters/clusters/_banner.html.haml
index a5de67be96b..4b4278075a6 100644
--- a/app/views/clusters/clusters/_banner.html.haml
+++ b/app/views/clusters/clusters/_banner.html.haml
@@ -3,7 +3,8 @@
%p.js-error-reason
.hidden.js-cluster-creating.bs-callout.bs-callout-info{ role: 'alert' }
- = s_('ClusterIntegration|Kubernetes cluster is being created on Google Kubernetes Engine...')
+ %span.spinner.spinner-dark.spinner-sm{ 'aria-label': 'Loading' }
+ %span.prepend-left-4= s_('ClusterIntegration|Kubernetes cluster is being created on Google Kubernetes Engine...')
.hidden.row.js-cluster-api-unreachable.bs-callout.bs-callout-warning{ role: 'alert' }
.col-11
@@ -18,4 +19,4 @@
%button.js-close-banner.close.cluster-application-banner-close.h-100.m-0= "×"
.hidden.js-cluster-success.bs-callout.bs-callout-success{ role: 'alert' }
- = s_("ClusterIntegration|Kubernetes cluster was successfully created on Google Kubernetes Engine. Refresh the page to see Kubernetes cluster's details")
+ = s_("ClusterIntegration|Kubernetes cluster was successfully created on Google Kubernetes Engine.")
diff --git a/app/views/clusters/clusters/show.html.haml b/app/views/clusters/clusters/show.html.haml
index 4dfbb310142..913d4caa0bc 100644
--- a/app/views/clusters/clusters/show.html.haml
+++ b/app/views/clusters/clusters/show.html.haml
@@ -33,26 +33,29 @@
%section#cluster-integration
%h4= @cluster.name
= render 'banner'
- = render 'form'
-
- = render_if_exists 'projects/clusters/prometheus_graphs'
-
- .cluster-applications-table#js-cluster-applications
-
- %section.settings#js-cluster-details{ class: ('expanded' if expanded) }
- .settings-header
- %h4= s_('ClusterIntegration|Kubernetes cluster details')
- %button.btn.js-settings-toggle{ type: 'button' }
- = expanded ? _('Collapse') : _('Expand')
- %p= s_('ClusterIntegration|See and edit the details for your Kubernetes cluster')
- .settings-content
- = render 'clusters/platforms/kubernetes/form', cluster: @cluster, platform: @cluster.platform_kubernetes, update_cluster_url_path: clusterable.cluster_path(@cluster)
-
- %section.settings.no-animate#js-cluster-advanced-settings{ class: ('expanded' if expanded) }
- .settings-header
- %h4= _('Advanced settings')
- %button.btn.js-settings-toggle{ type: 'button' }
- = expanded ? _('Collapse') : _('Expand')
- %p= s_("ClusterIntegration|Advanced options on this Kubernetes cluster's integration")
- .settings-content#advanced-settings-section
- = render 'advanced_settings'
+
+ - unless @cluster.status_name.in? %i/scheduled creating/
+ = render 'form'
+
+ - unless @cluster.status_name.in? %i/scheduled creating/
+ = render_if_exists 'projects/clusters/prometheus_graphs'
+
+ .cluster-applications-table#js-cluster-applications
+
+ %section.settings#js-cluster-details{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4= s_('ClusterIntegration|Kubernetes cluster details')
+ %button.btn.js-settings-toggle{ type: 'button' }
+ = expanded ? _('Collapse') : _('Expand')
+ %p= s_('ClusterIntegration|See and edit the details for your Kubernetes cluster')
+ .settings-content
+ = render 'clusters/platforms/kubernetes/form', cluster: @cluster, platform: @cluster.platform_kubernetes, update_cluster_url_path: clusterable.cluster_path(@cluster)
+
+ %section.settings.no-animate#js-cluster-advanced-settings{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4= _('Advanced settings')
+ %button.btn.js-settings-toggle{ type: 'button' }
+ = expanded ? _('Collapse') : _('Expand')
+ %p= s_("ClusterIntegration|Advanced options on this Kubernetes cluster's integration")
+ .settings-content#advanced-settings-section
+ = render 'advanced_settings'
diff --git a/app/views/explore/projects/_projects.html.haml b/app/views/explore/projects/_projects.html.haml
index 67f2f897137..35b32662b8a 100644
--- a/app/views/explore/projects/_projects.html.haml
+++ b/app/views/explore/projects/_projects.html.haml
@@ -1 +1,2 @@
-= render 'shared/projects/list', projects: projects, user: current_user
+- is_explore_page = defined?(explore_page) && explore_page
+= render 'shared/projects/list', projects: projects, user: current_user, explore_page: is_explore_page
diff --git a/app/views/explore/projects/trending.html.haml b/app/views/explore/projects/trending.html.haml
index ed508fa2506..153c90e534e 100644
--- a/app/views/explore/projects/trending.html.haml
+++ b/app/views/explore/projects/trending.html.haml
@@ -10,4 +10,4 @@
= render 'explore/head'
= render 'explore/projects/nav' unless Feature.enabled?(:project_list_filter_bar) && current_user
-= render 'projects', projects: @projects
+= render 'projects', projects: @projects, explore_page: true
diff --git a/app/views/help/_shortcuts.html.haml b/app/views/help/_shortcuts.html.haml
index efb3815b257..46d7c367aa7 100644
--- a/app/views/help/_shortcuts.html.haml
+++ b/app/views/help/_shortcuts.html.haml
@@ -24,11 +24,11 @@
%td.shortcut
%kbd f
%td Focus Filter
- - if performance_bar_enabled?
- %tr
- %td.shortcut
- %kbd p b
- %td Show/hide the Performance Bar
+ %tr
+ %td.shortcut
+ %kbd p
+ %kbd b
+ %td Toggle the Performance Bar
%tr
%td.shortcut
%kbd ?
diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml
index 20b844f9fd8..ac774803f95 100644
--- a/app/views/layouts/_head.html.haml
+++ b/app/views/layouts/_head.html.haml
@@ -78,4 +78,3 @@
= render 'layouts/google_analytics' if extra_config.has_key?('google_analytics_id')
= render 'layouts/piwik' if extra_config.has_key?('piwik_url') && extra_config.has_key?('piwik_site_id')
= render_if_exists 'layouts/snowplow'
- = render_if_exists 'layouts/pendo' if Feature.enabled?(:pendo_tracking) && !Rails.env.test?
diff --git a/app/views/projects/_flash_messages.html.haml b/app/views/projects/_flash_messages.html.haml
index d95045c9cce..f9222387e97 100644
--- a/app/views/projects/_flash_messages.html.haml
+++ b/app/views/projects/_flash_messages.html.haml
@@ -5,7 +5,7 @@
- if current_user && can?(current_user, :download_code, project)
= render 'shared/no_ssh'
= render 'shared/no_password'
- = render_if_exists 'shared/shared_runners_minutes_limit', project: project
- unless project.empty_repo?
= render 'shared/auto_devops_implicitly_enabled_banner', project: project
= render_if_exists 'projects/above_size_limit_warning', project: project
+ = render_if_exists 'shared/shared_runners_minutes_limit', project: project, classes: [container_class, ("limit-container-width" unless fluid_layout)]
diff --git a/app/views/projects/jobs/show.html.haml b/app/views/projects/jobs/show.html.haml
index c7fab87a593..a3688c17041 100644
--- a/app/views/projects/jobs/show.html.haml
+++ b/app/views/projects/jobs/show.html.haml
@@ -7,9 +7,10 @@
= stylesheet_link_tag 'page_bundles/xterm'
%div{ class: container_class }
- #js-job-vue-app{ data: { endpoint: project_job_path(@project, @build, format: :json),
+ #js-job-vue-app{ data: { endpoint: project_job_path(@project, @build, format: :json), project_path: @project.full_path,
deployment_help_url: help_page_path('user/project/clusters/index.html', anchor: 'troubleshooting-failed-deployment-jobs'),
runner_help_url: help_page_path('ci/runners/README.html', anchor: 'setting-maximum-job-timeout-for-a-runner'),
runner_settings_url: project_runners_path(@build.project, anchor: 'js-runners-settings'),
variables_settings_url: project_variables_path(@build.project, anchor: 'js-cicd-variables-settings'),
+ page_path: project_job_path(@project, @build), build_status: @build.status, build_stage: @build.stage, log_state: '',
build_options: javascript_build_options } }
diff --git a/app/views/projects/mirrors/_instructions.html.haml b/app/views/projects/mirrors/_instructions.html.haml
index 33e5a6e67c3..1a163cc4a54 100644
--- a/app/views/projects/mirrors/_instructions.html.haml
+++ b/app/views/projects/mirrors/_instructions.html.haml
@@ -2,7 +2,7 @@
%ul
%li
= _('The repository must be accessible over <code>http://</code>,
- <code>https://</code>, <code>ssh://</code> and <code>git://</code>.').html_safe
+ <code>https://</code>, <code>ssh://</code> or <code>git://</code>.').html_safe
%li= _('Include the username in the URL if required: <code>https://username@gitlab.company.com/group/project.git</code>.').html_safe
%li
- minutes = Gitlab.config.gitlab_shell.git_timeout / 60
diff --git a/app/views/projects/services/prometheus/_metrics.html.haml b/app/views/projects/services/prometheus/_metrics.html.haml
index a1d74b91002..3aefb3fdbb9 100644
--- a/app/views/projects/services/prometheus/_metrics.html.haml
+++ b/app/views/projects/services/prometheus/_metrics.html.haml
@@ -1,28 +1,34 @@
- project = local_assigns.fetch(:project)
-.card.js-panel-monitored-metrics{ data: { active_metrics: active_common_project_prometheus_metrics_path(project, :json), metrics_help_path: help_page_path('user/project/integrations/prometheus_library/index') } }
- .card-header
- = s_('PrometheusService|Common metrics')
- %span.badge.badge-pill.js-monitored-count 0
- .card-body
- .loading-metrics.js-loading-metrics
- %p.prepend-top-10.prepend-left-10
- = icon('spinner spin', class: 'metrics-load-spinner')
- = s_('PrometheusService|Finding and configuring metrics...')
- .empty-metrics.hidden.js-empty-metrics
- %p.text-tertiary.prepend-top-10.prepend-left-10
- = s_('PrometheusService|Waiting for your first deployment to an environment to find common metrics')
- %ul.list-unstyled.metrics-list.hidden.js-metrics-list
+.col-lg-3
+ %p
+ = s_('PrometheusService|Common metrics are automatically monitored based on a library of metrics from popular exporters.')
+ = link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus_library/index'), target: '_blank', rel: "noopener noreferrer"
-.card.hidden.js-panel-missing-env-vars
- .card-header
- = icon('caret-right lg fw', class: 'panel-toggle js-panel-toggle', 'aria-label' => 'Toggle panel')
- = s_('PrometheusService|Missing environment variable')
- %span.badge.badge-pill.js-env-var-count 0
- .card-body.hidden
- .flash-container
- .flash-notice
- .flash-text
- = s_("PrometheusService|To set up automatic monitoring, add the environment variable %{variable} to exporter's queries." % { variable: "<code>$CI_ENVIRONMENT_SLUG</code>" }).html_safe
- = link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus', anchor: 'metrics-and-labels')
- %ul.list-unstyled.metrics-list.js-missing-var-metrics-list
+.col-lg-9
+ .card.js-panel-monitored-metrics{ data: { active_metrics: active_common_project_prometheus_metrics_path(project, :json), metrics_help_path: help_page_path('user/project/integrations/prometheus_library/index') } }
+ .card-header
+ = s_('PrometheusService|Common metrics')
+ %span.badge.badge-pill.js-monitored-count 0
+ .card-body
+ .loading-metrics.js-loading-metrics
+ %p.prepend-top-10.prepend-left-10
+ = icon('spinner spin', class: 'metrics-load-spinner')
+ = s_('PrometheusService|Finding and configuring metrics...')
+ .empty-metrics.hidden.js-empty-metrics
+ %p.text-tertiary.prepend-top-10.prepend-left-10
+ = s_('PrometheusService|Waiting for your first deployment to an environment to find common metrics')
+ %ul.list-unstyled.metrics-list.hidden.js-metrics-list
+
+ .card.hidden.js-panel-missing-env-vars
+ .card-header
+ = icon('caret-right lg fw', class: 'panel-toggle js-panel-toggle', 'aria-label' => 'Toggle panel')
+ = s_('PrometheusService|Missing environment variable')
+ %span.badge.badge-pill.js-env-var-count 0
+ .card-body.hidden
+ .flash-container
+ .flash-notice
+ .flash-text
+ = s_("PrometheusService|To set up automatic monitoring, add the environment variable %{variable} to exporter's queries." % { variable: "<code>$CI_ENVIRONMENT_SLUG</code>" }).html_safe
+ = link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus', anchor: 'metrics-and-labels')
+ %ul.list-unstyled.metrics-list.js-missing-var-metrics-list
diff --git a/app/views/projects/services/prometheus/_show.html.haml b/app/views/projects/services/prometheus/_show.html.haml
index 6aafa85e99a..c719661d8e8 100644
--- a/app/views/projects/services/prometheus/_show.html.haml
+++ b/app/views/projects/services/prometheus/_show.html.haml
@@ -1,12 +1,9 @@
-.row.prepend-top-default.append-bottom-default.prometheus-metrics-monitoring.js-prometheus-metrics-monitoring
+.row
.col-lg-3
%h4.prepend-top-0
= s_('PrometheusService|Metrics')
- %p
- = s_('PrometheusService|Common metrics are automatically monitored based on a library of metrics from popular exporters.')
- = link_to s_('PrometheusService|More information'), help_page_path('user/project/integrations/prometheus_library/index'), target: '_blank', rel: "noopener noreferrer"
- .col-lg-9
- = render 'projects/services/prometheus/metrics', project: @project
+.row.append-bottom-default.prometheus-metrics-monitoring.js-prometheus-metrics-monitoring
+ = render 'projects/services/prometheus/metrics', project: @project
= render_if_exists 'projects/services/prometheus/external_alerts', project: @project
diff --git a/app/views/projects/settings/ci_cd/_form.html.haml b/app/views/projects/settings/ci_cd/_form.html.haml
index 2d108a1cba5..498a9744783 100644
--- a/app/views/projects/settings/ci_cd/_form.html.haml
+++ b/app/views/projects/settings/ci_cd/_form.html.haml
@@ -99,7 +99,7 @@
%code \(\d+.\d+\%\) covered
%li
pytest-cov (Python) -
- %code ^TOTAL\s+\d+\s+\d+\s+(\d+\%)$
+ %code ^TOTAL.+?(\d+\%)$
%li
phpunit --coverage-text --colors=never (PHP) -
%code ^\s*Lines:\s*\d+.\d+\%
diff --git a/app/views/shared/issuable/_search_bar.html.haml b/app/views/shared/issuable/_search_bar.html.haml
index e253413929a..c9458475aa5 100644
--- a/app/views/shared/issuable/_search_bar.html.haml
+++ b/app/views/shared/issuable/_search_bar.html.haml
@@ -1,6 +1,7 @@
- type = local_assigns.fetch(:type)
- board = local_assigns.fetch(:board, nil)
-- block_css_class = type != :boards_modal ? 'row-content-block second-block' : ''
+- is_not_boards_modal_or_productivity_analytics = type != :boards_modal && type != :productivity_analytics
+- block_css_class = is_not_boards_modal_or_productivity_analytics ? 'row-content-block second-block' : ''
- user_can_admin_list = board && can?(current_user, :admin_list, board.parent)
.issues-filters{ class: ("w-100" if type == :boards_modal) }
@@ -155,5 +156,5 @@
- if @project
#js-add-issues-btn.prepend-left-10{ data: { can_admin_list: can?(current_user, :admin_list, @project) } }
#js-toggle-focus-btn
- - elsif type != :boards_modal
+ - elsif is_not_boards_modal_or_productivity_analytics
= render 'shared/issuable/sort_dropdown'
diff --git a/app/views/shared/issuable/_sort_dropdown.html.haml b/app/views/shared/issuable/_sort_dropdown.html.haml
index df0523595f5..8260915c2ab 100644
--- a/app/views/shared/issuable/_sort_dropdown.html.haml
+++ b/app/views/shared/issuable/_sort_dropdown.html.haml
@@ -1,7 +1,7 @@
- sort_value = @sort
- sort_title = issuable_sort_option_title(sort_value)
- viewing_issues = controller.controller_name == 'issues' || controller.action_name == 'issues'
-- manual_sorting = viewing_issues && controller.controller_name != 'dashboard' && Feature.enabled?(:manual_sorting, default_enabled: true)
+- manual_sorting = viewing_issues && controller.controller_name != 'dashboard'
.dropdown.inline.prepend-left-10.issue-sort-dropdown
.btn-group{ role: 'group' }
diff --git a/app/views/shared/milestones/_top.html.haml b/app/views/shared/milestones/_top.html.haml
index 43503e1d08a..fd3317341f6 100644
--- a/app/views/shared/milestones/_top.html.haml
+++ b/app/views/shared/milestones/_top.html.haml
@@ -53,7 +53,7 @@
- close_msg = group ? 'You may close the milestone now.' : 'Navigate to the project to close the milestone.'
%span All issues for this milestone are closed. #{close_msg}
-= render_if_exists 'shared/milestones/burndown', milestone: @milestone, project: @project
+= render_if_exists 'shared/milestones/burndown', milestone: milestone, project: @project
- if is_dynamic_milestone
.table-holder
diff --git a/app/views/shared/notes/_hints.html.haml b/app/views/shared/notes/_hints.html.haml
index 72ede50dd8c..fae7d6526e8 100644
--- a/app/views/shared/notes/_hints.html.haml
+++ b/app/views/shared/notes/_hints.html.haml
@@ -1,13 +1,14 @@
- supports_quick_actions = local_assigns.fetch(:supports_quick_actions, false)
.comment-toolbar.clearfix
.toolbar-text
- - md_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer" tabindex="-1">'.html_safe % { url: help_page_path('user/markdown') }
- - actions_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer" tabindex="-1">'.html_safe % { url: help_page_path('user/project/quick_actions') }
- - link_end = '</a>'.html_safe
+ = link_to _('Markdown'), help_page_path('user/markdown'), target: '_blank', tabindex: -1
- if supports_quick_actions
- = s_('Editor|%{mdLinkStart}Markdown%{mdLinkEnd} and %{actionsLinkStart}quick actions%{actionsLinkEnd} are supported').html_safe % { mdLinkStart: md_link_start, mdLinkEnd: link_end, actionsLinkStart: actions_link_start, actionsLinkEnd: link_end }
+ and
+ = link_to _('quick actions'), help_page_path('user/project/quick_actions'), target: '_blank', tabindex: -1
+ are
- else
- = s_('Editor|%{mdLinkStart}Markdown is supported%{mdLinkEnd}').html_safe % { mdLinkStart: md_link_start, mdLinkEnd: link_end }
+ is
+ supported
%span.uploading-container
%span.uploading-progress-container.hide
diff --git a/app/views/shared/projects/_list.html.haml b/app/views/shared/projects/_list.html.haml
index 576ec3e1782..67cb1aa549c 100644
--- a/app/views/shared/projects/_list.html.haml
+++ b/app/views/shared/projects/_list.html.haml
@@ -21,6 +21,7 @@
- own_projects_current_user_empty_message_header = s_('UserProfile|You haven\'t created any personal projects.')
- own_projects_current_user_empty_message_description = s_('UserProfile|Your projects can be available publicly, internally, or privately, at your choice.')
- own_projects_visitor_empty_message = s_('UserProfile|This user doesn\'t have any personal projects')
+- explore_page_empty_message = s_('UserProfile|Explore public groups to find projects to contribute to.')
- primary_button_label = _('New project')
- primary_button_link = new_project_path
- secondary_button_label = _('Explore groups')
@@ -58,4 +59,4 @@
current_user_empty_message_description: own_projects_current_user_empty_message_description,
primary_button_label: primary_button_label,
primary_button_link: primary_button_link,
- visitor_empty_message: own_projects_visitor_empty_message }
+ visitor_empty_message: defined?(explore_page) && explore_page ? explore_page_empty_message : own_projects_visitor_empty_message }
diff --git a/app/views/u2f/_register.html.haml b/app/views/u2f/_register.html.haml
index f6724f72307..ef3835332a7 100644
--- a/app/views/u2f/_register.html.haml
+++ b/app/views/u2f/_register.html.haml
@@ -16,7 +16,7 @@
.col-md-4
%button#js-setup-u2f-device.btn.btn-info.btn-block{ disabled: true }= _("Set up new U2F device")
.col-md-8
- %p.text-warning= _("You need to register a two-factor authentication app before you can set up a U2F device.")
+ %p= _("You need to register a two-factor authentication app before you can set up a U2F device.")
%script#js-register-u2f-in-progress{ type: "text/template" }
%p= _("Trying to communicate with your device. Plug it in (if you haven't already) and press the button on the device now.")
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 991a177018e..400becdd023 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -88,6 +88,7 @@
- pipeline_processing:ci_build_prepare
- pipeline_processing:build_queue
- pipeline_processing:build_success
+- pipeline_processing:build_process
- pipeline_processing:pipeline_process
- pipeline_processing:pipeline_success
- pipeline_processing:pipeline_update
diff --git a/app/workers/build_process_worker.rb b/app/workers/build_process_worker.rb
new file mode 100644
index 00000000000..9cd9519df1f
--- /dev/null
+++ b/app/workers/build_process_worker.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class BuildProcessWorker
+ include ApplicationWorker
+ include PipelineQueue
+
+ queue_namespace :pipeline_processing
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def perform(build_id)
+ CommitStatus.find_by(id: build_id).try do |build|
+ build.pipeline.process!([build_id])
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+end
diff --git a/app/workers/namespaces/root_statistics_worker.rb b/app/workers/namespaces/root_statistics_worker.rb
index 48876825564..0c1ca5eb975 100644
--- a/app/workers/namespaces/root_statistics_worker.rb
+++ b/app/workers/namespaces/root_statistics_worker.rb
@@ -9,7 +9,7 @@ module Namespaces
def perform(namespace_id)
namespace = Namespace.find(namespace_id)
- return unless update_statistics_enabled_for?(namespace) && namespace.aggregation_scheduled?
+ return unless namespace.aggregation_scheduled?
Namespaces::StatisticsRefresherService.new.execute(namespace)
@@ -23,9 +23,5 @@ module Namespaces
def log_error(namespace_path, error_message)
Gitlab::SidekiqLogger.error("Namespace statistics can't be updated for #{namespace_path}: #{error_message}")
end
-
- def update_statistics_enabled_for?(namespace)
- Feature.enabled?(:update_statistics_namespace, namespace)
- end
end
end
diff --git a/app/workers/namespaces/schedule_aggregation_worker.rb b/app/workers/namespaces/schedule_aggregation_worker.rb
index a4594b84b13..983ce4bef4a 100644
--- a/app/workers/namespaces/schedule_aggregation_worker.rb
+++ b/app/workers/namespaces/schedule_aggregation_worker.rb
@@ -12,7 +12,7 @@ module Namespaces
namespace = Namespace.find(namespace_id)
root_ancestor = namespace.root_ancestor
- return unless update_statistics_enabled_for?(root_ancestor) && !root_ancestor.aggregation_scheduled?
+ return if root_ancestor.aggregation_scheduled?
Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: root_ancestor.id)
rescue ActiveRecord::RecordNotFound
@@ -37,9 +37,5 @@ module Namespaces
def log_error(root_ancestor_id)
Gitlab::SidekiqLogger.error("Namespace can't be scheduled for aggregation: #{root_ancestor_id} does not exist")
end
-
- def update_statistics_enabled_for?(root_ancestor)
- Feature.enabled?(:update_statistics_namespace, root_ancestor)
- end
end
end
diff --git a/app/workers/pipeline_process_worker.rb b/app/workers/pipeline_process_worker.rb
index f2aa17acb51..96524d93f8d 100644
--- a/app/workers/pipeline_process_worker.rb
+++ b/app/workers/pipeline_process_worker.rb
@@ -7,9 +7,10 @@ class PipelineProcessWorker
queue_namespace :pipeline_processing
# rubocop: disable CodeReuse/ActiveRecord
- def perform(pipeline_id)
- Ci::Pipeline.find_by(id: pipeline_id)
- .try(:process!)
+ def perform(pipeline_id, build_ids = nil)
+ Ci::Pipeline.find_by(id: pipeline_id).try do |pipeline|
+ pipeline.process!(build_ids)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/bin/web_puma b/bin/web_puma
index 178fe84800d..29d72cd4a41 100755
--- a/bin/web_puma
+++ b/bin/web_puma
@@ -10,7 +10,7 @@ puma_config="$app_root/config/puma.rb"
spawn_puma()
{
- exec bundle exec puma --config "${puma_config}" "$@"
+ exec bundle exec puma --config "${puma_config}" --environment "$RAILS_ENV" "$@"
}
get_puma_pid()
diff --git a/changelogs/unreleased/17276-breakage-in-displaying-svg-in-the-same-repository.yml b/changelogs/unreleased/17276-breakage-in-displaying-svg-in-the-same-repository.yml
new file mode 100644
index 00000000000..93936d441e7
--- /dev/null
+++ b/changelogs/unreleased/17276-breakage-in-displaying-svg-in-the-same-repository.yml
@@ -0,0 +1,5 @@
+---
+title: Fix inline rendering of relative paths to SVGs from the current repository
+merge_request: 31352
+author:
+type: fixed
diff --git a/changelogs/unreleased/26866-api-endpoint-to-list-the-docker-images-tags-of-a-group.yml b/changelogs/unreleased/26866-api-endpoint-to-list-the-docker-images-tags-of-a-group.yml
new file mode 100644
index 00000000000..adbd7971a14
--- /dev/null
+++ b/changelogs/unreleased/26866-api-endpoint-to-list-the-docker-images-tags-of-a-group.yml
@@ -0,0 +1,6 @@
+---
+title: Add API endpoints to return container repositories and tags from the group
+ level
+merge_request: 30817
+author:
+type: added
diff --git a/changelogs/unreleased/50130-cluster-cluster-details-update-automatically-after-cluster-is-created.yml b/changelogs/unreleased/50130-cluster-cluster-details-update-automatically-after-cluster-is-created.yml
new file mode 100644
index 00000000000..dc718572cfb
--- /dev/null
+++ b/changelogs/unreleased/50130-cluster-cluster-details-update-automatically-after-cluster-is-created.yml
@@ -0,0 +1,5 @@
+---
+title: Update cluster page automatically when cluster is created
+merge_request: 27189
+author:
+type: changed
diff --git a/changelogs/unreleased/58256-incorrect-empty-state-message-displayed-on-explore-projects-tab.yml b/changelogs/unreleased/58256-incorrect-empty-state-message-displayed-on-explore-projects-tab.yml
new file mode 100644
index 00000000000..f719338b9cb
--- /dev/null
+++ b/changelogs/unreleased/58256-incorrect-empty-state-message-displayed-on-explore-projects-tab.yml
@@ -0,0 +1,5 @@
+---
+title: Resolve Incorrect empty state message on Explore projects
+merge_request: 25578
+author:
+type: fixed
diff --git a/changelogs/unreleased/59521-job-sidebar-has-a-blank-block.yml b/changelogs/unreleased/59521-job-sidebar-has-a-blank-block.yml
new file mode 100644
index 00000000000..4c93a108f2b
--- /dev/null
+++ b/changelogs/unreleased/59521-job-sidebar-has-a-blank-block.yml
@@ -0,0 +1,5 @@
+---
+title: Remove blank block from job sidebar
+merge_request: 30754
+author:
+type: fixed
diff --git a/changelogs/unreleased/60516-uninstall-tiller.yml b/changelogs/unreleased/60516-uninstall-tiller.yml
new file mode 100644
index 00000000000..db25e7b3338
--- /dev/null
+++ b/changelogs/unreleased/60516-uninstall-tiller.yml
@@ -0,0 +1,5 @@
+---
+title: Allow Helm to be uninstalled from the UI
+merge_request: 27359
+author:
+type: added
diff --git a/changelogs/unreleased/60664-kubernetes-applications-uninstall-cert-manager.yml b/changelogs/unreleased/60664-kubernetes-applications-uninstall-cert-manager.yml
new file mode 100644
index 00000000000..efc3ec241e2
--- /dev/null
+++ b/changelogs/unreleased/60664-kubernetes-applications-uninstall-cert-manager.yml
@@ -0,0 +1,5 @@
+---
+title: Allow Cert-Manager to be uninstalled
+merge_request: 31166
+author:
+type: added
diff --git a/changelogs/unreleased/61776-fixing-the-U2F-warning-message-text-colour.yml b/changelogs/unreleased/61776-fixing-the-U2F-warning-message-text-colour.yml
new file mode 100644
index 00000000000..988eb77db12
--- /dev/null
+++ b/changelogs/unreleased/61776-fixing-the-U2F-warning-message-text-colour.yml
@@ -0,0 +1,5 @@
+---
+title: Remove the warning style from the U2F device message in user settings > account
+merge_request: 30119
+author: matejlatin
+type: other
diff --git a/changelogs/unreleased/63571-fix-gc-profiler-data-being-wiped.yml b/changelogs/unreleased/63571-fix-gc-profiler-data-being-wiped.yml
new file mode 100644
index 00000000000..7943d9573f3
--- /dev/null
+++ b/changelogs/unreleased/63571-fix-gc-profiler-data-being-wiped.yml
@@ -0,0 +1,5 @@
+---
+title: Fix GC::Profiler metrics fetching
+merge_request: 31331
+author:
+type: fixed
diff --git a/changelogs/unreleased/64092-removes-update-statistics-namespace-feature-flag.yml b/changelogs/unreleased/64092-removes-update-statistics-namespace-feature-flag.yml
new file mode 100644
index 00000000000..272c830a914
--- /dev/null
+++ b/changelogs/unreleased/64092-removes-update-statistics-namespace-feature-flag.yml
@@ -0,0 +1,5 @@
+---
+title: Enables storage statistics for root namespaces on database
+merge_request: 31392
+author:
+type: other
diff --git a/changelogs/unreleased/64341-user-callout-deferred-link-support.yml b/changelogs/unreleased/64341-user-callout-deferred-link-support.yml
new file mode 100644
index 00000000000..05230ddc124
--- /dev/null
+++ b/changelogs/unreleased/64341-user-callout-deferred-link-support.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for deferred links in persistent user callouts.
+merge_request: 30818
+author:
+type: added
diff --git a/changelogs/unreleased/64675-Dashboard-URL-legend-border.yml b/changelogs/unreleased/64675-Dashboard-URL-legend-border.yml
new file mode 100644
index 00000000000..f35261fcd6c
--- /dev/null
+++ b/changelogs/unreleased/64675-Dashboard-URL-legend-border.yml
@@ -0,0 +1,5 @@
+---
+title: Removed extrenal dashboard legend border
+merge_request: 31407
+author:
+type: fixed
diff --git a/changelogs/unreleased/GL-12757.yml b/changelogs/unreleased/GL-12757.yml
new file mode 100644
index 00000000000..e58ecf9259f
--- /dev/null
+++ b/changelogs/unreleased/GL-12757.yml
@@ -0,0 +1,5 @@
+---
+title: Update the container scanning CI template to use v12 of the clair scanner.
+merge_request: 30809
+author:
+type: changed
diff --git a/changelogs/unreleased/double-slash-64592.yml b/changelogs/unreleased/double-slash-64592.yml
new file mode 100644
index 00000000000..e3b5b197ac5
--- /dev/null
+++ b/changelogs/unreleased/double-slash-64592.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent double slash in review apps path
+merge_request: 31212
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-bin-web-puma-script-to-consider-rails-env.yml b/changelogs/unreleased/fix-bin-web-puma-script-to-consider-rails-env.yml
new file mode 100644
index 00000000000..a0564369b02
--- /dev/null
+++ b/changelogs/unreleased/fix-bin-web-puma-script-to-consider-rails-env.yml
@@ -0,0 +1,5 @@
+---
+title: Make `bin/web_puma` consider RAILS_ENV
+merge_request: 31378
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-i18n-updated-projects.yml b/changelogs/unreleased/fix-i18n-updated-projects.yml
deleted file mode 100644
index 408ee438480..00000000000
--- a/changelogs/unreleased/fix-i18n-updated-projects.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Properly translate term in projects list
-merge_request: 30958
-author:
-type: fixed
diff --git a/changelogs/unreleased/fix-name-vs-path-problem-for-cycle-analytics.yml b/changelogs/unreleased/fix-name-vs-path-problem-for-cycle-analytics.yml
new file mode 100644
index 00000000000..7d171c2cf5b
--- /dev/null
+++ b/changelogs/unreleased/fix-name-vs-path-problem-for-cycle-analytics.yml
@@ -0,0 +1,5 @@
+---
+title: Fix broken issue links and possible 500 error on cycle analytics page when project name and path are different
+merge_request: 31471
+author:
+type: fixed
diff --git a/changelogs/unreleased/georgekoltsov-55474-outbound-setting-system-hooks.yml b/changelogs/unreleased/georgekoltsov-55474-outbound-setting-system-hooks.yml
new file mode 100644
index 00000000000..fb1acb1e9f5
--- /dev/null
+++ b/changelogs/unreleased/georgekoltsov-55474-outbound-setting-system-hooks.yml
@@ -0,0 +1,5 @@
+---
+title: Add new outbound network requests application setting for system hooks
+merge_request: 31177
+author:
+type: added
diff --git a/changelogs/unreleased/implement-dag.yml b/changelogs/unreleased/implement-dag.yml
new file mode 100644
index 00000000000..72f3f9a510c
--- /dev/null
+++ b/changelogs/unreleased/implement-dag.yml
@@ -0,0 +1,5 @@
+---
+title: "Support creating DAGs in CI config through the `needs` key"
+merge_request: 31328
+author:
+type: added
diff --git a/changelogs/unreleased/jprovazn-fix-positioning.yml b/changelogs/unreleased/jprovazn-fix-positioning.yml
new file mode 100644
index 00000000000..5d703008bba
--- /dev/null
+++ b/changelogs/unreleased/jprovazn-fix-positioning.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize relative re-positioning when moving issues.
+merge_request: 30938
+author:
+type: fixed
diff --git a/changelogs/unreleased/jramsay-fix-mirroring-help-text-typo.yml b/changelogs/unreleased/jramsay-fix-mirroring-help-text-typo.yml
new file mode 100644
index 00000000000..4c049dffc58
--- /dev/null
+++ b/changelogs/unreleased/jramsay-fix-mirroring-help-text-typo.yml
@@ -0,0 +1,5 @@
+---
+title: Fix mirroring help text
+merge_request: 31348
+author: jramsay
+type: other
diff --git a/changelogs/unreleased/jupyter-fixes-v1.yml b/changelogs/unreleased/jupyter-fixes-v1.yml
new file mode 100644
index 00000000000..7a34f273c90
--- /dev/null
+++ b/changelogs/unreleased/jupyter-fixes-v1.yml
@@ -0,0 +1,5 @@
+---
+title: Jupyter fixes
+merge_request: 31332
+author: Amit Rathi
+type: fixed
diff --git a/changelogs/unreleased/mh-editor-indents.yml b/changelogs/unreleased/mh-editor-indents.yml
deleted file mode 100644
index a282c0f505d..00000000000
--- a/changelogs/unreleased/mh-editor-indents.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Markdown editors now have indentation shortcuts and auto-indentation
-merge_request: 28914
-author:
-type: added
diff --git a/changelogs/unreleased/sh-disable-redis-peek.yml b/changelogs/unreleased/sh-disable-redis-peek.yml
new file mode 100644
index 00000000000..de86c0031c7
--- /dev/null
+++ b/changelogs/unreleased/sh-disable-redis-peek.yml
@@ -0,0 +1,5 @@
+---
+title: Only track Redis calls if Peek is enabled
+merge_request: 31438
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-fix-special-role-error-500.yml b/changelogs/unreleased/sh-fix-special-role-error-500.yml
new file mode 100644
index 00000000000..9aed0710da3
--- /dev/null
+++ b/changelogs/unreleased/sh-fix-special-role-error-500.yml
@@ -0,0 +1,5 @@
+---
+title: Fix first-time contributor notes not rendering
+merge_request: 31340
+author:
+type: fixed
diff --git a/changelogs/unreleased/snowplow-ee-to-ce.yml b/changelogs/unreleased/snowplow-ee-to-ce.yml
new file mode 100644
index 00000000000..85c959f0510
--- /dev/null
+++ b/changelogs/unreleased/snowplow-ee-to-ce.yml
@@ -0,0 +1,5 @@
+---
+title: Moves snowplow tracking from ee to ce
+merge_request: 31160
+author: jejacks0n
+type: added
diff --git a/changelogs/unreleased/update-pipelines-minutes-expiry-banner-to-an-alert-component-type.yml b/changelogs/unreleased/update-pipelines-minutes-expiry-banner-to-an-alert-component-type.yml
new file mode 100644
index 00000000000..8c1a033dd29
--- /dev/null
+++ b/changelogs/unreleased/update-pipelines-minutes-expiry-banner-to-an-alert-component-type.yml
@@ -0,0 +1,5 @@
+---
+title: Enhance style of the shared runners limit
+merge_request: 31386
+author:
+type: other
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index dd53127ac2c..39b719a5978 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -427,6 +427,11 @@ production: &base
# If it is blank, it defaults to external_url.
node_name: ''
+ registry_replication:
+ # enabled: true
+ # primary_api_url: http://localhost:5000/ # internal address to the primary registry, will be used by GitLab to directly communicate with primary registry API
+
+
#
# 2. GitLab CI settings
# ==========================
diff --git a/config/initializers/0_inflections.rb b/config/initializers/0_inflections.rb
index 4d1f4917275..d317825c1b8 100644
--- a/config/initializers/0_inflections.rb
+++ b/config/initializers/0_inflections.rb
@@ -19,6 +19,7 @@ ActiveSupport::Inflector.inflections do |inflect|
project_registry
file_registry
job_artifact_registry
+ container_repository_registry
vulnerability_feedback
vulnerabilities_feedback
group_view
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 32fec7c3d22..659801f787d 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -296,6 +296,12 @@ Gitlab.ee do
Settings['geo'] ||= Settingslogic.new({})
# For backwards compatibility, default to gitlab_url and if so, ensure it ends with "/"
Settings.geo['node_name'] = Settings.geo['node_name'].presence || Settings.gitlab['url'].chomp('/').concat('/')
+
+ #
+ # Registry replication
+ #
+ Settings.geo['registry_replication'] ||= Settingslogic.new({})
+ Settings.geo.registry_replication['enabled'] ||= false
end
#
@@ -473,6 +479,9 @@ Gitlab.ee do
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker'
+ Settings.cron_jobs['geo_container_repository_sync_worker'] ||= Settingslogic.new({})
+ Settings.cron_jobs['geo_container_repository_sync_worker']['cron'] ||= '*/1 * * * *'
+ Settings.cron_jobs['geo_container_repository_sync_worker']['job_class'] ||= 'Geo::ContainerRepositorySyncDispatchWorker'
Settings.cron_jobs['historical_data_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['historical_data_worker']['cron'] ||= '0 12 * * *'
Settings.cron_jobs['historical_data_worker']['job_class'] = 'HistoricalDataWorker'
diff --git a/config/initializers/peek.rb b/config/initializers/peek.rb
index 4a2b7931f30..b6c7f1ff4fc 100644
--- a/config/initializers/peek.rb
+++ b/config/initializers/peek.rb
@@ -9,5 +9,10 @@ Peek.into Peek::Views::ActiveRecord
Peek.into Peek::Views::Gitaly
Peek.into Peek::Views::RedisDetailed
Peek.into Peek::Views::Rugged
-Peek.into Peek::Views::GC
+
+# `Peek::Views::GC` is currently disabled in production, as it runs with every request
+# even if PerformanceBar is inactive and clears `GC::Profiler` reports we need for metrics.
+# Check https://gitlab.com/gitlab-org/gitlab-ce/issues/65455
+Peek.into Peek::Views::GC if Rails.env.development?
+
Peek.into Peek::Views::Tracing if Labkit::Tracing.tracing_url_enabled?
diff --git a/config/routes/project.rb b/config/routes/project.rb
index 1f632765317..3113cb172f7 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -500,6 +500,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
get :realtime_changes
post :create_merge_request
get :discussions, format: :json
+
+ Gitlab.ee do
+ get 'designs(/*vueroute)', to: 'issues#show', format: false
+ end
end
collection do
diff --git a/db/migrate/20190612111404_add_geo_container_sync_capacity.rb b/db/migrate/20190612111404_add_geo_container_sync_capacity.rb
new file mode 100644
index 00000000000..d4cd569f460
--- /dev/null
+++ b/db/migrate/20190612111404_add_geo_container_sync_capacity.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class AddGeoContainerSyncCapacity < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ change_table :geo_nodes do |t|
+ t.column :container_repositories_max_capacity, :integer, default: 10, null: false
+ end
+ end
+end
diff --git a/db/migrate/20190703043358_add_commit_id_to_draft_notes.rb b/db/migrate/20190703043358_add_commit_id_to_draft_notes.rb
new file mode 100644
index 00000000000..022400ce585
--- /dev/null
+++ b/db/migrate/20190703043358_add_commit_id_to_draft_notes.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class AddCommitIdToDraftNotes < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :draft_notes, :commit_id, :binary
+ end
+end
diff --git a/db/migrate/20190726101050_rename_allow_local_requests_from_hooks_and_services_application_setting.rb b/db/migrate/20190726101050_rename_allow_local_requests_from_hooks_and_services_application_setting.rb
new file mode 100644
index 00000000000..ac65e8d745c
--- /dev/null
+++ b/db/migrate/20190726101050_rename_allow_local_requests_from_hooks_and_services_application_setting.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class RenameAllowLocalRequestsFromHooksAndServicesApplicationSetting < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ rename_column_concurrently :application_settings, :allow_local_requests_from_hooks_and_services, :allow_local_requests_from_web_hooks_and_services
+ end
+
+ def down
+ cleanup_concurrent_column_rename :application_settings, :allow_local_requests_from_web_hooks_and_services, :allow_local_requests_from_hooks_and_services
+ end
+end
diff --git a/db/migrate/20190726101133_add_allow_local_requests_from_system_hooks_to_application_settings.rb b/db/migrate/20190726101133_add_allow_local_requests_from_system_hooks_to_application_settings.rb
new file mode 100644
index 00000000000..95d4f956f93
--- /dev/null
+++ b/db/migrate/20190726101133_add_allow_local_requests_from_system_hooks_to_application_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddAllowLocalRequestsFromSystemHooksToApplicationSettings < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ add_column(:application_settings, :allow_local_requests_from_system_hooks,
+ :boolean,
+ default: true,
+ null: false)
+ end
+
+ def down
+ remove_column(:application_settings, :allow_local_requests_from_system_hooks)
+ end
+end
diff --git a/db/migrate/20190731084415_add_build_need.rb b/db/migrate/20190731084415_add_build_need.rb
new file mode 100644
index 00000000000..45b8abb480d
--- /dev/null
+++ b/db/migrate/20190731084415_add_build_need.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBuildNeed < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :ci_build_needs, id: :serial do |t|
+ t.integer :build_id, null: false
+ t.text :name, null: false
+
+ t.index [:build_id, :name], unique: true
+ t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade
+ end
+ end
+end
diff --git a/db/migrate/20190802012622_reorder_issues_project_id_relative_position_index.rb b/db/migrate/20190802012622_reorder_issues_project_id_relative_position_index.rb
new file mode 100644
index 00000000000..12088dd763f
--- /dev/null
+++ b/db/migrate/20190802012622_reorder_issues_project_id_relative_position_index.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class ReorderIssuesProjectIdRelativePositionIndex < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ OLD_INDEX_NAME = 'index_issues_on_project_id_and_state_and_rel_position_and_id'
+ NEW_INDEX_NAME = 'index_issues_on_project_id_and_rel_position_and_state_and_id'
+
+ def up
+ add_concurrent_index :issues, [:project_id, :relative_position, :state, :id], order: { id: :desc }, name: NEW_INDEX_NAME
+
+ remove_concurrent_index_by_name :issues, OLD_INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index :issues, [:project_id, :state, :relative_position, :id], order: { id: :desc }, name: OLD_INDEX_NAME
+
+ remove_concurrent_index_by_name :issues, NEW_INDEX_NAME
+ end
+end
diff --git a/db/post_migrate/20190801114109_cleanup_allow_local_requests_from_hooks_and_services_application_setting_rename.rb b/db/post_migrate/20190801114109_cleanup_allow_local_requests_from_hooks_and_services_application_setting_rename.rb
new file mode 100644
index 00000000000..127e44254ac
--- /dev/null
+++ b/db/post_migrate/20190801114109_cleanup_allow_local_requests_from_hooks_and_services_application_setting_rename.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CleanupAllowLocalRequestsFromHooksAndServicesApplicationSettingRename < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ cleanup_concurrent_column_rename :application_settings, :allow_local_requests_from_hooks_and_services, :allow_local_requests_from_web_hooks_and_services
+ end
+
+ def down
+ rename_column_concurrently :application_settings, :allow_local_requests_from_web_hooks_and_services, :allow_local_requests_from_hooks_and_services
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 6f5fc6c65eb..a9b7c1930e3 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 2019_07_29_090456) do
+ActiveRecord::Schema.define(version: 2019_08_02_012622) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
@@ -183,7 +183,6 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.string "external_authorization_service_default_label"
t.boolean "pages_domain_verification_enabled", default: true, null: false
t.string "user_default_internal_regex"
- t.boolean "allow_local_requests_from_hooks_and_services", default: false, null: false
t.float "external_authorization_service_timeout", default: 0.5
t.text "external_auth_client_cert"
t.text "encrypted_external_auth_client_key"
@@ -230,6 +229,8 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.string "grafana_url", default: "/-/grafana", null: false
t.string "outbound_local_requests_whitelist", limit: 255, default: [], null: false, array: true
t.integer "raw_blob_request_limit", default: 300, null: false
+ t.boolean "allow_local_requests_from_web_hooks_and_services", default: false, null: false
+ t.boolean "allow_local_requests_from_system_hooks", default: true, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["usage_stats_set_by_user_id"], name: "index_application_settings_on_usage_stats_set_by_user_id"
@@ -454,6 +455,12 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.index ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true
end
+ create_table "ci_build_needs", id: :serial, force: :cascade do |t|
+ t.integer "build_id", null: false
+ t.text "name", null: false
+ t.index ["build_id", "name"], name: "index_ci_build_needs_on_build_id_and_name", unique: true
+ end
+
create_table "ci_build_trace_chunks", force: :cascade do |t|
t.integer "build_id", null: false
t.integer "chunk_index", null: false
@@ -1130,6 +1137,7 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.text "position"
t.text "original_position"
t.text "change_position"
+ t.binary "commit_id"
t.index ["author_id"], name: "index_draft_notes_on_author_id"
t.index ["discussion_id"], name: "index_draft_notes_on_discussion_id"
t.index ["merge_request_id"], name: "index_draft_notes_on_merge_request_id"
@@ -1435,6 +1443,7 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.integer "minimum_reverification_interval", default: 7, null: false
t.string "internal_url"
t.string "name", null: false
+ t.integer "container_repositories_max_capacity", default: 10, null: false
t.index ["access_key"], name: "index_geo_nodes_on_access_key"
t.index ["name"], name: "index_geo_nodes_on_name", unique: true
t.index ["primary"], name: "index_geo_nodes_on_primary"
@@ -1707,7 +1716,7 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state"
t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)"
t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true
- t.index ["project_id", "state", "relative_position", "id"], name: "index_issues_on_project_id_and_state_and_rel_position_and_id", order: { id: :desc }
+ t.index ["project_id", "relative_position", "state", "id"], name: "index_issues_on_project_id_and_rel_position_and_state_and_id", order: { id: :desc }
t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state"
t.index ["relative_position"], name: "index_issues_on_relative_position"
t.index ["state"], name: "index_issues_on_state"
@@ -3635,6 +3644,7 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
+ add_foreign_key "ci_build_needs", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_chunks", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade
diff --git a/doc/administration/auth/ldap-ee.md b/doc/administration/auth/ldap-ee.md
index 1b7af2d945a..34f3cfa353f 100644
--- a/doc/administration/auth/ldap-ee.md
+++ b/doc/administration/auth/ldap-ee.md
@@ -4,37 +4,27 @@ type: reference
# LDAP Additions in GitLab EE **(STARTER ONLY)**
-This is a continuation of the main [LDAP documentation](ldap.md), detailing LDAP
-features specific to GitLab Enterprise Edition Starter, Premium and Ultimate.
+This section documents LDAP features specific to to GitLab Enterprise Edition
+[Starter](https://about.gitlab.com/pricing/#self-managed) and above.
-## Overview
-
-[LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
-stands for **Lightweight Directory Access Protocol**, which
-is a standard application protocol for
-accessing and maintaining distributed directory information services
-over an Internet Protocol (IP) network.
-
-GitLab integrates with LDAP to support **user authentication**. This integration
-works with most LDAP-compliant directory servers, including Microsoft Active
-Directory, Apple Open Directory, Open LDAP, and 389 Server.
-**GitLab Enterprise Edition** includes enhanced integration, including group
-membership syncing.
+For documentation relevant to both Community Edition and Enterprise Edition,
+see the main [LDAP documentation](ldap.md).
## Use cases
-- User sync: Once a day, GitLab will update users against LDAP
+- User sync: Once a day, GitLab will update users against LDAP.
- Group sync: Once an hour, GitLab will update group membership
- based on LDAP group members
+ based on LDAP group members.
-## Multiple LDAP servers
+## Multiple LDAP servers **(STARTER ONLY)**
With GitLab Enterprise Edition Starter, you can configure multiple LDAP servers
that your GitLab instance will connect to.
-To add another LDAP server, you can start by duplicating the settings under
-[the main configuration](ldap.md#configuration) and edit them to match the
-additional LDAP server.
+To add another LDAP server:
+
+1. Duplicating the settings under [the main configuration](ldap.md#configuration).
+1. Edit them to match the additional LDAP server.
Be sure to choose a different provider ID made of letters a-z and numbers 0-9.
This ID will be stored in the database so that GitLab can remember which LDAP
@@ -47,19 +37,19 @@ users against LDAP.
The process will execute the following access checks:
-1. Ensure the user is still present in LDAP
-1. If the LDAP server is Active Directory, ensure the user is active (not
- blocked/disabled state). This will only be checked if
- `active_directory: true` is set in the LDAP configuration [^1]
+- Ensure the user is still present in LDAP.
+- If the LDAP server is Active Directory, ensure the user is active (not
+ blocked/disabled state). This will only be checked if
+ `active_directory: true` is set in the LDAP configuration. [^1]
The user will be set to `ldap_blocked` state in GitLab if the above conditions
fail. This means the user will not be able to login or push/pull code.
The process will also update the following user information:
-1. Email address
-1. If `sync_ssh_keys` is set, SSH public keys
-1. If Kerberos is enabled, Kerberos identity
+- Email address.
+- If `sync_ssh_keys` is set, SSH public keys.
+- If Kerberos is enabled, Kerberos identity.
NOTE: **Note:**
The LDAP sync process updates existing users while new users will
@@ -72,9 +62,6 @@ first time GitLab will trigger a sync for groups the user should be a member of.
That way they don't need to wait for the hourly sync to be granted
access to their groups and projects.
-In GitLab Premium, we can also add a GitLab group to sync with one or multiple LDAP groups or we can
-also add a filter. The filter must comply with the syntax defined in [RFC 2254](https://tools.ietf.org/search/rfc2254).
-
A group sync process will run every hour on the hour, and `group_base` must be set
in LDAP configuration for LDAP synchronizations based on group CN to work. This allows
GitLab group membership to be automatically updated based on LDAP group members.
@@ -120,13 +107,26 @@ following.
1. [Restart GitLab][restart] for the changes to take effect.
----
-
To take advantage of group sync, group owners or maintainers will need to create an
-LDAP group link in their group **Settings > LDAP Groups** page. Multiple LDAP
-groups and/or filters can be linked with a single GitLab group. When the link is
-created, an access level/role is specified (Guest, Reporter, Developer, Maintainer,
-or Owner).
+LDAP group link in their group **Settings > LDAP Groups** page.
+
+Multiple LDAP groups and [filters](#filters-premium-only) can be linked with
+a single GitLab group. When the link is created, an access level/role is
+specified (Guest, Reporter, Developer, Maintainer, or Owner).
+
+### Filters **(PREMIUM ONLY)**
+
+In GitLab Premium, you can add an LDAP user filter for group synchronization.
+Filters allow for complex logic without creating a special LDAP group.
+
+To sync GitLab group membership based on an LDAP filter:
+
+1. Open the **LDAP Synchronization** page for the GitLab group.
+1. Select **LDAP user filter** as the **Sync method**.
+1. Enter an LDAP user filter in the **LDAP user filter** field.
+
+The filter must comply with the
+syntax defined in [RFC 2254](https://tools.ietf.org/search/rfc2254).
## Administrator sync
@@ -190,10 +190,13 @@ group, as opposed to the full DN.
## Global group memberships lock
"Lock memberships to LDAP synchronization" setting allows instance administrators
-to lock down user abilities to invite new members to a group. When enabled following happens:
+to lock down user abilities to invite new members to a group.
+
+When enabled, the following applies:
-1. Only administrator can manage memberships of any group including access levels.
-1. Users are not allowed to share project with other groups or invite members to a project created in a group.
+- Only administrator can manage memberships of any group including access levels.
+- Users are not allowed to share project with other groups or invite members to
+ a project created in a group.
## Adjusting LDAP user sync schedule
@@ -333,10 +336,18 @@ administrative duties.
### Supported LDAP group types/attributes
-GitLab supports LDAP groups that use member attributes `member`, `submember`,
-`uniquemember`, `memberof` and `memberuid`. This means group sync supports, at
-least, LDAP groups with object class `groupOfNames`, `posixGroup`, and
-`groupOfUniqueName`. Other object classes should work fine as long as members
+GitLab supports LDAP groups that use member attributes:
+
+- `member`
+- `submember`
+- `uniquemember`
+- `memberof`
+- `memberuid`.
+
+This means group sync supports, at least, LDAP groups with object class:
+`groupOfNames`, `posixGroup`, and `groupOfUniqueName`.
+
+Other object classes should work fine as long as members
are defined as one of the mentioned attributes. This also means GitLab supports
Microsoft Active Directory, Apple Open Directory, Open LDAP, and 389 Server.
Other LDAP servers should work, too.
@@ -344,11 +355,12 @@ Other LDAP servers should work, too.
Active Directory also supports nested groups. Group sync will recursively
resolve membership if `active_directory: true` is set in the configuration file.
-> **Note:** Nested group membership will only be resolved if the nested group
- also falls within the configured `group_base`. For example, if GitLab sees a
- nested group with DN `cn=nested_group,ou=special_groups,dc=example,dc=com` but
- the configured `group_base` is `ou=groups,dc=example,dc=com`, `cn=nested_group`
- will be ignored.
+NOTE: **Note:**
+Nested group membership will only be resolved if the nested group
+also falls within the configured `group_base`. For example, if GitLab sees a
+nested group with DN `cn=nested_group,ou=special_groups,dc=example,dc=com` but
+the configured `group_base` is `ou=groups,dc=example,dc=com`, `cn=nested_group`
+will be ignored.
### Queries
@@ -403,7 +415,7 @@ main: # 'main' is the GitLab 'provider ID' of this LDAP server
[^1]: In Active Directory, a user is marked as disabled/blocked if the user
account control attribute (`userAccountControl:1.2.840.113556.1.4.803`)
- has bit 2 set. See https://ctogonewild.com/2009/09/03/bitmask-searches-in-ldap/
+ has bit 2 set. See <https://ctogonewild.com/2009/09/03/bitmask-searches-in-ldap/>
for more information.
### User DN has changed
@@ -423,10 +435,10 @@ things to check to debug the situation.
- Ensure LDAP configuration has a `group_base` specified. This configuration is
required for group sync to work properly.
- Ensure the correct LDAP group link is added to the GitLab group. Check group
- links by visiting the GitLab group, then **Settings dropdown -> LDAP groups**.
-- Check that the user has an LDAP identity
+ links by visiting the GitLab group, then **Settings dropdown > LDAP groups**.
+- Check that the user has an LDAP identity:
1. Sign in to GitLab as an administrator user.
- 1. Navigate to **Admin area -> Users**.
+ 1. Navigate to **Admin area > Users**.
1. Search for the user
1. Open the user, by clicking on their name. Do not click 'Edit'.
1. Navigate to the **Identities** tab. There should be an LDAP identity with
@@ -437,7 +449,7 @@ Often, the best way to learn more about why group sync is behaving a certain
way is to enable debug logging. There is verbose output that details every
step of the sync.
-1. Start a Rails console
+1. Start a Rails console:
```bash
# For Omnibus installations
@@ -446,6 +458,7 @@ step of the sync.
# For installations from source
sudo -u git -H bundle exec rails console production
```
+
1. Set the log level to debug (only for this session):
```ruby
@@ -540,8 +553,9 @@ and more DNs may be added, or existing entries modified, based on additional
LDAP group lookups. The very last occurrence of this entry should indicate
exactly which users GitLab believes should be added to the group.
-> **Note:** 10 is 'Guest', 20 is 'Reporter', 30 is 'Developer', 40 is 'Maintainer'
- and 50 is 'Owner'
+NOTE: **Note:**
+10 is 'Guest', 20 is 'Reporter', 30 is 'Developer', 40 is 'Maintainer'
+and 50 is 'Owner'.
```bash
Resolved 'my_group' group member access: {"uid=john0,ou=people,dc=example,dc=com"=>30,
diff --git a/doc/administration/auth/ldap.md b/doc/administration/auth/ldap.md
index be05a4d63a7..186bf4c4825 100644
--- a/doc/administration/auth/ldap.md
+++ b/doc/administration/auth/ldap.md
@@ -7,28 +7,44 @@ type: reference
# LDAP
GitLab integrates with LDAP to support user authentication.
-This integration works with most LDAP-compliant directory
-servers, including Microsoft Active Directory, Apple Open Directory, Open LDAP,
-and 389 Server. GitLab Enterprise Editions include enhanced integration,
+
+This integration works with most LDAP-compliant directory servers, including:
+
+- Microsoft Active Directory
+- Apple Open Directory
+- Open LDAP
+- 389 Server.
+
+GitLab Enterprise Editions (EE) include enhanced integration,
including group membership syncing as well as multiple LDAP servers support.
-## GitLab EE
+For more details about EE-specific LDAP features, see the
+[LDAP Enterprise Edition documentation](ldap-ee.md).
-The information on this page is relevant for both GitLab CE and EE. For more
-details about EE-specific LDAP features, see the
-[LDAP Enterprise Edition documentation](ldap-ee.md). **(STARTER ONLY)**
+NOTE: **Note:**
+The information on this page is relevant for both GitLab CE and EE.
+
+## Overview
+
+[LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
+stands for **Lightweight Directory Access Protocol**, which is a standard
+application protocol for accessing and maintaining distributed directory
+information services over an Internet Protocol (IP) network.
## Security
-GitLab assumes that LDAP users are not able to change their LDAP 'mail', 'email'
-or 'userPrincipalName' attribute. An LDAP user who is allowed to change their
-email on the LDAP server can potentially
-[take over any account](#enabling-ldap-sign-in-for-existing-gitlab-users)
-on your GitLab server.
+GitLab assumes that LDAP users:
+
+- Are not able to change their LDAP `mail`, `email`, or `userPrincipalName` attribute.
+ An LDAP user who is allowed to change their email on the LDAP server can potentially
+ [take over any account](#enabling-ldap-sign-in-for-existing-gitlab-users)
+ on your GitLab server.
+- Have unique email addresses, otherwise it is possible for LDAP users with the same
+ email address to share the same GitLab account.
We recommend against using LDAP integration if your LDAP users are
-allowed to change their 'mail', 'email' or 'userPrincipalName' attribute on
-the LDAP server.
+allowed to change their 'mail', 'email' or 'userPrincipalName' attribute on
+the LDAP server or share email addresses.
### User deletion
@@ -64,9 +80,12 @@ NOTE: **Note**:
In GitLab Enterprise Edition Starter, you can configure multiple LDAP servers
to connect to one GitLab server.
-For a complete guide on configuring LDAP with GitLab Community Edition, please check
-the admin guide [How to configure LDAP with GitLab CE](how_to_configure_ldap_gitlab_ce/index.md).
-For GitLab Enterprise Editions, see also [How to configure LDAP with GitLab EE](how_to_configure_ldap_gitlab_ee/index.md). **(STARTER ONLY)**
+For a complete guide on configuring LDAP with:
+
+- GitLab Community Edition, see
+ [How to configure LDAP with GitLab CE](how_to_configure_ldap_gitlab_ce/index.md).
+- Enterprise Editions, see
+ [How to configure LDAP with GitLab EE](how_to_configure_ldap_gitlab_ee/index.md). **(STARTER ONLY)**
To enable LDAP integration you need to add your LDAP server settings in
`/etc/gitlab/gitlab.rb` or `/home/git/gitlab/config/gitlab.yml` for Omnibus
@@ -384,7 +403,7 @@ production:
Tip: If you want to limit access to the nested members of an Active Directory
group, you can use the following syntax:
-```
+```text
(memberOf:1.2.840.113556.1.4.1941:=CN=My Group,DC=Example,DC=com)
```
@@ -402,13 +421,13 @@ The `user_filter` DN can contain special characters. For example:
- A comma:
- ```
+ ```text
OU=GitLab, Inc,DC=gitlab,DC=com
```
- Open and close brackets:
- ```
+ ```text
OU=Gitlab (Inc),DC=gitlab,DC=com
```
@@ -417,13 +436,13 @@ The `user_filter` DN can contain special characters. For example:
- Escape commas with `\2C`. For example:
- ```
+ ```text
OU=GitLab\2C Inc,DC=gitlab,DC=com
```
- Escape open and close brackets with `\28` and `\29`, respectively. For example:
- ```
+ ```text
OU=Gitlab \28Inc\29,DC=gitlab,DC=com
```
@@ -507,11 +526,11 @@ timeout), the login is rejected and a message will be logged to
### Debug LDAP user filter with ldapsearch
-This example uses ldapsearch and assumes you are using ActiveDirectory. The
+This example uses `ldapsearch` and assumes you are using ActiveDirectory. The
following query returns the login names of the users that will be allowed to
log in to GitLab if you configure your own user_filter.
-```
+```sh
ldapsearch -H ldaps://$host:$port -D "$bind_dn" -y bind_dn_password.txt -b "$base" "$user_filter" sAMAccountName
```
diff --git a/doc/administration/geo/replication/index.md b/doc/administration/geo/replication/index.md
index b5789c87e47..dc49a95e008 100644
--- a/doc/administration/geo/replication/index.md
+++ b/doc/administration/geo/replication/index.md
@@ -245,35 +245,48 @@ This list of limitations only reflects the latest version of GitLab. If you are
- Object pools for forked project deduplication work only on the **primary** node, and are duplicated on the **secondary** node.
- [External merge request diffs](../../merge_request_diffs.md) will not be replicated if they are on-disk, and viewing merge requests will fail. However, external MR diffs in object storage **are** supported. The default configuration (in-database) does work.
-### Limitations on replication
-
-Only the following items are replicated to the **secondary** node:
-
-- All database content. For example, snippets, epics, issues, merge requests, groups, and project metadata.
-- Project repositories.
-- Project wiki repositories.
-- User uploads. For example, attachments to issues, merge requests, epics, and avatars.
-- CI job artifacts and traces.
+### Limitations on replication/verification
+
+The following table lists the GitLab features along with their replication
+and verification status on a **secondary** node.
+
+You can keep track of the progress to include the missing items in:
+
+- [ee-893](https://gitlab.com/groups/gitlab-org/-/epics/893).
+- [ee-1430](https://gitlab.com/groups/gitlab-org/-/epics/1430).
+
+| Feature | Replicated | Verified |
+|-----------|------------|----------|
+| All database content (e.g. snippets, epics, issues, merge requests, groups, and project metadata) | Yes | Yes |
+| Project repository | Yes | Yes |
+| Project wiki repository | Yes | Yes |
+| Project designs repository | No | No |
+| Uploads (e.g. attachments to issues, merge requests, epics, and avatars) | Yes | Yes, only on transfer, or manually (1) |
+| LFS Objects | Yes | Yes, only on transfer, or manually (1) |
+| CI job artifacts (other than traces) | Yes | No, only manually (1) |
+| Archived traces | Yes | Yes, only on transfer, or manually (1) |
+| Personal snippets | Yes | Yes |
+| Version-controlled personal snippets ([unsupported](https://gitlab.com/gitlab-org/gitlab-ce/issues/13426)) | No | No |
+| Project snippets | Yes | Yes |
+| Version-controlled project snippets ([unsupported](https://gitlab.com/gitlab-org/gitlab-ce/issues/13426)) | No | No |
+| Object pools for forked project deduplication | No | No |
+| [Server-side Git Hooks](../../custom_hooks.md) | No | No |
+| [Elasticsearch integration](../../../integration/elasticsearch.md) | No | No |
+| [GitLab Pages](../../pages/index.md) | No | No |
+| [Container Registry](../../container_registry.md) ([track progress](https://gitlab.com/gitlab-org/gitlab-ee/issues/2870)) | No | No |
+| [NPM Registry](../../npm_registry.md) | No | No |
+| [Maven Packages](../../maven_packages.md) | No | No |
+| [External merge request diffs](../../merge_request_diffs.md) | No, if they are on-disk | No |
+| Content in object storage ([track progress](https://gitlab.com/groups/gitlab-org/-/epics/1526)) | No | No |
+
+1. The integrity can be verified manually using [Integrity Check Rake Task](../../raketasks/check.md) on both nodes and comparing the output between them.
DANGER: **DANGER**
-Data not on this list is unavailable on the **secondary** node. Failing over without manually replicating data not on this list will cause the data to be **lost**.
-
-### Examples of data not replicated
-
-Take special note that these examples of GitLab features are both:
-
-- Commonly used.
-- **Not** replicated by Geo at present.
-
-Examples include:
-
-- [Elasticsearch integration](../../../integration/elasticsearch.md).
-- [Container Registry](../../container_registry.md). [Object Storage](object_storage.md) can mitigate this.
-- [GitLab Pages](../../pages/index.md).
-- [Mattermost integration](https://docs.gitlab.com/omnibus/gitlab-mattermost/).
-
-CAUTION: **Caution:**
-If you wish to use them on a **secondary** node, or to execute a failover successfully, you will need to replicate their data using some other means.
+Features not on this list, or with **No** in the **Replicated** column,
+are not replicated on the **secondary** node. Failing over without manually
+replicating data from those features will cause the data to be **lost**.
+If you wish to use those features on a **secondary** node, or to execute a failover
+successfully, you must replicate their data using some other means.
## Frequently Asked Questions
diff --git a/doc/administration/geo/replication/updating_the_geo_nodes.md b/doc/administration/geo/replication/updating_the_geo_nodes.md
index 550b3b07a95..39174780e24 100644
--- a/doc/administration/geo/replication/updating_the_geo_nodes.md
+++ b/doc/administration/geo/replication/updating_the_geo_nodes.md
@@ -10,10 +10,23 @@ all you need to do is update GitLab itself:
1. Log into each node (**primary** and **secondary** nodes).
1. [Update GitLab][update].
-1. [Update tracking database on **secondary** node](#update-tracking-database-on-secondary-node) when
- the tracking database is enabled.
1. [Test](#check-status-after-updating) **primary** and **secondary** nodes, and check version in each.
+### Check status after updating
+
+Now that the update process is complete, you may want to check whether
+everything is working correctly:
+
+1. Run the Geo raketask on all nodes, everything should be green:
+
+ ```sh
+ sudo gitlab-rake gitlab:geo:check
+ ```
+
+1. Check the **primary** node's Geo dashboard for any errors.
+1. Test the data replication by pushing code to the **primary** node and see if it
+ is received by **secondary** nodes.
+
## Upgrading to GitLab 12.1
By default, GitLab 12.1 will attempt to automatically upgrade the embedded PostgreSQL server to 10.7 from 9.6. Please see [the omnibus documentation](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-geo-instance) for the recommended procedure.
@@ -419,22 +432,7 @@ is prepended with the relevant node for better clarity:
sudo gitlab-ctl start
```
-## Check status after updating
-
-Now that the update process is complete, you may want to check whether
-everything is working correctly:
-
-1. Run the Geo raketask on all nodes, everything should be green:
-
- ```sh
- sudo gitlab-rake gitlab:geo:check
- ```
-
-1. Check the **primary** node's Geo dashboard for any errors.
-1. Test the data replication by pushing code to the **primary** node and see if it
- is received by **secondary** nodes.
-
-## Update tracking database on **secondary** node
+### Update tracking database on **secondary** node
After updating a **secondary** node, you might need to run migrations on
the tracking database. The tracking database was added in GitLab 9.1,
diff --git a/doc/administration/gitaly/index.md b/doc/administration/gitaly/index.md
index 0f547ef03bf..432056d48c7 100644
--- a/doc/administration/gitaly/index.md
+++ b/doc/administration/gitaly/index.md
@@ -469,7 +469,16 @@ One current feature of GitLab that still requires a shared directory (NFS) is
There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
to eliminate the need for NFS to support GitLab Pages.
-## Troubleshooting
+## Troubleshooting Gitaly
+
+### Commits, pushes, and clones return a 401
+
+```
+remote: GitLab: 401 Unauthorized
+```
+
+You will need to sync your `gitlab-secrets.json` file with your GitLab
+app nodes.
### `gitaly-debug`
@@ -510,3 +519,94 @@ implemented as calls to `gitaly-ruby`:
```
sum(rate(grpc_client_handled_total[5m])) by (grpc_method) > 0
```
+
+### Repository changes fail with a `401 Unauthorized` error
+
+If you're running Gitaly on its own server and notice that users can
+successfully clone and fetch repositories (via both SSH and HTTPS), but can't
+push to them or make changes to the repository in the web UI without getting a
+`401 Unauthorized` message, then it's possible Gitaly is failing to authenticate
+with the other nodes due to having the [wrong secrets file](#3-gitaly-server-configuration).
+
+Confirm the following are all true:
+
+- When any user performs a `git push` to any repository on this Gitaly node, it
+ fails with the following error (note the `401 Unauthorized`):
+
+ ```sh
+ remote: GitLab: 401 Unauthorized
+ To <REMOTE_URL>
+ ! [remote rejected] branch-name -> branch-name (pre-receive hook declined)
+ error: failed to push some refs to '<REMOTE_URL>'
+ ```
+
+- When any user adds or modifies a file from the repository using the GitLab
+ UI, it immediatley fails with a red `401 Unauthorized` banner.
+- Creating a new project and [initializing it with a README](../../gitlab-basics/create-project.md#blank-projects)
+ successfully creates the project but doesn't create the README.
+- When [tailing the logs](https://docs.gitlab.com/omnibus/settings/logs.md#tail-logs-in-a-console-on-the-server) on an app node and reproducing the error, you get `401` errors
+ when reaching the `/api/v4/internal/allowed` endpoint:
+
+ ```sh
+ # api_json.log
+ {
+ "time": "2019-07-18T00:30:14.967Z",
+ "severity": "INFO",
+ "duration": 0.57,
+ "db": 0,
+ "view": 0.57,
+ "status": 401,
+ "method": "POST",
+ "path": "\/api\/v4\/internal\/allowed",
+ "params": [
+ {
+ "key": "action",
+ "value": "git-receive-pack"
+ },
+ {
+ "key": "changes",
+ "value": "REDACTED"
+ },
+ {
+ "key": "gl_repository",
+ "value": "REDACTED"
+ },
+ {
+ "key": "project",
+ "value": "\/path\/to\/project.git"
+ },
+ {
+ "key": "protocol",
+ "value": "web"
+ },
+ {
+ "key": "env",
+ "value": "{\"GIT_ALTERNATE_OBJECT_DIRECTORIES\":[],\"GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE\":[],\"GIT_OBJECT_DIRECTORY\":null,\"GIT_OBJECT_DIRECTORY_RELATIVE\":null}"
+ },
+ {
+ "key": "user_id",
+ "value": "2"
+ },
+ {
+ "key": "secret_token",
+ "value": "[FILTERED]"
+ }
+ ],
+ "host": "gitlab.example.com",
+ "ip": "REDACTED",
+ "ua": "Ruby",
+ "route": "\/api\/:version\/internal\/allowed",
+ "queue_duration": 4.24,
+ "gitaly_calls": 0,
+ "gitaly_duration": 0,
+ "correlation_id": "XPUZqTukaP3"
+ }
+
+ # nginx_access.log
+ [IP] - - [18/Jul/2019:00:30:14 +0000] "POST /api/v4/internal/allowed HTTP/1.1" 401 30 "" "Ruby"
+ ```
+
+To fix this problem, confirm that your [`gitlab-secrets.json` file](#3-gitaly-server-configuration)
+on the Gitaly node matches the one on all other nodes. If it doesn't match,
+update the secrets file on the Gitaly node to match the others, then
+[reconfigure the node](../restart_gitlab.md#omnibus-gitlab-reconfigure).
diff --git a/doc/administration/high_availability/README.md b/doc/administration/high_availability/README.md
index 42516d811a0..56665ba8b9a 100644
--- a/doc/administration/high_availability/README.md
+++ b/doc/administration/high_availability/README.md
@@ -172,14 +172,16 @@ environment that supports about 10,000 users. The specifications below are a
representation of the work so far. The specifications may be adjusted in the
future based on additional testing and iteration.
-- 3 PostgreSQL - 4 CPU, 8GB RAM per node
-- 1 PgBouncer - 2 CPU, 4GB RAM
-- 2 Redis - 2 CPU, 8GB RAM per node
-- 3 Consul/Sentinel - 2 CPU, 2GB RAM per node
-- 4 Sidekiq - 4 CPU, 8GB RAM per node
-- 5 GitLab application nodes - 20 CPU, 64GB RAM per node
-- 1 Gitaly - 20 CPU, 64GB RAM
-- 1 Monitoring node - 4 CPU, 8GB RAM
+NOTE: **Note:** The specifications here were performance tested against a specific coded workload. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repo/change size.
+
+- 3 PostgreSQL - 4 CPU, 16GiB memory per node
+- 1 PgBouncer - 2 CPU, 4GiB memory
+- 2 Redis - 2 CPU, 8GiB memory per node
+- 3 Consul/Sentinel - 2 CPU, 2GiB memory per node
+- 4 Sidekiq - 4 CPU, 16GiB memory per node
+- 5 GitLab application nodes - 16 CPU, 64GiB memory per node
+- 1 Gitaly - 16 CPU, 64GiB memory
+- 1 Monitoring node - 2 CPU, 8GiB memory, 100GiB local storage
### Fully Distributed
diff --git a/doc/administration/index.md b/doc/administration/index.md
index 966acf008cc..f7f9d753e58 100644
--- a/doc/administration/index.md
+++ b/doc/administration/index.md
@@ -185,4 +185,6 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [Debugging tips](troubleshooting/debug.md): Tips to debug problems when things go wrong
- [Log system](logs.md): Where to look for logs.
- [Sidekiq Troubleshooting](troubleshooting/sidekiq.md): Debug when Sidekiq appears hung and is not processing jobs.
+- Useful [diagnostics tools](troubleshooting/diagnostics_tools.md) that are sometimes used by the GitLab
+ Support team.
- [Troubleshooting ElasticSearch](troubleshooting/elasticsearch.md): Tips to troubleshoot ElasticSearch.
diff --git a/doc/administration/operations/fast_ssh_key_lookup.md b/doc/administration/operations/fast_ssh_key_lookup.md
index ea69378b249..e787af798bc 100644
--- a/doc/administration/operations/fast_ssh_key_lookup.md
+++ b/doc/administration/operations/fast_ssh_key_lookup.md
@@ -71,10 +71,10 @@ sudo service sshd reload
Confirm that SSH is working by removing your user's SSH key in the UI, adding a
new one, and attempting to pull a repo.
-> **Note:** For Omnibus Docker, `AuthorizedKeysCommand` is setup by default in
+NOTE: **Note:** For Omnibus Docker, `AuthorizedKeysCommand` is setup by default in
GitLab 11.11 and later.
-> **Warning:** Do not disable writes until SSH is confirmed to be working
+CAUTION: **Caution:** Do not disable writes until SSH is confirmed to be working
perfectly, because the file will quickly become out-of-date.
In the case of lookup failures (which are common), the `authorized_keys`
diff --git a/doc/administration/plugins.md b/doc/administration/plugins.md
index 4302667caf5..4cf3c607dae 100644
--- a/doc/administration/plugins.md
+++ b/doc/administration/plugins.md
@@ -52,7 +52,37 @@ as appropriate. The plugins file list is updated for each event, there is no
need to restart GitLab to apply a new plugin.
If a plugin executes with non-zero exit code or GitLab fails to execute it, a
-message will be logged to `plugin.log`.
+message will be logged to:
+
+- `gitlab-rails/plugin.log` in an Omnibus installation.
+- `log/plugin.log` in a source installation.
+
+## Creating plugins
+
+Below is an example that will only response on the event `project_create` and
+will inform the admins from the GitLab instance that a new project has been created.
+
+```ruby
+# By using the embedded ruby version we eliminate the possibility that our chosen language
+# would be unavailable from
+#!/opt/gitlab/embedded/bin/ruby
+require 'json'
+require 'mail'
+
+# The incoming variables are in JSON format so we need to parse it first.
+ARGS = JSON.parse(STDIN.read)
+
+# We only want to trigger this plugin on the event project_create
+return unless ARGS['event_name'] == 'project_create'
+
+# We will inform our admins of our gitlab instance that a new project is created
+Mail.deliver do
+ from 'info@gitlab_instance.com'
+ to 'admin@gitlab_instance.com'
+ subject "new project " + ARGS['name']
+ body ARGS['owner_name'] + 'created project ' + ARGS['name']
+end
+```
## Validation
diff --git a/doc/administration/repository_storage_paths.md b/doc/administration/repository_storage_paths.md
index ad3a9b19c3c..b1a870210a8 100644
--- a/doc/administration/repository_storage_paths.md
+++ b/doc/administration/repository_storage_paths.md
@@ -57,10 +57,10 @@ storage2:
Now that you've read that big fat warning above, let's edit the configuration
files and add the full paths of the alternative repository storage paths. In
-the example below, we add two more mountpoints that are named `nfs` and `cephfs`
+the example below, we add two more mountpoints that are named `nfs_1` and `nfs_2`
respectively.
-NOTE: **Note:** This example uses NFS and CephFS. We do not recommend using EFS for storage as it may impact GitLab's performance. See the [relevant documentation](high_availability/nfs.md#avoid-using-awss-elastic-file-system-efs) for more details.
+NOTE: **Note:** This example uses NFS. We do not recommend using EFS for storage as it may impact GitLab's performance. See the [relevant documentation](high_availability/nfs.md#avoid-using-awss-elastic-file-system-efs) for more details.
**For installations from source**
@@ -73,10 +73,10 @@ NOTE: **Note:** This example uses NFS and CephFS. We do not recommend using EFS
storages: # You must have at least a 'default' storage path.
default:
path: /home/git/repositories
- nfs:
- path: /mnt/nfs/repositories
- cephfs:
- path: /mnt/cephfs/repositories
+ nfs_1:
+ path: /mnt/nfs1/repositories
+ nfs_2:
+ path: /mnt/nfs2/repositories
```
1. [Restart GitLab][restart-gitlab] for the changes to take effect.
@@ -96,8 +96,8 @@ working, you can remove the `repos_path` line.
```ruby
git_data_dirs({
"default" => { "path" => "/var/opt/gitlab/git-data" },
- "nfs" => { "path" => "/mnt/nfs/git-data" },
- "cephfs" => { "path" => "/mnt/cephfs/git-data" }
+ "nfs_1" => { "path" => "/mnt/nfs1/git-data" },
+ "nfs_2" => { "path" => "/mnt/nfs2/git-data" }
})
```
diff --git a/doc/administration/troubleshooting/diagnostics_tools.md b/doc/administration/troubleshooting/diagnostics_tools.md
new file mode 100644
index 00000000000..ab3b25f0e97
--- /dev/null
+++ b/doc/administration/troubleshooting/diagnostics_tools.md
@@ -0,0 +1,27 @@
+---
+type: reference
+---
+
+# Diagnostics tools
+
+These are some of the diagnostics tools the GitLab Support team uses during troubleshooting.
+They are listed here for transparency, and they may be useful for users with experience
+with troubleshooting GitLab. If you are currently having an issue with GitLab, you
+may want to check your [support options](https://about.gitlab.com/support/) first,
+before attempting to use these tools.
+
+## gitlabsos
+
+The [gitlabsos](https://gitlab.com/gitlab-com/support/toolbox/gitlabsos/) utility
+provides a unified method of gathering info and logs from GitLab and the system it's
+running on.
+
+## strace-parser
+
+[strace-parser](https://gitlab.com/wchandler/strace-parser) is a small tool to analyze
+and summarize raw strace data.
+
+## Pritaly
+
+[Pritaly](https://gitlab.com/wchandler/pritaly) takes Gitaly logs and colorizes output
+or converts the logs to JSON.
diff --git a/doc/api/README.md b/doc/api/README.md
index 70540420544..6cd89e34921 100644
--- a/doc/api/README.md
+++ b/doc/api/README.md
@@ -695,6 +695,13 @@ The correct encoding for the query parameter would be:
There are many unofficial GitLab API Clients for most of the popular
programming languages. Visit the [GitLab website] for a complete list.
+## Rate limits
+
+For administrator documentation on rate limit settings, check out
+[Rate limits](../security/rate_limits.md). To find the settings that are
+specifically used by GitLab.com, see
+[GitLab.com-specific rate limits](../user/gitlab_com/index.md).
+
[GitLab website]: https://about.gitlab.com/applications/#api-clients "Clients using the GitLab API"
[lib-api-url]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/lib/api/api.rb
[ce-3749]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/3749
diff --git a/doc/api/container_registry.md b/doc/api/container_registry.md
index 174b93a4f7a..bf544f64178 100644
--- a/doc/api/container_registry.md
+++ b/doc/api/container_registry.md
@@ -6,6 +6,8 @@ This is the API docs of the [GitLab Container Registry](../user/project/containe
## List registry repositories
+### Within a project
+
Get a list of registry repositories in a project.
```
@@ -14,7 +16,8 @@ GET /projects/:id/registry/repositories
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
-| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) accessible by the authenticated user. |
+| `tags` | boolean | no | If the param is included as true, each repository will include an array of `"tags"` in the response. |
```bash
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories"
@@ -28,6 +31,7 @@ Example response:
"id": 1,
"name": "",
"path": "group/project",
+ "project_id": 9,
"location": "gitlab.example.com:5000/group/project",
"created_at": "2019-01-10T13:38:57.391Z"
},
@@ -35,12 +39,77 @@ Example response:
"id": 2,
"name": "releases",
"path": "group/project/releases",
+ "project_id": 9,
"location": "gitlab.example.com:5000/group/project/releases",
"created_at": "2019-01-10T13:39:08.229Z"
}
]
```
+### Within a group
+
+Get a list of registry repositories in a group.
+
+```
+GET /groups/:id/registry/repositories
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) accessible by the authenticated user. |
+| `tags` | boolean | no | If the param is included as true, each repository will include an array of `"tags"` in the response. |
+
+```bash
+curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/2/registry/repositories?tags=1"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 1,
+ "name": "",
+ "path": "group/project",
+ "project_id": 9,
+ "location": "gitlab.example.com:5000/group/project",
+ "created_at": "2019-01-10T13:38:57.391Z",
+ "tags": [
+ {
+ "name": "0.0.1",
+ "path": "group/project:0.0.1",
+ "location": "gitlab.example.com:5000/group/project:0.0.1"
+ }
+ ]
+ },
+ {
+ "id": 2,
+ "name": "",
+ "path": "group/other_project",
+ "project_id": 11,
+ "location": "gitlab.example.com:5000/group/other_project",
+ "created_at": "2019-01-10T13:39:08.229Z",
+ "tags": [
+ {
+ "name": "0.0.1",
+ "path": "group/other_project:0.0.1",
+ "location": "gitlab.example.com:5000/group/other_project:0.0.1"
+ },
+ {
+ "name": "0.0.2",
+ "path": "group/other_project:0.0.2",
+ "location": "gitlab.example.com:5000/group/other_project:0.0.2"
+ },
+ {
+ "name": "latest",
+ "path": "group/other_project:latest",
+ "location": "gitlab.example.com:5000/group/other_project:latest"
+ }
+ ]
+ }
+]
+```
+
## Delete registry repository
Delete a repository in registry.
@@ -62,6 +131,8 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://git
## List repository tags
+### Within a project
+
Get a list of tags for given registry repository.
```
@@ -70,7 +141,7 @@ GET /projects/:id/registry/repositories/:repository_id/tags
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
-| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) accessible by the authenticated user. |
| `repository_id` | integer | yes | The ID of registry repository. |
```bash
@@ -104,7 +175,7 @@ GET /projects/:id/registry/repositories/:repository_id/tags/:tag_name
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
-| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) accessible by the authenticated user. |
| `repository_id` | integer | yes | The ID of registry repository. |
| `tag_name` | string | yes | The name of tag. |
diff --git a/doc/api/jobs.md b/doc/api/jobs.md
index 0e45ee1a583..2a1c1b5f6f3 100644
--- a/doc/api/jobs.md
+++ b/doc/api/jobs.md
@@ -491,7 +491,7 @@ Parameters
Example request:
```sh
-curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/5/artifacts/some/release/file.pdf"
+curl --location --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/5/artifacts/some/release/file.pdf"
```
Possible response status codes:
@@ -526,7 +526,7 @@ Parameters:
Example request:
```sh
-curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/artifacts/master/raw/some/release/file.pdf?job=pdf"
+curl --location --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/artifacts/master/raw/some/release/file.pdf?job=pdf"
```
Possible response status codes:
@@ -551,7 +551,7 @@ GET /projects/:id/jobs/:job_id/trace
| job_id | integer | yes | ID of a job. |
```sh
-curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/8/trace"
+curl --location --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/jobs/8/trace"
```
Possible response status codes:
diff --git a/doc/api/services.md b/doc/api/services.md
index df15e6892b0..45b49d7eb92 100644
--- a/doc/api/services.md
+++ b/doc/api/services.md
@@ -972,22 +972,28 @@ Parameters:
| `channel` | string | false | Default channel to use if others are not configured |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines |
| `notify_only_default_branch` | boolean | false | Send notifications only for the default branch |
-| `push_events` | boolean | false | Enable notifications for push events |
-| `issues_events` | boolean | false | Enable notifications for issue events |
+| `commit_events` | boolean | false | Enable notifications for commit events |
+| `confidential_issue_channel` | string | false | The name of the channel to receive confidential issues events notifications |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events |
+| `confidential_note_channel` | string | false | The name of the channel to receive confidential note events notifications |
+| `confidential_note_events` | boolean | false | Enable notifications for confidential note events |
+| `deployment_channel` | string | false | The name of the channel to receive deployment events notifications |
+| `deployment_events` | boolean | false | Enable notifications for deployment events |
+| `issue_channel` | string | false | The name of the channel to receive issues events notifications |
+| `issues_events` | boolean | false | Enable notifications for issue events |
+| `job_events` | boolean | false | Enable notifications for job events |
+| `merge_request_channel` | string | false | The name of the channel to receive merge request events notifications |
| `merge_requests_events` | boolean | false | Enable notifications for merge request events |
-| `tag_push_events` | boolean | false | Enable notifications for tag push events |
+| `note_channel` | string | false | The name of the channel to receive note events notifications |
| `note_events` | boolean | false | Enable notifications for note events |
+| `pipeline_channel` | string | false | The name of the channel to receive pipeline events notifications |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events |
-| `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
| `push_channel` | string | false | The name of the channel to receive push events notifications |
-| `issue_channel` | string | false | The name of the channel to receive issues events notifications |
-| `confidential_issue_channel` | string | false | The name of the channel to receive confidential issues events notifications |
-| `merge_request_channel` | string | false | The name of the channel to receive merge request events notifications |
-| `note_channel` | string | false | The name of the channel to receive note events notifications |
+| `push_events` | boolean | false | Enable notifications for push events |
| `tag_push_channel` | string | false | The name of the channel to receive tag push events notifications |
-| `pipeline_channel` | string | false | The name of the channel to receive pipeline events notifications |
+| `tag_push_events` | boolean | false | Enable notifications for tag push events |
| `wiki_page_channel` | string | false | The name of the channel to receive wiki page events notifications |
+| `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
### Delete Slack service
diff --git a/doc/api/settings.md b/doc/api/settings.md
index c3ac70f0579..83125aff264 100644
--- a/doc/api/settings.md
+++ b/doc/api/settings.md
@@ -64,7 +64,10 @@ Example response:
"performance_bar_allowed_group_id": 42,
"instance_statistics_visibility_private": false,
"user_show_add_ssh_key_message": true,
- "local_markdown_version": 0
+ "local_markdown_version": 0,
+ "allow_local_requests_from_hooks_and_services": true,
+ "allow_local_requests_from_web_hooks_and_services": true,
+ "allow_local_requests_from_system_hooks": false
}
```
@@ -138,7 +141,10 @@ Example response:
"user_show_add_ssh_key_message": true,
"file_template_project_id": 1,
"local_markdown_version": 0,
- "geo_node_allowed_ips": "0.0.0.0/0, ::/0"
+ "geo_node_allowed_ips": "0.0.0.0/0, ::/0",
+ "allow_local_requests_from_hooks_and_services": true,
+ "allow_local_requests_from_web_hooks_and_services": true,
+ "allow_local_requests_from_system_hooks": false
}
```
@@ -177,7 +183,9 @@ are listed in the descriptions of the relevant settings.
| `akismet_api_key` | string | required by: `akismet_enabled` | API key for akismet spam protection. |
| `akismet_enabled` | boolean | no | (**If enabled, requires:** `akismet_api_key`) Enable or disable akismet spam protection. |
| `allow_group_owners_to_manage_ldap` | boolean | no | **(PREMIUM)** Set to `true` to allow group owners to manage LDAP |
-| `allow_local_requests_from_hooks_and_services` | boolean | no | Allow requests to the local network from hooks and services. |
+| `allow_local_requests_from_hooks_and_services` | boolean | no | (Deprecated: Use `allow_local_requests_from_web_hooks_and_services` instead) Allow requests to the local network from hooks and services. |
+| `allow_local_requests_from_web_hooks_and_services` | boolean | no | Allow requests to the local network from web hooks and services. |
+| `allow_local_requests_from_system_hooks` | boolean | no | Allow requests to the local network from system hooks. |
| `authorized_keys_enabled` | boolean | no | By default, we write to the `authorized_keys` file to support Git over SSH without additional configuration. GitLab can be optimized to authenticate SSH keys via the database file. Only disable this if you have configured your OpenSSH server to use the AuthorizedKeysCommand. |
| `auto_devops_domain` | string | no | Specify a domain to use by default for every project's Auto Review Apps and Auto Deploy stages. |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for projects by default. It will automatically build, test, and deploy applications based on a predefined CI/CD configuration. |
diff --git a/doc/ci/ci_cd_for_external_repos/bitbucket_integration.md b/doc/ci/ci_cd_for_external_repos/bitbucket_integration.md
index b3110b435db..8fe11140cc7 100644
--- a/doc/ci/ci_cd_for_external_repos/bitbucket_integration.md
+++ b/doc/ci/ci_cd_for_external_repos/bitbucket_integration.md
@@ -14,9 +14,9 @@ To use GitLab CI/CD with a Bitbucket Cloud repository:
1. In GitLab create a **CI/CD for external repo**, select **Repo by URL** and
create the project.
- ![Create project](img/external_repository.png)
+ ![Create project](img/external_repository.png)
- GitLab will import the repository and enable [Pull Mirroring][pull-mirroring].
+ GitLab will import the repository and enable [Pull Mirroring][pull-mirroring].
1. In GitLab create a
[Personal Access Token](../../user/profile/personal_access_tokens.md)
@@ -26,19 +26,19 @@ To use GitLab CI/CD with a Bitbucket Cloud repository:
1. In Bitbucket, from **Settings > Webhooks**, create a new web hook to notify
GitLab of new commits.
- The web hook URL should be set to the GitLab API to trigger pull mirroring,
- using the Personal Access Token we just generated for authentication.
+ The web hook URL should be set to the GitLab API to trigger pull mirroring,
+ using the Personal Access Token we just generated for authentication.
- ```text
- https://gitlab.com/api/v4/projects/<NAMESPACE>%2F<PROJECT>/mirror/pull?private_token=<PERSONAL_ACCESS_TOKEN>
- ```
+ ```text
+ https://gitlab.com/api/v4/projects/<NAMESPACE>%2F<PROJECT>/mirror/pull?private_token=<PERSONAL_ACCESS_TOKEN>
+ ```
- The web hook Trigger should be set to 'Repository Push'.
+ The web hook Trigger should be set to 'Repository Push'.
- ![Bitbucket Cloud webhook](img/bitbucket_webhook.png)
+ ![Bitbucket Cloud webhook](img/bitbucket_webhook.png)
- After saving, test the web hook by pushing a change to your Bitbucket
- repository.
+ After saving, test the web hook by pushing a change to your Bitbucket
+ repository.
1. In Bitbucket, create an **App Password** from **Bitbucket Settings > App
Passwords** to authenticate the build status script setting commit build
@@ -49,104 +49,104 @@ To use GitLab CI/CD with a Bitbucket Cloud repository:
1. In GitLab, from **Settings > CI/CD > Environment variables**, add variables to allow
communication with Bitbucket via the Bitbucket API:
- `BITBUCKET_ACCESS_TOKEN`: the Bitbucket app password created above.
+ `BITBUCKET_ACCESS_TOKEN`: the Bitbucket app password created above.
- `BITBUCKET_USERNAME`: the username of the Bitbucket account.
+ `BITBUCKET_USERNAME`: the username of the Bitbucket account.
- `BITBUCKET_NAMESPACE`: set this if your GitLab and Bitbucket namespaces differ.
+ `BITBUCKET_NAMESPACE`: set this if your GitLab and Bitbucket namespaces differ.
- `BITBUCKET_REPOSITORY`: set this if your GitLab and Bitbucket project names differ.
+ `BITBUCKET_REPOSITORY`: set this if your GitLab and Bitbucket project names differ.
1. In Bitbucket, add a script to push the pipeline status to Bitbucket.
- > Note: changes made in GitLab will be overwritten by any changes made
- > upstream in Bitbucket.
-
- Create a file `build_status` and insert the script below and run
- `chmod +x build_status` in your terminal to make the script executable.
-
- ```bash
- #!/usr/bin/env bash
-
- # Push GitLab CI/CD build status to Bitbucket Cloud
-
- if [ -z "$BITBUCKET_ACCESS_TOKEN" ]; then
- echo "ERROR: BITBUCKET_ACCESS_TOKEN is not set"
- exit 1
- fi
- if [ -z "$BITBUCKET_USERNAME" ]; then
- echo "ERROR: BITBUCKET_USERNAME is not set"
- exit 1
- fi
- if [ -z "$BITBUCKET_NAMESPACE" ]; then
- echo "Setting BITBUCKET_NAMESPACE to $CI_PROJECT_NAMESPACE"
- BITBUCKET_NAMESPACE=$CI_PROJECT_NAMESPACE
- fi
- if [ -z "$BITBUCKET_REPOSITORY" ]; then
- echo "Setting BITBUCKET_REPOSITORY to $CI_PROJECT_NAME"
- BITBUCKET_REPOSITORY=$CI_PROJECT_NAME
- fi
-
- BITBUCKET_API_ROOT="https://api.bitbucket.org/2.0"
- BITBUCKET_STATUS_API="$BITBUCKET_API_ROOT/repositories/$BITBUCKET_NAMESPACE/$BITBUCKET_REPOSITORY/commit/$CI_COMMIT_SHA/statuses/build"
- BITBUCKET_KEY="ci/gitlab-ci/$CI_JOB_NAME"
-
- case "$BUILD_STATUS" in
- running)
- BITBUCKET_STATE="INPROGRESS"
- BITBUCKET_DESCRIPTION="The build is running!"
- ;;
- passed)
- BITBUCKET_STATE="SUCCESSFUL"
- BITBUCKET_DESCRIPTION="The build passed!"
- ;;
- failed)
- BITBUCKET_STATE="FAILED"
- BITBUCKET_DESCRIPTION="The build failed."
- ;;
- esac
-
- echo "Pushing status to $BITBUCKET_STATUS_API..."
- curl --request POST $BITBUCKET_STATUS_API \
- --user $BITBUCKET_USERNAME:$BITBUCKET_ACCESS_TOKEN \
- --header "Content-Type:application/json" \
- --silent \
- --data "{ \"state\": \"$BITBUCKET_STATE\", \"key\": \"$BITBUCKET_KEY\", \"description\":
- \"$BITBUCKET_DESCRIPTION\",\"url\": \"$CI_PROJECT_URL/-/jobs/$CI_JOB_ID\" }"
- ```
+ > Note: changes made in GitLab will be overwritten by any changes made
+ > upstream in Bitbucket.
+
+ Create a file `build_status` and insert the script below and run
+ `chmod +x build_status` in your terminal to make the script executable.
+
+ ```bash
+ #!/usr/bin/env bash
+
+ # Push GitLab CI/CD build status to Bitbucket Cloud
+
+ if [ -z "$BITBUCKET_ACCESS_TOKEN" ]; then
+ echo "ERROR: BITBUCKET_ACCESS_TOKEN is not set"
+ exit 1
+ fi
+ if [ -z "$BITBUCKET_USERNAME" ]; then
+ echo "ERROR: BITBUCKET_USERNAME is not set"
+ exit 1
+ fi
+ if [ -z "$BITBUCKET_NAMESPACE" ]; then
+ echo "Setting BITBUCKET_NAMESPACE to $CI_PROJECT_NAMESPACE"
+ BITBUCKET_NAMESPACE=$CI_PROJECT_NAMESPACE
+ fi
+ if [ -z "$BITBUCKET_REPOSITORY" ]; then
+ echo "Setting BITBUCKET_REPOSITORY to $CI_PROJECT_NAME"
+ BITBUCKET_REPOSITORY=$CI_PROJECT_NAME
+ fi
+
+ BITBUCKET_API_ROOT="https://api.bitbucket.org/2.0"
+ BITBUCKET_STATUS_API="$BITBUCKET_API_ROOT/repositories/$BITBUCKET_NAMESPACE/$BITBUCKET_REPOSITORY/commit/$CI_COMMIT_SHA/statuses/build"
+ BITBUCKET_KEY="ci/gitlab-ci/$CI_JOB_NAME"
+
+ case "$BUILD_STATUS" in
+ running)
+ BITBUCKET_STATE="INPROGRESS"
+ BITBUCKET_DESCRIPTION="The build is running!"
+ ;;
+ passed)
+ BITBUCKET_STATE="SUCCESSFUL"
+ BITBUCKET_DESCRIPTION="The build passed!"
+ ;;
+ failed)
+ BITBUCKET_STATE="FAILED"
+ BITBUCKET_DESCRIPTION="The build failed."
+ ;;
+ esac
+
+ echo "Pushing status to $BITBUCKET_STATUS_API..."
+ curl --request POST $BITBUCKET_STATUS_API \
+ --user $BITBUCKET_USERNAME:$BITBUCKET_ACCESS_TOKEN \
+ --header "Content-Type:application/json" \
+ --silent \
+ --data "{ \"state\": \"$BITBUCKET_STATE\", \"key\": \"$BITBUCKET_KEY\", \"description\":
+ \"$BITBUCKET_DESCRIPTION\",\"url\": \"$CI_PROJECT_URL/-/jobs/$CI_JOB_ID\" }"
+ ```
1. Still in Bitbucket, create a `.gitlab-ci.yml` file to use the script to push
pipeline success and failures to Bitbucket.
- ```yaml
- stages:
- - test
- - ci_status
-
- unit-tests:
- script:
- - echo "Success. Add your tests!"
-
- success:
- stage: ci_status
- before_script:
- - ""
- after_script:
- - ""
- script:
- - BUILD_STATUS=passed BUILD_KEY=push ./build_status
- when: on_success
-
- failure:
- stage: ci_status
- before_script:
- - ""
- after_script:
- - ""
- script:
- - BUILD_STATUS=failed BUILD_KEY=push ./build_status
- when: on_failure
- ```
+ ```yaml
+ stages:
+ - test
+ - ci_status
+
+ unit-tests:
+ script:
+ - echo "Success. Add your tests!"
+
+ success:
+ stage: ci_status
+ before_script:
+ - ""
+ after_script:
+ - ""
+ script:
+ - BUILD_STATUS=passed BUILD_KEY=push ./build_status
+ when: on_success
+
+ failure:
+ stage: ci_status
+ before_script:
+ - ""
+ after_script:
+ - ""
+ script:
+ - BUILD_STATUS=failed BUILD_KEY=push ./build_status
+ when: on_failure
+ ```
GitLab is now configured to mirror changes from Bitbucket, run CI/CD pipelines
configured in `.gitlab-ci.yml` and push the status to Bitbucket.
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index 10a21edbd34..278a0d6e934 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -48,42 +48,42 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user.
1. During GitLab Runner installation select `shell` as method of executing job scripts or use command:
- ```bash
- sudo gitlab-runner register -n \
- --url https://gitlab.com/ \
- --registration-token REGISTRATION_TOKEN \
- --executor shell \
- --description "My Runner"
- ```
+ ```bash
+ sudo gitlab-runner register -n \
+ --url https://gitlab.com/ \
+ --registration-token REGISTRATION_TOKEN \
+ --executor shell \
+ --description "My Runner"
+ ```
1. Install Docker Engine on server.
- For more information how to install Docker Engine on different systems
- checkout the [Supported installations](https://docs.docker.com/engine/installation/).
+ For more information how to install Docker Engine on different systems
+ checkout the [Supported installations](https://docs.docker.com/engine/installation/).
1. Add `gitlab-runner` user to `docker` group:
- ```bash
- sudo usermod -aG docker gitlab-runner
- ```
+ ```bash
+ sudo usermod -aG docker gitlab-runner
+ ```
1. Verify that `gitlab-runner` has access to Docker:
- ```bash
- sudo -u gitlab-runner -H docker info
- ```
+ ```bash
+ sudo -u gitlab-runner -H docker info
+ ```
- You can now verify that everything works by adding `docker info` to `.gitlab-ci.yml`:
+ You can now verify that everything works by adding `docker info` to `.gitlab-ci.yml`:
- ```yaml
- before_script:
- - docker info
+ ```yaml
+ before_script:
+ - docker info
- build_image:
- script:
- - docker build -t my-docker-image .
- - docker run my-docker-image /script/to/run/tests
- ```
+ build_image:
+ script:
+ - docker build -t my-docker-image .
+ - docker run my-docker-image /script/to/run/tests
+ ```
1. You can now use `docker` command (and **install** `docker-compose` if needed).
@@ -107,83 +107,83 @@ In order to do that, follow the steps:
1. Register GitLab Runner from the command line to use `docker` and `privileged`
mode:
- ```bash
- sudo gitlab-runner register -n \
- --url https://gitlab.com/ \
- --registration-token REGISTRATION_TOKEN \
- --executor docker \
- --description "My Docker Runner" \
- --docker-image "docker:stable" \
- --docker-privileged
- ```
-
- The above command will register a new Runner to use the special
- `docker:stable` image which is provided by Docker. **Notice that it's using
- the `privileged` mode to start the build and service containers.** If you
- want to use [docker-in-docker] mode, you always have to use `privileged = true`
- in your Docker containers.
-
- DANGER: **Danger:**
- By enabling `--docker-privileged`, you are effectively disabling all of
- the security mechanisms of containers and exposing your host to privilege
- escalation which can lead to container breakout. For more information, check
- out the official Docker documentation on
- [Runtime privilege and Linux capabilities][docker-cap].
-
- The above command will create a `config.toml` entry similar to this:
-
- ```toml
- [[runners]]
- url = "https://gitlab.com/"
- token = TOKEN
- executor = "docker"
- [runners.docker]
- tls_verify = false
- image = "docker:stable"
- privileged = true
- disable_cache = false
- volumes = ["/cache"]
- [runners.cache]
- Insecure = false
- ```
+ ```bash
+ sudo gitlab-runner register -n \
+ --url https://gitlab.com/ \
+ --registration-token REGISTRATION_TOKEN \
+ --executor docker \
+ --description "My Docker Runner" \
+ --docker-image "docker:stable" \
+ --docker-privileged
+ ```
+
+ The above command will register a new Runner to use the special
+ `docker:stable` image which is provided by Docker. **Notice that it's using
+ the `privileged` mode to start the build and service containers.** If you
+ want to use [docker-in-docker] mode, you always have to use `privileged = true`
+ in your Docker containers.
+
+ DANGER: **Danger:**
+ By enabling `--docker-privileged`, you are effectively disabling all of
+ the security mechanisms of containers and exposing your host to privilege
+ escalation which can lead to container breakout. For more information, check
+ out the official Docker documentation on
+ [Runtime privilege and Linux capabilities][docker-cap].
+
+ The above command will create a `config.toml` entry similar to this:
+
+ ```toml
+ [[runners]]
+ url = "https://gitlab.com/"
+ token = TOKEN
+ executor = "docker"
+ [runners.docker]
+ tls_verify = false
+ image = "docker:stable"
+ privileged = true
+ disable_cache = false
+ volumes = ["/cache"]
+ [runners.cache]
+ Insecure = false
+ ```
1. You can now use `docker` in the build script (note the inclusion of the
`docker:dind` service):
- ```yaml
- image: docker:stable
-
- variables:
- # When using dind service we need to instruct docker, to talk with the
- # daemon started inside of the service. The daemon is available with
- # a network connection instead of the default /var/run/docker.sock socket.
- #
- # The 'docker' hostname is the alias of the service container as described at
- # https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services
- #
- # Note that if you're using the Kubernetes executor, the variable should be set to
- # tcp://localhost:2375/ because of how the Kubernetes executor connects services
- # to the job container
- # DOCKER_HOST: tcp://localhost:2375/
- #
- # For non-Kubernetes executors, we use tcp://docker:2375/
- DOCKER_HOST: tcp://docker:2375/
- # When using dind, it's wise to use the overlayfs driver for
- # improved performance.
- DOCKER_DRIVER: overlay2
-
- services:
- - docker:dind
-
- before_script:
- - docker info
-
- build:
- stage: build
- script:
- - docker build -t my-docker-image .
- - docker run my-docker-image /script/to/run/tests
- ```
+ ```yaml
+ image: docker:stable
+
+ variables:
+ # When using dind service we need to instruct docker, to talk with the
+ # daemon started inside of the service. The daemon is available with
+ # a network connection instead of the default /var/run/docker.sock socket.
+ #
+ # The 'docker' hostname is the alias of the service container as described at
+ # https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services
+ #
+ # Note that if you're using the Kubernetes executor, the variable should be set to
+ # tcp://localhost:2375/ because of how the Kubernetes executor connects services
+ # to the job container
+ # DOCKER_HOST: tcp://localhost:2375/
+ #
+ # For non-Kubernetes executors, we use tcp://docker:2375/
+ DOCKER_HOST: tcp://docker:2375/
+ # When using dind, it's wise to use the overlayfs driver for
+ # improved performance.
+ DOCKER_DRIVER: overlay2
+
+ services:
+ - docker:dind
+
+ before_script:
+ - docker info
+
+ build:
+ stage: build
+ script:
+ - docker build -t my-docker-image .
+ - docker run my-docker-image /script/to/run/tests
+ ```
Docker-in-Docker works well, and is the recommended configuration, but it is
not without its own challenges:
@@ -202,14 +202,14 @@ not without its own challenges:
and use it as your mount point (for a more thorough explanation, check [issue
#41227](https://gitlab.com/gitlab-org/gitlab-ce/issues/41227)):
- ```yaml
- variables:
- MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
+ ```yaml
+ variables:
+ MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
- script:
- - mkdir -p "$MOUNT_POINT"
- - docker run -v "$MOUNT_POINT:/mnt" my-docker-image
- ```
+ script:
+ - mkdir -p "$MOUNT_POINT"
+ - docker run -v "$MOUNT_POINT:/mnt" my-docker-image
+ ```
An example project using this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
@@ -230,54 +230,54 @@ In order to do that, follow the steps:
1. Register GitLab Runner from the command line to use `docker` and share `/var/run/docker.sock`:
- ```bash
- sudo gitlab-runner register -n \
- --url https://gitlab.com/ \
- --registration-token REGISTRATION_TOKEN \
- --executor docker \
- --description "My Docker Runner" \
- --docker-image "docker:stable" \
- --docker-volumes /var/run/docker.sock:/var/run/docker.sock
- ```
-
- The above command will register a new Runner to use the special
- `docker:stable` image which is provided by Docker. **Notice that it's using
- the Docker daemon of the Runner itself, and any containers spawned by docker
- commands will be siblings of the Runner rather than children of the runner.**
- This may have complications and limitations that are unsuitable for your workflow.
-
- The above command will create a `config.toml` entry similar to this:
-
- ```toml
- [[runners]]
- url = "https://gitlab.com/"
- token = REGISTRATION_TOKEN
- executor = "docker"
- [runners.docker]
- tls_verify = false
- image = "docker:stable"
- privileged = false
- disable_cache = false
- volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
- [runners.cache]
- Insecure = false
- ```
+ ```bash
+ sudo gitlab-runner register -n \
+ --url https://gitlab.com/ \
+ --registration-token REGISTRATION_TOKEN \
+ --executor docker \
+ --description "My Docker Runner" \
+ --docker-image "docker:stable" \
+ --docker-volumes /var/run/docker.sock:/var/run/docker.sock
+ ```
+
+ The above command will register a new Runner to use the special
+ `docker:stable` image which is provided by Docker. **Notice that it's using
+ the Docker daemon of the Runner itself, and any containers spawned by docker
+ commands will be siblings of the Runner rather than children of the runner.**
+ This may have complications and limitations that are unsuitable for your workflow.
+
+ The above command will create a `config.toml` entry similar to this:
+
+ ```toml
+ [[runners]]
+ url = "https://gitlab.com/"
+ token = REGISTRATION_TOKEN
+ executor = "docker"
+ [runners.docker]
+ tls_verify = false
+ image = "docker:stable"
+ privileged = false
+ disable_cache = false
+ volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
+ [runners.cache]
+ Insecure = false
+ ```
1. You can now use `docker` in the build script (note that you don't need to
include the `docker:dind` service as when using the Docker in Docker executor):
- ```yaml
- image: docker:stable
+ ```yaml
+ image: docker:stable
- before_script:
- - docker info
+ before_script:
+ - docker info
- build:
- stage: build
- script:
- - docker build -t my-docker-image .
- - docker run my-docker-image /script/to/run/tests
- ```
+ build:
+ stage: build
+ script:
+ - docker build -t my-docker-image .
+ - docker run my-docker-image /script/to/run/tests
+ ```
While the above method avoids using Docker in privileged mode, you should be
aware of the following implications:
@@ -293,9 +293,9 @@ aware of the following implications:
work as expected since volume mounting is done in the context of the host
machine, not the build container. For example:
- ```sh
- docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests
- ```
+ ```sh
+ docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests
+ ```
## Making docker-in-docker builds faster with Docker layer caching
@@ -366,23 +366,23 @@ which can be avoided if a different driver is used, for example `overlay2`.
1. Make sure a recent kernel is used, preferably `>= 4.2`.
1. Check whether the `overlay` module is loaded:
- ```sh
- sudo lsmod | grep overlay
- ```
+ ```sh
+ sudo lsmod | grep overlay
+ ```
- If you see no result, then it isn't loaded. To load it use:
+ If you see no result, then it isn't loaded. To load it use:
- ```sh
- sudo modprobe overlay
- ```
+ ```sh
+ sudo modprobe overlay
+ ```
- If everything went fine, you need to make sure module is loaded on reboot.
- On Ubuntu systems, this is done by editing `/etc/modules`. Just add the
- following line into it:
+ If everything went fine, you need to make sure module is loaded on reboot.
+ On Ubuntu systems, this is done by editing `/etc/modules`. Just add the
+ following line into it:
- ```text
- overlay
- ```
+ ```text
+ overlay
+ ```
### Use driver per project
@@ -450,9 +450,9 @@ For all projects, mostly suitable for public ones:
your Docker images and has read/write access to the Registry. This is ephemeral,
so it's only valid for one job. You can use the following example as-is:
- ```sh
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- ```
+ ```sh
+ docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
+ ```
For private and internal projects:
@@ -465,9 +465,9 @@ For private and internal projects:
Replace the `<username>` and `<access_token>` in the following example:
- ```sh
- docker login -u <username> -p <access_token> $CI_REGISTRY
- ```
+ ```sh
+ docker login -u <username> -p <access_token> $CI_REGISTRY
+ ```
- **Using the GitLab Deploy Token**: You can create and use a
[special deploy token](../../user/project/deploy_tokens/index.md#gitlab-deploy-token)
@@ -475,9 +475,9 @@ For private and internal projects:
Once created, you can use the special environment variables, and GitLab CI/CD
will fill them in for you. You can use the following example as-is:
- ```sh
- docker login -u $CI_DEPLOY_USER -p $CI_DEPLOY_PASSWORD $CI_REGISTRY
- ```
+ ```sh
+ docker login -u $CI_DEPLOY_USER -p $CI_DEPLOY_PASSWORD $CI_REGISTRY
+ ```
### Container Registry examples
diff --git a/doc/ci/docker/using_docker_images.md b/doc/ci/docker/using_docker_images.md
index f3896c5232c..d5056568dff 100644
--- a/doc/ci/docker/using_docker_images.md
+++ b/doc/ci/docker/using_docker_images.md
@@ -305,25 +305,25 @@ For example, the following two definitions are equal:
1. Using a string as an option to `image` and `services`:
- ```yaml
- image: "registry.example.com/my/image:latest"
+ ```yaml
+ image: "registry.example.com/my/image:latest"
- services:
- - postgresql:9.4
- - redis:latest
- ```
+ services:
+ - postgresql:9.4
+ - redis:latest
+ ```
1. Using a map as an option to `image` and `services`. The use of `image:name` is
required:
- ```yaml
- image:
- name: "registry.example.com/my/image:latest"
+ ```yaml
+ image:
+ name: "registry.example.com/my/image:latest"
- services:
- - name: postgresql:9.4
- - name: redis:latest
- ```
+ services:
+ - name: postgresql:9.4
+ - name: redis:latest
+ ```
### Available settings for `image`
@@ -526,6 +526,7 @@ it's provided as an environment variable. This is because GitLab Runnner uses **
runtime.
### Using statically-defined credentials
+
There are two approaches that you can take in order to access a
private registry. Both require setting the environment variable
`DOCKER_AUTH_CONFIG` with appropriate authentication info.
@@ -555,18 +556,18 @@ There are two ways to determine the value of `DOCKER_AUTH_CONFIG`:
- **First way -** Do a `docker login` on your local machine:
- ```bash
- docker login registry.example.com:5000 --username my_username --password my_password
- ```
+ ```bash
+ docker login registry.example.com:5000 --username my_username --password my_password
+ ```
- Then copy the content of `~/.docker/config.json`.
+ Then copy the content of `~/.docker/config.json`.
- If you don't need access to the registry from your computer, you
- can do a `docker logout`:
+ If you don't need access to the registry from your computer, you
+ can do a `docker logout`:
- ```bash
- docker logout registry.example.com:5000
- ```
+ ```bash
+ docker logout registry.example.com:5000
+ ```
- **Second way -** In some setups, it's possible that Docker client
will use the available system keystore to store the result of `docker
@@ -575,12 +576,12 @@ There are two ways to determine the value of `DOCKER_AUTH_CONFIG`:
`${username}:${password}` manually. Open a terminal and execute the
following command:
- ```bash
- echo -n "my_username:my_password" | base64
+ ```bash
+ echo -n "my_username:my_password" | base64
- # Example output to copy
- bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=
- ```
+ # Example output to copy
+ bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=
+ ```
#### Configuring a job
@@ -590,25 +591,25 @@ follow these steps:
1. Create a [variable](../variables/README.md#gitlab-cicd-environment-variables) `DOCKER_AUTH_CONFIG` with the content of the
Docker configuration file as the value:
- ```json
- {
- "auths": {
- "registry.example.com:5000": {
- "auth": "bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ="
- }
- }
- }
- ```
+ ```json
+ {
+ "auths": {
+ "registry.example.com:5000": {
+ "auth": "bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ="
+ }
+ }
+ }
+ ```
1. You can now use any private image from `registry.example.com:5000` defined in
`image` and/or `services` in your `.gitlab-ci.yml` file:
- ```yaml
- image: registry.example.com:5000/namespace/image:tag
- ```
+ ```yaml
+ image: registry.example.com:5000/namespace/image:tag
+ ```
- In the example above, GitLab Runner will look at `registry.example.com:5000` for the
- image `namespace/image:tag`.
+ In the example above, GitLab Runner will look at `registry.example.com:5000` for the
+ image `namespace/image:tag`.
You can add configuration for as many registries as you want, adding more
registries to the `"auths"` hash as described above.
@@ -637,10 +638,10 @@ To add `DOCKER_AUTH_CONFIG` to a Runner:
1. Modify the Runner's `config.toml` file as follows:
- ```toml
- [[runners]]
- environment = ["DOCKER_AUTH_CONFIG={\"auths\":{\"registry.example.com:5000\":{\"auth\":\"bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=\"}}}"]
- ```
+ ```toml
+ [[runners]]
+ environment = ["DOCKER_AUTH_CONFIG={\"auths\":{\"registry.example.com:5000\":{\"auth\":\"bXlfdXNlcm5hbWU6bXlfcGFzc3dvcmQ=\"}}}"]
+ ```
1. Restart the Runner service.
@@ -662,16 +663,17 @@ To configure credentials store, follow these steps:
Make sure helper program is available in GitLab Runner `$PATH`.
1. Make GitLab Runner use it. There are two ways to accomplish this. Either:
+
- Create a
[variable](../variables/README.md#gitlab-cicd-environment-variables)
`DOCKER_AUTH_CONFIG` with the content of the
- Docker configuration file as the value:
+ Docker configuration file as the value:
- ```json
- {
- "credsStore": "osxkeychain"
- }
- ```
+ ```json
+ {
+ "credsStore": "osxkeychain"
+ }
+ ```
- Or, if you are running self-hosted Runners, add the above JSON to
`${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this config file
@@ -693,17 +695,18 @@ To configure access for `aws_account_id.dkr.ecr.region.amazonaws.com`, follow th
1. Make sure `docker-credential-ecr-login` is available in GitLab Runner's `$PATH`.
1. Make GitLab Runner use it. There are two ways to accomplish this. Either:
+
- Create a [variable](../variables/README.md#gitlab-cicd-environment-variables)
`DOCKER_AUTH_CONFIG` with the content of the
- Docker configuration file as the value:
+ Docker configuration file as the value:
- ```json
- {
- "credHelpers": {
- "aws_account_id.dkr.ecr.region.amazonaws.com": "ecr-login"
- }
- }
- ```
+ ```json
+ {
+ "credHelpers": {
+ "aws_account_id.dkr.ecr.region.amazonaws.com": "ecr-login"
+ }
+ }
+ ```
- Or, if you are running self-hosted Runners,
add the above JSON to `${GITLAB_RUNNER_HOME}/.docker/config.json`.
@@ -713,12 +716,12 @@ To configure access for `aws_account_id.dkr.ecr.region.amazonaws.com`, follow th
1. You can now use any private image from `aws_account_id.dkr.ecr.region.amazonaws.com` defined in
`image` and/or `services` in your `.gitlab-ci.yml` file:
- ```yaml
- image: aws_account_id.dkr.ecr.region.amazonaws.com/private/image:latest
- ```
+ ```yaml
+ image: aws_account_id.dkr.ecr.region.amazonaws.com/private/image:latest
+ ```
- In the example above, GitLab Runner will look at `aws_account_id.dkr.ecr.region.amazonaws.com` for the
- image `private/image:latest`.
+ In the example above, GitLab Runner will look at `aws_account_id.dkr.ecr.region.amazonaws.com` for the
+ image `private/image:latest`.
You can add configuration for as many registries as you want, adding more
registries to the `"credHelpers"` hash as described above.
diff --git a/doc/ci/examples/artifactory_and_gitlab/index.md b/doc/ci/examples/artifactory_and_gitlab/index.md
index c9f700ed190..940c4711132 100644
--- a/doc/ci/examples/artifactory_and_gitlab/index.md
+++ b/doc/ci/examples/artifactory_and_gitlab/index.md
@@ -39,9 +39,10 @@ project:
1. Create a new project by selecting **Import project from ➔ Repo by URL**
1. Add the following URL:
- ```
- https://gitlab.com/gitlab-examples/maven/simple-maven-dep.git
- ```
+ ```
+ https://gitlab.com/gitlab-examples/maven/simple-maven-dep.git
+ ```
+
1. Click **Create project**
This application is nothing more than a basic class with a stub for a JUnit based test suite.
@@ -63,15 +64,15 @@ The application is ready to use, but you need some additional steps to deploy it
1. Copy the snippet in the `pom.xml` file for your project, just after the
`dependencies` section. The snippet should look like this:
- ```xml
- <distributionManagement>
- <repository>
- <id>central</id>
- <name>83d43b5afeb5-releases</name>
- <url>${env.MAVEN_REPO_URL}/libs-release-local</url>
- </repository>
- </distributionManagement>
- ```
+ ```xml
+ <distributionManagement>
+ <repository>
+ <id>central</id>
+ <name>83d43b5afeb5-releases</name>
+ <url>${env.MAVEN_REPO_URL}/libs-release-local</url>
+ </repository>
+ </distributionManagement>
+ ```
Another step you need to do before you can deploy the dependency to Artifactory
is to configure the authentication data. It is a simple task, but Maven requires
@@ -86,18 +87,18 @@ parameter in `.gitlab-ci.yml` to use the custom location instead of the default
1. Create a file called `settings.xml` in the `.m2` folder
1. Copy the following content into a `settings.xml` file:
- ```xml
- <settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd"
- xmlns="http://maven.apache.org/SETTINGS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <servers>
- <server>
- <id>central</id>
- <username>${env.MAVEN_REPO_USER}</username>
- <password>${env.MAVEN_REPO_PASS}</password>
- </server>
- </servers>
- </settings>
- ```
+ ```xml
+ <settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd"
+ xmlns="http://maven.apache.org/SETTINGS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <servers>
+ <server>
+ <id>central</id>
+ <username>${env.MAVEN_REPO_USER}</username>
+ <password>${env.MAVEN_REPO_PASS}</password>
+ </server>
+ </servers>
+ </settings>
+ ```
Username and password will be replaced by the correct values using variables.
@@ -187,9 +188,10 @@ We'll use again a Maven app that can be cloned from our example project:
1. Create a new project by selecting **Import project from ➔ Repo by URL**
1. Add the following URL:
- ```
- https://gitlab.com/gitlab-examples/maven/simple-maven-app.git
- ```
+ ```
+ https://gitlab.com/gitlab-examples/maven/simple-maven-app.git
+ ```
+
1. Click **Create project**
This one is a simple app as well. If you look at the `src/main/java/com/example/app/App.java`
@@ -204,13 +206,13 @@ Since Maven doesn't know how to resolve the dependency, you need to modify the c
1. Copy the snippet in the `dependencies` section of the `pom.xml` file.
The snippet should look like this:
- ```xml
- <dependency>
- <groupId>com.example.dep</groupId>
- <artifactId>simple-maven-dep</artifactId>
- <version>1.0</version>
- </dependency>
- ```
+ ```xml
+ <dependency>
+ <groupId>com.example.dep</groupId>
+ <artifactId>simple-maven-dep</artifactId>
+ <version>1.0</version>
+ </dependency>
+ ```
### Configure the Artifactory repository location
diff --git a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/index.md b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/index.md
index a5fed00972f..0e595e1a0be 100644
--- a/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/index.md
+++ b/doc/ci/examples/test_phoenix_app_with_gitlab_ci_cd/index.md
@@ -188,28 +188,27 @@ when running our Phoenix in our `localhost`.
- Open `hello_gitlab_ci/config/test.exs` on your favorite code editor
- Go to **Configure your database** session and edit the block to include `System.get_env`:
- ```elixir
- # Configure your database
- config :hello_gitlab_ci, HelloGitlabCi.Repo,
- adapter: Ecto.Adapters.Postgres,
- username: System.get_env("POSTGRES_USER") || "postgres",
- password: System.get_env("POSTGRES_PASSWORD") || "postgres",
- database: System.get_env("POSTGRES_DB") || "hello_gitlab_ci_test",
- hostname: System.get_env("POSTGRES_HOST") || "localhost",
- pool: Ecto.Adapters.SQL.Sandbox
- ```
-
- We'll need these system variables later on.
+ ```elixir
+ # Configure your database
+ config :hello_gitlab_ci, HelloGitlabCi.Repo,
+ adapter: Ecto.Adapters.Postgres,
+ username: System.get_env("POSTGRES_USER") || "postgres",
+ password: System.get_env("POSTGRES_PASSWORD") || "postgres",
+ database: System.get_env("POSTGRES_DB") || "hello_gitlab_ci_test",
+ hostname: System.get_env("POSTGRES_HOST") || "localhost",
+ pool: Ecto.Adapters.SQL.Sandbox
+ ```
+
+ We'll need these system variables later on.
- Create an empty file named `.gitkeep` into `hello_gitlab_ci/priv/repo/migrations`
- As our project is still fresh, we don't have any data on our database, so, the `migrations`
-directory will be empty.
- Without `.gitkeep`, git will not upload this empty directory and we'll got an error when running our
-test on GitLab.
+ As our project is still fresh, we don't have any data on our database, so, the `migrations`
+ directory will be empty.
+ Without `.gitkeep`, git will not upload this empty directory and we'll got an error when running our
+ test on GitLab.
- > **Note:**
- If we add a folder via the GitLab UI, GitLab itself will add the `.gitkeep` to that new dir.
+ > **Note:** If we add a folder via the GitLab UI, GitLab itself will add the `.gitkeep` to that new dir.
Now, let's run a local test and see if everything we did didn't break anything.
@@ -248,64 +247,64 @@ project.
- The fastest and easiest way to do this, is to click on **Set up CI** on project's main page:
- ![Set up CI](img/setup-ci.png)
+ ![Set up CI](img/setup-ci.png)
- On next screen, we can select a template ready to go. Click on **Apply a GitLab CI/CD Yaml
template** and select **Elixir**:
- ![Select template](img/select-template.png)
+ ![Select template](img/select-template.png)
- This template file tells GitLab CI/CD about what we wish to do every time a new commit is made.
- However, we have to adapt it to run a Phoenix app.
+ This template file tells GitLab CI/CD about what we wish to do every time a new commit is made.
+ However, we have to adapt it to run a Phoenix app.
- The first line tells GitLab what Docker image will be used.
- Remember when we learn about Runners, the isolated virtual machine where GitLab CI/CD build and test
- our application? This virtual machine must have all dependencies to run our application. This is
- where a Docker image is needed. The correct image will provide the entire system for us.
+ Remember when we learn about Runners, the isolated virtual machine where GitLab CI/CD build and test
+ our application? This virtual machine must have all dependencies to run our application. This is
+ where a Docker image is needed. The correct image will provide the entire system for us.
- As a suggestion, you can use [trenpixster's elixir image][docker-image], which already has all
- dependencies for Phoenix installed, such as Elixir, Erlang, NodeJS and PostgreSQL:
+ As a suggestion, you can use [trenpixster's elixir image][docker-image], which already has all
+ dependencies for Phoenix installed, such as Elixir, Erlang, NodeJS and PostgreSQL:
- ```yml
- image: trenpixster/elixir:latest
- ```
+ ```yml
+ image: trenpixster/elixir:latest
+ ```
- At `services` session, we'll only use `postgres`, so we'll delete `mysql` and `redis` lines:
- ```yml
- services:
- - postgres:latest
- ```
+ ```yml
+ services:
+ - postgres:latest
+ ```
- Now, we'll create a new entry called `variables`, before `before_script` session:
- ```yml
- variables:
- POSTGRES_DB: hello_gitlab_ci_test
- POSTGRES_HOST: postgres
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: "postgres"
- MIX_ENV: "test"
- ```
+ ```yml
+ variables:
+ POSTGRES_DB: hello_gitlab_ci_test
+ POSTGRES_HOST: postgres
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: "postgres"
+ MIX_ENV: "test"
+ ```
- Here, we are setting up the values for GitLab CI/CD authenticate into PostgreSQL, as we did on
- `config/test.exs` earlier.
+ Here, we are setting up the values for GitLab CI/CD authenticate into PostgreSQL, as we did on
+ `config/test.exs` earlier.
- In `before_script` session, we'll add some commands to prepare everything to the test:
- ```yml
- before_script:
- - apt-get update && apt-get -y install postgresql-client
- - mix local.hex --force
- - mix deps.get --only test
- - mix ecto.create
- - mix ecto.migrate
- ```
-
- It's important to install `postgresql-client` to let GitLab CI/CD access PostgreSQL and create our
- database with the login information provided earlier. More important is to respect the indentation,
- to avoid syntax errors when running the build.
+ ```yml
+ before_script:
+ - apt-get update && apt-get -y install postgresql-client
+ - mix local.hex --force
+ - mix deps.get --only test
+ - mix ecto.create
+ - mix ecto.migrate
+ ```
+
+ It's important to install `postgresql-client` to let GitLab CI/CD access PostgreSQL and create our
+ database with the login information provided earlier. More important is to respect the indentation,
+ to avoid syntax errors when running the build.
- And finally, we'll let `mix` session intact.
diff --git a/doc/ci/ssh_keys/README.md b/doc/ci/ssh_keys/README.md
index 69591ed605c..d9f022a7125 100644
--- a/doc/ci/ssh_keys/README.md
+++ b/doc/ci/ssh_keys/README.md
@@ -57,44 +57,44 @@ to access it. This is where an SSH key pair comes in handy.
1. Modify your `.gitlab-ci.yml` with a `before_script` action. In the following
example, a Debian based image is assumed. Edit to your needs:
- ```yaml
- before_script:
- ##
- ## Install ssh-agent if not already installed, it is required by Docker.
- ## (change apt-get to yum if you use an RPM-based image)
- ##
- - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )'
-
- ##
- ## Run ssh-agent (inside the build environment)
- ##
- - eval $(ssh-agent -s)
-
- ##
- ## Add the SSH key stored in SSH_PRIVATE_KEY variable to the agent store
- ## We're using tr to fix line endings which makes ed25519 keys work
- ## without extra base64 encoding.
- ## https://gitlab.com/gitlab-examples/ssh-private-key/issues/1#note_48526556
- ##
- - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - > /dev/null
-
- ##
- ## Create the SSH directory and give it the right permissions
- ##
- - mkdir -p ~/.ssh
- - chmod 700 ~/.ssh
-
- ##
- ## Optionally, if you will be using any Git commands, set the user name and
- ## and email.
- ##
- #- git config --global user.email "user@example.com"
- #- git config --global user.name "User name"
- ```
-
- NOTE: **Note:**
- The [`before_script`](../yaml/README.md#before_script-and-after_script) can be set globally
- or per-job.
+ ```yaml
+ before_script:
+ ##
+ ## Install ssh-agent if not already installed, it is required by Docker.
+ ## (change apt-get to yum if you use an RPM-based image)
+ ##
+ - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )'
+
+ ##
+ ## Run ssh-agent (inside the build environment)
+ ##
+ - eval $(ssh-agent -s)
+
+ ##
+ ## Add the SSH key stored in SSH_PRIVATE_KEY variable to the agent store
+ ## We're using tr to fix line endings which makes ed25519 keys work
+ ## without extra base64 encoding.
+ ## https://gitlab.com/gitlab-examples/ssh-private-key/issues/1#note_48526556
+ ##
+ - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - > /dev/null
+
+ ##
+ ## Create the SSH directory and give it the right permissions
+ ##
+ - mkdir -p ~/.ssh
+ - chmod 700 ~/.ssh
+
+ ##
+ ## Optionally, if you will be using any Git commands, set the user name and
+ ## and email.
+ ##
+ #- git config --global user.email "user@example.com"
+ #- git config --global user.name "User name"
+ ```
+
+ NOTE: **Note:**
+ The [`before_script`](../yaml/README.md#before_script-and-after_script) can be set globally
+ or per-job.
1. Make sure the private server's [SSH host keys are verified](#verifying-the-ssh-host-keys).
@@ -118,9 +118,9 @@ on, and use that key for all projects that are run on this machine.
1. Then from the terminal login as the `gitlab-runner` user:
- ```
- sudo su - gitlab-runner
- ```
+ ```
+ sudo su - gitlab-runner
+ ```
1. Generate the SSH key pair as described in the instructions to
[generate an SSH key](../../ssh/README.md#generating-a-new-ssh-key-pair).
diff --git a/doc/ci/variables/README.md b/doc/ci/variables/README.md
index 4d6ca8cff6d..c63b1e104ed 100644
--- a/doc/ci/variables/README.md
+++ b/doc/ci/variables/README.md
@@ -370,8 +370,11 @@ variables take precedence over those defined in `.gitlab-ci.yml`.
## Unsupported variables
There are cases where some variables cannot be used in the context of a
-`.gitlab-ci.yml` definition (for example under `script`). Read more
-about which variables are [not supported](where_variables_can_be_used.md).
+`.gitlab-ci.yml` definition (for example under `script`). Read more about which variables are [not supported](where_variables_can_be_used.md).
+
+## Where variables can be used
+
+Click [here](where_variables_can_be_used.md) for a section that describes where and how the different types of variables can be used.
## Advanced use
@@ -481,81 +484,86 @@ Below you can find supported syntax reference:
1. Equality matching using a string
- > Example: `$VARIABLE == "some value"`
+ Examples:
- > Example: `$VARIABLE != "some value"` (introduced in GitLab 11.11)
+ - `$VARIABLE == "some value"`
+ - `$VARIABLE != "some value"` (introduced in GitLab 11.11)
- You can use equality operator `==` or `!=` to compare a variable content to a
- string. We support both, double quotes and single quotes to define a string
- value, so both `$VARIABLE == "some value"` and `$VARIABLE == 'some value'`
- are supported. `"some value" == $VARIABLE` is correct too.
+ You can use equality operator `==` or `!=` to compare a variable content to a
+ string. We support both, double quotes and single quotes to define a string
+ value, so both `$VARIABLE == "some value"` and `$VARIABLE == 'some value'`
+ are supported. `"some value" == $VARIABLE` is correct too.
1. Checking for an undefined value
- > Example: `$VARIABLE == null`
+ Examples:
- > Example: `$VARIABLE != null` (introduced in GitLab 11.11)
+ - `$VARIABLE == null`
+ - `$VARIABLE != null` (introduced in GitLab 11.11)
- It sometimes happens that you want to check whether a variable is defined
- or not. To do that, you can compare a variable to `null` keyword, like
- `$VARIABLE == null`. This expression is going to evaluate to truth if
- variable is not defined when `==` is used, or to falsey if `!=` is used.
+ It sometimes happens that you want to check whether a variable is defined
+ or not. To do that, you can compare a variable to `null` keyword, like
+ `$VARIABLE == null`. This expression is going to evaluate to truth if
+ variable is not defined when `==` is used, or to falsey if `!=` is used.
1. Checking for an empty variable
- > Example: `$VARIABLE == ""`
+ Examples:
+
+ - `$VARIABLE == ""`
+ - `$VARIABLE != ""` (introduced in GitLab 11.11)
- > Example: `$VARIABLE != ""` (introduced in GitLab 11.11)
-
- If you want to check whether a variable is defined, but is empty, you can
- simply compare it against an empty string, like `$VAR == ''` or non-empty
- string `$VARIABLE != ""`.
+ If you want to check whether a variable is defined, but is empty, you can
+ simply compare it against an empty string, like `$VAR == ''` or non-empty
+ string `$VARIABLE != ""`.
1. Comparing two variables
- > Example: `$VARIABLE_1 == $VARIABLE_2`
+ Examples:
- > Example: `$VARIABLE_1 != $VARIABLE_2` (introduced in GitLab 11.11)
+ - `$VARIABLE_1 == $VARIABLE_2`
+ - `$VARIABLE_1 != $VARIABLE_2` (introduced in GitLab 11.11)
- It is possible to compare two variables. This is going to compare values
- of these variables.
+ It is possible to compare two variables. This is going to compare values
+ of these variables.
1. Variable presence check
- > Example: `$STAGING`
+ Example: `$STAGING`
- If you only want to create a job when there is some variable present,
- which means that it is defined and non-empty, you can simply use
- variable name as an expression, like `$STAGING`. If `$STAGING` variable
- is defined, and is non empty, expression will evaluate to truth.
- `$STAGING` value needs to a string, with length higher than zero.
- Variable that contains only whitespace characters is not an empty variable.
+ If you only want to create a job when there is some variable present,
+ which means that it is defined and non-empty, you can simply use
+ variable name as an expression, like `$STAGING`. If `$STAGING` variable
+ is defined, and is non empty, expression will evaluate to truth.
+ `$STAGING` value needs to a string, with length higher than zero.
+ Variable that contains only whitespace characters is not an empty variable.
1. Pattern matching (introduced in GitLab 11.0)
- > Example: `$VARIABLE =~ /^content.*/`
+ Examples:
- > Example: `$VARIABLE_1 !~ /^content.*/` (introduced in GitLab 11.11)
+ - `$VARIABLE =~ /^content.*/`
+ - `$VARIABLE_1 !~ /^content.*/` (introduced in GitLab 11.11)
- It is possible perform pattern matching against a variable and regular
- expression. Expression like this evaluates to truth if matches are found
- when using `=~`. It evaluates to truth if matches are not found when `!~` is used.
+ It is possible perform pattern matching against a variable and regular
+ expression. Expression like this evaluates to truth if matches are found
+ when using `=~`. It evaluates to truth if matches are not found when `!~` is used.
- Pattern matching is case-sensitive by default. Use `i` flag modifier, like
- `/pattern/i` to make a pattern case-insensitive.
+ Pattern matching is case-sensitive by default. Use `i` flag modifier, like
+ `/pattern/i` to make a pattern case-insensitive.
1. Conjunction / Disjunction ([introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/27925) in GitLab 12.0)
- > Example: `$VARIABLE1 =~ /^content.*/ && $VARIABLE2 == "something"`
-
- > Example: `$VARIABLE1 =~ /^content.*/ && $VARIABLE2 =~ /thing$/ && $VARIABLE3`
+ Examples:
- > Example: `$VARIABLE1 =~ /^content.*/ || $VARIABLE2 =~ /thing$/ && $VARIABLE3`
+ - `$VARIABLE1 =~ /^content.*/ && $VARIABLE2 == "something"`
+ - `$VARIABLE1 =~ /^content.*/ && $VARIABLE2 =~ /thing$/ && $VARIABLE3`
+ - `$VARIABLE1 =~ /^content.*/ || $VARIABLE2 =~ /thing$/ && $VARIABLE3`
- It is possible to join multiple conditions using `&&` or `||`. Any of the otherwise
- supported syntax may be used in a conjunctive or disjunctive statement.
- Precedence of operators follows standard Ruby 2.5 operation
- [precedence](https://ruby-doc.org/core-2.5.0/doc/syntax/precedence_rdoc.html).
+ It is possible to join multiple conditions using `&&` or `||`. Any of the otherwise
+ supported syntax may be used in a conjunctive or disjunctive statement.
+ Precedence of operators follows standard Ruby 2.5 operation
+ [precedence](https://ruby-doc.org/core-2.5.0/doc/syntax/precedence_rdoc.html).
## Debug tracing
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 201047b70b9..a6051e87366 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -2468,20 +2468,20 @@ There are three possible values: `none`, `normal`, and `recursive`:
- `normal` means that only the top-level submodules will be included. It is
equivalent to:
- ```
- git submodule sync
- git submodule update --init
- ```
+ ```
+ git submodule sync
+ git submodule update --init
+ ```
- `recursive` means that all submodules (including submodules of submodules)
will be included. This feature needs Git v1.8.1 and later. When using a
GitLab Runner with an executor not based on Docker, make sure the Git version
meets that requirement. It is equivalent to:
- ```
- git submodule sync --recursive
- git submodule update --init --recursive
- ```
+ ```
+ git submodule sync --recursive
+ git submodule update --init --recursive
+ ```
Note that for this feature to work correctly, the submodules must be configured
(in `.gitmodules`) with either:
diff --git a/doc/development/README.md b/doc/development/README.md
index 99c88146be5..44283a3ab0c 100644
--- a/doc/development/README.md
+++ b/doc/development/README.md
@@ -150,6 +150,10 @@ description: 'Learn how to contribute to GitLab.'
- [Go Guidelines](go_guide/index.md)
+## Shell Scripting guides
+
+- [Shell scripting standards and style guidelines](shell_scripting_guide/index.md)
+
## Other GitLab Development Kit (GDK) guides
- [Run full Auto DevOps cycle in a GDK instance](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/auto_devops.md)
diff --git a/doc/development/contributing/issue_workflow.md b/doc/development/contributing/issue_workflow.md
index 79c701d7abf..39f12e6886e 100644
--- a/doc/development/contributing/issue_workflow.md
+++ b/doc/development/contributing/issue_workflow.md
@@ -92,9 +92,6 @@ The following team labels are **true** teams per our [organization structure](ht
The descriptions on the [labels page](https://gitlab.com/gitlab-org/gitlab-ce/-/labels) explain what falls under the
responsibility of each team.
-Within those team labels, we also have the ~backend and ~frontend labels to
-indicate if an issue needs backend work, frontend work, or both.
-
Team labels are always capitalized so that they show up as the first label for
any issue.
@@ -107,15 +104,6 @@ The current stage labels can be found by [searching the labels list for `devops:
These labels are [scoped labels](../../user/project/labels.md#scoped-labels-premium)
and thus are mutually exclusive.
-They differ from the [Team labels](#team-labels) because teams may work on
-issues outside their stage.
-
-Normally there is a 1:1 relationship between Stage labels and Team labels, but
-any issue can be picked up by any team, depending on current priorities.
-So, an issue labeled ~"devops:create" may be scheduled by the ~Plan team, for
-example. In such cases, it's usual to include both team labels so each team can
-be aware of the progress.
-
The Stage labels are used to generate the [direction pages][direction-pages] automatically.
[devops-stages]: https://about.gitlab.com/direction/#devops-stages
@@ -130,9 +118,16 @@ The current group labels can be found by [searching the labels list for `group::
These labels are [scoped labels](../../user/project/labels.md#scoped-labels-premium)
and thus are mutually exclusive.
-Groups are nested beneath a particular stage, so only one stage label and one group label
-can be applied to a single issue. You can find the groups listed in the
-[Product Categories pages][product-categories].
+You can find the groups listed in the [Product Stages, Groups, and Categories][product-categories] page.
+
+We use the term group to map down product requirements from our product stages.
+As a team needs some way to collect the work their members are planning to be assigned to, we use the `~group::` labels to do so.
+
+Normally there is a 1:1 relationship between Stage labels and Group labels. In the spirit of "Everyone can contribute",
+any issue can be picked up by any group, depending on current priorities. For example, an issue labeled ~"devops::create" may be picked up by the ~"group::access" group.
+
+We also use stage and group labels to help quantify our [throughput](https://about.gitlab.com/handbook/engineering/management/throughput).
+Please read [Stage and Group labels in Throughtput](https://about.gitlab.com/handbook/engineering/management/throughput/#stage-and-group-labels-in-throughput) for more information on how the labels are used in this context.
[structure-groups]: https://about.gitlab.com/company/team/structure/#groups
[product-categories]: https://about.gitlab.com/handbook/product/categories/
diff --git a/doc/development/fe_guide/event_tracking.md b/doc/development/fe_guide/event_tracking.md
index 716f6ad7f92..1e6287d8f6d 100644
--- a/doc/development/fe_guide/event_tracking.md
+++ b/doc/development/fe_guide/event_tracking.md
@@ -1,79 +1,76 @@
# Event Tracking
-We use [Snowplow](https://github.com/snowplow/snowplow) for tracking custom events (available in GitLab [Enterprise Edition](https://about.gitlab.com/pricing/) only).
+We use a tracking interface that wraps up [Snowplow](https://github.com/snowplow/snowplow) for tracking custom events. Snowplow implements page tracking, but also exposes custom event tracking.
-## Generic tracking function
-
-In addition to Snowplow's built-in method for tracking page views, we use a generic tracking function which enables us to selectively apply listeners to events.
-
-The generic tracking function can be imported in EE-specific JS files as follows:
+The tracking interface can be imported in JS files as follows:
```javascript
-import { trackEvent } from `ee/stats`;
+import Tracking from `~/tracking`;
```
-This gives the user access to the `trackEvent` method, which takes the following parameters:
+## Tracking in HAML or Vue templates
-| parameter | type | description | required |
-| ---------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- |
-| `category` | string | Describes the page that you're capturing click events on. Unless infeasible, please use the Rails page attribute `document.body.dataset.page` by default. | true |
-| `eventName` | string | Describes the action the user is taking. The first word should always describe the action. For example, clicks should be `click` and activations should be `activate`. Use underscores to describe what was acted on. For example, activating a form field would be `activate_form_input`. Clicking on a dropdown is `click_dropdown`. | true |
-| `additionalData` | object | Additional data such as `label`, `property`, and `value` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). | false |
+To avoid having to do create a bunch of custom javascript event handlers, when working within HAML or Vue templates, we can add `data-track-*` attributes to elements of interest. This way, all elements that have a `data-track-event` attribute to automatically have event tracking bound.
-Read more about instrumentation and the taxonomy in the [Product Handbook](https://about.gitlab.com/handbook/product/feature-instrumentation).
-
-### Tracking in `.js` and `.vue` files
+Below is an example of `data-track-*` attributes assigned to a button in HAML:
-The most simple use case is to add tracking programmatically to an event of interest in Javascript.
+```haml
+%button.btn{ data: { track_event: "click_button", track_label: "template_preview", track_property: "my-template", track_value: "" } }
+```
-The following example demonstrates how to track a click on a button in Javascript by calling the `trackEvent` method explicitly:
+We can then setup tracking for large sections of a page, or an entire page by telling the Tracking interface to bind to it.
```javascript
-import { trackEvent } from `ee/stats`;
+import Tracking from '~/tracking';
-trackEvent('dashboard:projects:index', 'click_button', {
- label: 'create_from_template',
- property: 'template_preview',
- value: 'rails',
+// for the entire document
+new Tracking().bind();
+
+// for a container element
+document.addEventListener('DOMContentLoaded', () => {
+ new Tracking('my_category').bind(document.getElementById('my-container'));
});
+
```
-### Tracking in HAML templates
+When you instantiate a Tracking instance you can provide a category. If none is provided, `document.body.dataset.page` will be used. When you bind the Tracking instance you can provide an element. If no element is provided to bind to, the `document` is assumed.
-Sometimes we want to track clicks for multiple elements on a page. Creating event handlers for all elements could soon turn into a tedious task.
+Below is a list of supported `data-track-*` attributes:
-There's a more convenient solution to this problem. When working with HAML templates, we can add `data-track-*` attributes to elements of interest. This way, all elements that have both `data-track-label` and `data-track-event` attributes assigned get marked for event tracking. All we have to do is call the `bindTrackableContainer` method on a container which allows for better scoping.
+| attribute | required | description |
+|:----------------------|:---------|:------------|
+| `data-track-event` | true | Action the user is taking. Clicks should be `click` and activations should be `activate`, so for example, focusing a form field would be `activate_form_input`, and clicking a button would be `click_button`. |
+| `data-track-label` | false | The `label` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy) |
+| `data-track-property` | false | The `property` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy)
+| `data-track-value` | false | The `value` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). If omitted, this will be the elements `value` property or an empty string. For checkboxes, the default value will be the element's checked attribute or `false` when unchecked.
-Below is an example of `data-track-*` attributes assigned to a button in HAML:
-```ruby
-%button.btn{ data: { track_label: "template_preview", track_property: "my-template", track_event: "click_button", track_value: "" } }
-```
-
-By calling `bindTrackableContainer('.my-container')`, click handlers get bound to all elements located in `.my-container` provided that they have the necessary `data-track-*` attributes assigned to them.
+## Tracking in raw Javascript
-```javascript
-import Stats from 'ee/stats';
+Custom events can be tracked by directly calling the `Tracking.event` static function, which accepts the following arguments:
-document.addEventListener('DOMContentLoaded', () => {
- Stats.bindTrackableContainer('.my-container', 'category');
-});
-```
+| argument | type | default value | description |
+|:-----------|:-------|:---------------------------|:------------|
+| `category` | string | document.body.dataset.page | Page or subsection of a page that events are being captured within. |
+| `event` | string | 'generic' | Action the user is taking. Clicks should be `click` and activations should be `activate`, so for example, focusing a form field would be `activate_form_input`, and clicking a button would be `click_button`. |
+| `data` | object | {} | Additional data such as `label`, `property`, and `value` as described [in our Feature Instrumentation taxonomy](https://about.gitlab.com/handbook/product/feature-instrumentation/#taxonomy). These will be set as empty strings if you don't provide them. |
-The second parameter in `bindTrackableContainer` is optional. If omitted, the value of `document.body.dataset.page` will be used as category instead.
+Tracking can be programmatically added to an event of interest in Javascript, and the following example demonstrates tracking a click on a button by calling `Tracking.event` manually.
-Below is a list of supported `data-track-*` attributes:
+```javascript
+import Tracking from `~/tracking`;
-| attribute | description | required |
-| --------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- |
-| `data-track-label` | The `label` in `trackEvent` | true |
-| `data-track-event` | The `eventName` in `trackEvent` | true |
-| `data-track-property` | The `property` in `trackEvent`. If omitted, an empty string will be used as a default value. | false |
-| `data-track-value` | The `value` in `trackEvent`. If omitted, this will be `target.value` or empty string. For checkboxes, the default value being tracked will be the element's checked attribute if `data-track-value` is omitted. | false |
+document.getElementById('my_button').addEventListener('click', () => {
+ Tracking.event('dashboard:projects:index', 'click_button', {
+ label: 'create_from_template',
+ property: 'template_preview',
+ value: 'rails',
+ });
+})
+```
-Since Snowplow is an Enterprise Edition feature, it's necessary to create a CE backport when adding `data-track-*` attributes to HAML templates in most cases.
-## Testing
+## Toggling tracking on or off
Snowplow can be enabled by navigating to:
diff --git a/doc/development/testing_guide/end_to_end/index.md b/doc/development/testing_guide/end_to_end/index.md
index 2dc06ba10a5..882e2230636 100644
--- a/doc/development/testing_guide/end_to_end/index.md
+++ b/doc/development/testing_guide/end_to_end/index.md
@@ -65,28 +65,24 @@ Below you can read more about how to use it and how does it work.
Currently, we are using _multi-project pipeline_-like approach to run QA
pipelines.
-![QA on merge requests CI/CD architecture](../img/qa_on_merge_requests_cicd_architecture.png)
-
-<details>
-<summary>Show mermaid source</summary>
-<pre>
+```mermaid
graph LR
A1 -.->|1. Triggers an omnibus-gitlab pipeline and wait for it to be done| A2
- B2[<b>`Trigger-qa` stage</b><br />`Trigger:qa-test` job] -.->|2. Triggers a gitlab-qa pipeline and wait for it to be done| A3
+ B2[`Trigger-qa` stage<br>`Trigger:qa-test` job] -.->|2. Triggers a gitlab-qa pipeline and wait for it to be done| A3
-subgraph gitlab-ce/ee pipeline
- A1[<b>`test` stage</b><br />`package-and-qa` job]
+subgraph "gitlab-ce/ee pipeline"
+ A1[`test` stage<br>`package-and-qa` job]
end
-subgraph omnibus-gitlab pipeline
- A2[<b>`Trigger-docker` stage</b><br />`Trigger:gitlab-docker` job] -->|once done| B2
+subgraph "omnibus-gitlab pipeline"
+ A2[`Trigger-docker` stage<br>`Trigger:gitlab-docker` job] -->|once done| B2
end
-subgraph gitlab-qa pipeline
- A3>QA jobs run] -.->|3. Reports back the pipeline result to the `package-and-qa` job<br />and post the result on the original commit tested| A1
+subgraph "gitlab-qa pipeline"
+ A3>QA jobs run] -.->|3. Reports back the pipeline result to the `package-and-qa` job<br>and post the result on the original commit tested| A1
end
-</pre>
-</details>
+```
+
1. Developer triggers a manual action, that can be found in CE / EE merge
requests. This starts a chain of pipelines in multiple projects.
diff --git a/doc/development/testing_guide/end_to_end/quick_start_guide.md b/doc/development/testing_guide/end_to_end/quick_start_guide.md
index 14a169dcc1d..e1df8be8b6f 100644
--- a/doc/development/testing_guide/end_to_end/quick_start_guide.md
+++ b/doc/development/testing_guide/end_to_end/quick_start_guide.md
@@ -110,7 +110,7 @@ end
```
> Notice that the test itself is simple. The most challenging part is the creation of the application state, which will be covered later.
-
+>
> The exemplified test case's MVC is not enough for the change to be merged, but it helps to build up the test logic. The reason is that we do not want to use locators directly in the tests, and tests **must** use [Page Objects] before they can be merged. This way we better separate the responsibilities, where the Page Objects encapsulate elements and methods that allow us to interact with pages, while the spec files describe the test cases in more business-related language.
Below are the steps that the test covers:
@@ -211,7 +211,7 @@ A pre-condition for the entire test suite is defined in the `before :context` bl
> For our test suite, due to the need of the tests being completely independent of each other, we won't use the `before :context` block. The `before :context` block would make the tests dependent on each other because the first test changes the label of the issue, and the second one depends on the `'animal::fox'` label being set.
-> **Tip:** In case of a test suite with only one `it` block it's ok to use only the `before` block (see below) with all the test's pre-conditions.
+TIP: **Tip:** In case of a test suite with only one `it` block it's ok to use only the `before` block (see below) with all the test's pre-conditions.
#### `before`
@@ -274,11 +274,11 @@ end
In the `before` block we create all the application state needed for the tests to run. We do that by using the `Runtime::Browser.visit` method to go to the login page, by performing a `sign_in_using_credentials` from the `Login` Page Object, by fabricating resources via APIs (`issue`, and `Resource::Label`), and by using the `issue.visit!` to visit the issue page.
> A project is created in the background by creating the `issue` resource.
-
+>
> When creating the [Resources], notice that when calling the `fabricate_via_api` method, we pass some attribute:values, like `title`, and `labels` for the `issue` resource; and `project` and `title` for the `label` resource.
-
+>
> What's important to understand here is that by creating the application state mostly using the public APIs we save a lot of time in the test suite setup stage.
-
+>
> Soon we will cover the use of the already existing resources' methods and the creation of your own `fabricate_via_api` methods for resources where this is still not available, but first, let's optimize our implementation.
### 6. Optimization
@@ -362,7 +362,7 @@ First, in the [issue resource](https://gitlab.com/gitlab-org/gitlab-ee/blob/d358
Add the following `attribute :id` and `attribute :labels` right above the [`attribute :title`](https://gitlab.com/gitlab-org/gitlab-ee/blob/d3584e80b4236acdf393d815d604801573af72cc/qa/qa/resource/issue.rb#L15).
> This line is needed to allow for the issue fabrication, and for labels to be automatically added to the issue when fabricating it via API.
-
+>
> We add the attributes above the existing attribute to keep them alphabetically organized.
Then, let's initialize an instance variable for labels to allow an empty array as default value when such information is not passed during the resource fabrication, since this optional. [Between the attributes and the `fabricate!` method](https://gitlab.com/gitlab-org/gitlab-ee/blob/1a1f1408728f19b2aa15887cd20bddab7e70c8bd/qa/qa/resource/issue.rb#L18), add the following:
@@ -437,7 +437,7 @@ By defining the `resource_web_url(resource)` method, we override the one from th
By defining the `api_get_path` method, we **would** allow for the [`ApiFabricator`](https://gitlab.com/gitlab-org/gitlab-ee/blob/master/qa/qa/resource/api_fabricator.rb) module to know which path to use to get a single label, but since there's no path available for that in the publich API, we raise a `NotImplementedError` instead.
-By defining the `api_post_path` method, we allow for the [`ApiFabricator `](https://gitlab.com/gitlab-org/gitlab-ee/blob/master/qa/qa/resource/api_fabricator.rb) module to know which path to use to create a new label in a specific project.
+By defining the `api_post_path` method, we allow for the [`ApiFabricator`](https://gitlab.com/gitlab-org/gitlab-ee/blob/master/qa/qa/resource/api_fabricator.rb) module to know which path to use to create a new label in a specific project.
By defining the `api_post_body` method, we we allow for the [`ApiFabricator.api_post`](https://gitlab.com/gitlab-org/gitlab-ee/blob/a9177ca1812bac57e2b2fa4560e1d5dd8ffac38b/qa/qa/resource/api_fabricator.rb#L68) method to know which data to send when making the `POST` request.
@@ -580,7 +580,7 @@ filter_output = search_field_tag search_id, nil, class: "dropdown-input-field",
> `data-qa-*` data attributes and CSS classes starting with `qa-` are used solely for the purpose of QA and testing.
> By defining these, we add **testability** to the application.
-
+>
> When defining a data attribute like: `qa_selector: 'labels_block'`, it should match the element definition: `element :labels_block`. We use a [sanity test](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa/qa/page#how-did-we-solve-fragile-tests-problem) to check that defined elements have their respective selectors in the specified views.
#### Updates in the `QA::Page::Base` class
@@ -599,8 +599,6 @@ This method receives an element (`name`) and the `keys` that it will send to tha
As you might remember, in the Issue Page Object we call this method like this: `send_keys_to_element(:dropdown_input_field, [label, :enter])`.
-___
-
With that, you should be able to start writing end-to-end tests yourself. *Congratulations!*
[Page Objects]: page_objects.md
diff --git a/doc/development/testing_guide/end_to_end/style_guide.md b/doc/development/testing_guide/end_to_end/style_guide.md
index 6a888142575..97560e616a1 100644
--- a/doc/development/testing_guide/end_to_end/style_guide.md
+++ b/doc/development/testing_guide/end_to_end/style_guide.md
@@ -45,7 +45,7 @@ Notice that in the above example, before clicking the `:operations_environments_
> We can create these methods as helpers to abstract multi-step navigation.
-### Element naming convention
+## Element naming convention
When adding new elements to a page, it's important that we have a uniform element naming convention.
@@ -67,7 +67,7 @@ We follow a simple formula roughly based on hungarian notation.
*Note: This list is a work in progress. This list will eventually be the end-all enumeration of all available types.
I.e., any element that does not end with something in this list is bad form.*
-#### Examples
+### Examples
**Good**
@@ -98,3 +98,47 @@ view '...' do
element :ssh_clone_url
end
```
+
+## Block argument naming
+
+To have a standard on how we call pages when using the `.perform` method, we use the name of page object being called, all lowercased, and separated by underscore, if needed (see good and bad examples below.) This also applies to resources. We chose not to simply use `page` because that would shadow the Capybara DSL, potentially leading to confusion and bugs.
+
+### Examples
+
+**Good**
+
+```ruby
+# qa/specs/features/browser_ui/1_manage/project/add_project_member_spec.rb
+
+Page::Project::Settings::Members.perform do |members|
+ members.do_something
+end
+```
+
+```ruby
+# qa/specs/features/ee/browser_ui/3_create/merge_request/add_batch_comments_in_merge_request_spec.rb
+
+Resource::MergeRequest.fabricate! do |merge_request|
+ merge_request.do_something_else
+end
+```
+
+**Bad**
+
+```ruby
+# qa/specs/features/browser_ui/1_manage/project/add_project_member_spec.rb
+
+Page::Project::Settings::Members.perform do |project_settings_members_page|
+ project_settings_members_page.do_something
+end
+```
+
+```ruby
+# qa/specs/features/ee/browser_ui/3_create/merge_request/add_batch_comments_in_merge_request_spec.rb
+
+Resource::MergeRequest.fabricate! do |merge_request_page|
+ merge_request_page.do_something_else
+end
+```
+
+> Besides the advantage of having a standard in place, by following this standard we also write shorter lines of code. \ No newline at end of file
diff --git a/doc/development/testing_guide/img/qa_on_merge_requests_cicd_architecture.png b/doc/development/testing_guide/img/qa_on_merge_requests_cicd_architecture.png
deleted file mode 100644
index 5b93a05db96..00000000000
--- a/doc/development/testing_guide/img/qa_on_merge_requests_cicd_architecture.png
+++ /dev/null
Binary files differ
diff --git a/doc/development/testing_guide/img/review_apps_cicd_architecture.png b/doc/development/testing_guide/img/review_apps_cicd_architecture.png
deleted file mode 100644
index 1ee28d3db91..00000000000
--- a/doc/development/testing_guide/img/review_apps_cicd_architecture.png
+++ /dev/null
Binary files differ
diff --git a/doc/development/testing_guide/review_apps.md b/doc/development/testing_guide/review_apps.md
index 96761622cfe..7843fc4c874 100644
--- a/doc/development/testing_guide/review_apps.md
+++ b/doc/development/testing_guide/review_apps.md
@@ -8,38 +8,33 @@ Review Apps are automatically deployed by each pipeline, both in
### CI/CD architecture diagram
-![Review Apps CI/CD architecture](img/review_apps_cicd_architecture.png)
-
-<details>
-<summary>Show mermaid source</summary>
-<pre>
+```mermaid
graph TD
build-qa-image -.->|once the `prepare` stage is done| gitlab:assets:compile
review-build-cng -->|triggers a CNG-mirror pipeline and wait for it to be done| CNG-mirror
review-build-cng -.->|once the `test` stage is done| review-deploy
review-deploy -.->|once the `review` stage is done| review-qa-smoke
-subgraph 1. gitlab-ce/ee `prepare` stage
+subgraph "1. gitlab-ce/ee `prepare` stage"
build-qa-image
end
-subgraph 2. gitlab-ce/ee `test` stage
+subgraph "2. gitlab-ce/ee `test` stage"
gitlab:assets:compile -->|plays dependent job once done| review-build-cng
end
-subgraph 3. gitlab-ce/ee `review` stage
- review-deploy["review-deploy<br /><br />Helm deploys the Review App using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br /><br />Cloud Native images are deployed to the `review-apps-ce` or `review-apps-ee`<br />Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."]
+subgraph "3. gitlab-ce/ee `review` stage"
+ review-deploy["review-deploy<br><br>Helm deploys the Review App using the Cloud<br/>Native images built by the CNG-mirror pipeline.<br><br>Cloud Native images are deployed to the `review-apps-ce` or `review-apps-ee`<br>Kubernetes (GKE) cluster, in the GCP `gitlab-review-apps` project."]
end
-subgraph 4. gitlab-ce/ee `qa` stage
- review-qa-smoke[review-qa-smoke<br /><br />gitlab-qa runs the smoke suite against the Review App.]
+subgraph "4. gitlab-ce/ee `qa` stage"
+ review-qa-smoke[review-qa-smoke<br><br>gitlab-qa runs the smoke suite against the Review App.]
end
-subgraph CNG-mirror pipeline
+subgraph "CNG-mirror pipeline"
CNG-mirror>Cloud Native images are built];
end
-</pre>
-</details>
+```
### Detailed explanation
@@ -115,6 +110,28 @@ On every [pipeline][gitlab-pipeline] in the `qa` stage, the
browser performance testing using a
[Sitespeed.io Container](../../user/project/merge_requests/browser_performance_testing.md).
+## Cluster configuration
+
+### Node pools
+
+Both `review-apps-ce` and `review-apps-ee` clusters are currently set up with
+two node pools:
+
+- a node pool of non-preemptible `n1-standard-2` (2 vCPU, 7.5 GB memory) nodes
+ dedicated to the `tiller` deployment (see below) with a single node.
+- a node pool of preemptible `n1-standard-2` (2 vCPU, 7.5 GB memory) nodes,
+ with a minimum of 1 node and a maximum of 250 nodes.
+
+### Helm/Tiller
+
+The `tiller` deployment (the Helm server) is deployed to a dedicated node pool
+that has the `app=helm` label and a specific
+[taint](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
+to prevent other pods from being scheduled on this node pool.
+
+This is to ensure Tiller isn't affected by "noisy" neighbors that could put
+their node under pressure.
+
## How to:
### Log into my Review App
@@ -241,15 +258,6 @@ thousands of unused Docker images.**
CNG-mirror project to store these Docker images so that we can just wipe out
the registry at some point, and use a new fresh, empty one.
-**How big are the Kubernetes clusters (`review-apps-ce` and `review-apps-ee`)?**
-
- > The clusters are currently set up with a single pool of preemptible nodes,
- with a minimum of 1 node and a maximum of 500 nodes.
-
-**What are the machine running on the cluster?**
-
- > We're currently using `n1-standard-1` (1 vCPU, 3.75 GB memory) machines.
-
**How do we secure this from abuse? Apps are open to the world so we need to
find a way to limit it to only us.**
diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md
index 24edd05da2f..f0da1cc2ddc 100644
--- a/doc/development/what_requires_downtime.md
+++ b/doc/development/what_requires_downtime.md
@@ -140,7 +140,7 @@ done without requiring downtime. However, this does require that any application
changes are deployed _first_. Thus, changing the constraints of a column should
happen in a post-deployment migration.
NOTE: Avoid using `change_column` as it produces inefficient query because it re-defines
-the whole column type. For example, to add a NOT NULL constraint, prefer `change_column_null `
+the whole column type. For example, to add a NOT NULL constraint, prefer `change_column_null`
## Changing Column Types
diff --git a/doc/gitlab-basics/create-project.md b/doc/gitlab-basics/create-project.md
index 1833b27a292..8bbaf5d1927 100644
--- a/doc/gitlab-basics/create-project.md
+++ b/doc/gitlab-basics/create-project.md
@@ -53,7 +53,7 @@ There are two types of project templates:
- [Built-in templates](#built-in-templates), sourced from the following groups:
- [`project-templates`](https://gitlab.com/gitlab-org/project-templates)
- [`pages`](https://gitlab.com/pages)
-- [Custom project templates](#custom-project-templates-premium-only), for custom templates
+- [Custom project templates](#custom-project-templates-premium), for custom templates
configured by GitLab administrators and users.
#### Built-in templates
@@ -78,7 +78,7 @@ You can improve the existing built-in templates or contribute new ones in the
[`project-templates`](https://gitlab.com/gitlab-org/project-templates) and
[`pages`](https://gitlab.com/pages) groups.
-#### Custom project templates **(PREMIUM ONLY)**
+#### Custom project templates **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/6860) in
[GitLab Premium](https://about.gitlab.com/pricing) 11.2.
diff --git a/doc/install/installation.md b/doc/install/installation.md
index 46ee980841b..a12edef4e41 100644
--- a/doc/install/installation.md
+++ b/doc/install/installation.md
@@ -938,7 +938,7 @@ To use GitLab with Puma:
cd /home/git/gitlab
# Copy config file for the web server
- sudo -u git -H config/puma.rb.example config/puma.rb
+ sudo -u git -H cp config/puma.rb.example config/puma.rb
```
1. Edit the system `init.d` script to use `EXPERIMENTAL_PUMA=1` flag. If you have `/etc/default/gitlab`, then you should edit it instead.
diff --git a/doc/install/requirements.md b/doc/install/requirements.md
index 25ab608de3a..cfabc09646d 100644
--- a/doc/install/requirements.md
+++ b/doc/install/requirements.md
@@ -62,17 +62,19 @@ NOTE: **Note:** Since file system performance may affect GitLab's overall perfor
### CPU
+This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repo/change size.
+
- 1 core supports up to 100 users but the application can be a bit slower due to having all workers and background jobs running on the same core
-- **2 cores** is the **recommended** number of cores and supports up to 500 users
-- 4 cores supports up to 2,000 users
-- 8 cores supports up to 5,000 users
-- 16 cores supports up to 10,000 users
-- 32 cores supports up to 20,000 users
-- 64 cores supports up to 40,000 users
-- More users? Run it on [multiple application servers](https://about.gitlab.com/high-availability/)
+- **2 cores** is the **recommended** minimum number of cores and supports up to 100 users
+- 4 cores supports up to 500 users
+- 8 cores supports up to 1,000 users
+- 32 cores supports up to 5,000 users
+- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/)
### Memory
+This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repo/change size.
+
You need at least 8GB of addressable memory (RAM + swap) to install and use GitLab!
The operating system and any other running applications will also be using memory
so keep in mind that you need at least 4GB available before running GitLab. With
@@ -80,13 +82,11 @@ less memory GitLab will give strange errors during the reconfigure run and 500
errors during usage.
- 4GB RAM + 4GB swap supports up to 100 users but it will be very slow
-- **8GB RAM** is the **recommended** memory size for all installations and supports up to 100 users
-- 16GB RAM supports up to 2,000 users
-- 32GB RAM supports up to 4,000 users
-- 64GB RAM supports up to 8,000 users
-- 128GB RAM supports up to 16,000 users
-- 256GB RAM supports up to 32,000 users
-- More users? Run it on [multiple application servers](https://about.gitlab.com/high-availability/)
+- **8GB RAM** is the **recommended** minimum memory size for all installations and supports up to 100 users
+- 16GB RAM supports up to 500 users
+- 32GB RAM supports up to 1,000 users
+- 128GB RAM supports up to 5,000 users
+- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/)
We recommend having at least [2GB of swap on your server](https://askubuntu.com/a/505344/310789), even if you currently have
enough available RAM. Having swap will help reduce the chance of errors occurring
@@ -146,8 +146,8 @@ CREATE EXTENSION postgres_fdw;
## Unicorn Workers
-For most instances we recommend using: CPU cores + 1 = unicorn workers.
-So for a machine with 2 cores, 3 unicorn workers is ideal.
+For most instances we recommend using: (CPU cores * 1.5) + 1 = unicorn workers.
+For example a node with 4 cores would have 7 unicorn workers.
For all machines that have 2GB and up we recommend a minimum of three unicorn workers.
If you have a 1GB machine we recommend to configure only two Unicorn workers to prevent excessive swapping.
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index 626bd259ed6..1c80fc543af 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -24,18 +24,21 @@ special searches:
## Installing Elasticsearch
Elasticsearch is _not_ included in the Omnibus packages. You will have to
-install it yourself whether you are using the Omnibus package or installed
-GitLab from source. Providing detailed information on installing Elasticsearch
-is out of the scope of this document.
+[install it yourself](https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html "Elasticsearch installation documentation")
+whether you are using the Omnibus package or installed GitLab from source.
+Providing detailed information on installing Elasticsearch is out of the scope
+of this document.
+
+NOTE: **Note:**
+Elasticsearch should be installed on a separate server, whether you install
+it yourself or by using the
+[Amazon Elasticsearch](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-gsg.html)
+service. Running Elasticsearch on the same server as GitLab is not recommended
+and it will likely cause performance degradation on the GitLab installation.
Once the data is added to the database or repository and [Elasticsearch is
enabled in the admin area](#enabling-elasticsearch) the search index will be
-updated automatically. Elasticsearch can be installed on the same machine as
-GitLab or on a separate server, or you can use the [Amazon Elasticsearch](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-gsg.html)
-service.
-
-You can follow the steps as described in the [official web site](https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html "Elasticsearch installation documentation") or
-use the packages that are available for your OS.
+updated automatically.
## Elasticsearch repository indexer (beta)
diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md
index 0d86df04367..f8da09e5fe1 100644
--- a/doc/raketasks/backup_restore.md
+++ b/doc/raketasks/backup_restore.md
@@ -960,3 +960,22 @@ want to run the chown against your custom location instead of
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
[restart GitLab]: ../administration/restart_gitlab.md#installations-from-source
+
+### Backup fails to complete with Gzip error
+
+While running the backup, you may receive a gzip error:
+
+```sh
+sudo /opt/gitlab/bin/gitlab-rake gitlab:backup:create
+Dumping ...
+...
+gzip: stdout: Input/output error
+
+Backup failed
+```
+
+If this happens, check the following:
+
+1. Confirm there is sufficent diskspace for the gzip operation.
+1. If NFS is being used, check if the mount option `timeo` is set. The default is `600`, and changing this to smaller values have resulted in this error.
+
diff --git a/doc/security/README.md b/doc/security/README.md
index c48d5bc2065..5d498ac7602 100644
--- a/doc/security/README.md
+++ b/doc/security/README.md
@@ -7,7 +7,7 @@ type: index
- [Password length limits](password_length_limits.md)
- [Restrict SSH key technologies and minimum length](ssh_keys_restrictions.md)
-- [Rack attack](rack_attack.md)
+- [Rate limits](rate_limits.md)
- [Webhooks and insecure internal web services](webhooks.md)
- [Information exclusivity](information_exclusivity.md)
- [Reset your root password](reset_root_password.md)
diff --git a/doc/security/img/outbound_requests_section.png b/doc/security/img/outbound_requests_section.png
deleted file mode 100644
index f7783f34cdd..00000000000
--- a/doc/security/img/outbound_requests_section.png
+++ /dev/null
Binary files differ
diff --git a/doc/security/img/outbound_requests_section_v12_2.png b/doc/security/img/outbound_requests_section_v12_2.png
new file mode 100644
index 00000000000..4fd3c7d9fce
--- /dev/null
+++ b/doc/security/img/outbound_requests_section_v12_2.png
Binary files differ
diff --git a/doc/security/rack_attack.md b/doc/security/rack_attack.md
index 1e5678ec47c..c772f783f71 100644
--- a/doc/security/rack_attack.md
+++ b/doc/security/rack_attack.md
@@ -2,7 +2,9 @@
type: reference, howto
---
-# Rack Attack
+# Rack Attack initializer
+
+## Overview
[Rack Attack](https://github.com/kickstarter/rack-attack), also known as Rack::Attack, is a Ruby gem
that is meant to protect GitLab with the ability to customize throttling and
@@ -14,19 +16,72 @@ If you find throttling is not enough to protect you against abusive clients,
Rack Attack offers IP whitelisting, blacklisting, Fail2ban style filtering, and
tracking.
-**Note:** Starting with 11.2, Rack Attack is disabled by default. To continue
-using Rack Attack, please enable it by [configuring `gitlab.rb` as described in Settings](#settings).
+For more information on how to use these options see the [Rack Attack README](https://github.com/kickstarter/rack-attack/blob/master/README.md).
+
+NOTE: **Note:** See
+[User and IP rate limits](../user/admin_area/settings/user_and_ip_rate_limits.md)
+for simpler throttles that are configured in UI.
+
+NOTE: **Note:** Starting with 11.2, Rack Attack is disabled by default. If your
+instance is not exposed to the public internet, it is recommended that you leave
+Rack Attack disabled.
+
+## Behavior
+
+If set up as described in the [Settings](#settings) section below, two behaviors
+will be enabled:
+
+- Protected paths will be throttled
+- Failed authentications for Git and container registry requests will trigger a temporary IP ban
+
+### Protected paths throttle
+
+GitLab responds with HTTP status code 429 to POST requests at protected paths
+over 10 requests per minute per IP address.
+
+By default, protected paths are:
+
+```ruby
+default['gitlab']['gitlab-rails']['rack_attack_protected_paths'] = [
+ '/users/password',
+ '/users/sign_in',
+ '/api/#{API::API.version}/session.json',
+ '/api/#{API::API.version}/session',
+ '/users',
+ '/users/confirmation',
+ '/unsubscribes/',
+ '/import/github/personal_access_token'
+]
+```
+
+This header is included in responses to blocked requests:
+
+```
+Retry-After: 60
+```
+
+For example, the following are limited to a maximum 10 requests per minute:
+
+- user sign-in
+- user sign-up (if enabled)
+- user password reset
+
+After trying for 10 times, the client will
+have to wait a minute before to be able to try again.
+
+### Git and container registry failed authentication ban
+
+GitLab responds with HTTP status code 403 for 1 hour, if 30 failed
+authentication requests were received in a 3-minute period from a single IP address.
-By default, user sign-in, user sign-up (if enabled), and user password reset is
-limited to 6 requests per minute. After trying for 6 times, the client will
-have to wait for the next minute to be able to try again.
+This applies only to Git requests and container registry (`/jwt/auth`) requests
+(combined).
-If you installed or upgraded GitLab by following the [official guides](../install/README.md),
-Rack Attack should be disabled by default. If your instance is not exposed to any incoming
-connections, it is recommended that you leave Rack Attack disabled.
+This limit is reset by requests that authenticate successfully. For example, 29
+failed authentication requests followed by 1 successful request, followed by 29
+more failed authentication requests would not trigger a ban.
-For more information on how to use these options check out
-[rack-attack README](https://github.com/kickstarter/rack-attack/blob/master/README.md).
+No response headers are provided.
## Settings
diff --git a/doc/security/rate_limits.md b/doc/security/rate_limits.md
new file mode 100644
index 00000000000..0e5bdcd9c79
--- /dev/null
+++ b/doc/security/rate_limits.md
@@ -0,0 +1,32 @@
+---
+type: reference, howto
+---
+
+# Rate limits
+
+NOTE: **Note:**
+For GitLab.com, please see
+[GitLab.com-specific rate limits](../user/gitlab_com/index.md#gitlabcom-specific-rate-limits).
+
+Rate limiting is a common technique used to improve the security and durability
+of a web application.
+
+For example, a simple script can make thousands of web requests per second.
+Whether malicious, apathetic, or just a bug, your application and infrastructure
+may not be able to cope with the load. For more details, see
+[Denial-of-service attack](https://en.wikipedia.org/wiki/Denial-of-service_attack).
+Most cases can be mitigated by limiting the rate of requests from a single IP address.
+
+Most [brute-force attacks](https://en.wikipedia.org/wiki/Brute-force_attack) are
+similarly mitigated by a rate limit.
+
+## Admin Area settings
+
+See
+[User and IP rate limits](../user/admin_area/settings/user_and_ip_rate_limits.md).
+
+## Rack Attack initializer
+
+This method of rate limiting is cumbersome, but has some advantages. It allows
+throttling of specific paths, and is also integrated into Git and container
+registry requests. See [Rack Attack initializer](rack_attack.md).
diff --git a/doc/security/webhooks.md b/doc/security/webhooks.md
index 1194234a295..7ece9407ac0 100644
--- a/doc/security/webhooks.md
+++ b/doc/security/webhooks.md
@@ -34,15 +34,16 @@ to 127.0.0.1, ::1 and 0.0.0.0, as well as IPv4 10.0.0.0/8, 172.16.0.0/12,
192.168.0.0/16 and IPv6 site-local (ffc0::/10) addresses won't be allowed.
This behavior can be overridden by enabling the option *"Allow requests to the
-local network from hooks and services"* in the *"Outbound requests"* section
+local network from web hooks and services"* in the *"Outbound requests"* section
inside the Admin area under **Settings**
(`/admin/application_settings/network`):
-![Outbound requests admin settings](img/outbound_requests_section.png)
+![Outbound requests admin settings](img/outbound_requests_section_v12_2.png)
->**Note:**
-*System hooks* are exempt from this protection because they are set up by
-admins.
+NOTE: **Note:**
+*System hooks* are enabled to make requests to local network by default since they are
+set up by administrators. However, you can turn this off by disabling the
+**Allow requests to the local network from system hooks** option.
<!-- ## Troubleshooting
diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md
index 1687a3aee72..63a72f8f193 100644
--- a/doc/topics/autodevops/index.md
+++ b/doc/topics/autodevops/index.md
@@ -1069,6 +1069,12 @@ Container Registry. **Restarting a pod, scaling a service, or other actions whic
require on-going access to the registry may fail**. On-going secure access is
planned for a subsequent release.
+### Private registry support
+
+There is no documented way of using private container registry with Auto DevOps.
+We strongly advise using GitLab Container Registry with Auto DevOps in order to
+simplify configuration and prevent any unforeseen issues.
+
## Troubleshooting
- Auto Build and Auto Test may fail in detecting your language/framework. There
diff --git a/doc/topics/git/index.md b/doc/topics/git/index.md
index cdcd8215b23..6a539b526f3 100644
--- a/doc/topics/git/index.md
+++ b/doc/topics/git/index.md
@@ -48,6 +48,7 @@ The following are resources about version control concepts:
The following resources may help you become more efficient at using Git:
+- [Useful Git commands](useful_git_commands.md) collected by the GitLab support team.
- [Git Tips & Tricks](https://about.gitlab.com/2016/12/08/git-tips-and-tricks/)
- [Eight Tips to help you work better with Git](https://about.gitlab.com/2015/02/19/8-tips-to-help-you-work-better-with-git/)
@@ -71,6 +72,7 @@ The following are advanced topics for those who want to get the most out of Git:
- [Custom Git Hooks](../../administration/custom_hooks.md)
- [Git Attributes](../../user/project/git_attributes.md)
- Git Submodules: [Using Git submodules with GitLab CI](../../ci/git_submodules.md#using-git-submodules-with-gitlab-ci)
+- [Partial Clone](partial_clone.md)
## API
@@ -82,6 +84,8 @@ Git-related queries from GitLab.
The following relate to Git Large File Storage:
- [Getting Started with Git LFS](https://about.gitlab.com/2017/01/30/getting-started-with-git-lfs-tutorial/)
-- [GitLab Git LFS documentation](../../workflow/lfs/manage_large_binaries_with_git_lfs.md)
+- [Migrate an existing Git repo with Git LFS](migrate_to_git_lfs/index.md)
+- [GitLab Git LFS user documentation](../../workflow/lfs/manage_large_binaries_with_git_lfs.md)
+- [GitLab Git LFS admin documentation](../../workflow/lfs/lfs_administration.md)
- [Git-Annex to Git-LFS migration guide](../../workflow/lfs/migrate_from_git_annex_to_git_lfs.md)
- [Towards a production quality open source Git LFS server](https://about.gitlab.com/2015/08/13/towards-a-production-quality-open-source-git-lfs-server/)
diff --git a/doc/topics/git/migrate_to_git_lfs/index.md b/doc/topics/git/migrate_to_git_lfs/index.md
new file mode 100644
index 00000000000..c879e404997
--- /dev/null
+++ b/doc/topics/git/migrate_to_git_lfs/index.md
@@ -0,0 +1,174 @@
+---
+type: tutorial, concepts
+description: "How to migrate an existing Git repository to Git LFS with BFG."
+last_updated: 2019-07-11
+---
+
+# Migrate a Git repo into Git LFS with BFG
+
+Using Git LFS can help you to reduce the size of your Git
+repository and improve its performance.
+
+However, simply adding the
+large files that are already in your repository to Git LFS,
+will not actually reduce the size of your repository because
+the files are still referenced by previous commits.
+
+Through the method described on this document, first migrate
+to Git LFS with [BFG](https://rtyley.github.io/bfg-repo-cleaner/)
+through a mirror repo, then clean up the repository's history,
+and lastly create LFS tracking rules to prevent new binary files
+from being added.
+
+This tutorial was inspired by the guide
+[Use BFG to migrate a repo to Git LFS](https://confluence.atlassian.com/bitbucket/use-bfg-to-migrate-a-repo-to-git-lfs-834233484.html).
+For more information on Git LFS, see the [references](#references)
+below.
+
+CAUTION: **Warning:**
+The method described on this guide rewrites Git history. Make
+sure to back up your repo before beginning and use it at your
+own risk.
+
+## Requirements
+
+Before beginning, make sure:
+
+- You have enough LFS storage for the files you want to convert.
+ Storage is required for the entire history of all files.
+- All the team members you share the repository with have pushed all changes.
+ Branches based on the repository before applying this method cannot be merged.
+ Branches based on the repo before applying this method cannot be merged.
+
+To follow this tutorial, you'll need:
+
+- Maintainer permissions to the existing Git repository
+ you'd like to migrate to LFS with access through the command line.
+- [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
+ and [Java Runtime Environment](https://www.java.com/en/download/manual.jsp)
+ (Java 7 or above) installed locally.
+- BFG installed locally:
+
+ ```bash
+ brew install bfg
+ ```
+
+- Git LFS installed locally:
+
+ ```bash
+ brew install git-lfs
+ ```
+
+NOTE: **Note:**
+This guide was tested on macOS Mojave.
+
+## Steps
+
+Consider an example upstream project, `git@gitlab.com:gitlab-tests/test-git-lfs-repo-migration.git`.
+
+1. Back up your repository:
+
+ Create a copy of your repository so that you can
+ recover it in case something goes wrong.
+
+1. Clone `--mirror` the repo:
+
+ Cloning with the mirror flag will create a bare repository.
+ This ensures you get all the branches within the repo.
+
+ It creates a directory called `<repo-name>.git`
+ (in our example, `test-git-lfs-repo-migration.git`),
+ mirroring the upstream project:
+
+ ```bash
+ git clone --mirror git@gitlab.com:gitlab-tests/test-git-lfs-repo-migration.git
+ ```
+
+1. Convert the Git history with BFG:
+
+ ```bash
+ bfg --convert-to-git-lfs "*.{png,mp4,jpg,gif}" --no-blob-protection test-git-lfs-repo-migration.git
+ ```
+
+ It is scanning all the history, and looking for any files with
+ that extension, and then converting them to an LFS pointer.
+
+1. Clean up the repository:
+
+ ```bash
+ # cd path/to/mirror/repo:
+ cd test-git-lfs-repo-migration.git
+ # clean up the repo:
+ git reflog expire --expire=now --all && git gc --prune=now --aggressive
+ ```
+
+ You can also take a look on how to further [clean the repo](../../../user/project/repository/reducing_the_repo_size_using_git.md),
+ but it's not necessary for the purposes of this guide.
+
+1. Install Git LFS in the mirror repository:
+
+ ```bash
+ git lfs install
+ ```
+
+1. [Unprotect the default branch](../../../user/project/protected_branches.md),
+ so that we can force-push the rewritten repository:
+
+ 1. Navigate to your project's **Settings > Repository** and
+ expand **Protected Branches**.
+ 1. Scroll down to locate the protected branches and click
+ **Unprotect** the default branch.
+
+1. Force-push to GitLab:
+
+ ```bash
+ git push --force
+ ```
+
+1. Track the files you want with LFS:
+
+ ```bash
+ # cd path/to/upstream/repo:
+ cd test-git-lfs-repo-migration
+ # You may need to reset your local copy with upstream's `master` after force-pushing from the mirror:
+ git reset --hard origin/master
+ # Track the files with LFS:
+ git lfs track "*.gif" "*.png" "*.jpg" "*.psd" "*.mp4" ".gitattributes" "img/"
+ ```
+
+ Now all existing the files you converted, as well as the new
+ ones you add, will be properly tracked with LFS.
+
+1. [Re-protect the default branch](../../../user/project/protected_branches.md):
+
+ 1. Navigate to your project's **Settings > Repository** and
+ expand **Protected Branches**.
+ 1. Select the default branch from the **Branch** dropdown menu,
+ and set up the
+ **Allowed to push** and **Allowed to merge** rules.
+ 1. Click **Protect**.
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
+
+## References
+
+- [Getting Started with Git LFS](https://about.gitlab.com/2017/01/30/getting-started-with-git-lfs-tutorial/)
+- [Migrate from Git Annex to Git LFS](../../../workflow/lfs/migrate_from_git_annex_to_git_lfs.md)
+- [GitLab's Git LFS user documentation](../../../workflow/lfs/manage_large_binaries_with_git_lfs.md)
+- [GitLab's Git LFS administrator documentation](../../../workflow/lfs/lfs_administration.md)
+- Alternative method to [migrate an existing repo to Git LFS](https://github.com/git-lfs/git-lfs/wiki/Tutorial#migrating-existing-repository-data-to-lfs)
+
+<!--
+Test project:
+https://gitlab.com/gitlab-tests/test-git-lfs-repo-migration
+-->
diff --git a/doc/topics/git/partial_clone.md b/doc/topics/git/partial_clone.md
new file mode 100644
index 00000000000..f2951308ba1
--- /dev/null
+++ b/doc/topics/git/partial_clone.md
@@ -0,0 +1,147 @@
+# Partial Clone for Large Repositories
+
+CAUTION: **Alpha:**
+Partial Clone is an experimental feature, and will significantly increase
+Gitaly resource utilization when performing a partial clone, and decrease
+performance of subsequent fetch operations.
+
+As Git repositories become very large, usability decreases as performance
+decreases. One major challenge is cloning the repository, because Git will
+download the entire repository including every commit and every version of
+every object. This can be slow to transfer, and require large amounts of disk
+space.
+
+Historically, performing a **shallow clone**
+([`--depth`](https://www.git-scm.com/docs/git-clone#Documentation/git-clone.txt---depthltdepthgt))
+has been the only way to reduce the amount of data transferred when cloning
+a Git repository. This does not, however, allow filtering by sub-tree which is
+important for monolithic repositories containing many projects, or by object
+size preventing unnecessary large objects being downloaded.
+
+[Partial clone](https://github.com/git/git/blob/master/Documentation/technical/partial-clone.txt)
+is a performance optimization that "allows Git to function without having a
+complete copy of the repository. The goal of this work is to allow Git better
+handle extremely large repositories."
+
+Specifically, using partial clone, it should be possible for Git to natively
+support:
+
+- large objects, instead of using [Git LFS](https://git-lfs.github.com/)
+- enormous repositories
+
+Briefly, partial clone works by:
+
+- excluding objects from being transferred when cloning or fetching a
+ repository using a new `--filter` flag
+- downloading missing objects on demand
+
+Follow [Git for enormous repositories](https://gitlab.com/groups/gitlab-org/-/epics/773) for roadmap and updates.
+
+## Enabling partial clone
+
+GitLab 12.1 uses Git 2.21.0 which has an arbitrary file access security
+vulnerability when `uploadpack.allowFilter` is enabled, and should not be
+enabled in production environments.
+
+A feature flag is planned to enable `uploadpack.allowFilter` and
+`uploadpack.allowAnySHA1InWant` once the version of Git used by GitLab has been
+updated to Git 2.22.0.
+
+Follow [this issue](https://gitlab.com/gitlab-org/gitaly/issues/1553) for
+updated.
+
+## Excluding objects by size
+
+Partial Clone allows large objects to be stored directly in the Git repository,
+and be excluded from clones as desired by the user. This eliminates the error
+prone process of deciding which objects should be stored in LFS or not. Using
+partial clone, all files – large or small – may be treated the same.
+
+With the `uploadpack.allowFilter` and `uploadpack.allowAnySHA1InWant` options
+enabled on the Git server:
+
+```bash
+# clone the repo, excluding blobs larger than 1 megabyte
+git clone --filter=blob:limit=1m <url>
+
+# in the checkout step of the clone, and any subsequent operations
+# any blobs that are needed will be downloaded on demand
+git checkout feature-branch
+```
+
+## Excluding objects by path
+
+Partial Clone allows clones to be filtered by path using a format similar to a
+`.gitignore` file stored inside the repository.
+
+With the `uploadpack.allowFilter` and `uploadpack.allowAnySHA1InWant` options
+enabled on the Git server:
+
+1. **Create a filter spec.** For example, consider a monolithic repository with
+ many applications, each in a different subdirectory in the root. Create a file
+ `shiny-app/.filterspec` using the GitLab web interface:
+
+ ```.gitignore
+ # Only the paths listed in the file will be downloaded when performing a
+ # partial clone using `--filter=sparse:oid=shiny-app/.gitfilterspec`
+
+ # Explicitly include filterspec needed to configure sparse checkout with
+ # git config --local core.sparsecheckout true
+ # git show master:snazzy-app/.gitfilterspec >> .git/info/sparse-checkout
+ shiny-app/.gitfilterspec
+
+ # Shiny App
+ shiny-app/
+
+ # Dependencies
+ shimmery-app/
+ shared-component-a/
+ shared-component-b/
+ ```
+
+2. *Create a new Git repository and fetch.* Support for `--filter=sparse:oid`
+ using the clone command is incomplete, so we will emulate the clone command
+ by hand, using `git init` and `git fetch`. Follow
+ [gitaly#1769](https://gitlab.com/gitlab-org/gitaly/issues/1769) for updates.
+
+ ```bash
+ # Create a new directory for the Git repository
+ mkdir jumbo-repo && cd jumbo-repo
+
+ # Initialize a new Git repository
+ git init
+
+ # Add the remote
+ git remote add origin git@gitlab.com/example/jumbo-repo
+
+ # Enable partial clone support for the remote
+ git config --local extensions.partialClone origin
+
+ # Fetch the filtered set of objects using the filterspec stored on the
+ # server. WARNING: this step is slow!
+ git fetch --filter=sparse:oid=master:shiny-app/.gitfilterspec origin
+
+ # Optional: observe there are missing objects that we have not fetched
+ git rev-list --all --quiet --objects --missing=print | wc -l
+ ```
+
+ CAUTION: **IDE and Shell integrations:**
+ Git integrations with `bash`, `zsh`, etc and editors that automatically
+ show Git status information often run `git fetch` which will fetch the
+ entire repository. You many need to disable or reconfigure these
+ integrations.
+
+3. **Sparse checkout** must be enabled and configured to prevent objects from
+ other paths being downloaded automatically when checking out branches. Follow
+ [gitaly#1765](https://gitlab.com/gitlab-org/gitaly/issues/1765) for updates.
+
+ ```bash
+ # Enable sparse checkout
+ git config --local core.sparsecheckout true
+
+ # Configure sparse checkout
+ git show master:snazzy-app/.gitfilterspec >> .git/info/sparse-checkout
+
+ # Checkout master
+ git checkout master
+ ```
diff --git a/doc/topics/git/useful_git_commands.md b/doc/topics/git/useful_git_commands.md
new file mode 100644
index 00000000000..84406805350
--- /dev/null
+++ b/doc/topics/git/useful_git_commands.md
@@ -0,0 +1,210 @@
+---
+type: reference
+---
+
+# Useful Git commands
+
+Here are some useful Git commands collected by the GitLab support team. You may not
+need to use often, but they can can come in handy when needed.
+
+## Remotes
+
+### Add another URL to a remote, so both remotes get updated on each push
+
+```sh
+git remote set-url --add <remote_name> <remote_url>
+```
+
+## Staging and reverting changes
+
+### Remove last commit and leave the changes in unstaged
+
+```sh
+git reset --soft HEAD^
+```
+
+### Unstage a certain number of commits from HEAD
+
+To unstage 3 commits, for example, run:
+
+```sh
+git reset HEAD^3
+```
+
+### Unstage changes to a certain file from HEAD
+
+```sh
+git reset <filename>
+```
+
+### Revert a file to HEAD state and remove changes
+
+There are two options to revert changes to a file:
+
+- `git checkout <filename>`
+- `git reset --hard <filename>`
+
+### Undo a previous commit by creating a new replacement commit
+
+```sh
+git revert <commit-sha>
+```
+
+### Create a new message for last commit
+
+```sh
+git commit --amend
+```
+
+### Add a file to the last commit
+
+```sh
+git add <filename>
+git commit --amend
+```
+
+Append `--no-edit` to the `commit` command if you do not want to edit the commit
+message.
+
+## Stashing
+
+### Stash changes
+
+```sh
+git stash save
+```
+
+The default behavor of `stash` is to save, so you can also use just:
+
+```sh
+git stash
+```
+
+### Unstash your changes
+
+```sh
+git stash apply
+```
+
+### Discard your stashed changes
+
+```sh
+git stash drop
+```
+
+### Apply and drop your stashed changes
+
+```sh
+git stash pop
+```
+
+## Refs and Log
+
+### Use reflog to show the log of reference changes to HEAD
+
+```sh
+git reflog
+```
+
+### Check the Git history of a file
+
+The basic command to check the git history of a file:
+
+```sh
+git log <file>
+```
+
+If you get this error message:
+
+```text
+fatal: ambiguous argument <file_name>: unknown revision or path not in the working tree.
+Use '--' to separate paths from revisions, like this:
+```
+
+Use this to check the Git history of the file:
+
+```sh
+git log -- <file>
+```
+
+### Find the tags that contain a particular SHA
+
+```sh
+git tag --contains <sha>
+```
+
+### Check the content of each change to a file
+
+```sh
+gitk <file>
+```
+
+### Check the content of each change to a file, follows it past file renames
+
+```sh
+gitk --follow <file>
+```
+
+## Debugging
+
+### Use a custom SSH key for a git command
+
+```text
+GIT_SSH_COMMAND="ssh -i ~/.ssh/gitlabadmin" git <command>
+```
+
+### Debug cloning
+
+With SSH:
+
+```text
+GIT_SSH_COMMAND="ssh -vvv" git clone <git@url>
+```
+
+With HTTPS:
+
+```text
+GIT_TRACE_PACKET=1 GIT_TRACE=2 GIT_CURL_VERBOSE=1 git clone <url>
+```
+
+## Rebasing
+
+### Rebase your branch onto master
+
+The -i flag stands for 'interactive':
+
+```sh
+git rebase -i master
+```
+
+### Continue the rebase if paused
+
+```sh
+git rebase --continue
+```
+
+### Use git rerere
+
+To _reuse_ recorded solutions to the same problems when repeated:
+
+```sh
+git rerere
+```
+
+To enable `rerere` functionality:
+
+```sh
+git config --global rerere.enabled true
+```
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
diff --git a/doc/user/admin_area/settings/img/user_and_ip_rate_limits.png b/doc/user/admin_area/settings/img/user_and_ip_rate_limits.png
new file mode 100644
index 00000000000..2bd85a2fd96
--- /dev/null
+++ b/doc/user/admin_area/settings/img/user_and_ip_rate_limits.png
Binary files differ
diff --git a/doc/user/admin_area/settings/index.md b/doc/user/admin_area/settings/index.md
index 5427d04cd7d..2a12614e325 100644
--- a/doc/user/admin_area/settings/index.md
+++ b/doc/user/admin_area/settings/index.md
@@ -18,6 +18,7 @@ include:
- [Third party offers](third_party_offers.md)
- [Usage statistics](usage_statistics.md)
- [Visibility and access controls](visibility_and_access_controls.md)
+- [User and IP rate limits](user_and_ip_rate_limits.md)
- [Custom templates repository](instance_template_repository.md) **(PREMIUM)**
NOTE: **Note:**
diff --git a/doc/user/admin_area/settings/user_and_ip_rate_limits.md b/doc/user/admin_area/settings/user_and_ip_rate_limits.md
new file mode 100644
index 00000000000..e3a495750f2
--- /dev/null
+++ b/doc/user/admin_area/settings/user_and_ip_rate_limits.md
@@ -0,0 +1,32 @@
+---
+type: reference
+---
+
+# User and IP rate limits
+
+Rate limiting is a common technique used to improve the security and durability
+of a web application. For more details, see
+[Rate limits](../../../security/rate_limits.md).
+
+The following limits can be enforced in **Admin Area > Network > User and
+IP rate limits**:
+
+- Unauthenticated requests
+- Authenticated API requests
+- Authenticated web requests
+
+These limits are disabled by default.
+
+![user-and-ip-rate-limits](img/user_and_ip_rate_limits.png)
+
+<!-- ## Troubleshooting
+
+Include any troubleshooting steps that you can foresee. If you know beforehand what issues
+one might have when setting this up, or when something is changed, or on upgrading, it's
+important to describe those, too. Think of things that may go wrong and include them here.
+This is important to minimize requests for support, and to avoid doc comments with
+questions that you know someone might ask.
+
+Each scenario can be a third-level heading, e.g. `### Getting error message X`.
+If you have none to add when creating a doc, leave this section in place
+but commented out to help encourage others to add to it in the future. -->
diff --git a/doc/user/application_security/dast/index.md b/doc/user/application_security/dast/index.md
index 88e2d1ef22b..fa84f995e58 100644
--- a/doc/user/application_security/dast/index.md
+++ b/doc/user/application_security/dast/index.md
@@ -7,6 +7,11 @@ type: reference, howto
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/4348)
in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.4.
+NOTE: **4 of the top 6 attacks were application based.**
+Download our whitepaper,
+["A Seismic Shift in Application Security"](https://about.gitlab.com/resources/whitepaper-seismic-shift-application-security/)
+to learn how to protect your organization.
+
Running [static checks](../sast/index.md) on your code is the first step to detect
vulnerabilities that can put the security of your code at risk. Yet, once
deployed, your application is exposed to a new category of possible attacks,
diff --git a/doc/user/application_security/index.md b/doc/user/application_security/index.md
index 31f0b5a050c..4dcb416c110 100644
--- a/doc/user/application_security/index.md
+++ b/doc/user/application_security/index.md
@@ -148,6 +148,38 @@ Clicking on this button will create a merge request to apply the solution onto t
![Create merge request from vulnerability](img/create_issue_with_list_hover.png)
+## Security approvals in merge requests **(ULTIMATE)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/9928) in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.2.
+
+Merge Request Approvals can be configured to require approval from a member
+of your security team when a vulnerability would be introduced by a merge request.
+
+This threshold is defined as `high`, `critical`, or `unknown`
+severity. When any vulnerabilities are present within a merge request, an
+approval will be required from the `Vulnerability-Check` approver group.
+
+### Enabling Security Approvals within a project
+
+To enable Security Approvals, a [project approval rule](../project/merge_requests/merge_request_approvals.md#multiple-approval-rules-premium)
+must be created with the case-sensitive name `Vulnerability-Check`. This approval
+group must be set with an "Approvals required" count greater than zero.
+
+Once this group has been added to your project, the approval rule will be enabled
+for all Merge Requests.
+
+Any code changes made will cause the count of approvals required to reset.
+
+An approval will be required when a security report:
+
+- Contains a new vulnerability of `high`, `critical`, or `unknown` severity.
+- Is not generated during pipeline execution.
+
+An approval will be optional when a security report:
+
+- Contains no new vulnerabilities.
+- Contains only new vulnerabilities of `low` or `medium` severity.
+
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/application_security/sast/analyzers.md b/doc/user/application_security/sast/analyzers.md
index 59835aeba01..cb533538047 100644
--- a/doc/user/application_security/sast/analyzers.md
+++ b/doc/user/application_security/sast/analyzers.md
@@ -29,6 +29,7 @@ SAST supports the following official analyzers:
- [Security Code Scan (.NET)](https://gitlab.com/gitlab-org/security-products/analyzers/security-code-scan)
- [TSLint (Typescript)](https://gitlab.com/gitlab-org/security-products/analyzers/tslint)
- [Sobelow (Elixir Phoenix)](https://gitlab.com/gitlab-org/security-products/analyzers/sobelow)
+- [PMD (Apex only)](https://gitlab.com/gitlab-org/security-products/analyzers/pmd-apex)
The analyzers are published as Docker images that SAST will use to launch
dedicated containers for each analysis.
@@ -116,24 +117,24 @@ custom analyzer can scan the source code.
## Analyzers Data
-| Property \ Tool | Bandit | Brakeman | ESLint security | Find Sec Bugs | Flawfinder | Go AST Scanner | NodeJsScan | Php CS Security Audit | Security code Scan (.NET) | TSLint Security | Sobelow |
-| --------------------------------------- | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :---------------------: | :-------------------------: | :-------------: | :----------------: |
-| Severity | ✓ | 𐄂 | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 |
-| Title | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
-| Description | 𐄂 | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | ✓ | 𐄂 | 𐄂 | ✓ | ✓ |
-| File | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
-| Start line | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
-| End line | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ | 𐄂 |
-| Start column | 𐄂 | 𐄂 | ✓ | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | 𐄂 |
-| End column | 𐄂 | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ | 𐄂 |
-| External id (e.g. CVE) | 𐄂 | ⚠ | 𐄂 | ⚠ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
-| URLs | 𐄂 | ✓ | 𐄂 | ⚠ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
-| Internal doc/explanation | ⚠ | ✓ | 𐄂 | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ |
-| Solution | 𐄂 | 𐄂 | 𐄂 | ⚠ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
-| Confidence | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ |
-| Affected item (e.g. class or package) | 𐄂 | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
-| Source code extract | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
-| Internal ID | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | ✓ |
+| Property \ Tool | Apex | Bandit | Brakeman | ESLint security | Find Sec Bugs | Flawfinder | Go AST Scanner | NodeJsScan | Php CS Security Audit | Security code Scan (.NET) | TSLint Security | Sobelow |
+| --------------------------------------- | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :------------------: | :---------------------: | :-------------------------: | :-------------: | :----------------: |
+| Severity | ✓ | ✓ | 𐄂 | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 | ✓ | 𐄂 |
+| Title | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
+| Description | ✓ | 𐄂 | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | ✓ | 𐄂 | 𐄂 | ✓ | ✓ |
+| File | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
+| Start line | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
+| End line | ✓ | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ | 𐄂 |
+| Start column | ✓ | 𐄂 | 𐄂 | ✓ | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | 𐄂 |
+| End column | ✓ | 𐄂 | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ | 𐄂 |
+| External id (e.g. CVE) | 𐄂 | 𐄂 | ⚠ | 𐄂 | ⚠ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
+| URLs | ✓ | 𐄂 | ✓ | 𐄂 | ⚠ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
+| Internal doc/explanation | ✓ | ⚠ | ✓ | 𐄂 | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ |
+| Solution | ✓ | 𐄂 | 𐄂 | 𐄂 | ⚠ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
+| Confidence | 𐄂 | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | ✓ |
+| Affected item (e.g. class or package) | ✓ | 𐄂 | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
+| Source code extract | 𐄂 | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | 𐄂 | 𐄂 | 𐄂 | 𐄂 | 𐄂 |
+| Internal ID | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | 𐄂 | ✓ | ✓ | ✓ | ✓ |
- ✓ => we have that data
- ⚠ => we have that data but it's partially reliable, or we need to extract it from unstructured content
diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md
index aac881112ff..5149f628345 100644
--- a/doc/user/application_security/sast/index.md
+++ b/doc/user/application_security/sast/index.md
@@ -59,6 +59,7 @@ The following table shows which languages, package managers and frameworks are s
|-----------------------------------------------------------------------------|----------------------------------------------------------------------------------------|------------------------------|
| .NET | [Security Code Scan](https://security-code-scan.github.io) | 11.0 |
| Any | [Gitleaks](https://github.com/zricethezav/gitleaks) and [TruffleHog](https://github.com/dxa4481/truffleHog) | 11.9 |
+| Apex (Salesforce) | [pmd](https://pmd.github.io/pmd/index.html) | 12.1 |
| C/C++ | [Flawfinder](https://www.dwheeler.com/flawfinder/) | 10.7 |
| Elixir (Phoenix) | [Sobelow](https://github.com/nccgroup/sobelow) | 11.10 |
| Go | [Gosec](https://github.com/securego/gosec) | 10.7 |
diff --git a/doc/user/clusters/applications.md b/doc/user/clusters/applications.md
index 7c24f6db86f..c0106b2cb9e 100644
--- a/doc/user/clusters/applications.md
+++ b/doc/user/clusters/applications.md
@@ -252,7 +252,9 @@ The applications below can be uninstalled.
| Application | GitLab version | Notes |
| ----------- | -------------- | ----- |
+| Cert-Manager | 12.2+ | The associated private key will be deleted and cannot be restored. Deployed applications will continue to use HTTPS, but certificates will not be renewed. Before uninstalling, you may wish to [back up your configuration](https://docs.cert-manager.io/en/latest/tasks/backup-restore-crds.html) or [revoke your certificates](https://letsencrypt.org/docs/revoking/) |
| GitLab Runner | 12.2+ | Any running pipelines will be canceled. |
+| Helm | 12.2+ | The associated Tiller pod will be deleted and cannot be restored. |
| Ingress | 12.1+ | The associated load balancer and IP will be deleted and cannot be restored. Furthermore, it can only be uninstalled if JupyterHub is not installed. |
| JupyterHub | 12.1+ | All data not committed to GitLab will be deleted and cannot be restored. |
| Knative | 12.1+ | The associated IP will be deleted and cannot be restored. |
diff --git a/doc/user/gitlab_com/index.md b/doc/user/gitlab_com/index.md
index 7858c419e04..e6c27c33654 100644
--- a/doc/user/gitlab_com/index.md
+++ b/doc/user/gitlab_com/index.md
@@ -298,6 +298,79 @@ Web front-ends:
- `memory_limit_min` = 1024MiB
- `memory_limit_max` = 1280MiB
+## GitLab.com-specific rate limits
+
+NOTE: **Note:**
+See [Rate limits](../../security/rate_limits.md) for administrator
+documentation.
+
+IP blocks usually happen when GitLab.com receives unusual traffic from a single
+IP address that the system views as potentially malicious based on rate limit
+settings. After the unusual traffic ceases, the IP address will be automatically
+released depending on the type of block, as described below.
+
+If you receive a `403 Forbidden` error for all requests to GitLab.com, please
+check for any automated processes that may be triggering a block. For
+assistance, contact [GitLab Support](https://support.gitlab.com)
+with details, such as the affected IP address.
+
+### HAProxy API throttle
+
+GitLab.com responds with HTTP status code 429 to API requests over 10 requests
+per second per IP address.
+
+The following example headers are included for all API requests:
+
+```
+RateLimit-Limit: 600
+RateLimit-Observed: 6
+RateLimit-Remaining: 594
+RateLimit-Reset: 1563325137
+RateLimit-ResetTime: Wed, 17 Jul 2019 00:58:57 GMT
+```
+
+Source:
+
+- Search for `rate_limit_http_rate_per_minute` and `rate_limit_sessions_per_second` in [GitLab.com's current HAProxy settings](https://gitlab.com/gitlab-cookbooks/gitlab-haproxy/blob/master/attributes/default.rb).
+
+### Rack Attack initializer
+
+#### Protected paths throttle
+
+GitLab.com responds with HTTP status code 429 to POST requests at protected
+paths over 10 requests per **minute** per IP address.
+
+See the source below for which paths are protected. This includes user creation,
+user confirmation, user sign in, and password reset.
+
+This header is included in responses to blocked requests:
+
+```
+Retry-After: 60
+```
+
+Source:
+
+- Search for `rate_limit_requests_per_period`, `rate_limit_period`, and `rack_attack_protected_paths` in [GitLab.com's current Rails app settings](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-cookbooks/gitlab/attributes/default.rb).
+
+#### Git and container registry failed authentication ban
+
+GitLab.com responds with HTTP status code 403 for 1 hour, if 30 failed
+authentication requests were received in a 3-minute period from a single IP address.
+
+This applies only to Git requests and container registry (`/jwt/auth`) requests
+(combined).
+
+This limit is reset by requests that authenticate successfully. For example, 29
+failed authentication requests followed by 1 successful request, followed by 29
+more failed authentication requests would not trigger a ban.
+
+No response headers are provided.
+
+### Admin Area settings
+
+GitLab.com does not currently use these settings.
+
## GitLab.com at scale
In addition to the GitLab Enterprise Edition Omnibus install, GitLab.com uses
diff --git a/doc/user/group/bulk_editing/index.md b/doc/user/group/bulk_editing/index.md
index 5b5f75c2dd9..c8715577eb2 100644
--- a/doc/user/group/bulk_editing/index.md
+++ b/doc/user/group/bulk_editing/index.md
@@ -5,22 +5,21 @@
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/12719) for merge
requests in GitLab [GitLab Premium](https://about.gitlab.com/pricing/) 12.2.
-> NOTE: **Note:**
->
-> - A permission level of `Reporter` or higher is required in order to manage issues.
-> - A permission level of `Developer` or higher is required in order to manage merge requests.
-
Milestones can be updated simultaneously across multiple issues or merge requests by using the bulk editing feature.
![Bulk editing](img/bulk-editing.png)
+NOTE: **Note:**
+A permission level of `Reporter` or higher is required in order to manage issues, and
+a permission level of `Developer` or higher is required in order to manage merge requests.
+
To bulk update group issue or merge request milestones:
1. Navigate to the issues or merge requests list.
1. Click the **Edit issues** or **Edit merge requests** button.
- - This will open a sidebar on the right-hand side of your screen where an editable field
- for milestones will be displayed.
- - Checkboxes will also appear beside each issue or merge request.
+ - This will open a sidebar on the right-hand side of your screen where an editable field
+ for milestones will be displayed.
+ - Checkboxes will also appear beside each issue or merge request.
1. Check the checkbox beside each issue to be edited.
1. Select the desired milestone from the sidebar.
1. Click **Update all**.
diff --git a/doc/user/markdown.md b/doc/user/markdown.md
index 9380dcf2a32..edb1e904f2b 100644
--- a/doc/user/markdown.md
+++ b/doc/user/markdown.md
@@ -185,6 +185,49 @@ graph TD;
C-->D;
```
+#### Subgraphs
+
+NOTE: **Note:** GitLab 12.1 and up now [requires quotes around subgraph
+titles that contain multiple words](https://github.com/knsv/mermaid/pull/845).
+
+Subgraphs can also be included:
+
+~~~
+```mermaid
+graph TB
+
+ SubGraph1 --> SubGraph1Flow
+ subgraph "SubGraph 1 Flow"
+ SubGraph1Flow(SubNode 1)
+ SubGraph1Flow -- Choice1 --> DoChoice1
+ SubGraph1Flow -- Choice2 --> DoChoice2
+ end
+
+ subgraph "Main Graph"
+ Node1[Node 1] --> Node2[Node 2]
+ Node2 --> SubGraph1[Jump to SubGraph1]
+ SubGraph1 --> FinalThing[Final Thing]
+end
+```
+~~~
+
+```mermaid
+graph TB
+
+ SubGraph1 --> SubGraph1Flow
+ subgraph "SubGraph 1 Flow"
+ SubGraph1Flow(SubNode 1)
+ SubGraph1Flow -- Choice1 --> DoChoice1
+ SubGraph1Flow -- Choice2 --> DoChoice2
+ end
+
+ subgraph "Main Graph"
+ Node1[Node 1] --> Node2[Node 2]
+ Node2 --> SubGraph1[Jump to SubGraph1]
+ SubGraph1 --> FinalThing[Final Thing]
+end
+```
+
### Emoji
> If this is not rendered correctly, [view it in GitLab itself](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/user/markdown.md#emoji).
diff --git a/doc/user/permissions.md b/doc/user/permissions.md
index e6822f0c52c..d92435ef724 100644
--- a/doc/user/permissions.md
+++ b/doc/user/permissions.md
@@ -64,7 +64,6 @@ The following table depicts the various user permission levels in a project.
| Lock issue threads | | ✓ | ✓ | ✓ | ✓ |
| Manage issue tracker | | ✓ | ✓ | ✓ | ✓ |
| Manage related issues **(STARTER)** | | ✓ | ✓ | ✓ | ✓ |
-| Create issue from vulnerability **(ULTIMATE)** | | ✓ | ✓ | ✓ | ✓ |
| Manage labels | | ✓ | ✓ | ✓ | ✓ |
| Create code snippets | | ✓ | ✓ | ✓ | ✓ |
| See a commit status | | ✓ | ✓ | ✓ | ✓ |
@@ -94,6 +93,8 @@ The following table depicts the various user permission levels in a project.
| Remove a container registry image | | | ✓ | ✓ | ✓ |
| Create/edit/delete project milestones | | | ✓ | ✓ | ✓ |
| Use security dashboard **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
+| View dependency list **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
+| Create issue from vulnerability **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
| Dismiss vulnerability **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
| Apply code change suggestions | | | ✓ | ✓ | ✓ |
| Create and edit wiki pages | | | ✓ | ✓ | ✓ |
@@ -211,7 +212,8 @@ group.
| Create project in group | | | ✓ | ✓ | ✓ |
| Create/edit/delete group milestones | | | ✓ | ✓ | ✓ |
| Enable/disable a dependency proxy **(PREMIUM)** | | | ✓ | ✓ | ✓ |
-| Create subgroup | | | | ✓ (1) | ✓ |
+| Use security dashboard **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
+| Create subgroup | | | | ✓ (1) | ✓ |
| Edit group | | | | | ✓ |
| Manage group members | | | | | ✓ |
| Remove group | | | | | ✓ |
diff --git a/doc/user/project/img/key_value_labels.png b/doc/user/project/img/key_value_labels.png
deleted file mode 100644
index bec901f127f..00000000000
--- a/doc/user/project/img/key_value_labels.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_default.png b/doc/user/project/img/labels_default.png
deleted file mode 100644
index 221c5cf55a2..00000000000
--- a/doc/user/project/img/labels_default.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_default_v12_1.png b/doc/user/project/img/labels_default_v12_1.png
new file mode 100644
index 00000000000..4a0a6ba9ce0
--- /dev/null
+++ b/doc/user/project/img/labels_default_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_delete_v12_1.png b/doc/user/project/img/labels_delete_v12_1.png
new file mode 100644
index 00000000000..2e0ea934519
--- /dev/null
+++ b/doc/user/project/img/labels_delete_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_drag_priority_v12_1.gif b/doc/user/project/img/labels_drag_priority_v12_1.gif
new file mode 100644
index 00000000000..a568490da5f
--- /dev/null
+++ b/doc/user/project/img/labels_drag_priority_v12_1.gif
Binary files differ
diff --git a/doc/user/project/img/labels_epic_sidebar.png b/doc/user/project/img/labels_epic_sidebar.png
deleted file mode 100644
index f8162d89e9d..00000000000
--- a/doc/user/project/img/labels_epic_sidebar.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_epic_sidebar_v12_1.png b/doc/user/project/img/labels_epic_sidebar_v12_1.png
new file mode 100644
index 00000000000..7e05c6d1ce4
--- /dev/null
+++ b/doc/user/project/img/labels_epic_sidebar_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_generate_default.png b/doc/user/project/img/labels_generate_default.png
deleted file mode 100644
index 982a4df999c..00000000000
--- a/doc/user/project/img/labels_generate_default.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_generate_default_v12_1.png b/doc/user/project/img/labels_generate_default_v12_1.png
new file mode 100644
index 00000000000..48adc9a5699
--- /dev/null
+++ b/doc/user/project/img/labels_generate_default_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_group_issues.png b/doc/user/project/img/labels_group_issues.png
deleted file mode 100644
index cea1d304d31..00000000000
--- a/doc/user/project/img/labels_group_issues.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_group_issues_v12_1.png b/doc/user/project/img/labels_group_issues_v12_1.png
new file mode 100644
index 00000000000..bfe425f20ac
--- /dev/null
+++ b/doc/user/project/img/labels_group_issues_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_key_value_v12_1.png b/doc/user/project/img/labels_key_value_v12_1.png
new file mode 100644
index 00000000000..81d5c416c95
--- /dev/null
+++ b/doc/user/project/img/labels_key_value_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_list.png b/doc/user/project/img/labels_list.png
deleted file mode 100644
index 6940dde68bf..00000000000
--- a/doc/user/project/img/labels_list.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_list_v12_1.png b/doc/user/project/img/labels_list_v12_1.png
new file mode 100644
index 00000000000..c414ab42fbc
--- /dev/null
+++ b/doc/user/project/img/labels_list_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/new_label_from_sidebar.gif b/doc/user/project/img/labels_new_label_from_sidebar.gif
index 572b29a86e1..572b29a86e1 100644
--- a/doc/user/project/img/new_label_from_sidebar.gif
+++ b/doc/user/project/img/labels_new_label_from_sidebar.gif
Binary files differ
diff --git a/doc/user/project/img/labels_prioritized.png b/doc/user/project/img/labels_prioritized.png
deleted file mode 100644
index 7ce2d08b38c..00000000000
--- a/doc/user/project/img/labels_prioritized.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_prioritized_v12_1.png b/doc/user/project/img/labels_prioritized_v12_1.png
new file mode 100644
index 00000000000..8ea69949d8e
--- /dev/null
+++ b/doc/user/project/img/labels_prioritized_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_promotion.png b/doc/user/project/img/labels_promotion.png
deleted file mode 100644
index 762a3773692..00000000000
--- a/doc/user/project/img/labels_promotion.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_promotion_v12_1.png b/doc/user/project/img/labels_promotion_v12_1.png
new file mode 100644
index 00000000000..238d50981ae
--- /dev/null
+++ b/doc/user/project/img/labels_promotion_v12_1.png
Binary files differ
diff --git a/doc/user/project/img/labels_subscriptions.png b/doc/user/project/img/labels_subscriptions.png
deleted file mode 100644
index f3c4235d051..00000000000
--- a/doc/user/project/img/labels_subscriptions.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/img/labels_subscriptions_v12_1.png b/doc/user/project/img/labels_subscriptions_v12_1.png
new file mode 100644
index 00000000000..57c437ac8a4
--- /dev/null
+++ b/doc/user/project/img/labels_subscriptions_v12_1.png
Binary files differ
diff --git a/doc/user/project/integrations/jira_server_configuration.md b/doc/user/project/integrations/jira_server_configuration.md
index 5116cbfe9ac..1efd0ff3944 100644
--- a/doc/user/project/integrations/jira_server_configuration.md
+++ b/doc/user/project/integrations/jira_server_configuration.md
@@ -3,8 +3,8 @@
We need to create a user in Jira which will have access to all projects that
need to integrate with GitLab.
-As an example, we'll create a user named `gitlab` and add it to the `Jira-developers`
-group.
+As an example, we'll create a user named `gitlab` and add it to a new group
+named `gitlab-developers`.
NOTE: **Note**
It is important that the Jira user created for the integration is given 'write'
diff --git a/doc/user/project/integrations/prometheus.md b/doc/user/project/integrations/prometheus.md
index e609fe43507..44439b59e77 100644
--- a/doc/user/project/integrations/prometheus.md
+++ b/doc/user/project/integrations/prometheus.md
@@ -98,7 +98,10 @@ You can view the performance dashboard for an environment by [clicking on the mo
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3799) in [GitLab Premium](https://about.gitlab.com/pricing/) 10.6.
-Additional metrics can be monitored by adding them on the Prometheus integration page. Once saved, they will be displayed on the environment performance dashboard.
+Custom metrics can be monitored by adding them on the Prometheus integration page. Once saved, they will be displayed on the environment performance dashboard provided that either:
+
+- A [connected Kubernetes cluster](../clusters/index.md#adding-and-removing-clusters) with the environment scope of `*` is used and [Prometheus installed on the cluster](#enabling-prometheus-integration), or
+- Prometheus is [manually configured](#manual-configuration-of-prometheus).
![Add New Metric](img/prometheus_add_metric.png)
diff --git a/doc/user/project/issues/related_issues.md b/doc/user/project/issues/related_issues.md
index 9c72fe33d0d..d7178506b64 100644
--- a/doc/user/project/issues/related_issues.md
+++ b/doc/user/project/issues/related_issues.md
@@ -19,7 +19,7 @@ Issues from a different project require additional information like the
group and the project name. For example:
- same project: `#44`
-- same group: `project#44 `
+- same group: `project#44`
- different group: `group/project#44`
Valid references will be added to a temporary list that you can review.
diff --git a/doc/user/project/labels.md b/doc/user/project/labels.md
index fdfeb4aa4cd..a00c1c980d2 100644
--- a/doc/user/project/labels.md
+++ b/doc/user/project/labels.md
@@ -2,43 +2,52 @@
## Overview
-Labels allow you to categorize issues or merge requests using descriptive titles like `bug`, `feature request`, or `docs`. Each label also has a customizable color. They allow you to quickly and dynamically filter and manage issues or merge requests you care about, and are visible throughout GitLab in most places where issues and merge requests are located.
+Labels allow you to categorize issues or merge requests using descriptive titles like
+`bug`, `feature request`, or `docs`. Each label also has a customizable color. They
+allow you to quickly and dynamically filter and manage issues or merge requests you
+care about, and are visible throughout GitLab in most places where issues and merge
+requests are located.
## Project labels and group labels
In GitLab, you can create project and group labels:
- **Project labels** can be assigned to issues or merge requests in that project only.
-- **Group labels** can be assigned to any issue or merge request of any project in that group or any subgroups of the group.
+- **Group labels** can be assigned to any issue or merge request in any project in
+ that group, or any subgroups of the group.
## Scoped labels **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/9175) in [GitLab Premium](https://about.gitlab.com/pricing/) 11.10.
-Scoped labels allow teams to use the simple and familiar feature of labels to
+Scoped labels allow teams to use the simple and familiar label feature to
annotate their issues, merge requests, and epics to achieve custom fields and
custom workflow states by leveraging a special label title syntax.
-A scoped label is a kind of label defined only by a special double-colon syntax
+A scoped label is a kind of label defined by a special double-colon syntax
in the label’s title, using the format `key::value`. For example:
-![A sample scoped label](img/key_value_labels.png)
+![A sample scoped label](img/labels_key_value_v12_1.png)
An issue, epic, or merge request cannot have two scoped labels with the same key.
-For example, if an issue is already labeled `priority::3` and you apply the label `priority::2` to it,
-`priority::3` is automatically removed.
+For example, if an issue is already labeled `priority::3`, and then you apply the
+`priority::2` label to it, `priority::3` is automatically removed.
This functionality is demonstrated in a video titled [Use scoped labels in GitLab 11.10 for custom fields and custom workflows](https://www.youtube.com/watch?v=4BCBby6du3c).
### Labels with multiple colon pairs
-If labels have multiple instances of `::`, the longest path from left to right, until the last `::`, is considered the "key" or the "scope".
+If labels have multiple instances of `::`, the longest path from left to right, until
+the last `::`, is considered the "key" or the "scope".
-For example, `nested::key1::value1` and `nested::key1::value2` cannot both exist on the same issue. Adding the latter label will automatically remove the former due to the shared scope of `nested::key1`.
+For example, `nested::key1::value1` and `nested::key1::value2` cannot both exist on
+the same issue. Adding the latter label will automatically remove the former due to
+the shared scope of `nested::key1`.
-`nested::key1::value1` and `nested::key2::value1` can both exist on the same issue, as these are considered to use two different label scopes, `nested::key1` and `nested::key2`.
+`nested::key1::value1` and `nested::key2::value1` can both exist on the same issue,
+as these are considered to use two different label scopes, `nested::key1` and `nested::key2`.
-### Workflows with scoped labels **(PREMIUM)**
+### Workflows with scoped labels
Suppose you wanted a custom field in issues to track the platform operating system
that your features target, where each issue should only target one platform. You
@@ -58,101 +67,154 @@ be able to advance workflow states consistently in issues themselves.
## Creating labels
->**Note:**
+NOTE: **Note:**
A permission level of Reporter or higher is required to create labels.
### New project label
To create a **project label**, navigate to **Issues > Labels** in the project.
-This page only shows project labels in this project and group labels of this project's parent group.
+This page only shows the project labels in this project, and the group labels of the
+project's parent group.
-Click the **New label** button. Enter the title, an optional description, and the background color. Click **Create label** to create the label.
+Click the **New label** button. Enter the title, an optional description, and the
+background color. Click **Create label** to create the label.
-If a project has no labels, you can generate a default set of project labels from its empty label list page:
+If a project has no labels, you can generate a default set of project labels from
+its empty label list page:
-![Labels generate default](img/labels_generate_default.png)
+![Labels generate default](img/labels_generate_default_v12_1.png)
GitLab will add the following default labels to the project:
-![Labels default](img/labels_default.png)
+![Labels default](img/labels_default_v12_1.png)
### New group label
-To create a **group label**, follow similar steps from above to project labels. Navigate to **Issues > Labels** in the group and create it from there.
-This page only shows group labels in this group.
-Alternatively, you can create group labels also from Epic sidebar. Please note that the created label will belong to the immediate group to which epic belongs.
+To create a **group label**, navigate to **Issues > Labels** in the **group** and create
+it from there. This page only shows group labels in this group.
+
+Alternatively, you can create group labels from the Epic sidebar. Please note that the
+created label will belong to the immediate group to which the epic belongs. **(ULTIMATE)**
-![Create Labels from Epic](img/labels_epic_sidebar.png)
+![Create Labels from Epic](img/labels_epic_sidebar_v12_1.png)
Group labels appear in every label list page of the group's child projects.
-![Labels list](img/labels_list.png)
+![Labels list](img/labels_list_v12_1.png)
### New project label from sidebar
-From the sidebar of an issue or a merge request, you can create a new **project label** inline immediately, instead of navigating to the project label list page.
+From the sidebar of an issue or a merge request, you can create a new **project label**
+inline immediately, instead of navigating to the project label list page.
-![Labels inline](img/new_label_from_sidebar.gif)
+![Labels inline](img/labels_new_label_from_sidebar.gif)
## Editing labels
NOTE: **Note:**
A permission level of Reporter or higher is required to edit labels.
-You can update a label by navigating to **Issues > Labels** in the project or group and clicking the pencil icon.
+To update a label, navigate to **Issues > Labels** in the project or group
+and click the pencil icon. The title, description and color can be changed.
-You can delete a label by clicking the trash icon.
+To delete a label, click the three dots next to the `Subscribe` button, and select
+**Delete**.
+
+![Delete label](img/labels_delete_v12_1.png)
### Promoting project labels to group labels
-If you are expanding from a few projects to a larger number of projects within the same group, you may want to share the same label among multiple projects in the same group. If you previously created a project label and now want to make it available for other projects, you can promote it to a group label.
+If you are expanding from a few projects to a larger number of projects within the
+same group, you may want to share the same label among multiple projects in the same
+group. If you previously created a project label and now want to make it available
+for other projects, you can promote it to a group label.
-From the project label list page, you can promote a project label to a group label. This will merge all project labels across all projects in this group with the same name into a single group label. All issues and merge requests that previously were assigned one of these project labels will now be assigned the new group label. This action cannot be reversed and the changes are permanent.
+From the project label list page, you can promote a project label to a group label.
+This will merge all project labels with the same name into a single group label, across
+all projects in this group. All issues and merge requests that were previously
+assigned one of these project labels will now be assigned the new group label. This
+action cannot be reversed and the changes are permanent.
-![Labels promotion](img/labels_promotion.png)
+![Labels promotion](img/labels_promotion_v12_1.png)
## Assigning labels from the sidebar
-Every issue and merge request can be assigned any number of labels. The labels are visible on every issue and merge request page, in the sidebar. They are also visible in the issue board. From the sidebar, you can assign or unassign a label to the object (i.e. label or unlabel it). You can also perform this as a [quick action](quick_actions.md) in a comment.
+Every issue and merge request can be assigned any number of labels. The labels are
+visible on every issue and merge request page, in the sidebar. They are also visible on:
+
+- Every issue and merge request page in the sidebar.
+- The issue board.
+
+From the sidebar, you can assign or unassign a label to the object (i.e. label or
+unlabel it). You can also perform this as a [quick action](quick_actions.md),
+in a comment.
| View labels in sidebar | Assign labels from sidebar |
-|:---:|:---:|
+|:----------------------:|:--------------------------:|
| ![Labels sidebar](img/labels_sidebar.png) | ![Labels sidebar assign](img/labels_sidebar_assign.png) |
## Searching for project labels
-You can search for project labels by navigating from the left sidebar to your
-project's **Issues > Labels** and entering your query to the search bar on the
-top-right:
+To search for project labels, go to **Issues > Labels** in the left sidebar, and enter
+your search query in the **Filter** field.
![Labels project list search](img/labels_project_list_search.png)
-GitLab will consider the label title and description for the search.
+GitLab will check both the label titles and descriptions for the search.
+
+## Filtering by label
+
+The following can be filtered labels:
-## Filtering issues, merge requests and epics by label
+- Issue lists
+- Merge Request lists
+- Epic lists **(ULTIMATE)**
+- Issue Boards
### Filtering in list pages
-From the project issue list page and the project merge request list page, you can [filter](../search/index.md#issues-and-merge-requests) by both group (including subgroup ancestors) labels and project labels.
+- From the project issue list page and the project merge request list page, you can
+ [filter](../search/index.md#issues-and-merge-requests) by:
+ - Group labels (including subgroup ancestors)
+ - Project labels
-From the group issue list page and the group merge request list page, you can [filter](../search/index.md#issues-and-merge-requests) by both group labels (including subgroup ancestors and subgroup descendants) and project labels.
+- From the group issue list page and the group merge request list page, you can
+ [filter](../search/index.md#issues-and-merge-requests) by:
+ - Group labels (including subgroup ancestors and subgroup descendants)
+ - Project labels
-From the group epic list page, you can [filter](../search/index.md#issues-and-merge-requests) by both current group labels as well as descendant group labels.
+- You can [filter](../search/index.md#issues-and-merge-requests) the group epic list
+ page by: **(ULTIMATE)**
+ - Current group labels
+ - Descendant group labels
-![Labels group issues](img/labels_group_issues.png)
+![Labels group issues](img/labels_group_issues_v12_1.png)
### Filtering in issue boards
-- From [project boards](issue_board.md), you can filter by both group labels and project labels in the [search and filter bar](../search/index.md#issue-boards).
-- From [group issue boards](issue_board.md#group-issue-boards-premium), you can filter by only group labels in the [search and filter bar](../search/index.md#issue-boards). **(PREMIUM)**
-- From [project boards](issue_board.md), you can filter by both group labels and project labels in the [issue board configuration](issue_board.md#configurable-issue-boards-starter). **(STARTER)**
-- From [group issue boards](issue_board.md#group-issue-boards-premium), you can filter by only group labels in the [issue board configuration](issue_board.md#configurable-issue-boards-starter). **(STARTER)**
+- From [project boards](issue_board.md), you can use the [search and filter bar](../search/index.md#issue-boards)
+ to filter by:
+ - Group labels
+ - Project labels
+
+- From [group issue boards](issue_board.md#group-issue-boards-premium), you can use the
+ [search and filter bar](../search/index.md#issue-boards) to filter by group labels only. **(PREMIUM)**
+
+- From [project boards](issue_board.md), in the [issue board configuration](issue_board.md#configurable-issue-boards-starter),
+ you can filter by: **(STARTER)**
+ - Group labels
+ - Project labels
+
+- From [group issue boards](issue_board.md#group-issue-boards-premium), in the [issue board configuration](issue_board.md#configurable-issue-boards-starter),
+ you can filter by group labels only. **(STARTER)**
## Subscribing to labels
-From the project label list page and the group label list page, you can subscribe to [notifications](../../workflow/notifications.md) of a given label, to alert you that the label has been assigned to an issue or merge request.
+From the project label list page and the group label list page, you can subscribe
+to [notifications](../../workflow/notifications.md) of a given label, to alert you
+that the label has been assigned to an issue or merge request.
-![Labels subscriptions](img/labels_subscriptions.png)
+![Labels subscriptions](img/labels_subscriptions_v12_1.png)
## Label priority
@@ -161,26 +223,43 @@ From the project label list page and the group label list page, you can subscrib
> - Introduced in GitLab 8.9.
> - Priority sorting is based on the highest priority label only. [This discussion](https://gitlab.com/gitlab-org/gitlab-ce/issues/18554) considers changing this.
-Labels can have relative priorities, which are used in the "Label priority" and "Priority" sort orders of the issue and merge request list pages.
+Labels can have relative priorities, which are used in the "Label priority" and
+"Priority" sort orders of the issue and merge request list pages.
+
+From the project label list page, star a label to indicate that it has a priority.
+
+![Labels prioritized](img/labels_prioritized_v12_1.png)
+
+Drag starred labels up and down the list to change their priority. Higher in the list
+means higher priority. Prioritization happens at the project level, only on the project
+label list page, and not on the group label list page.
-From the project label list page, star a label to indicate that it has a priority. Drag starred labels up and down to change their priority. Higher means higher priority. Prioritization happens at the project level, only on the project label list page, and not on the group label list page. However, both project and group labels can be prioritized on the project label list page since both types are displayed on the project label list page.
+However, both project and group
+labels can be prioritized on the project label list page since both types are displayed
+on the project label list page.
-![Labels prioritized](img/labels_prioritized.png)
+![Drag to change label priority](img/labels_drag_priority_v12_1.gif)
-On the project and group issue and merge request list pages, you can sort by `Label priority` and `Priority`, which account for objects (issues and merge requests) that have prioritized labels assigned to them.
+On the merge request and issue pages, for both groups and projects, you can sort by `Label priority`
+and `Priority`, which account for objects (issues and merge requests) that have prioritized
+labels assigned to them.
If you sort by `Label priority`, GitLab considers this sort comparison order:
- Object with a higher priority prioritized label.
- Object without a prioritized label.
-Ties are broken arbitrarily. (Note that we _only_ consider the highest prioritized label in an object, and not any of the lower prioritized labels. [This discussion](https://gitlab.com/gitlab-org/gitlab-ce/issues/18554) considers changing this.)
+Ties are broken arbitrarily. Note that we _only_ consider the highest prioritized label
+in an object, and not any of the lower prioritized labels. [This discussion](https://gitlab.com/gitlab-org/gitlab-ce/issues/18554)
+considers changing this.
![Labels sort label priority](img/labels_sort_label_priority.png)
If you sort by `Priority`, GitLab considers this sort comparison order:
-- Object's assigned [milestone](milestones/index.md)'s due date is sooner, provided the object has a milestone and the milestone has a due date. If this isn't the case, consider the object having a due date in the infinite future.
+- Due date of the assigned [milestone](milestones/index.md) is sooner, provided
+ the object has a milestone and the milestone has a due date. If this isn't the case,
+ consider the object having a due date in the infinite future.
- Object with a higher priority prioritized label.
- Object without a prioritized label.
diff --git a/doc/user/project/merge_requests/merge_request_approvals.md b/doc/user/project/merge_requests/merge_request_approvals.md
index 220795d6f15..656459b3b03 100644
--- a/doc/user/project/merge_requests/merge_request_approvals.md
+++ b/doc/user/project/merge_requests/merge_request_approvals.md
@@ -331,6 +331,16 @@ the dropdown) `approver` and select the user.
![Filter MRs by an approver](img/filter_approver_merge_requests.png)
+## Security approvals in merge requests **(ULTIMATE)**
+
+> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.2.
+
+Merge Request Approvals can be configured to require approval from a member
+of your security team when a vulnerability would be introduced by a merge request.
+
+For more information, see
+[Security approvals in merge requests](../../application_security/index.md#security-approvals-in-merge-requests-ultimate).
+
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/project/packages/npm_registry.md b/doc/user/project/packages/npm_registry.md
index ca0aa9965ef..e01bac6b26f 100644
--- a/doc/user/project/packages/npm_registry.md
+++ b/doc/user/project/packages/npm_registry.md
@@ -81,6 +81,10 @@ domain name.
You should now be able to download and upload NPM packages to your project.
+NOTE: **Note:**
+If you encounter an error message with [Yarn](https://yarnpkg.com/en/), see the
+[troubleshooting section](#troubleshooting).
+
## Uploading packages
Before you will be able to upload a package, you need to specify the registry
@@ -116,3 +120,29 @@ a given scope, you will receive a `403 Forbidden!` error.
If you upload a package with a same name and version twice, GitLab will show
both packages in the UI, but the GitLab NPM Registry will expose the most recent
one as it supports only one package per version for `npm install`.
+
+## Troubleshooting
+
+### Error running yarn with NPM registry
+
+If you are using [yarn](https://yarnpkg.com/en/) with the NPM registry, you may get
+an error message like:
+
+```sh
+yarn install v1.15.2
+warning package.json: No license field
+info No lockfile found.
+warning XXX: No license field
+[1/4] 🔍 Resolving packages...
+[2/4] 🚚 Fetching packages...
+error An unexpected error occurred: "https://gitlab.com/api/v4/projects/XXX/packages/npm/XXX/XXX/-/XXX/XXX-X.X.X.tgz: Request failed \"404 Not Found\"".
+info If you think this is a bug, please open a bug report with the information provided in "/Users/XXX/gitlab-migration/module-util/yarn-error.log".
+info Visit https://yarnpkg.com/en/docs/cli/install for documentation about this command
+```
+
+In this case, try adding this to your `.npmrc` file (and replace `<your_oauth_token>`
+with your with your OAuth or personal access token):
+
+```text
+//gitlab.com/api/v4/projects/:_authToken=<your_oauth_token>
+```
diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md
index 54ecc42d2b9..6a9900d48f9 100644
--- a/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md
+++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md
@@ -191,7 +191,7 @@ can use the following setup:
1. In Cloudflare, create a DNS `TXT` record to verify your domain.
1. In GitLab, verify your domain.
1. In Cloudflare, create a DNS `CNAME` record pointing `www` to `domain.com`.
-1. In Cloudflare, add a Page Rule pointing `www.domain,com` to `domain.com`:
+1. In Cloudflare, add a Page Rule pointing `www.domain.com` to `domain.com`:
- Navigate to your domain's dashboard and click **Page Rules**
on the top nav.
- Click **Create Page Rule**.
diff --git a/doc/user/project/pipelines/settings.md b/doc/user/project/pipelines/settings.md
index 45f27b3c811..456209c2180 100644
--- a/doc/user/project/pipelines/settings.md
+++ b/doc/user/project/pipelines/settings.md
@@ -76,6 +76,13 @@ Here are some valid examples:
- `my/path/.gitlab-ci.yml`
- `my/path/.my-custom-file.yml`
+The path can be customized at a project level. To customize the path:
+
+1. Go to the project's **Settings > CI / CD**.
+1. Expand the **General pipelines** section.
+1. Provide a value in the **Custom CI config path** field.
+1. Click **Save changes**.
+
## Test coverage parsing
If you use test coverage in your code, GitLab can capture its output in the
diff --git a/doc/user/project/quick_actions.md b/doc/user/project/quick_actions.md
index b8e0ef8d12f..437899dce1e 100644
--- a/doc/user/project/quick_actions.md
+++ b/doc/user/project/quick_actions.md
@@ -70,6 +70,19 @@ Many quick actions require a parameter, for example: username, milestone, and
label. [Autocomplete characters](autocomplete_characters.md) can make it easier
to enter a parameter, compared to selecting items from a list.
+## Quick actions parameters
+
+The easiest way to set parameters for quick actions is to use autocomplete. If
+you manually enter a parameter, it must be enclosed in double quotation marks
+(`"`), unless it contains only:
+
+1. ASCII letters.
+2. Numerals.
+3. Underscore, hyphen, question mark, dot, and ampersand.
+
+Parameters are also case-sensitive. Autocomplete handles this, and the insertion
+of quotation marks, automatically.
+
## Quick actions for commit messages
The following quick actions are applicable for commit messages:
diff --git a/doc/workflow/lfs/manage_large_binaries_with_git_lfs.md b/doc/workflow/lfs/manage_large_binaries_with_git_lfs.md
index b6bba57049d..264372a512d 100644
--- a/doc/workflow/lfs/manage_large_binaries_with_git_lfs.md
+++ b/doc/workflow/lfs/manage_large_binaries_with_git_lfs.md
@@ -84,6 +84,10 @@ that are on the remote repository, eg. for a branch from origin:
git lfs fetch origin master
```
+### Migrate an existing repo to Git LFS
+
+Read the documentation on how to [migrate an existing Git repo with Git LFS](../../topics/git/migrate_to_git_lfs/index.md).
+
## File Locking
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/35856) in GitLab 10.5.
diff --git a/doc/workflow/shortcuts.md b/doc/workflow/shortcuts.md
index fd67ea8ce87..d61d7eafd18 100644
--- a/doc/workflow/shortcuts.md
+++ b/doc/workflow/shortcuts.md
@@ -35,11 +35,11 @@ You can see GitLab's keyboard shortcuts by using <kbd>shift</kbd> + <kbd>?</kbd>
| Keyboard Shortcut | Description |
| ----------------- | ----------- |
-| <kbd>g</kbd> + <kbd>a</kbd> | Go to the activity feed |
-| <kbd>g</kbd> + <kbd>p</kbd> | Go to projects |
-| <kbd>g</kbd> + <kbd>i</kbd> | Go to issues |
-| <kbd>g</kbd> + <kbd>m</kbd> | Go to merge requests |
-| <kbd>g</kbd> + <kbd>t</kbd> | Go to todos |
+| <kbd>Shift</kbd> + <kbd>a</kbd> | Go to the activity feed |
+| <kbd>Shift</kbd> + <kbd>p</kbd> | Go to projects |
+| <kbd>Shift</kbd> + <kbd>i</kbd> | Go to issues |
+| <kbd>Shift</kbd> + <kbd>m</kbd> | Go to merge requests |
+| <kbd>Shift</kbd> + <kbd>t</kbd> | Go to todos |
## Project
diff --git a/lib/api/api.rb b/lib/api/api.rb
index 223ae13bd2d..e500a93b31e 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -104,7 +104,6 @@ module API
mount ::API::BroadcastMessages
mount ::API::Commits
mount ::API::CommitStatuses
- mount ::API::ContainerRegistry
mount ::API::DeployKeys
mount ::API::Deployments
mount ::API::Environments
@@ -116,6 +115,7 @@ module API
mount ::API::GroupLabels
mount ::API::GroupMilestones
mount ::API::Groups
+ mount ::API::GroupContainerRepositories
mount ::API::GroupVariables
mount ::API::ImportGithub
mount ::API::Internal
@@ -138,6 +138,7 @@ module API
mount ::API::Pipelines
mount ::API::PipelineSchedules
mount ::API::ProjectClusters
+ mount ::API::ProjectContainerRepositories
mount ::API::ProjectEvents
mount ::API::ProjectExport
mount ::API::ProjectImport
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index 2d6dd18d4ea..2f5ce3d4003 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -1162,6 +1162,7 @@ module API
attributes = ::ApplicationSettingsHelper.visible_attributes
attributes.delete(:performance_bar_allowed_group_path)
attributes.delete(:performance_bar_enabled)
+ attributes.delete(:allow_local_requests_from_hooks_and_services)
attributes
end
@@ -1180,6 +1181,7 @@ module API
# support legacy names, can be removed in v5
expose :password_authentication_enabled_for_web, as: :password_authentication_enabled
expose :password_authentication_enabled_for_web, as: :signin_enabled
+ expose :allow_local_requests_from_web_hooks_and_services, as: :allow_local_requests_from_hooks_and_services
end
# deprecated old Release representation
diff --git a/lib/api/entities/container_registry.rb b/lib/api/entities/container_registry.rb
index 00833ca7480..6250f35c7cb 100644
--- a/lib/api/entities/container_registry.rb
+++ b/lib/api/entities/container_registry.rb
@@ -3,18 +3,20 @@
module API
module Entities
module ContainerRegistry
- class Repository < Grape::Entity
- expose :id
+ class Tag < Grape::Entity
expose :name
expose :path
expose :location
- expose :created_at
end
- class Tag < Grape::Entity
+ class Repository < Grape::Entity
+ expose :id
expose :name
expose :path
+ expose :project_id
expose :location
+ expose :created_at
+ expose :tags, using: Tag, if: -> (_, options) { options[:tags] }
end
class TagDetails < Tag
diff --git a/lib/api/group_container_repositories.rb b/lib/api/group_container_repositories.rb
new file mode 100644
index 00000000000..fd24662cc9a
--- /dev/null
+++ b/lib/api/group_container_repositories.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module API
+ class GroupContainerRepositories < Grape::API
+ include PaginationParams
+
+ before { authorize_read_group_container_images! }
+
+ REPOSITORY_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(
+ tag_name: API::NO_SLASH_URL_PART_REGEX)
+
+ params do
+ requires :id, type: String, desc: "Group's ID or path"
+ end
+ resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get a list of all repositories within a group' do
+ detail 'This feature was introduced in GitLab 12.2.'
+ success Entities::ContainerRegistry::Repository
+ end
+ params do
+ use :pagination
+ optional :tags, type: Boolean, default: false, desc: 'Determines if tags should be included'
+ end
+ get ':id/registry/repositories' do
+ repositories = ContainerRepositoriesFinder.new(
+ id: user_group.id, container_type: :group
+ ).execute
+
+ present paginate(repositories), with: Entities::ContainerRegistry::Repository, tags: params[:tags]
+ end
+ end
+
+ helpers do
+ def authorize_read_group_container_images!
+ authorize! :read_container_image, user_group
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/notes_helpers.rb b/lib/api/helpers/notes_helpers.rb
index b03ac7deb71..7124ac0c5c3 100644
--- a/lib/api/helpers/notes_helpers.rb
+++ b/lib/api/helpers/notes_helpers.rb
@@ -76,7 +76,7 @@ module API
def find_noteable(parent_type, parent_id, noteable_type, noteable_id)
params = params_by_noteable_type_and_id(noteable_type, noteable_id)
- noteable = NotesFinder.new(user_project, current_user, params).target
+ noteable = NotesFinder.new(current_user, params.merge(project: user_project)).target
noteable = nil unless can?(current_user, noteable_read_ability_name(noteable), noteable)
noteable || not_found!(noteable_type)
end
diff --git a/lib/api/container_registry.rb b/lib/api/project_container_repositories.rb
index 7dad20a822a..6d53abcc500 100644
--- a/lib/api/container_registry.rb
+++ b/lib/api/project_container_repositories.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
module API
- class ContainerRegistry < Grape::API
+ class ProjectContainerRepositories < Grape::API
include PaginationParams
- REGISTRY_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(
+ REPOSITORY_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(
tag_name: API::NO_SLASH_URL_PART_REGEX)
before { error!('404 Not Found', 404) unless Feature.enabled?(:container_registry_api, user_project, default_enabled: true) }
@@ -20,11 +20,14 @@ module API
end
params do
use :pagination
+ optional :tags, type: Boolean, default: false, desc: 'Determines if tags should be included'
end
get ':id/registry/repositories' do
- repositories = user_project.container_repositories.ordered
+ repositories = ContainerRepositoriesFinder.new(
+ id: user_project.id, container_type: :project
+ ).execute
- present paginate(repositories), with: Entities::ContainerRegistry::Repository
+ present paginate(repositories), with: Entities::ContainerRegistry::Repository, tags: params[:tags]
end
desc 'Delete repository' do
@@ -33,7 +36,7 @@ module API
params do
requires :repository_id, type: Integer, desc: 'The ID of the repository'
end
- delete ':id/registry/repositories/:repository_id', requirements: REGISTRY_ENDPOINT_REQUIREMENTS do
+ delete ':id/registry/repositories/:repository_id', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_admin_container_image!
DeleteContainerRepositoryWorker.perform_async(current_user.id, repository.id)
@@ -49,7 +52,7 @@ module API
requires :repository_id, type: Integer, desc: 'The ID of the repository'
use :pagination
end
- get ':id/registry/repositories/:repository_id/tags', requirements: REGISTRY_ENDPOINT_REQUIREMENTS do
+ get ':id/registry/repositories/:repository_id/tags', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_read_container_image!
tags = Kaminari.paginate_array(repository.tags)
@@ -65,7 +68,7 @@ module API
optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name'
optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month'
end
- delete ':id/registry/repositories/:repository_id/tags', requirements: REGISTRY_ENDPOINT_REQUIREMENTS do
+ delete ':id/registry/repositories/:repository_id/tags', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_admin_container_image!
message = 'This request has already been made. You can run this at most once an hour for a given container repository'
@@ -85,7 +88,7 @@ module API
requires :repository_id, type: Integer, desc: 'The ID of the repository'
requires :tag_name, type: String, desc: 'The name of the tag'
end
- get ':id/registry/repositories/:repository_id/tags/:tag_name', requirements: REGISTRY_ENDPOINT_REQUIREMENTS do
+ get ':id/registry/repositories/:repository_id/tags/:tag_name', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_read_container_image!
validate_tag!
@@ -99,7 +102,7 @@ module API
requires :repository_id, type: Integer, desc: 'The ID of the repository'
requires :tag_name, type: String, desc: 'The name of the tag'
end
- delete ':id/registry/repositories/:repository_id/tags/:tag_name', requirements: REGISTRY_ENDPOINT_REQUIREMENTS do
+ delete ':id/registry/repositories/:repository_id/tags/:tag_name', requirements: REPOSITORY_ENDPOINT_REQUIREMENTS do
authorize_destroy_container_image!
validate_tag!
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index aa9e879160d..196ef1fcdfa 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -124,6 +124,7 @@ module API
optional :usage_ping_enabled, type: Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
optional :instance_statistics_visibility_private, type: Boolean, desc: 'When set to `true` Instance statistics will only be available to admins'
optional :local_markdown_version, type: Integer, desc: "Local markdown version, increase this value when any cached markdown should be invalidated"
+ optional :allow_local_requests_from_hooks_and_services, type: Boolean, desc: 'Deprecated: Use :allow_local_requests_from_web_hooks_and_services instead. Allow requests to the local network from hooks and services.' # support legacy names, can be removed in v5
ApplicationSetting::SUPPORTED_KEY_TYPES.each do |type|
optional :"#{type}_key_restriction",
@@ -158,6 +159,11 @@ module API
attrs[:password_authentication_enabled_for_web] = attrs.delete(:password_authentication_enabled)
end
+ # support legacy names, can be removed in v5
+ if attrs.has_key?(:allow_local_requests_from_hooks_and_services)
+ attrs[:allow_local_requests_from_web_hooks_and_services] = attrs.delete(:allow_local_requests_from_hooks_and_services)
+ end
+
attrs = filter_attributes_using_license(attrs)
if ApplicationSettings::UpdateService.new(current_settings, current_user, attrs).execute
diff --git a/lib/feature.rb b/lib/feature.rb
index e28333aa58e..c70a6980f19 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -80,6 +80,13 @@ class Feature
get(key).disable_group(group)
end
+ def remove(key)
+ feature = get(key)
+ return unless persisted?(feature)
+
+ feature.remove
+ end
+
def flipper
if Gitlab::SafeRequestStore.active?
Gitlab::SafeRequestStore[:flipper] ||= build_flipper_instance
diff --git a/lib/gitlab/background_migration/migrate_legacy_artifacts.rb b/lib/gitlab/background_migration/migrate_legacy_artifacts.rb
index 5cd638083b0..4377ec2987c 100644
--- a/lib/gitlab/background_migration/migrate_legacy_artifacts.rb
+++ b/lib/gitlab/background_migration/migrate_legacy_artifacts.rb
@@ -39,10 +39,10 @@ module Gitlab
SELECT
project_id,
id,
- artifacts_expire_at,
+ artifacts_expire_at #{add_missing_db_timezone},
#{LEGACY_PATH_FILE_LOCATION},
- created_at,
- created_at,
+ created_at #{add_missing_db_timezone},
+ created_at #{add_missing_db_timezone},
artifacts_file,
artifacts_size,
COALESCE(artifacts_file_store, #{FILE_LOCAL_STORE}),
@@ -81,10 +81,10 @@ module Gitlab
SELECT
project_id,
id,
- artifacts_expire_at,
+ artifacts_expire_at #{add_missing_db_timezone},
#{LEGACY_PATH_FILE_LOCATION},
- created_at,
- created_at,
+ created_at #{add_missing_db_timezone},
+ created_at #{add_missing_db_timezone},
artifacts_metadata,
NULL,
COALESCE(artifacts_metadata_store, #{FILE_LOCAL_STORE}),
@@ -121,6 +121,12 @@ module Gitlab
AND artifacts_file <> ''
SQL
end
+
+ def add_missing_db_timezone
+ return '' unless Gitlab::Database.postgresql?
+
+ 'at time zone \'UTC\''
+ end
end
end
end
diff --git a/lib/gitlab/blob_helper.rb b/lib/gitlab/blob_helper.rb
index d3e15a79a8b..fc579ad8d2a 100644
--- a/lib/gitlab/blob_helper.rb
+++ b/lib/gitlab/blob_helper.rb
@@ -45,7 +45,7 @@ module Gitlab
end
def image?
- ['.png', '.jpg', '.jpeg', '.gif'].include?(extname.downcase)
+ ['.png', '.jpg', '.jpeg', '.gif', '.svg'].include?(extname.downcase)
end
# Internal: Lookup mime type for extension.
diff --git a/lib/gitlab/ci/ansi2html.rb b/lib/gitlab/ci/ansi2html.rb
index fc3223e7442..7e348763e81 100644
--- a/lib/gitlab/ci/ansi2html.rb
+++ b/lib/gitlab/ci/ansi2html.rb
@@ -194,16 +194,10 @@ module Gitlab
end
def handle_new_line
- css_classes = []
-
- if @sections.any?
- css_classes = %w[section line] + sections.map { |section| "s_#{section}" }
- end
-
write_in_tag %{<br/>}
- write_raw %{<span class="#{css_classes.join(' ')}"></span>} if css_classes.any?
+
+ close_open_tags if @sections.any? && @lineno_in_section == 0
@lineno_in_section += 1
- open_new_tag
end
def handle_section(scanner)
@@ -310,11 +304,24 @@ module Gitlab
if @sections.any?
css_classes << "section"
- css_classes << "js-section-header section-header" if @lineno_in_section == 0
+
+ css_classes << if @lineno_in_section == 0
+ "js-section-header section-header"
+ else
+ "line"
+ end
+
css_classes += sections.map { |section| "js-s-#{section}" }
end
- @out << %{<span class="#{css_classes.join(' ')}">}
+ close_open_tags
+
+ @out << if css_classes.any?
+ %{<span class="#{css_classes.join(' ')}">}
+ else
+ %{<span>}
+ end
+
@n_open_tags += 1
end
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index 5ab795359b8..2fd76bc3690 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -13,7 +13,7 @@ module Gitlab
ALLOWED_KEYS = %i[tags script only except type image services
allow_failure type stage when start_in artifacts cache
- dependencies before_script after_script variables
+ dependencies needs before_script after_script variables
environment coverage retry parallel extends].freeze
validations do
@@ -34,11 +34,22 @@ module Gitlab
message: 'should be on_success, on_failure, ' \
'always, manual or delayed' }
validates :dependencies, array_of_strings: true
+ validates :needs, array_of_strings: true
validates :extends, array_of_strings_or_string: true
end
validates :start_in, duration: { limit: '1 day' }, if: :delayed?
validates :start_in, absence: true, unless: :delayed?
+
+ validate do
+ next unless dependencies.present?
+ next unless needs.present?
+
+ missing_needs = dependencies - needs
+ if missing_needs.any?
+ errors.add(:dependencies, "the #{missing_needs.join(", ")} should be part of needs")
+ end
+ end
end
entry :before_script, Entry::Script,
@@ -95,10 +106,10 @@ module Gitlab
helpers :before_script, :script, :stage, :type, :after_script,
:cache, :image, :services, :only, :except, :variables,
:artifacts, :environment, :coverage, :retry,
- :parallel
+ :parallel, :needs
attributes :script, :tags, :allow_failure, :when, :dependencies,
- :retry, :parallel, :extends, :start_in
+ :needs, :retry, :parallel, :extends, :start_in
def self.matching?(name, config)
!name.to_s.start_with?('.') &&
@@ -178,7 +189,8 @@ module Gitlab
parallel: parallel_defined? ? parallel_value.to_i : nil,
artifacts: artifacts_value,
after_script: after_script_value,
- ignore: ignored? }
+ ignore: ignored?,
+ needs: needs_defined? ? needs_value : nil }
end
end
end
diff --git a/lib/gitlab/ci/config/normalizer.rb b/lib/gitlab/ci/config/normalizer.rb
index 99356226ef9..09f9bf5f69f 100644
--- a/lib/gitlab/ci/config/normalizer.rb
+++ b/lib/gitlab/ci/config/normalizer.rb
@@ -4,61 +4,63 @@ module Gitlab
module Ci
class Config
class Normalizer
+ include Gitlab::Utils::StrongMemoize
+
def initialize(jobs_config)
@jobs_config = jobs_config
end
def normalize_jobs
- extract_parallelized_jobs!
- return @jobs_config if @parallelized_jobs.empty?
+ return @jobs_config if parallelized_jobs.empty?
+
+ expand_parallelize_jobs do |job_name, config|
+ if config[:dependencies]
+ config[:dependencies] = expand_names(config[:dependencies])
+ end
- parallelized_config = parallelize_jobs
- parallelize_dependencies(parallelized_config)
+ if config[:needs]
+ config[:needs] = expand_names(config[:needs])
+ end
+
+ config
+ end
end
private
- def extract_parallelized_jobs!
- @parallelized_jobs = {}
+ def expand_names(job_names)
+ return unless job_names
- @jobs_config.each do |job_name, config|
- if config[:parallel]
- @parallelized_jobs[job_name] = self.class.parallelize_job_names(job_name, config[:parallel])
- end
+ job_names.flat_map do |job_name|
+ parallelized_jobs[job_name.to_sym] || job_name
end
-
- @parallelized_jobs
end
- def parallelize_jobs
- @jobs_config.each_with_object({}) do |(job_name, config), hash|
- if @parallelized_jobs.key?(job_name)
- @parallelized_jobs[job_name].each { |name, index| hash[name.to_sym] = config.merge(name: name, instance: index) }
- else
- hash[job_name] = config
- end
+ def parallelized_jobs
+ strong_memoize(:parallelized_jobs) do
+ @jobs_config.each_with_object({}) do |(job_name, config), hash|
+ next unless config[:parallel]
- hash
+ hash[job_name] = self.class.parallelize_job_names(job_name, config[:parallel])
+ end
end
end
- def parallelize_dependencies(parallelized_config)
- parallelized_job_names = @parallelized_jobs.keys.map(&:to_s)
- parallelized_config.each_with_object({}) do |(job_name, config), hash|
- if config[:dependencies] && (intersection = config[:dependencies] & parallelized_job_names).any?
- parallelized_deps = intersection.flat_map { |dep| @parallelized_jobs[dep.to_sym].map(&:first) }
- deps = config[:dependencies] - intersection + parallelized_deps
- hash[job_name] = config.merge(dependencies: deps)
+ def expand_parallelize_jobs
+ @jobs_config.each_with_object({}) do |(job_name, config), hash|
+ if parallelized_jobs.key?(job_name)
+ parallelized_jobs[job_name].each_with_index do |name, index|
+ hash[name.to_sym] =
+ yield(name, config.merge(name: name, instance: index + 1))
+ end
else
- hash[job_name] = config
+ hash[job_name] = yield(job_name, config)
end
-
- hash
end
end
def self.parallelize_job_names(name, total)
- Array.new(total) { |index| ["#{name} #{index + 1}/#{total}", index + 1] }
+ Array.new(total) { |index| "#{name} #{index + 1}/#{total}" }
end
end
end
diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb
index d8296940a04..ab0d4c38ab6 100644
--- a/lib/gitlab/ci/pipeline/seed/build.rb
+++ b/lib/gitlab/ci/pipeline/seed/build.rb
@@ -9,9 +9,10 @@ module Gitlab
delegate :dig, to: :@attributes
- def initialize(pipeline, attributes)
+ def initialize(pipeline, attributes, previous_stages)
@pipeline = pipeline
@attributes = attributes
+ @previous_stages = previous_stages
@only = Gitlab::Ci::Build::Policy
.fabricate(attributes.delete(:only))
@@ -19,10 +20,15 @@ module Gitlab
.fabricate(attributes.delete(:except))
end
+ def name
+ dig(:name)
+ end
+
def included?
strong_memoize(:inclusion) do
- @only.all? { |spec| spec.satisfied_by?(@pipeline, self) } &&
- @except.none? { |spec| spec.satisfied_by?(@pipeline, self) }
+ all_of_only? &&
+ none_of_except? &&
+ all_of_needs?
end
end
@@ -42,6 +48,25 @@ module Gitlab
@attributes.to_h.dig(:options, :trigger).present?
end
+ def all_of_only?
+ @only.all? { |spec| spec.satisfied_by?(@pipeline, self) }
+ end
+
+ def none_of_except?
+ @except.none? { |spec| spec.satisfied_by?(@pipeline, self) }
+ end
+
+ def all_of_needs?
+ return true unless Feature.enabled?(:ci_dag_support, @pipeline.project)
+ return true if dig(:needs_attributes).nil?
+
+ dig(:needs_attributes).all? do |need|
+ @previous_stages.any? do |stage|
+ stage.seeds_names.include?(need[:name])
+ end
+ end
+ end
+
def to_resource
strong_memoize(:resource) do
if bridge?
diff --git a/lib/gitlab/ci/pipeline/seed/stage.rb b/lib/gitlab/ci/pipeline/seed/stage.rb
index 9c15064756a..7c737027445 100644
--- a/lib/gitlab/ci/pipeline/seed/stage.rb
+++ b/lib/gitlab/ci/pipeline/seed/stage.rb
@@ -10,12 +10,13 @@ module Gitlab
delegate :size, to: :seeds
delegate :dig, to: :seeds
- def initialize(pipeline, attributes)
+ def initialize(pipeline, attributes, previous_stages)
@pipeline = pipeline
@attributes = attributes
+ @previous_stages = previous_stages
@builds = attributes.fetch(:builds).map do |attributes|
- Seed::Build.new(@pipeline, attributes)
+ Seed::Build.new(@pipeline, attributes, previous_stages)
end
end
@@ -32,6 +33,12 @@ module Gitlab
end
end
+ def seeds_names
+ strong_memoize(:seeds_names) do
+ seeds.map(&:name).to_set
+ end
+ end
+
def included?
seeds.any?
end
@@ -39,13 +46,7 @@ module Gitlab
def to_resource
strong_memoize(:stage) do
::Ci::Stage.new(attributes).tap do |stage|
- seeds.each do |seed|
- if seed.bridge?
- stage.bridges << seed.to_resource
- else
- stage.builds << seed.to_resource
- end
- end
+ stage.statuses = seeds.map(&:to_resource)
end
end
end
diff --git a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
index 5ad624bb15f..c963d6ed1c4 100644
--- a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
@@ -5,6 +5,7 @@ container_scanning:
image: docker:stable
variables:
DOCKER_DRIVER: overlay2
+ DOCKER_TLS_CERTDIR: ""
# Defining two new variables based on GitLab's CI/CD predefined variables
# https://docs.gitlab.com/ee/ci/variables/#predefined-environment-variables
CI_APPLICATION_REPOSITORY: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG
@@ -22,8 +23,8 @@ container_scanning:
DOCKER_SERVICE: docker
DOCKER_HOST: tcp://${DOCKER_SERVICE}:2375/
# https://hub.docker.com/r/arminc/clair-local-scan/tags
- CLAIR_LOCAL_SCAN_VERSION: v2.0.8_fe9b059d930314b54c78f75afe265955faf4fdc1
- CLAIR_EXECUTABLE_VERSION: v11
+ CLAIR_LOCAL_SCAN_VERSION: v2.0.8_0ed98e9ead65a51ba53f7cc53fa5e80c92169207
+ CLAIR_EXECUTABLE_VERSION: v12
## Disable the proxy for clair-local-scan, otherwise Container Scanning will
## fail when a proxy is used.
NO_PROXY: ${DOCKER_SERVICE},localhost
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index a0bbf3c23a2..998130e5bd0 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -40,6 +40,7 @@ module Gitlab
environment: job[:environment_name],
coverage_regex: job[:coverage],
yaml_variables: yaml_variables(name),
+ needs_attributes: job[:needs]&.map { |need| { name: need } },
options: {
image: job[:image],
services: job[:services],
@@ -108,6 +109,7 @@ module Gitlab
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
+ validate_job_needs!(name, job)
validate_job_environment!(name, job)
end
end
@@ -152,6 +154,22 @@ module Gitlab
end
end
+ def validate_job_needs!(name, job)
+ return unless job[:needs]
+
+ stage_index = @stages.index(job[:stage])
+
+ job[:needs].each do |need|
+ raise ValidationError, "#{name} job: undefined need: #{need}" unless @jobs[need.to_sym]
+
+ needs_stage_index = @stages.index(@jobs[need.to_sym][:stage])
+
+ unless needs_stage_index.present? && needs_stage_index < stage_index
+ raise ValidationError, "#{name} job: need #{need} is not defined in prior stages"
+ end
+ end
+ end
+
def validate_job_environment!(name, job)
return unless job[:environment]
return unless job[:environment].is_a?(Hash)
diff --git a/lib/gitlab/cycle_analytics/base_query.rb b/lib/gitlab/cycle_analytics/base_query.rb
index 9c98c0bfbf2..459bb5177b5 100644
--- a/lib/gitlab/cycle_analytics/base_query.rb
+++ b/lib/gitlab/cycle_analytics/base_query.rb
@@ -19,9 +19,10 @@ module Gitlab
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
- .where(issue_table[:project_id].in(project_ids))
- .where(routes_table[:source_type].eq('Namespace'))
- .where(issue_table[:created_at].gteq(options[:from]))
+ .project(projects_table[:path].as("project_path"))
+ .project(routes_table[:path].as("namespace_path"))
+
+ query = limit_query(query, project_ids)
# Load merge_requests
@@ -30,6 +31,12 @@ module Gitlab
query
end
+ def limit_query(query, project_ids)
+ query.where(issue_table[:project_id].in(project_ids))
+ .where(routes_table[:source_type].eq('Namespace'))
+ .where(issue_table[:created_at].gteq(options[:from]))
+ end
+
def load_merge_requests(query)
query.join(mr_table, Arel::Nodes::OuterJoin)
.on(mr_table[:id].eq(mr_closing_issues_table[:merge_request_id]))
diff --git a/lib/gitlab/cycle_analytics/code_event_fetcher.rb b/lib/gitlab/cycle_analytics/code_event_fetcher.rb
index 1e4e9b9e02c..fcc282bf7a6 100644
--- a/lib/gitlab/cycle_analytics/code_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/code_event_fetcher.rb
@@ -11,9 +11,7 @@ module Gitlab
mr_table[:id],
mr_table[:created_at],
mr_table[:state],
- mr_table[:author_id],
- projects_table[:name],
- routes_table[:path]]
+ mr_table[:author_id]]
@order = mr_table[:created_at]
super(*args)
diff --git a/lib/gitlab/cycle_analytics/issue_event_fetcher.rb b/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
index 2d03e425a6a..6914cf24c19 100644
--- a/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
@@ -10,9 +10,7 @@ module Gitlab
issue_table[:iid],
issue_table[:id],
issue_table[:created_at],
- issue_table[:author_id],
- projects_table[:name],
- routes_table[:path]]
+ issue_table[:author_id]]
super(*args)
end
diff --git a/lib/gitlab/cycle_analytics/issue_helper.rb b/lib/gitlab/cycle_analytics/issue_helper.rb
index 0fc4f1dd41a..295eca5edca 100644
--- a/lib/gitlab/cycle_analytics/issue_helper.rb
+++ b/lib/gitlab/cycle_analytics/issue_helper.rb
@@ -8,12 +8,19 @@ module Gitlab
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
- .where(issue_table[:project_id].in(project_ids))
+ .project(projects_table[:path].as("project_path"))
+ .project(routes_table[:path].as("namespace_path"))
+
+ query = limit_query(query, project_ids)
+
+ query
+ end
+
+ def limit_query(query, project_ids)
+ query.where(issue_table[:project_id].in(project_ids))
.where(routes_table[:source_type].eq('Namespace'))
.where(issue_table[:created_at].gteq(options[:from]))
.where(issue_metrics_table[:first_added_to_board_at].not_eq(nil).or(issue_metrics_table[:first_associated_with_milestone_at].not_eq(nil)))
-
- query
end
end
end
diff --git a/lib/gitlab/cycle_analytics/plan_event_fetcher.rb b/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
index 77cc358daa9..bad02e00a13 100644
--- a/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
@@ -10,9 +10,7 @@ module Gitlab
issue_table[:iid],
issue_table[:id],
issue_table[:created_at],
- issue_table[:author_id],
- projects_table[:name],
- routes_table[:path]]
+ issue_table[:author_id]]
super(*args)
end
diff --git a/lib/gitlab/cycle_analytics/plan_helper.rb b/lib/gitlab/cycle_analytics/plan_helper.rb
index c3f742503a9..a63ae58ad21 100644
--- a/lib/gitlab/cycle_analytics/plan_helper.rb
+++ b/lib/gitlab/cycle_analytics/plan_helper.rb
@@ -8,14 +8,16 @@ module Gitlab
.join(projects_table).on(issue_table[:project_id].eq(projects_table[:id]))
.join(routes_table).on(projects_table[:namespace_id].eq(routes_table[:source_id]))
.project(issue_table[:project_id].as("project_id"))
+ .project(projects_table[:path].as("project_path"))
+ .project(routes_table[:path].as("namespace_path"))
.where(issue_table[:project_id].in(project_ids))
.where(routes_table[:source_type].eq('Namespace'))
- query = add_conditions_to_query(query)
+ query = limit_query(query)
query
end
- def add_conditions_to_query(query)
+ def limit_query(query)
query.where(issue_table[:created_at].gteq(options[:from]))
.where(issue_metrics_table[:first_added_to_board_at].not_eq(nil).or(issue_metrics_table[:first_associated_with_milestone_at].not_eq(nil)))
.where(issue_metrics_table[:first_mentioned_in_commit_at].not_eq(nil))
diff --git a/lib/gitlab/cycle_analytics/production_event_fetcher.rb b/lib/gitlab/cycle_analytics/production_event_fetcher.rb
index 404b2460814..8843ab2bcb9 100644
--- a/lib/gitlab/cycle_analytics/production_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/production_event_fetcher.rb
@@ -11,7 +11,6 @@ module Gitlab
issue_table[:id],
issue_table[:created_at],
issue_table[:author_id],
- projects_table[:name],
routes_table[:path]]
super(*args)
diff --git a/lib/gitlab/cycle_analytics/review_event_fetcher.rb b/lib/gitlab/cycle_analytics/review_event_fetcher.rb
index 6acd12517fa..4b5d79097b7 100644
--- a/lib/gitlab/cycle_analytics/review_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/review_event_fetcher.rb
@@ -11,9 +11,7 @@ module Gitlab
mr_table[:id],
mr_table[:created_at],
mr_table[:state],
- mr_table[:author_id],
- projects_table[:name],
- routes_table[:path]]
+ mr_table[:author_id]]
super(*args)
end
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index 01fd261404b..86e532766b1 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -7,7 +7,7 @@ module Gitlab
CANONICAL_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze
CANONICAL_EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
- IGNORED_FILES_REGEX = /VERSION|CHANGELOG\.md/i.freeze
+ IGNORED_FILES_REGEX = /VERSION|CHANGELOG\.md|doc\/.+/i.freeze
PLEASE_READ_THIS_BANNER = %Q{
============================================================
===================== PLEASE READ THIS =====================
diff --git a/lib/gitlab/exclusive_lease_helpers.rb b/lib/gitlab/exclusive_lease_helpers.rb
index 7961d4bbd6e..61eb030563d 100644
--- a/lib/gitlab/exclusive_lease_helpers.rb
+++ b/lib/gitlab/exclusive_lease_helpers.rb
@@ -15,17 +15,18 @@ module Gitlab
raise ArgumentError, 'Key needs to be specified' unless key
lease = Gitlab::ExclusiveLease.new(key, timeout: ttl)
+ retried = false
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit.
sleep(sleep_sec)
- break if (retries -= 1) < 0
+ (retries -= 1) < 0 ? break : retried ||= true
end
raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid
- yield
+ yield(retried)
ensure
Gitlab::ExclusiveLease.cancel(key, uuid)
end
diff --git a/lib/gitlab/http_connection_adapter.rb b/lib/gitlab/http_connection_adapter.rb
index 41eab3658bc..84eb60f3a5d 100644
--- a/lib/gitlab/http_connection_adapter.rb
+++ b/lib/gitlab/http_connection_adapter.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
# This class is part of the Gitlab::HTTP wrapper. Depending on the value
-# of the global setting allow_local_requests_from_hooks_and_services this adapter
+# of the global setting allow_local_requests_from_web_hooks_and_services this adapter
# will allow/block connection to internal IPs and/or urls.
#
# This functionality can be overridden by providing the setting the option
@@ -38,7 +38,7 @@ module Gitlab
end
def allow_settings_local_requests?
- Gitlab::CurrentSettings.allow_local_requests_from_hooks_and_services?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
end
end
diff --git a/lib/gitlab/kubernetes/helm/delete_command.rb b/lib/gitlab/kubernetes/helm/delete_command.rb
index aeba4a54b6d..dcf22e7abb6 100644
--- a/lib/gitlab/kubernetes/helm/delete_command.rb
+++ b/lib/gitlab/kubernetes/helm/delete_command.rb
@@ -43,17 +43,6 @@ module Gitlab
command.shelljoin
end
-
- def optional_tls_flags
- return [] unless files.key?(:'ca.pem')
-
- [
- '--tls',
- '--tls-ca-cert', "#{files_dir}/ca.pem",
- '--tls-cert', "#{files_dir}/cert.pem",
- '--tls-key', "#{files_dir}/key.pem"
- ]
- end
end
end
end
diff --git a/lib/gitlab/kubernetes/helm/reset_command.rb b/lib/gitlab/kubernetes/helm/reset_command.rb
new file mode 100644
index 00000000000..37e1d8573ab
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/reset_command.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Kubernetes
+ module Helm
+ class ResetCommand
+ include BaseCommand
+ include ClientCommand
+
+ attr_reader :name, :files
+
+ def initialize(name:, rbac:, files:)
+ @name = name
+ @files = files
+ @rbac = rbac
+ end
+
+ def generate_script
+ super + [
+ reset_helm_command,
+ delete_tiller_replicaset
+ ].join("\n")
+ end
+
+ def rbac?
+ @rbac
+ end
+
+ def pod_name
+ "uninstall-#{name}"
+ end
+
+ private
+
+ # This method can be delete once we upgrade Helm to > 12.13.0
+ # https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/27096#note_159695900
+ #
+ # Tracking this method to be removed here:
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/52791#note_199374155
+ def delete_tiller_replicaset
+ command = %w[kubectl delete replicaset -n gitlab-managed-apps -l name=tiller]
+
+ command.shelljoin
+ end
+
+ def reset_helm_command
+ command = %w[helm reset] + optional_tls_flags
+
+ command.shelljoin
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/kube_client.rb b/lib/gitlab/kubernetes/kube_client.rb
index 1350924cd76..64317225ec6 100644
--- a/lib/gitlab/kubernetes/kube_client.rb
+++ b/lib/gitlab/kubernetes/kube_client.rb
@@ -128,7 +128,7 @@ module Gitlab
private
def validate_url!
- return if Gitlab::CurrentSettings.allow_local_requests_from_hooks_and_services?
+ return if Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
Gitlab::UrlBlocker.validate!(api_prefix, allow_local_network: false)
end
diff --git a/lib/gitlab/metrics/samplers/influx_sampler.rb b/lib/gitlab/metrics/samplers/influx_sampler.rb
index 5138b37f83e..1eae0a7bf45 100644
--- a/lib/gitlab/metrics/samplers/influx_sampler.rb
+++ b/lib/gitlab/metrics/samplers/influx_sampler.rb
@@ -15,19 +15,14 @@ module Gitlab
@last_step = nil
@metrics = []
-
- @last_minor_gc = Delta.new(GC.stat[:minor_gc_count])
- @last_major_gc = Delta.new(GC.stat[:major_gc_count])
end
def sample
sample_memory_usage
sample_file_descriptors
- sample_gc
flush
ensure
- GC::Profiler.clear
@metrics.clear
end
@@ -43,23 +38,6 @@ module Gitlab
add_metric('file_descriptors', value: System.file_descriptor_count)
end
- def sample_gc
- time = GC::Profiler.total_time * 1000.0
- stats = GC.stat.merge(total_time: time)
-
- # We want the difference of GC runs compared to the last sample, not the
- # total amount since the process started.
- stats[:minor_gc_count] =
- @last_minor_gc.compared_with(stats[:minor_gc_count])
-
- stats[:major_gc_count] =
- @last_major_gc.compared_with(stats[:major_gc_count])
-
- stats[:count] = stats[:minor_gc_count] + stats[:major_gc_count]
-
- add_metric('gc_statistics', stats)
- end
-
def add_metric(series, values, tags = {})
prefix = sidekiq? ? 'sidekiq_' : 'rails_'
diff --git a/lib/gitlab/metrics/samplers/ruby_sampler.rb b/lib/gitlab/metrics/samplers/ruby_sampler.rb
index 1e200db0baf..3bfa3da35e0 100644
--- a/lib/gitlab/metrics/samplers/ruby_sampler.rb
+++ b/lib/gitlab/metrics/samplers/ruby_sampler.rb
@@ -6,7 +6,11 @@ module Gitlab
module Metrics
module Samplers
class RubySampler < BaseSampler
+ GC_REPORT_BUCKETS = [0.001, 0.002, 0.005, 0.01, 0.05, 0.1, 0.5].freeze
+
def initialize(interval)
+ GC::Profiler.clear
+
metrics[:process_start_time_seconds].set(labels, Time.now.to_i)
super
@@ -37,7 +41,7 @@ module Gitlab
process_resident_memory_bytes: ::Gitlab::Metrics.gauge(with_prefix(:process, :resident_memory_bytes), 'Memory used', labels),
process_start_time_seconds: ::Gitlab::Metrics.gauge(with_prefix(:process, :start_time_seconds), 'Process start time seconds'),
sampler_duration: ::Gitlab::Metrics.counter(with_prefix(:sampler, :duration_seconds_total), 'Sampler time', labels),
- total_time: ::Gitlab::Metrics.counter(with_prefix(:gc, :duration_seconds_total), 'Total GC time', labels)
+ gc_duration_seconds: ::Gitlab::Metrics.histogram(with_prefix(:gc, :duration_seconds), 'GC time', labels, GC_REPORT_BUCKETS)
}
GC.stat.keys.each do |key|
@@ -57,20 +61,27 @@ module Gitlab
sample_gc
metrics[:sampler_duration].increment(labels, System.monotonic_time - start_time)
- ensure
- GC::Profiler.clear
end
private
def sample_gc
- # Collect generic GC stats.
+ # Observe all GC samples
+ sample_gc_reports.each do |report|
+ metrics[:gc_duration_seconds].observe(labels, report[:GC_TIME])
+ end
+
+ # Collect generic GC stats
GC.stat.each do |key, value|
metrics[key].set(labels, value)
end
+ end
- # Collect the GC time since last sample in float seconds.
- metrics[:total_time].increment(labels, GC::Profiler.total_time)
+ def sample_gc_reports
+ GC::Profiler.enable
+ GC::Profiler.raw_data
+ ensure
+ GC::Profiler.clear
end
def set_memory_usage_metrics
diff --git a/lib/gitlab/octokit/middleware.rb b/lib/gitlab/octokit/middleware.rb
index 2f762957d1b..2dd7d08a58b 100644
--- a/lib/gitlab/octokit/middleware.rb
+++ b/lib/gitlab/octokit/middleware.rb
@@ -16,7 +16,7 @@ module Gitlab
private
def allow_local_requests?
- Gitlab::CurrentSettings.allow_local_requests_from_hooks_and_services?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
end
end
diff --git a/lib/gitlab/profiler.rb b/lib/gitlab/profiler.rb
index 615c0ec374c..ec7671f9a8b 100644
--- a/lib/gitlab/profiler.rb
+++ b/lib/gitlab/profiler.rb
@@ -21,6 +21,9 @@ module Gitlab
lib/gitlab/profiler.rb
lib/gitlab/correlation_id.rb
lib/gitlab/webpack/dev_server_middleware.rb
+ lib/gitlab/sidekiq_status/
+ lib/gitlab/sidekiq_logging/
+ lib/gitlab/sidekiq_middleware/
].freeze
# Takes a URL to profile (can be a fully-qualified URL, or an absolute path)
diff --git a/lib/gitlab/project_search_results.rb b/lib/gitlab/project_search_results.rb
index 0f3b97e2317..827f4f77f36 100644
--- a/lib/gitlab/project_search_results.rb
+++ b/lib/gitlab/project_search_results.rb
@@ -108,7 +108,7 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def notes_finder(type)
- NotesFinder.new(project, @current_user, search: query, target_type: type).execute.user.order('updated_at DESC')
+ NotesFinder.new(@current_user, search: query, target_type: type, project: project).execute.user.order('updated_at DESC')
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/peek/views/redis_detailed.rb b/lib/peek/views/redis_detailed.rb
index b95307deddb..f36f581d5e9 100644
--- a/lib/peek/views/redis_detailed.rb
+++ b/lib/peek/views/redis_detailed.rb
@@ -16,6 +16,7 @@ module Gitlab
private
def add_call_details(duration, args)
+ return unless peek_enabled?
# redis-rb passes an array (e.g. [:get, key])
return unless args.length == 1
@@ -26,6 +27,10 @@ module Gitlab
}
end
+ def peek_enabled?
+ Gitlab::SafeRequestStore.store[:peek_enabled]
+ end
+
def detail_store
::Gitlab::SafeRequestStore['redis_call_details'] ||= []
end
diff --git a/lib/peek/views/rugged.rb b/lib/peek/views/rugged.rb
index 6b9d3e7b1a3..18b3f422852 100644
--- a/lib/peek/views/rugged.rb
+++ b/lib/peek/views/rugged.rb
@@ -29,8 +29,12 @@ module Peek
def format_args(args)
args.map do |arg|
- # Needed to avoid infinite as_json calls
- if arg.is_a?(Gitlab::Git::Repository)
+ # ActiveSupport::JSON recursively calls as_json on all
+ # instance variables, and if that instance variable points to
+ # something that refers back to the same instance, we can wind
+ # up in an infinite loop. Currently this only seems to happen with
+ # Gitlab::Git::Repository and ::Repository.
+ if arg.instance_variables.present?
arg.to_s
else
arg
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 709226e686c..8cf70014256 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -948,6 +948,12 @@ msgstr ""
msgid "Allow requests to the local network from hooks and services."
msgstr ""
+msgid "Allow requests to the local network from system hooks"
+msgstr ""
+
+msgid "Allow requests to the local network from web hooks and services"
+msgstr ""
+
msgid "Allow this key to push to repository as well? (Default only allows pull access.)"
msgstr ""
@@ -2647,7 +2653,7 @@ msgstr ""
msgid "ClusterIntegration|Kubernetes cluster name"
msgstr ""
-msgid "ClusterIntegration|Kubernetes cluster was successfully created on Google Kubernetes Engine. Refresh the page to see Kubernetes cluster's details"
+msgid "ClusterIntegration|Kubernetes cluster was successfully created on Google Kubernetes Engine."
msgstr ""
msgid "ClusterIntegration|Kubernetes clusters allow you to use review apps, deploy your applications, run your pipelines, and much more in an easy way."
@@ -2818,12 +2824,15 @@ msgstr ""
msgid "ClusterIntegration|The associated IP and all deployed services will be deleted and cannot be restored. Uninstalling Knative will also remove Istio from your cluster. This will not effect any other applications."
msgstr ""
-msgid "ClusterIntegration|The associated certifcate will be deleted and cannot be restored."
+msgid "ClusterIntegration|The associated Tiller pod will be deleted and cannot be restored."
msgstr ""
msgid "ClusterIntegration|The associated load balancer and IP will be deleted and cannot be restored."
msgstr ""
+msgid "ClusterIntegration|The associated private key will be deleted and cannot be restored."
+msgstr ""
+
msgid "ClusterIntegration|The endpoint is in the process of being assigned. Please check your Kubernetes cluster or Quotas on Google Kubernetes Engine if it takes a long time."
msgstr ""
@@ -3282,6 +3291,9 @@ msgstr ""
msgid "Copy token to clipboard"
msgstr ""
+msgid "Could not add prometheus URL to whitelist"
+msgstr ""
+
msgid "Could not authorize chat nickname. Try again!"
msgstr ""
@@ -4049,12 +4061,6 @@ msgstr ""
msgid "Edit public deploy key"
msgstr ""
-msgid "Editor|%{mdLinkStart}Markdown is supported%{mdLinkEnd}"
-msgstr ""
-
-msgid "Editor|%{mdLinkStart}Markdown%{mdLinkEnd} and %{actionsLinkStart}quick actions%{actionsLinkEnd} are supported"
-msgstr ""
-
msgid "Email"
msgstr ""
@@ -6505,12 +6511,18 @@ msgstr ""
msgid "Mark to do as done"
msgstr ""
+msgid "Markdown"
+msgstr ""
+
msgid "Markdown Help"
msgstr ""
msgid "Markdown enabled"
msgstr ""
+msgid "Markdown is supported"
+msgstr ""
+
msgid "Marked this %{noun} as Work In Progress."
msgstr ""
@@ -8732,6 +8744,9 @@ msgstr ""
msgid "ProjectsNew|Want to house several dependent projects under the same namespace? %{link_start}Create a group.%{link_end}"
msgstr ""
+msgid "Prometheus listen_address is not a valid URI"
+msgstr ""
+
msgid "PrometheusService|%{exporters} with %{metrics} were found"
msgstr ""
@@ -10969,7 +10984,7 @@ msgstr ""
msgid "The repository must be accessible over <code>http://</code>, <code>https://</code> or <code>git://</code>."
msgstr ""
-msgid "The repository must be accessible over <code>http://</code>, <code>https://</code>, <code>ssh://</code> and <code>git://</code>."
+msgid "The repository must be accessible over <code>http://</code>, <code>https://</code>, <code>ssh://</code> or <code>git://</code>."
msgstr ""
msgid "The review stage shows the time from creating the merge request to merging it. The data will automatically be added after you merge your first merge request."
@@ -13560,6 +13575,9 @@ msgstr ""
msgid "project avatar"
msgstr ""
+msgid "quick actions"
+msgstr ""
+
msgid "register"
msgstr ""
diff --git a/package.json b/package.json
index c97046b0e84..bf6000dc53d 100644
--- a/package.json
+++ b/package.json
@@ -38,7 +38,7 @@
"@babel/preset-env": "^7.4.4",
"@gitlab/csslab": "^1.9.0",
"@gitlab/svgs": "^1.67.0",
- "@gitlab/ui": "^5.11.1",
+ "@gitlab/ui": "5.12.1",
"apollo-cache-inmemory": "^1.5.1",
"apollo-client": "^2.5.1",
"apollo-link": "^1.2.11",
@@ -76,7 +76,6 @@
"diff": "^3.4.0",
"document-register-element": "1.13.1",
"dropzone": "^4.2.0",
- "echarts": "^4.2.0-rc.2",
"emoji-regex": "^7.0.3",
"emoji-unicode-version": "^0.2.1",
"exports-loader": "^0.7.0",
@@ -100,9 +99,9 @@
"mermaid": "^8.2.3",
"monaco-editor": "^0.15.6",
"monaco-editor-webpack-plugin": "^1.7.0",
- "mousetrap": "1.4.6",
+ "mousetrap": "^1.4.6",
"pdfjs-dist": "^2.0.943",
- "pikaday": "^1.6.1",
+ "pikaday": "^1.8.0",
"popper.js": "^1.14.7",
"prismjs": "^1.6.0",
"prosemirror-markdown": "^1.3.0",
diff --git a/qa/qa/specs/features/browser_ui/1_manage/login/login_via_oauth_spec.rb b/qa/qa/specs/features/browser_ui/1_manage/login/login_via_oauth_spec.rb
index a118176eb8a..db99488160b 100644
--- a/qa/qa/specs/features/browser_ui/1_manage/login/login_via_oauth_spec.rb
+++ b/qa/qa/specs/features/browser_ui/1_manage/login/login_via_oauth_spec.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
module QA
- context 'Manage', :orchestrated, :oauth do
+ # Failure issue: https://gitlab.com/gitlab-org/quality/nightly/issues/121
+ context 'Manage', :orchestrated, :oauth, :quarantine do
describe 'OAuth login' do
it 'User logs in to GitLab with GitHub OAuth' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb
index 11fd570d131..2027a3c16aa 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb
@@ -1,8 +1,7 @@
# frozen_string_literal: true
module QA
- # Failure issue: https://gitlab.com/gitlab-org/quality/nightly/issues/113
- context 'Create', :requires_admin, :quarantine do
+ context 'Create', :requires_admin do
describe 'push after setting the file size limit via admin/application_settings' do
before(:context) do
@project = Resource::Project.fabricate_via_api! do |p|
@@ -21,14 +20,21 @@ module QA
it 'push successful when the file size is under the limit' do
set_file_size_limit(5)
- push = push_new_file('oversize_file_1.bin', wait_for_push: true)
- expect(push.output).not_to have_content 'remote: fatal: pack exceeds maximum allowed size'
+
+ retry_on_fail do
+ push = push_new_file('oversize_file_1.bin', wait_for_push: true)
+
+ expect(push.output).not_to have_content 'remote: fatal: pack exceeds maximum allowed size'
+ end
end
it 'push fails when the file size is above the limit' do
set_file_size_limit(1)
- expect { push_new_file('oversize_file_2.bin', wait_for_push: false) }
- .to raise_error(QA::Git::Repository::RepositoryCommandError, /remote: fatal: pack exceeds maximum allowed size/)
+
+ retry_on_fail do
+ expect { push_new_file('oversize_file_2.bin', wait_for_push: false) }
+ .to raise_error(QA::Git::Repository::RepositoryCommandError, /remote: fatal: pack exceeds maximum allowed size/)
+ end
end
def set_file_size_limit(limit)
@@ -54,6 +60,22 @@ module QA
output
end
+
+ # Application settings are cached for up to a minute. So when we change
+ # the `receive_max_input_size` setting, the setting might not be applied
+ # for minute. This caused the tests to intermittently fail.
+ # See https://gitlab.com/gitlab-org/quality/nightly/issues/113
+ #
+ # Instead of waiting a minute after changing the setting, we retry the
+ # attempt to push if it fails. Most of the time the setting is updated in
+ # under a minute, i.e., in fewer than 6 attempts with a 10 second sleep
+ # between attempts.
+ # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/30233#note_188616863
+ def retry_on_fail
+ Support::Retrier.retry_on_exception(max_attempts: 6, reload_page: nil, sleep_interval: 10) do
+ yield
+ end
+ end
end
end
end
diff --git a/rubocop/cop/rspec/top_level_describe_path.rb b/rubocop/cop/rspec/top_level_describe_path.rb
new file mode 100644
index 00000000000..61796e23af0
--- /dev/null
+++ b/rubocop/cop/rspec/top_level_describe_path.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'rubocop/rspec/top_level_describe'
+
+module RuboCop
+ module Cop
+ module RSpec
+ class TopLevelDescribePath < RuboCop::Cop::Cop
+ include RuboCop::RSpec::TopLevelDescribe
+
+ MESSAGE = 'A file with a top-level `describe` must end in _spec.rb.'
+ SHARED_EXAMPLES = %i[shared_examples shared_examples_for].freeze
+
+ def on_top_level_describe(node, args)
+ return if acceptable_file_path?(processed_source.buffer.name)
+ return if shared_example?(node)
+
+ add_offense(node, message: MESSAGE)
+ end
+
+ private
+
+ def acceptable_file_path?(path)
+ File.fnmatch?('*_spec.rb', path) || File.fnmatch?('*/frontend/fixtures/*', path)
+ end
+
+ def shared_example?(node)
+ node.ancestors.any? do |node|
+ node.respond_to?(:method_name) && SHARED_EXAMPLES.include?(node.method_name)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/rubocop.rb b/rubocop/rubocop.rb
index ba61a634d97..58a7ead6f13 100644
--- a/rubocop/rubocop.rb
+++ b/rubocop/rubocop.rb
@@ -32,6 +32,7 @@ require_relative 'cop/migration/update_large_table'
require_relative 'cop/project_path_helper'
require_relative 'cop/rspec/env_assignment'
require_relative 'cop/rspec/factories_in_migration_specs'
+require_relative 'cop/rspec/top_level_describe_path'
require_relative 'cop/qa/element_with_pattern'
require_relative 'cop/sidekiq_options_queue'
require_relative 'cop/destroy_all'
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index 408a0595f9e..4935c1342a3 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -340,31 +340,6 @@ function display_deployment_debug() {
fi
}
-function wait_for_review_app_to_be_accessible() {
- echoinfo "Waiting for the Review App at ${CI_ENVIRONMENT_URL} to be accessible..." true
-
- local interval=5
- local elapsed_seconds=0
- local max_seconds=$((2 * 60))
- while true; do
- local review_app_http_code
- review_app_http_code=$(curl --silent --output /dev/null --max-time 5 --write-out "%{http_code}" "${CI_ENVIRONMENT_URL}/users/sign_in")
- if [[ "${review_app_http_code}" -eq "200" ]] || [[ "${elapsed_seconds}" -gt "${max_seconds}" ]]; then
- break
- fi
-
- let "elapsed_seconds+=interval"
- sleep ${interval}
- done
-
- if [[ "${review_app_http_code}" -eq "200" ]]; then
- echoinfo "The Review App at ${CI_ENVIRONMENT_URL} is ready after ${elapsed_seconds} seconds!"
- else
- echoerr "The Review App at ${CI_ENVIRONMENT_URL} isn't ready after ${max_seconds} seconds of polling..."
- exit 1
- fi
-}
-
function add_license() {
if [ -z "${REVIEW_APPS_EE_LICENSE}" ]; then echo "License not found" && return; fi
diff --git a/spec/controllers/admin/clusters/applications_controller_spec.rb b/spec/controllers/admin/clusters/applications_controller_spec.rb
index cf202d88acc..9d6edcd80c0 100644
--- a/spec/controllers/admin/clusters/applications_controller_spec.rb
+++ b/spec/controllers/admin/clusters/applications_controller_spec.rb
@@ -84,7 +84,7 @@ describe Admin::Clusters::ApplicationsController do
patch :update, params: params
end
- let!(:application) { create(:clusters_applications_cert_managers, :installed, cluster: cluster) }
+ let!(:application) { create(:clusters_applications_cert_manager, :installed, cluster: cluster) }
let(:application_name) { application.name }
let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index eaa5d6cd073..6cdd61e7abd 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -222,6 +222,20 @@ describe AutocompleteController do
expect(response_user_ids).to contain_exactly(non_member.id)
end
end
+
+ context 'merge_request_iid parameter included' do
+ before do
+ sign_in(user)
+ end
+
+ it 'includes can_merge option to users' do
+ merge_request = create(:merge_request, source_project: project)
+
+ get(:users, params: { merge_request_iid: merge_request.iid, project_id: project.id })
+
+ expect(json_response.first).to have_key('can_merge')
+ end
+ end
end
context 'GET projects' do
diff --git a/spec/controllers/concerns/issuable_actions_spec.rb b/spec/controllers/concerns/issuable_actions_spec.rb
new file mode 100644
index 00000000000..7b0b4497f3f
--- /dev/null
+++ b/spec/controllers/concerns/issuable_actions_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssuableActions do
+ let(:project) { double('project') }
+ let(:user) { double('user') }
+ let(:issuable) { double('issuable') }
+ let(:finder_params_for_issuable) { {} }
+ let(:notes_result) { double('notes_result') }
+ let(:discussion_serializer) { double('discussion_serializer') }
+
+ let(:controller) do
+ klass = Class.new do
+ attr_reader :current_user, :project, :issuable
+
+ def self.before_action(action, params = nil)
+ end
+
+ include IssuableActions
+
+ def initialize(issuable, project, user, finder_params)
+ @issuable = issuable
+ @project = project
+ @current_user = user
+ @finder_params = finder_params
+ end
+
+ def finder_params_for_issuable
+ @finder_params
+ end
+
+ def params
+ {
+ notes_filter: 1
+ }
+ end
+
+ def prepare_notes_for_rendering(notes)
+ []
+ end
+
+ def render(options)
+ end
+ end
+
+ klass.new(issuable, project, user, finder_params_for_issuable)
+ end
+
+ describe '#discussions' do
+ before do
+ allow(user).to receive(:set_notes_filter)
+ allow(user).to receive(:user_preference)
+ allow(discussion_serializer).to receive(:represent)
+ end
+
+ it 'instantiates and calls NotesFinder as expected' do
+ expect(Discussion).to receive(:build_collection).and_return([])
+ expect(DiscussionSerializer).to receive(:new).and_return(discussion_serializer)
+ expect(NotesFinder).to receive(:new).with(user, finder_params_for_issuable).and_call_original
+
+ expect_any_instance_of(NotesFinder).to receive(:execute).and_return(notes_result)
+
+ expect(notes_result).to receive_messages(inc_relations_for_view: notes_result, includes: notes_result, fresh: notes_result)
+
+ controller.discussions
+ end
+ end
+end
diff --git a/spec/controllers/groups/clusters/applications_controller_spec.rb b/spec/controllers/groups/clusters/applications_controller_spec.rb
index 16a63536ea6..21533d1c89a 100644
--- a/spec/controllers/groups/clusters/applications_controller_spec.rb
+++ b/spec/controllers/groups/clusters/applications_controller_spec.rb
@@ -91,7 +91,7 @@ describe Groups::Clusters::ApplicationsController do
patch :update, params: params.merge(group_id: group)
end
- let!(:application) { create(:clusters_applications_cert_managers, :installed, cluster: cluster) }
+ let!(:application) { create(:clusters_applications_cert_manager, :installed, cluster: cluster) }
let(:application_name) { application.name }
let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index dbfacf4e42e..43c910da7a5 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -111,7 +111,7 @@ describe HelpController do
it 'renders the raw file' do
get :show,
params: {
- path: 'user/project/img/labels_default'
+ path: 'user/project/img/labels_default_v12_1'
},
format: :png
expect(response).to be_success
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 32d14dce936..fab47aa4701 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -251,15 +251,13 @@ describe Projects::IssuesController do
end
end
- describe 'Redirect after sign in' do
+ # This spec runs as a request-style spec in order to invoke the
+ # Rails router. A controller-style spec matches the wrong route, and
+ # session['user_return_to'] becomes incorrect.
+ describe 'Redirect after sign in', type: :request do
context 'with an AJAX request' do
it 'does not store the visited URL' do
- get :show, params: {
- format: :json,
- namespace_id: project.namespace,
- project_id: project,
- id: issue.iid
- }, xhr: true
+ get project_issue_path(project, issue), xhr: true
expect(session['user_return_to']).to be_blank
end
@@ -267,14 +265,9 @@ describe Projects::IssuesController do
context 'without an AJAX request' do
it 'stores the visited URL' do
- get :show,
- params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: issue.iid
- }
+ get project_issue_path(project, issue)
- expect(session['user_return_to']).to eq("/#{project.namespace.to_param}/#{project.to_param}/issues/#{issue.iid}")
+ expect(session['user_return_to']).to eq(project_issue_path(project, issue))
end
end
end
@@ -1260,6 +1253,28 @@ describe Projects::IssuesController do
sign_in(user)
end
+ context do
+ it_behaves_like 'discussions provider' do
+ let!(:author) { create(:user) }
+ let!(:project) { create(:project) }
+
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ let!(:note_on_issue1) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, author: create(:user)) }
+ let!(:note_on_issue2) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, author: create(:user)) }
+
+ let(:requested_iid) { issue.iid }
+ let(:expected_discussion_count) { 3 }
+ let(:expected_discussion_ids) do
+ [
+ issue.notes.first.discussion_id,
+ note_on_issue1.discussion_id,
+ note_on_issue2.discussion_id
+ ]
+ end
+ end
+ end
+
it 'returns discussion json' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 39ebf02dcf5..f076a5e769f 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -546,7 +546,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
- expect(json_response['html']).to eq('<span class="">BUILD TRACE</span>')
+ expect(json_response['html']).to eq('<span>BUILD TRACE</span>')
end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 57a432de3f5..b1dc6a65dd4 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1210,6 +1210,22 @@ describe Projects::MergeRequestsController do
end
end
end
+
+ context do
+ it_behaves_like 'discussions provider' do
+ let!(:author) { create(:user) }
+ let!(:project) { create(:project) }
+
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+
+ let!(:mr_note1) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let!(:mr_note2) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+
+ let(:requested_iid) { merge_request.iid }
+ let(:expected_discussion_count) { 2 }
+ let(:expected_discussion_ids) { [mr_note1.discussion_id, mr_note2.discussion_id] }
+ end
+ end
end
describe 'GET edit' do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 98aea9056dc..9ab565dc2e8 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -43,7 +43,7 @@ describe Projects::NotesController do
request.headers['X-Last-Fetched-At'] = last_fetched_at
expect(NotesFinder).to receive(:new)
- .with(anything, anything, hash_including(last_fetched_at: last_fetched_at))
+ .with(anything, hash_including(last_fetched_at: last_fetched_at))
.and_call_original
get :index, params: request_params
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 13fad1b6dc9..232890b1bba 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -27,7 +27,7 @@ describe 'Database schema' do
cluster_providers_gcp: %w[gcp_project_id operation_id],
deploy_keys_projects: %w[deploy_key_id],
deployments: %w[deployable_id environment_id user_id],
- draft_notes: %w[discussion_id],
+ draft_notes: %w[discussion_id commit_id],
emails: %w[user_id],
events: %w[target_id],
epics: %w[updated_by_id last_edited_by_id start_date_sourcing_milestone_id due_date_sourcing_milestone_id],
diff --git a/spec/factories/ci/build_need.rb b/spec/factories/ci/build_need.rb
new file mode 100644
index 00000000000..568aff45a91
--- /dev/null
+++ b/spec/factories/ci/build_need.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_build_need, class: Ci::BuildNeed do
+ build factory: :ci_build
+ sequence(:name) { |n| "build_#{n}" }
+ end
+end
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 7fe9ea6801a..89f7bc15217 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -4,6 +4,20 @@ FactoryBot.define do
factory :clusters_applications_helm, class: Clusters::Applications::Helm do
cluster factory: %i(cluster provided_by_gcp)
+ before(:create) do
+ allow(Gitlab::Kubernetes::Helm::Certificate).to receive(:generate_root)
+ .and_return(
+ double(
+ key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')),
+ cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem'))
+ )
+ )
+ end
+
+ after(:create) do
+ allow(Gitlab::Kubernetes::Helm::Certificate).to receive(:generate_root).and_call_original
+ end
+
trait :not_installable do
status(-2)
end
@@ -60,7 +74,7 @@ FactoryBot.define do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
- factory :clusters_applications_cert_managers, class: Clusters::Applications::CertManager do
+ factory :clusters_applications_cert_manager, class: Clusters::Applications::CertManager do
email 'admin@example.com'
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
diff --git a/spec/factories/container_repositories.rb b/spec/factories/container_repositories.rb
index a9771200d6e..0b756220d68 100644
--- a/spec/factories/container_repositories.rb
+++ b/spec/factories/container_repositories.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :container_repository do
- name 'test_image'
+ sequence(:name) { |n| "test_image_#{n}" }
project
transient do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index c77605f3869..ddd87404003 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -338,14 +338,17 @@ describe 'Admin updates settings' do
visit network_admin_application_settings_path
page.within('.as-outbound') do
- check 'Allow requests to the local network from hooks and services'
+ check 'Allow requests to the local network from web hooks and services'
+ # Enabled by default
+ uncheck 'Allow requests to the local network from system hooks'
# Enabled by default
uncheck 'Enforce DNS rebinding attack protection'
click_button 'Save changes'
end
expect(page).to have_content "Application settings saved successfully"
- expect(current_settings.allow_local_requests_from_hooks_and_services).to be true
+ expect(current_settings.allow_local_requests_from_web_hooks_and_services).to be true
+ expect(current_settings.allow_local_requests_from_system_hooks).to be false
expect(current_settings.dns_rebinding_protection_enabled).to be false
end
end
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 659bec177ab..3a47475da2b 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -51,7 +51,7 @@ describe 'Dashboard shortcuts', :js do
find('body').send_keys([:shift, 'P'])
find('.nothing-here-block')
- expect(page).to have_content('This user doesn\'t have any personal projects')
+ expect(page).to have_content('Explore public groups to find projects to contribute to.')
end
end
diff --git a/spec/features/groups/labels/user_sees_links_to_issuables.rb b/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb
index e636f625b31..6199b566ebc 100644
--- a/spec/features/groups/labels/user_sees_links_to_issuables.rb
+++ b/spec/features/groups/labels/user_sees_links_to_issuables_spec.rb
@@ -11,7 +11,9 @@ describe 'Groups > Labels > User sees links to issuables' do
end
it 'shows links to MRs and issues' do
- expect(page).to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
diff --git a/spec/features/instance_statistics/instance_statistics.rb b/spec/features/instance_statistics/instance_statistics_spec.rb
index 40d0f1db207..40d0f1db207 100644
--- a/spec/features/instance_statistics/instance_statistics.rb
+++ b/spec/features/instance_statistics/instance_statistics_spec.rb
diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb
index de97c8a8bc0..3d15095e2da 100644
--- a/spec/features/projects/clusters/applications_spec.rb
+++ b/spec/features/projects/clusters/applications_spec.rb
@@ -22,9 +22,8 @@ describe 'Clusters Applications', :js do
let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
it 'user is unable to install applications' do
- page.within('.js-cluster-application-row-helm') do
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Install')
- end
+ expect(page).not_to have_css('.js-cluster-application-row-helm')
+ expect(page).not_to have_css('.js-cluster-application-install-button')
end
end
@@ -63,7 +62,8 @@ describe 'Clusters Applications', :js do
Clusters::Cluster.last.application_helm.make_installed!
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installed')
+ expect(page).not_to have_css('.js-cluster-application-install-button')
+ expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
end
expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster')
@@ -183,7 +183,7 @@ describe 'Clusters Applications', :js do
Clusters::Cluster.last.application_cert_manager.make_installed!
expect(email_form_value).to eq('new_email@example.org')
- expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installed')
+ expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
end
expect(page).to have_content('Cert-Manager was successfully installed on your Kubernetes cluster')
diff --git a/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb
index 934de2fde8f..c19e46da913 100644
--- a/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder.rb
+++ b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
# This is a regression test for https://gitlab.com/gitlab-org/gitlab-ce/issues/37569
-describe 'Projects > Files > User browses a tree with a folder containing only a folder' do
+# Quarantine: https://gitlab.com/gitlab-org/gitlab-ce/issues/65329
+describe 'Projects > Files > User browses a tree with a folder containing only a folder', :quarantine do
let(:project) { create(:project, :empty_repo) }
let(:user) { project.owner }
diff --git a/spec/features/projects/labels/user_sees_links_to_issuables.rb b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
index fd2151a1f8e..7a9b9e6eac2 100644
--- a/spec/features/projects/labels/user_sees_links_to_issuables.rb
+++ b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
@@ -19,8 +19,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public) }
it 'shows links to MRs and issues' do
- expect(page).to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
@@ -28,8 +30,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public, issues_access_level: ProjectFeature::DISABLED) }
it 'shows links to MRs but not to issues' do
- expect(page).to have_link('view merge requests')
- expect(page).not_to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).not_to have_link('Issues')
+ end
end
end
@@ -37,8 +41,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public, merge_requests_access_level: ProjectFeature::DISABLED) }
it 'shows links to issues but not to MRs' do
- expect(page).not_to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).not_to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
end
@@ -51,8 +57,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public, namespace: group) }
it 'shows links to MRs and issues' do
- expect(page).to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
@@ -60,8 +68,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public, namespace: group, issues_access_level: ProjectFeature::DISABLED) }
it 'shows links to MRs and issues' do
- expect(page).to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
@@ -69,8 +79,10 @@ describe 'Projects > Labels > User sees links to issuables' do
let(:project) { create(:project, :public, namespace: group, merge_requests_access_level: ProjectFeature::DISABLED) }
it 'shows links to MRs and issues' do
- expect(page).to have_link('view merge requests')
- expect(page).to have_link('view open issues')
+ page.within('.labels-container') do
+ expect(page).to have_link('Merge requests')
+ expect(page).to have_link('Issues')
+ end
end
end
end
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
index 1080976f7ce..cc6dbaa6eb8 100644
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
@@ -134,15 +134,9 @@ describe "User creates wiki page" do
fill_in(:wiki_content, with: ascii_content)
- # This is the dumbest bug in the world:
- # When the #wiki_content textarea is filled in, JS captures the `Enter` keydown event in order to do
- # auto-indentation and manually inserts a newline. However, for whatever reason, when you try to click on the
- # submit button in Capybara, it will not trigger the `click` event if a \n or \r character has been manually
- # added to the textarea. It will, however, trigger ALL OTHER EVENTS, including `mouseover`/down/up, focus, and
- # blur. Just not `click`. But only when you manually insert \n or \r - if you manually insert any other sequence
- # then `click` is fired normally. And it's only Capybara. Browsers and JSDOM don't have this issue.
- # So that's why the next line performs the click via JS.
- page.execute_script("document.querySelector('.rspec-create-page-button').click()")
+ page.within(".wiki-form") do
+ click_button("Create page")
+ end
page.within ".md" do
expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 92e34a1f510..5ff12c37aff 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -34,7 +34,7 @@ describe 'User edits snippet', :js do
click_button('Save changes')
wait_for_requests
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ link = find('a.no-attachment-icon img:not(.lazy)[alt="banana_sample"]')['src']
expect(link).to match(%r{/uploads/-/system/personal_snippet/#{snippet.id}/\h{32}/banana_sample\.gif\z})
end
diff --git a/spec/features/snippets/user_sees_breadcrumb_links.rb b/spec/features/snippets/user_sees_breadcrumb_links.rb
deleted file mode 100644
index 5b10984ce1d..00000000000
--- a/spec/features/snippets/user_sees_breadcrumb_links.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'rails_helper'
-
-describe 'New user snippet breadcrumbs' do
- let(:user) { create(:user) }
-
- before do
- sign_in(user)
- visit new_snippet_path
- end
-
- it 'display a link to user snippets and new user snippet pages' do
- page.within '.breadcrumbs' do
- expect(find_link('Snippets')[:href]).to end_with(dashboard_snippets_path)
- expect(find_link('New')[:href]).to end_with(new_snippet_path)
- end
- end
-end
diff --git a/spec/features/user_opens_link_to_comment.rb b/spec/features/user_opens_link_to_comment_spec.rb
index f1e07e55799..f1e07e55799 100644
--- a/spec/features/user_opens_link_to_comment.rb
+++ b/spec/features/user_opens_link_to_comment_spec.rb
diff --git a/spec/features/users/add_email_to_existing_account.rb b/spec/features/users/add_email_to_existing_account_spec.rb
index 42e352399a8..42e352399a8 100644
--- a/spec/features/users/add_email_to_existing_account.rb
+++ b/spec/features/users/add_email_to_existing_account_spec.rb
diff --git a/spec/finders/container_repositories_finder_spec.rb b/spec/finders/container_repositories_finder_spec.rb
new file mode 100644
index 00000000000..deec62d6598
--- /dev/null
+++ b/spec/finders/container_repositories_finder_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContainerRepositoriesFinder do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let(:project_repository) { create(:container_repository, project: project) }
+
+ describe '#execute' do
+ let(:id) { nil }
+
+ subject { described_class.new(id: id, container_type: container_type).execute }
+
+ context 'when container_type is group' do
+ let(:other_project) { create(:project, group: group) }
+
+ let(:other_repository) do
+ create(:container_repository, name: 'test_repository2', project: other_project)
+ end
+
+ let(:container_type) { :group }
+ let(:id) { group.id }
+
+ it { is_expected.to match_array([project_repository, other_repository]) }
+ end
+
+ context 'when container_type is project' do
+ let(:container_type) { :project }
+ let(:id) { project.id }
+
+ it { is_expected.to match_array([project_repository]) }
+ end
+
+ context 'with invalid id' do
+ let(:container_type) { :project }
+ let(:id) { 123456789 }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/finders/notes_finder_spec.rb b/spec/finders/notes_finder_spec.rb
index 87bde4ca2f6..88906adfeeb 100644
--- a/spec/finders/notes_finder_spec.rb
+++ b/spec/finders/notes_finder_spec.rb
@@ -14,7 +14,7 @@ describe NotesFinder do
let!(:system_note) { create(:note_on_issue, project: project, system: true) }
it 'returns only user notes when using only_comments filter' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:only_comments])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:only_comments])
notes = finder.execute
@@ -22,7 +22,7 @@ describe NotesFinder do
end
it 'returns only system notes when using only_activity filters' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:only_activity])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:only_activity])
notes = finder.execute
@@ -30,7 +30,7 @@ describe NotesFinder do
end
it 'gets all notes' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:all_activity])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:all_activity])
notes = finder.execute
@@ -41,7 +41,7 @@ describe NotesFinder do
it 'finds notes on merge requests' do
create(:note_on_merge_request, project: project)
- notes = described_class.new(project, user).execute
+ notes = described_class.new(user, project: project).execute
expect(notes.count).to eq(1)
end
@@ -49,7 +49,7 @@ describe NotesFinder do
it 'finds notes on snippets' do
create(:note_on_project_snippet, project: project)
- notes = described_class.new(project, user).execute
+ notes = described_class.new(user, project: project).execute
expect(notes.count).to eq(1)
end
@@ -59,13 +59,13 @@ describe NotesFinder do
note = create(:note_on_commit, project: project)
params = { target_type: 'commit', target_id: note.noteable.id }
- notes = described_class.new(project, create(:user), params).execute
+ notes = described_class.new(create(:user), params).execute
expect(notes.count).to eq(0)
end
it 'succeeds when no notes found' do
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -82,7 +82,7 @@ describe NotesFinder do
it 'publicly excludes notes on merge requests' do
create(:note_on_merge_request, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -90,7 +90,7 @@ describe NotesFinder do
it 'publicly excludes notes on issues' do
create(:note_on_issue, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -98,7 +98,7 @@ describe NotesFinder do
it 'publicly excludes notes on snippets' do
create(:note_on_project_snippet, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -110,7 +110,7 @@ describe NotesFinder do
let!(:note2) { create :note_on_commit, project: project }
it 'finds only notes for the selected type' do
- notes = described_class.new(project, user, target_type: 'issue').execute
+ notes = described_class.new(user, project: project, target_type: 'issue').execute
expect(notes).to eq([note1])
end
@@ -118,56 +118,51 @@ describe NotesFinder do
context 'for target' do
let(:project) { create(:project, :repository) }
- let(:note1) { create :note_on_commit, project: project }
- let(:note2) { create :note_on_commit, project: project }
+ let!(:note1) { create :note_on_commit, project: project }
+ let!(:note2) { create :note_on_commit, project: project }
let(:commit) { note1.noteable }
- let(:params) { { target_id: commit.id, target_type: 'commit', last_fetched_at: 1.hour.ago.to_i } }
-
- before do
- note1
- note2
- end
+ let(:params) { { project: project, target_id: commit.id, target_type: 'commit', last_fetched_at: 1.hour.ago.to_i } }
it 'finds all notes' do
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.size).to eq(2)
end
it 'finds notes on merge requests' do
note = create(:note_on_merge_request, project: project)
- params = { target_type: 'merge_request', target_id: note.noteable.id }
+ params = { project: project, target_type: 'merge_request', target_id: note.noteable.id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes).to include(note)
end
it 'finds notes on snippets' do
note = create(:note_on_project_snippet, project: project)
- params = { target_type: 'snippet', target_id: note.noteable.id }
+ params = { project: project, target_type: 'snippet', target_id: note.noteable.id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.count).to eq(1)
end
it 'finds notes on personal snippets' do
note = create(:note_on_personal_snippet)
- params = { target_type: 'personal_snippet', target_id: note.noteable_id }
+ params = { project: project, target_type: 'personal_snippet', target_id: note.noteable_id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.count).to eq(1)
end
it 'raises an exception for an invalid target_type' do
params[:target_type] = 'invalid'
- expect { described_class.new(project, user, params).execute }.to raise_error("invalid target_type '#{params[:target_type]}'")
+ expect { described_class.new(user, params).execute }.to raise_error("invalid target_type '#{params[:target_type]}'")
end
it 'filters out old notes' do
note2.update_attribute(:updated_at, 2.hours.ago)
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes).to eq([note1])
end
@@ -175,25 +170,47 @@ describe NotesFinder do
let(:confidential_issue) { create(:issue, :confidential, project: project, author: user) }
let!(:confidential_note) { create(:note, noteable: confidential_issue, project: confidential_issue.project) }
- let(:params) { { target_id: confidential_issue.id, target_type: 'issue', last_fetched_at: 1.hour.ago.to_i } }
+ let(:params) { { project: confidential_issue.project, target_id: confidential_issue.id, target_type: 'issue', last_fetched_at: 1.hour.ago.to_i } }
it 'returns notes if user can see the issue' do
- expect(described_class.new(project, user, params).execute).to eq([confidential_note])
+ expect(described_class.new(user, params).execute).to eq([confidential_note])
end
it 'raises an error if user can not see the issue' do
user = create(:user)
- expect { described_class.new(project, user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { described_class.new(user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'raises an error for project members with guest role' do
user = create(:user)
project.add_guest(user)
- expect { described_class.new(project, user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { described_class.new(user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
+
+ context 'for explicit target' do
+ let(:project) { create(:project, :repository) }
+ let!(:note1) { create :note_on_commit, project: project, created_at: 1.day.ago, updated_at: 2.hours.ago }
+ let!(:note2) { create :note_on_commit, project: project }
+ let(:commit) { note1.noteable }
+ let(:params) { { project: project, target: commit } }
+
+ it 'returns the expected notes' do
+ expect(described_class.new(user, params).execute).to eq([note1, note2])
+ end
+
+ it 'returns the expected notes when last_fetched_at is given' do
+ params = { project: project, target: commit, last_fetched_at: 1.hour.ago.to_i }
+ expect(described_class.new(user, params).execute).to eq([note2])
+ end
+
+ it 'fails when nil is provided' do
+ params = { project: project, target: nil }
+ expect { described_class.new(user, params).execute }.to raise_error(RuntimeError)
+ end
+ end
end
describe '.search' do
@@ -201,17 +218,17 @@ describe NotesFinder do
let(:note) { create(:note_on_issue, note: 'WoW', project: project) }
it 'returns notes with matching content' do
- expect(described_class.new(note.project, nil, search: note.note).execute).to eq([note])
+ expect(described_class.new(nil, project: note.project, search: note.note).execute).to eq([note])
end
it 'returns notes with matching content regardless of the casing' do
- expect(described_class.new(note.project, nil, search: 'WOW').execute).to eq([note])
+ expect(described_class.new(nil, project: note.project, search: 'WOW').execute).to eq([note])
end
it 'returns commit notes user can access' do
note = create(:note_on_commit, project: project)
- expect(described_class.new(note.project, create(:user), search: note.note).execute).to eq([note])
+ expect(described_class.new(create(:user), project: note.project, search: note.note).execute).to eq([note])
end
context "confidential issues" do
@@ -220,27 +237,27 @@ describe NotesFinder do
let(:confidential_note) { create(:note, note: "Random", noteable: confidential_issue, project: confidential_issue.project) }
it "returns notes with matching content if user can see the issue" do
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to eq([confidential_note])
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to eq([confidential_note])
end
it "does not return notes with matching content if user can not see the issue" do
user = create(:user)
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
it "does not return notes with matching content for project members with guest role" do
user = create(:user)
project.add_guest(user)
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
it "does not return notes with matching content for unauthenticated users" do
- expect(described_class.new(confidential_note.project, nil, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(nil, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
end
context 'inlines SQL filters on subqueries for performance' do
- let(:sql) { described_class.new(note.project, nil, search: note.note).execute.to_sql }
+ let(:sql) { described_class.new(nil, project: note.project, search: note.note).execute.to_sql }
let(:number_of_noteable_types) { 4 }
specify 'project_id check' do
@@ -254,11 +271,11 @@ describe NotesFinder do
end
describe '#target' do
- subject { described_class.new(project, user, params) }
+ subject { described_class.new(user, params) }
context 'for a issue target' do
let(:issue) { create(:issue, project: project) }
- let(:params) { { target_type: 'issue', target_id: issue.id } }
+ let(:params) { { project: project, target_type: 'issue', target_id: issue.id } }
it 'returns the issue' do
expect(subject.target).to eq(issue)
@@ -267,7 +284,7 @@ describe NotesFinder do
context 'for a merge request target' do
let(:merge_request) { create(:merge_request, source_project: project) }
- let(:params) { { target_type: 'merge_request', target_id: merge_request.id } }
+ let(:params) { { project: project, target_type: 'merge_request', target_id: merge_request.id } }
it 'returns the merge_request' do
expect(subject.target).to eq(merge_request)
@@ -276,7 +293,7 @@ describe NotesFinder do
context 'for a snippet target' do
let(:snippet) { create(:project_snippet, project: project) }
- let(:params) { { target_type: 'snippet', target_id: snippet.id } }
+ let(:params) { { project: project, target_type: 'snippet', target_id: snippet.id } }
it 'returns the snippet' do
expect(subject.target).to eq(snippet)
@@ -286,7 +303,7 @@ describe NotesFinder do
context 'for a commit target' do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
- let(:params) { { target_type: 'commit', target_id: commit.id } }
+ let(:params) { { project: project, target_type: 'commit', target_id: commit.id } }
it 'returns the commit' do
expect(subject.target).to eq(commit)
@@ -298,24 +315,24 @@ describe NotesFinder do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
it 'finds issues by iid' do
- iid_params = { target_type: 'issue', target_iid: issue.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(issue)
+ iid_params = { project: project, target_type: 'issue', target_iid: issue.iid }
+ expect(described_class.new(user, iid_params).target).to eq(issue)
end
it 'finds merge requests by iid' do
- iid_params = { target_type: 'merge_request', target_iid: merge_request.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(merge_request)
+ iid_params = { project: project, target_type: 'merge_request', target_iid: merge_request.iid }
+ expect(described_class.new(user, iid_params).target).to eq(merge_request)
end
it 'returns nil if both target_id and target_iid are not given' do
- params_without_any_id = { target_type: 'issue' }
- expect(described_class.new(project, user, params_without_any_id).target).to be_nil
+ params_without_any_id = { project: project, target_type: 'issue' }
+ expect(described_class.new(user, params_without_any_id).target).to be_nil
end
it 'prioritizes target_id over target_iid' do
issue2 = create(:issue, project: project)
- iid_params = { target_type: 'issue', target_id: issue2.id, target_iid: issue.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(issue2)
+ iid_params = { project: project, target_type: 'issue', target_id: issue2.id, target_iid: issue.iid }
+ expect(described_class.new(user, iid_params).target).to eq(issue2)
end
end
end
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
new file mode 100644
index 00000000000..bcc1db79e83
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -0,0 +1,67 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "notes",
+ "individual_note"
+ ],
+ "properties" : {
+ "id": { "type": "string" },
+ "individual_note": { "type": "boolean" },
+ "notes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "string" },
+ "type": { "type": ["string", "null"] },
+ "body": { "type": "string" },
+ "attachment": { "type": ["string", "null"]},
+ "award_emoji": { "type": "array" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" },
+ "status_tooltip_html": { "type": ["string", "null"] },
+ "path": { "type": "string" }
+ },
+ "additionalProperties": false
+ },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "system": { "type": "boolean" },
+ "noteable_id": { "type": "integer" },
+ "noteable_iid": { "type": "integer" },
+ "noteable_type": { "type": "string" },
+ "resolved": { "type": "boolean" },
+ "resolvable": { "type": "boolean" },
+ "resolved_by": { "type": ["string", "null"] },
+ "note": { "type": "string" },
+ "note_html": { "type": "string" },
+ "current_user": { "type": "object" },
+ "suggestions": { "type": "array" },
+ "discussion_id": { "type": "string" },
+ "emoji_awardable": { "type": "boolean" },
+ "report_abuse_path": { "type": "string" },
+ "noteable_note_url": { "type": "string" },
+ "resolve_path": { "type": "string" },
+ "resolve_with_issue_path": { "type": "string" },
+ "cached_markdown_version": { "type": "integer" },
+ "human_access": { "type": ["string", "null"] },
+ "toggle_award_path": { "type": "string" },
+ "path": { "type": "string" }
+ },
+ "required": [
+ "id", "attachment", "author", "created_at", "updated_at",
+ "system", "noteable_id", "noteable_type"
+ ],
+ "additionalProperties": false
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/entities/discussions.json b/spec/fixtures/api/schemas/entities/discussions.json
new file mode 100644
index 00000000000..5a837429776
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/discussions.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "discussion.json" }
+}
diff --git a/spec/fixtures/api/schemas/registry/repository.json b/spec/fixtures/api/schemas/registry/repository.json
index e0fd4620c43..d0a068b65a7 100644
--- a/spec/fixtures/api/schemas/registry/repository.json
+++ b/spec/fixtures/api/schemas/registry/repository.json
@@ -17,6 +17,9 @@
"path": {
"type": "string"
},
+ "project_id": {
+ "type": "integer"
+ },
"location": {
"type": "string"
},
@@ -28,7 +31,8 @@
},
"destroy_path": {
"type": "string"
- }
+ },
+ "tags": { "$ref": "tags.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/clusters/sample_key.key b/spec/fixtures/clusters/sample_key.key
new file mode 100644
index 00000000000..4ddb20b0922
--- /dev/null
+++ b/spec/fixtures/clusters/sample_key.key
@@ -0,0 +1,9 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIBOgIBAAJBAMA5sXIBE0HwgIB40iNidN4PGWzOyLQK0bsdOBNgpEXkDlZBvnak
+OUgAPF+rME4PB0Yl415DabUI40T5UNmlwxcCAwEAAQJAZtY2pSwIFm3JAXIh0cZZ
+iXcAfiJ+YzuqinUOS+eW2sBCAEzjcARlU/o6sFQgtsOi4FOMczAd1Yx8UDMXMmrw
+2QIhAPBgVhJiTF09pdmeFWutCvTJDlFFAQNbrbo2X2x/9WF9AiEAzLgqMKeStSRu
+H9N16TuDrUoO8R+DPqriCwkKrSHaWyMCIFzMhE4inuKcSywBaLmiG4m3GQzs++Al
+A6PRG/PSTpQtAiBxtBg6zdf+JC3GH3zt/dA0/10tL4OF2wORfYQghRzyYQIhAL2l
+0ZQW+yLIZAGrdBFWYEAa52GZosncmzBNlsoTgwE4
+-----END RSA PRIVATE KEY----- \ No newline at end of file
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index 6de06a9e2d5..80816faa5fc 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -147,47 +147,80 @@ describe('Clusters', () => {
});
describe('updateContainer', () => {
+ const { location } = window;
+
+ beforeEach(() => {
+ delete window.location;
+ window.location = {
+ reload: jest.fn(),
+ hash: location.hash,
+ };
+ });
+
+ afterEach(() => {
+ window.location = location;
+ });
+
describe('when creating cluster', () => {
it('should show the creating container', () => {
cluster.updateContainer(null, 'creating');
expect(cluster.creatingContainer.classList.contains('hidden')).toBeFalsy();
-
expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
-
expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(window.location.reload).not.toHaveBeenCalled();
});
it('should continue to show `creating` banner with subsequent updates of the same status', () => {
+ cluster.updateContainer(null, 'creating');
cluster.updateContainer('creating', 'creating');
expect(cluster.creatingContainer.classList.contains('hidden')).toBeFalsy();
-
expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
-
expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(window.location.reload).not.toHaveBeenCalled();
});
});
describe('when cluster is created', () => {
- it('should show the success container and fresh the page', () => {
- cluster.updateContainer(null, 'created');
+ it('should hide the "creating" banner and refresh the page', () => {
+ jest.spyOn(cluster, 'setClusterNewlyCreated');
+ cluster.updateContainer(null, 'creating');
+ cluster.updateContainer('creating', 'created');
expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(window.location.reload).toHaveBeenCalled();
+ expect(cluster.setClusterNewlyCreated).toHaveBeenCalledWith(true);
+ });
- expect(cluster.successContainer.classList.contains('hidden')).toBeFalsy();
+ it('when the page is refreshed, it should show the "success" banner', () => {
+ jest.spyOn(cluster, 'setClusterNewlyCreated');
+ jest.spyOn(cluster, 'isClusterNewlyCreated').mockReturnValue(true);
+
+ cluster.updateContainer(null, 'created');
+ cluster.updateContainer('created', 'created');
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.successContainer.classList.contains('hidden')).toBeFalsy();
expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(window.location.reload).not.toHaveBeenCalled();
+ expect(cluster.setClusterNewlyCreated).toHaveBeenCalledWith(false);
});
it('should not show a banner when status is already `created`', () => {
+ jest.spyOn(cluster, 'setClusterNewlyCreated');
+ jest.spyOn(cluster, 'isClusterNewlyCreated').mockReturnValue(false);
+
+ cluster.updateContainer(null, 'created');
cluster.updateContainer('created', 'created');
expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
-
expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
-
expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(window.location.reload).not.toHaveBeenCalled();
+ expect(cluster.setClusterNewlyCreated).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/helpers/indent_helper_spec.js b/spec/frontend/helpers/indent_helper_spec.js
deleted file mode 100644
index fca12f0d1ef..00000000000
--- a/spec/frontend/helpers/indent_helper_spec.js
+++ /dev/null
@@ -1,371 +0,0 @@
-import IndentHelper from '~/helpers/indent_helper';
-
-function createMockTextarea() {
- const el = document.createElement('textarea');
- el.setCursor = pos => el.setSelectionRange(pos, pos);
- el.setCursorToEnd = () => el.setCursor(el.value.length);
- el.selection = () => [el.selectionStart, el.selectionEnd];
- el.cursor = () => {
- const [start, end] = el.selection();
- return start === end ? start : undefined;
- };
- return el;
-}
-
-describe('indent_helper', () => {
- let element;
- let ih;
-
- beforeEach(() => {
- element = createMockTextarea();
- ih = new IndentHelper(element);
- });
-
- describe('indents', () => {
- describe('a single line', () => {
- it('when on an empty line; and cursor follows', () => {
- element.value = '';
- ih.indent();
- expect(element.value).toBe(' ');
- expect(element.cursor()).toBe(4);
- ih.indent();
- expect(element.value).toBe(' ');
- expect(element.cursor()).toBe(8);
- });
-
- it('when at the start of a line; and cursor stays at start', () => {
- element.value = 'foobar';
- element.setCursor(0);
- ih.indent();
- expect(element.value).toBe(' foobar');
- expect(element.cursor()).toBe(4);
- });
-
- it('when the cursor is in the middle; and cursor follows', () => {
- element.value = 'foobar';
- element.setCursor(3);
- ih.indent();
- expect(element.value).toBe(' foobar');
- expect(element.cursor()).toBe(7);
- });
- });
-
- describe('several lines', () => {
- it('when everything is selected; and everything remains selected', () => {
- element.value = 'foo\nbar\nbaz';
- element.setSelectionRange(0, 11);
- ih.indent();
- expect(element.value).toBe(' foo\n bar\n baz');
- expect(element.selection()).toEqual([0, 23]);
- });
-
- it('when all lines are partially selected; and the selection adapts', () => {
- element.value = 'foo\nbar\nbaz';
- element.setSelectionRange(2, 9);
- ih.indent();
- expect(element.value).toBe(' foo\n bar\n baz');
- expect(element.selection()).toEqual([6, 21]);
- });
-
- it('when some lines are entirely selected; and entire lines remain selected', () => {
- element.value = 'foo\nbar\nbaz';
- element.setSelectionRange(4, 11);
- ih.indent();
- expect(element.value).toBe('foo\n bar\n baz');
- expect(element.selection()).toEqual([4, 19]);
- });
-
- it('when some lines are partially selected; and the selection adapts', () => {
- element.value = 'foo\nbar\nbaz';
- element.setSelectionRange(5, 9);
- ih.indent();
- expect(element.value).toBe('foo\n bar\n baz');
- expect(element.selection()).toEqual([5 + 4, 9 + 2 * 4]);
- });
-
- it('having different indentation when some lines are entirely selected; and entire lines remain selected', () => {
- element.value = ' foo\nbar\n baz';
- element.setSelectionRange(8, 19);
- ih.indent();
- expect(element.value).toBe(' foo\n bar\n baz');
- expect(element.selection()).toEqual([8, 27]);
- });
-
- it('having different indentation when some lines are partially selected; and the selection adapts', () => {
- element.value = ' foo\nbar\n baz';
- element.setSelectionRange(9, 14);
- ih.indent();
- expect(element.value).toBe(' foo\n bar\n baz');
- expect(element.selection()).toEqual([13, 22]);
- });
- });
- });
-
- describe('unindents', () => {
- describe('a single line', () => {
- it('but does nothing if there is not indent', () => {
- element.value = 'foobar';
- element.setCursor(2);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(2);
- });
-
- it('but does nothing if there is a partial indent', () => {
- element.value = ' foobar';
- element.setCursor(1);
- ih.unindent();
- expect(element.value).toBe(' foobar');
- expect(element.cursor()).toBe(1);
- });
-
- it('when the cursor is in the line text; cursor follows', () => {
- element.value = ' foobar';
- element.setCursor(6);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(2);
- });
-
- it('when the cursor is in the indent; and cursor goes to start', () => {
- element.value = ' foobar';
- element.setCursor(2);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(0);
- });
-
- it('when the cursor is at line start; and cursor stays at start', () => {
- element.value = ' foobar';
- element.setCursor(0);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(0);
- });
-
- it('when a selection includes part of the indent and text', () => {
- element.value = ' foobar';
- element.setSelectionRange(2, 8);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.selection()).toEqual([0, 4]);
- });
-
- it('when a selection includes part of the indent only', () => {
- element.value = ' foobar';
- element.setSelectionRange(0, 4);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(0);
-
- element.value = ' foobar';
- element.setSelectionRange(1, 3);
- ih.unindent();
- expect(element.value).toBe('foobar');
- expect(element.cursor()).toBe(0);
- });
- });
-
- describe('several lines', () => {
- it('when everything is selected', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(0, 27);
- ih.unindent();
- expect(element.value).toBe('foo\n bar\nbaz');
- expect(element.selection()).toEqual([0, 15]);
- });
-
- it('when all lines are partially selected', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(5, 26);
- ih.unindent();
- expect(element.value).toBe('foo\n bar\nbaz');
- expect(element.selection()).toEqual([1, 14]);
- });
-
- it('when all lines are entirely selected', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(8, 27);
- ih.unindent();
- expect(element.value).toBe(' foo\n bar\nbaz');
- expect(element.selection()).toEqual([8, 19]);
- });
-
- it('when some lines are entirely selected', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(8, 27);
- ih.unindent();
- expect(element.value).toBe(' foo\n bar\nbaz');
- expect(element.selection()).toEqual([8, 19]);
- });
-
- it('when some lines are partially selected', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(17, 26);
- ih.unindent();
- expect(element.value).toBe(' foo\n bar\nbaz');
- expect(element.selection()).toEqual([13, 18]);
- });
-
- it('when some lines are partially selected within their indents', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(10, 22);
- ih.unindent();
- expect(element.value).toBe(' foo\n bar\nbaz');
- expect(element.selection()).toEqual([8, 16]);
- });
- });
- });
-
- describe('newline', () => {
- describe('on a single line', () => {
- it('auto-indents the new line', () => {
- element.value = 'foo\n bar\n baz\n qux';
-
- element.setCursor(3);
- ih.newline();
- expect(element.value).toBe('foo\n\n bar\n baz\n qux');
- expect(element.cursor()).toBe(4);
-
- element.setCursor(9);
- ih.newline();
- expect(element.value).toBe('foo\n\n bar\n \n baz\n qux');
- expect(element.cursor()).toBe(11);
-
- element.setCursor(19);
- ih.newline();
- expect(element.value).toBe('foo\n\n bar\n \n baz\n \n qux');
- expect(element.cursor()).toBe(24);
-
- element.setCursor(36);
- ih.newline();
- expect(element.value).toBe('foo\n\n bar\n \n baz\n \n qux\n ');
- expect(element.cursor()).toBe(45);
- });
-
- it('splits a line and auto-indents', () => {
- element.value = ' foobar';
- element.setCursor(7);
- ih.newline();
- expect(element.value).toBe(' foo\n bar');
- expect(element.cursor()).toBe(12);
- });
-
- it('replaces selection with an indented newline', () => {
- element.value = ' foobarbaz';
- element.setSelectionRange(7, 10);
- ih.newline();
- expect(element.value).toBe(' foo\n baz');
- expect(element.cursor()).toBe(12);
- });
- });
-
- it('on several lines.replaces selection with indented newline', () => {
- element.value = ' foo\n bar\n baz';
- element.setSelectionRange(4, 17);
- ih.newline();
- expect(element.value).toBe(' fo\n az');
- expect(element.cursor()).toBe(7);
- });
- });
-
- describe('backspace', () => {
- let event;
-
- // This suite tests only the special indent-removing behaviour of the
- // backspace() method, since non-special cases are handled natively as a
- // backspace keypress.
-
- beforeEach(() => {
- event = { preventDefault: jest.fn() };
- });
-
- describe('on a single line', () => {
- it('does nothing special if in the line text', () => {
- element.value = ' foobar';
- element.setCursor(7);
- ih.backspace(event);
- expect(event.preventDefault).not.toHaveBeenCalled();
- });
-
- it('does nothing special if after a non-leading indent', () => {
- element.value = ' foo bar';
- element.setCursor(11);
- ih.backspace(event);
- expect(event.preventDefault).not.toHaveBeenCalled();
- });
-
- it('deletes one leading indent', () => {
- element.value = ' foo';
- element.setCursor(8);
- ih.backspace(event);
- expect(event.preventDefault).toHaveBeenCalled();
- expect(element.value).toBe(' foo');
- expect(element.cursor()).toBe(4);
- });
-
- it('does nothing if cursor is inside the leading indent', () => {
- element.value = ' foo';
- element.setCursor(4);
- ih.backspace(event);
- expect(event.preventDefault).not.toHaveBeenCalled();
- });
-
- it('does nothing if cursor is at the start of the line', () => {
- element.value = ' foo';
- element.setCursor(0);
- ih.backspace(event);
- expect(event.preventDefault).not.toHaveBeenCalled();
- });
-
- it('deletes one partial indent', () => {
- element.value = ' foo';
- element.setCursor(6);
- ih.backspace(event);
- expect(event.preventDefault).toHaveBeenCalled();
- expect(element.value).toBe(' foo');
- expect(element.cursor()).toBe(4);
- });
-
- it('deletes indents sequentially', () => {
- element.value = ' foo';
- element.setCursor(10);
- ih.backspace(event);
- ih.backspace(event);
- ih.backspace(event);
- expect(event.preventDefault).toHaveBeenCalled();
- expect(element.value).toBe('foo');
- expect(element.cursor()).toBe(0);
- });
- });
-
- describe('on several lines', () => {
- it('deletes indent only on its own line', () => {
- element.value = ' foo\n bar\n baz';
- element.setCursor(16);
- ih.backspace(event);
- expect(event.preventDefault).toHaveBeenCalled();
- expect(element.value).toBe(' foo\n bar\n baz');
- expect(element.cursor()).toBe(12);
- });
-
- it('has no special behaviour with any range selection', () => {
- const text = ' foo\n bar\n baz';
- for (let start = 0; start < text.length; start += 1) {
- for (let end = start + 1; end < text.length; end += 1) {
- element.value = text;
- element.setSelectionRange(start, end);
- ih.backspace(event);
- expect(event.preventDefault).not.toHaveBeenCalled();
-
- // Ensure that the backspace() method doesn't change state
- // In reality, these two statements won't hold because the browser
- // will natively process the backspace event.
- expect(element.value).toBe(text);
- expect(element.selection()).toEqual([start, end]);
- }
- }
- });
- });
- });
-});
diff --git a/spec/frontend/helpers/vue_test_utils_helper.js b/spec/frontend/helpers/vue_test_utils_helper.js
index 121e99c9783..68326e37ae7 100644
--- a/spec/frontend/helpers/vue_test_utils_helper.js
+++ b/spec/frontend/helpers/vue_test_utils_helper.js
@@ -1,5 +1,3 @@
-/* eslint-disable import/prefer-default-export */
-
const vNodeContainsText = (vnode, text) =>
(vnode.text && vnode.text.includes(text)) ||
(vnode.children && vnode.children.filter(child => vNodeContainsText(child, text)).length);
@@ -19,3 +17,19 @@ export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =
Boolean(
shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length,
);
+
+/**
+ * Returns a promise that waits for a mutation to be fired before resolving
+ * NOTE: There's no reject action here so it will hang if it waits for a mutation that won't happen.
+ * @param {Object} store - The Vue store that contains the mutations
+ * @param {String} expectedMutationType - The Mutation to wait for
+ */
+export const waitForMutation = (store, expectedMutationType) =>
+ new Promise(resolve => {
+ const unsubscribe = store.subscribe(mutation => {
+ if (mutation.type === expectedMutationType) {
+ unsubscribe();
+ resolve();
+ }
+ });
+ });
diff --git a/spec/javascripts/helpers/vue_test_utils_helper_spec.js b/spec/frontend/helpers/vue_test_utils_helper_spec.js
index 41714066da5..41714066da5 100644
--- a/spec/javascripts/helpers/vue_test_utils_helper_spec.js
+++ b/spec/frontend/helpers/vue_test_utils_helper_spec.js
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
deleted file mode 100644
index e3d3b82d2f3..00000000000
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ /dev/null
@@ -1,180 +0,0 @@
-import * as cu from '~/lib/utils/common_utils';
-
-const CMD_ENTITY = '&#8984;';
-
-// Redefine `navigator.platform` because it's unsettable by default in JSDOM.
-let platform;
-Object.defineProperty(navigator, 'platform', {
- configurable: true,
- get: () => platform,
- set: val => {
- platform = val;
- },
-});
-
-describe('common_utils', () => {
- describe('platform leader key helpers', () => {
- const CTRL_EVENT = { ctrlKey: true };
- const META_EVENT = { metaKey: true };
- const BOTH_EVENT = { ctrlKey: true, metaKey: true };
-
- it('should return "ctrl" if navigator.platform is unset', () => {
- expect(cu.getPlatformLeaderKey()).toBe('ctrl');
- expect(cu.getPlatformLeaderKeyHTML()).toBe('Ctrl');
- expect(cu.isPlatformLeaderKey(CTRL_EVENT)).toBe(true);
- expect(cu.isPlatformLeaderKey(META_EVENT)).toBe(false);
- expect(cu.isPlatformLeaderKey(BOTH_EVENT)).toBe(true);
- });
-
- it('should return "meta" on MacOS', () => {
- navigator.platform = 'MacIntel';
- expect(cu.getPlatformLeaderKey()).toBe('meta');
- expect(cu.getPlatformLeaderKeyHTML()).toBe(CMD_ENTITY);
- expect(cu.isPlatformLeaderKey(CTRL_EVENT)).toBe(false);
- expect(cu.isPlatformLeaderKey(META_EVENT)).toBe(true);
- expect(cu.isPlatformLeaderKey(BOTH_EVENT)).toBe(true);
- });
-
- it('should return "ctrl" on Linux', () => {
- navigator.platform = 'Linux is great';
- expect(cu.getPlatformLeaderKey()).toBe('ctrl');
- expect(cu.getPlatformLeaderKeyHTML()).toBe('Ctrl');
- expect(cu.isPlatformLeaderKey(CTRL_EVENT)).toBe(true);
- expect(cu.isPlatformLeaderKey(META_EVENT)).toBe(false);
- expect(cu.isPlatformLeaderKey(BOTH_EVENT)).toBe(true);
- });
-
- it('should return "ctrl" on Windows', () => {
- navigator.platform = 'Win32';
- expect(cu.getPlatformLeaderKey()).toBe('ctrl');
- expect(cu.getPlatformLeaderKeyHTML()).toBe('Ctrl');
- expect(cu.isPlatformLeaderKey(CTRL_EVENT)).toBe(true);
- expect(cu.isPlatformLeaderKey(META_EVENT)).toBe(false);
- expect(cu.isPlatformLeaderKey(BOTH_EVENT)).toBe(true);
- });
- });
-
- describe('keystroke', () => {
- const CODE_BACKSPACE = 8;
- const CODE_TAB = 9;
- const CODE_ENTER = 13;
- const CODE_SPACE = 32;
- const CODE_4 = 52;
- const CODE_F = 70;
- const CODE_Z = 90;
-
- // Helper function that quickly creates KeyboardEvents
- const k = (code, modifiers = '') => ({
- keyCode: code,
- which: code,
- altKey: modifiers.includes('a'),
- ctrlKey: modifiers.includes('c'),
- metaKey: modifiers.includes('m'),
- shiftKey: modifiers.includes('s'),
- });
-
- const EV_F = k(CODE_F);
- const EV_ALT_F = k(CODE_F, 'a');
- const EV_CONTROL_F = k(CODE_F, 'c');
- const EV_META_F = k(CODE_F, 'm');
- const EV_SHIFT_F = k(CODE_F, 's');
- const EV_CONTROL_SHIFT_F = k(CODE_F, 'cs');
- const EV_ALL_F = k(CODE_F, 'scma');
- const EV_ENTER = k(CODE_ENTER);
- const EV_TAB = k(CODE_TAB);
- const EV_SPACE = k(CODE_SPACE);
- const EV_BACKSPACE = k(CODE_BACKSPACE);
- const EV_4 = k(CODE_4);
- const EV_$ = k(CODE_4, 's');
-
- const { keystroke } = cu;
-
- it('short-circuits with bad arguments', () => {
- expect(keystroke()).toBe(false);
- expect(keystroke({})).toBe(false);
- });
-
- it('handles keystrokes using key codes', () => {
- // Test a letter key with modifiers
- expect(keystroke(EV_F, CODE_F)).toBe(true);
- expect(keystroke(EV_F, CODE_F, '')).toBe(true);
- expect(keystroke(EV_ALT_F, CODE_F, 'a')).toBe(true);
- expect(keystroke(EV_CONTROL_F, CODE_F, 'c')).toBe(true);
- expect(keystroke(EV_META_F, CODE_F, 'm')).toBe(true);
- expect(keystroke(EV_SHIFT_F, CODE_F, 's')).toBe(true);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'cs')).toBe(true);
- expect(keystroke(EV_ALL_F, CODE_F, 'acms')).toBe(true);
-
- // Test non-letter keys
- expect(keystroke(EV_TAB, CODE_TAB)).toBe(true);
- expect(keystroke(EV_ENTER, CODE_ENTER)).toBe(true);
- expect(keystroke(EV_SPACE, CODE_SPACE)).toBe(true);
- expect(keystroke(EV_BACKSPACE, CODE_BACKSPACE)).toBe(true);
-
- // Test a number/symbol key
- expect(keystroke(EV_4, CODE_4)).toBe(true);
- expect(keystroke(EV_$, CODE_4, 's')).toBe(true);
-
- // Test wrong input
- expect(keystroke(EV_F, CODE_Z)).toBe(false);
- expect(keystroke(EV_SHIFT_F, CODE_F)).toBe(false);
- expect(keystroke(EV_SHIFT_F, CODE_F, 'c')).toBe(false);
- });
-
- it('is case-insensitive', () => {
- expect(keystroke(EV_ALL_F, CODE_F, 'ACMS')).toBe(true);
- });
-
- it('handles bogus inputs', () => {
- expect(keystroke(EV_F, 'not a keystroke')).toBe(false);
- expect(keystroke(EV_F, null)).toBe(false);
- });
-
- it('handles exact modifier keys, in any order', () => {
- // Test permutations of modifiers
- expect(keystroke(EV_ALL_F, CODE_F, 'acms')).toBe(true);
- expect(keystroke(EV_ALL_F, CODE_F, 'smca')).toBe(true);
- expect(keystroke(EV_ALL_F, CODE_F, 'csma')).toBe(true);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'cs')).toBe(true);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'sc')).toBe(true);
-
- // Test wrong modifiers
- expect(keystroke(EV_ALL_F, CODE_F, 'smca')).toBe(true);
- expect(keystroke(EV_ALL_F, CODE_F)).toBe(false);
- expect(keystroke(EV_ALL_F, CODE_F, '')).toBe(false);
- expect(keystroke(EV_ALL_F, CODE_F, 'c')).toBe(false);
- expect(keystroke(EV_ALL_F, CODE_F, 'ca')).toBe(false);
- expect(keystroke(EV_ALL_F, CODE_F, 'ms')).toBe(false);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'cs')).toBe(true);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'c')).toBe(false);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 's')).toBe(false);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'csa')).toBe(false);
- expect(keystroke(EV_CONTROL_SHIFT_F, CODE_F, 'm')).toBe(false);
- expect(keystroke(EV_SHIFT_F, CODE_F, 's')).toBe(true);
- expect(keystroke(EV_SHIFT_F, CODE_F, 'c')).toBe(false);
- expect(keystroke(EV_SHIFT_F, CODE_F, 'csm')).toBe(false);
- });
-
- it('handles the platform-dependent leader key', () => {
- navigator.platform = 'Win32';
- let EV_UNDO = k(CODE_Z, 'c');
- let EV_REDO = k(CODE_Z, 'cs');
- expect(keystroke(EV_UNDO, CODE_Z, 'l')).toBe(true);
- expect(keystroke(EV_UNDO, CODE_Z, 'c')).toBe(true);
- expect(keystroke(EV_UNDO, CODE_Z, 'm')).toBe(false);
- expect(keystroke(EV_REDO, CODE_Z, 'sl')).toBe(true);
- expect(keystroke(EV_REDO, CODE_Z, 'sc')).toBe(true);
- expect(keystroke(EV_REDO, CODE_Z, 'sm')).toBe(false);
-
- navigator.platform = 'MacIntel';
- EV_UNDO = k(CODE_Z, 'm');
- EV_REDO = k(CODE_Z, 'ms');
- expect(keystroke(EV_UNDO, CODE_Z, 'l')).toBe(true);
- expect(keystroke(EV_UNDO, CODE_Z, 'c')).toBe(false);
- expect(keystroke(EV_UNDO, CODE_Z, 'm')).toBe(true);
- expect(keystroke(EV_REDO, CODE_Z, 'sl')).toBe(true);
- expect(keystroke(EV_REDO, CODE_Z, 'sc')).toBe(false);
- expect(keystroke(EV_REDO, CODE_Z, 'sm')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/lib/utils/undo_stack_spec.js b/spec/frontend/lib/utils/undo_stack_spec.js
deleted file mode 100644
index 31ad0e77d6f..00000000000
--- a/spec/frontend/lib/utils/undo_stack_spec.js
+++ /dev/null
@@ -1,237 +0,0 @@
-import UndoStack from '~/lib/utils/undo_stack';
-
-import { isEqual } from 'underscore';
-
-describe('UndoStack', () => {
- let stack;
-
- beforeEach(() => {
- stack = new UndoStack();
- });
-
- afterEach(() => {
- // Make sure there's not pending saves
- const history = Array.from(stack.history);
- jest.runAllTimers();
- expect(stack.history).toEqual(history);
- });
-
- it('is blank on construction', () => {
- expect(stack.isEmpty()).toBe(true);
- expect(stack.history).toEqual([]);
- expect(stack.cursor).toBe(-1);
- expect(stack.canUndo()).toBe(false);
- expect(stack.canRedo()).toBe(false);
- });
-
- it('handles simple undo/redo behaviour', () => {
- stack.save(10);
- stack.save(11);
- stack.save(12);
-
- expect(stack.history).toEqual([10, 11, 12]);
- expect(stack.cursor).toBe(2);
- expect(stack.current()).toBe(12);
- expect(stack.isEmpty()).toBe(false);
- expect(stack.canUndo()).toBe(true);
- expect(stack.canRedo()).toBe(false);
-
- stack.undo();
- expect(stack.history).toEqual([10, 11, 12]);
- expect(stack.current()).toBe(11);
- expect(stack.canUndo()).toBe(true);
- expect(stack.canRedo()).toBe(true);
-
- stack.undo();
- expect(stack.current()).toBe(10);
- expect(stack.canUndo()).toBe(false);
- expect(stack.canRedo()).toBe(true);
-
- stack.redo();
- expect(stack.current()).toBe(11);
-
- stack.redo();
- expect(stack.current()).toBe(12);
- expect(stack.isEmpty()).toBe(false);
- expect(stack.canUndo()).toBe(true);
- expect(stack.canRedo()).toBe(false);
-
- // Saving should clear the redo stack
- stack.undo();
- stack.save(13);
- expect(stack.history).toEqual([10, 11, 13]);
- expect(stack.current()).toBe(13);
- });
-
- it('clear() should clear the undo history', () => {
- stack.save(0);
- stack.save(1);
- stack.save(2);
- stack.clear();
- expect(stack.history).toEqual([]);
- expect(stack.current()).toBeUndefined();
- });
-
- it('undo and redo are no-ops if unavailable', () => {
- stack.save(10);
- expect(stack.canRedo()).toBe(false);
- expect(stack.canUndo()).toBe(false);
-
- stack.save(11);
- expect(stack.canRedo()).toBe(false);
- expect(stack.canUndo()).toBe(true);
-
- expect(stack.redo()).toBeUndefined();
- expect(stack.history).toEqual([10, 11]);
- expect(stack.current()).toBe(11);
- expect(stack.canRedo()).toBe(false);
- expect(stack.canUndo()).toBe(true);
-
- expect(stack.undo()).toBe(10);
- expect(stack.undo()).toBeUndefined();
- expect(stack.history).toEqual([10, 11]);
- expect(stack.current()).toBe(10);
- expect(stack.canRedo()).toBe(true);
- expect(stack.canUndo()).toBe(false);
- });
-
- it('should not save a duplicate state', () => {
- stack.save(10);
- stack.save(11);
- stack.save(11);
- stack.save(10);
- stack.save(10);
-
- expect(stack.history).toEqual([10, 11, 10]);
- });
-
- it('uses the === operator to detect duplicates', () => {
- stack.save(10);
- stack.save(10);
- expect(stack.history).toEqual([10]);
-
- // eslint-disable-next-line eqeqeq
- expect(2 == '2' && '2' == 2).toBe(true);
- stack.clear();
- stack.save(2);
- stack.save(2);
- stack.save('2');
- stack.save('2');
- stack.save(2);
- expect(stack.history).toEqual([2, '2', 2]);
-
- const obj = {};
- stack.clear();
- stack.save(obj);
- stack.save(obj);
- stack.save({});
- stack.save({});
- expect(stack.history).toEqual([{}, {}, {}]);
- });
-
- it('should allow custom comparators', () => {
- stack.comparator = isEqual;
- const obj = {};
- stack.clear();
- stack.save(obj);
- stack.save(obj);
- stack.save({});
- stack.save({});
- expect(stack.history).toEqual([{}]);
- });
-
- it('should enforce a max number of undo states', () => {
- // Try 2000 saves. Only the last 1000 should be preserved.
- const sequence = Array(2000)
- .fill(0)
- .map((el, i) => i);
- sequence.forEach(stack.save.bind(stack));
- expect(stack.history.length).toBe(1000);
- expect(stack.history).toEqual(sequence.slice(1000));
- expect(stack.current()).toBe(1999);
- expect(stack.canUndo()).toBe(true);
- expect(stack.canRedo()).toBe(false);
-
- // Saving drops the oldest elements from the stack
- stack.save('end');
- expect(stack.history.length).toBe(1000);
- expect(stack.current()).toBe('end');
- expect(stack.history).toEqual([...sequence.slice(1001), 'end']);
-
- // If states were undone but the history is full, can still add.
- stack.undo();
- stack.undo();
- expect(stack.current()).toBe(1998);
- stack.save(3000);
- expect(stack.history.length).toBe(999);
- // should be [1001, 1002, ..., 1998, 3000]
- expect(stack.history).toEqual([...sequence.slice(1001, 1999), 3000]);
-
- // Try a different max length
- stack = new UndoStack(2);
- stack.save(0);
- expect(stack.history).toEqual([0]);
- stack.save(1);
- expect(stack.history).toEqual([0, 1]);
- stack.save(2);
- expect(stack.history).toEqual([1, 2]);
- });
-
- describe('scheduled saves', () => {
- it('should work', () => {
- // Schedules 1000 ms ahead by default
- stack.save(0);
- stack.scheduleSave(1);
- expect(stack.history).toEqual([0]);
- jest.advanceTimersByTime(999);
- expect(stack.history).toEqual([0]);
- jest.advanceTimersByTime(1);
- expect(stack.history).toEqual([0, 1]);
- });
-
- it('should have an adjustable delay', () => {
- stack.scheduleSave(2, 100);
- jest.advanceTimersByTime(100);
- expect(stack.history).toEqual([2]);
- });
-
- it('should cancel previous scheduled saves', () => {
- stack.scheduleSave(3);
- jest.advanceTimersByTime(100);
- stack.scheduleSave(4);
- jest.runAllTimers();
- expect(stack.history).toEqual([4]);
- });
-
- it('should be canceled by explicit saves', () => {
- stack.scheduleSave(5);
- stack.save(6);
- jest.runAllTimers();
- expect(stack.history).toEqual([6]);
- });
-
- it('should be canceled by undos and redos', () => {
- stack.save(1);
- stack.save(2);
- stack.scheduleSave(3);
- stack.undo();
- jest.runAllTimers();
- expect(stack.history).toEqual([1, 2]);
- expect(stack.current()).toBe(1);
-
- stack.scheduleSave(4);
- stack.redo();
- jest.runAllTimers();
- expect(stack.history).toEqual([1, 2]);
- expect(stack.current()).toBe(2);
- });
-
- it('should be persisted immediately with saveNow()', () => {
- stack.scheduleSave(7);
- stack.scheduleSave(8);
- stack.saveNow();
- jest.runAllTimers();
- expect(stack.history).toEqual([8]);
- });
- });
-});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
new file mode 100644
index 00000000000..7e462e9a6ce
--- /dev/null
+++ b/spec/frontend/tracking_spec.js
@@ -0,0 +1,123 @@
+import $ from 'jquery';
+import { setHTMLFixture } from './helpers/fixtures';
+
+import Tracking from '~/tracking';
+
+describe('Tracking', () => {
+ beforeEach(() => {
+ window.snowplow = window.snowplow || (() => {});
+ });
+
+ describe('.event', () => {
+ let snowplowSpy = null;
+
+ beforeEach(() => {
+ snowplowSpy = jest.spyOn(window, 'snowplow');
+ });
+
+ it('tracks to snowplow (our current tracking system)', () => {
+ Tracking.event('_category_', '_eventName_', { label: '_label_' });
+
+ expect(snowplowSpy).toHaveBeenCalledWith('trackStructEvent', '_category_', '_eventName_', {
+ label: '_label_',
+ property: '',
+ value: '',
+ });
+ });
+
+ it('skips tracking if snowplow is unavailable', () => {
+ window.snowplow = false;
+ Tracking.event('_category_', '_eventName_');
+
+ expect(snowplowSpy).not.toHaveBeenCalled();
+ });
+
+ it('skips tracking if ', () => {
+ window.snowplow = false;
+ Tracking.event('_category_', '_eventName_');
+
+ expect(snowplowSpy).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('tracking interface events', () => {
+ let eventSpy = null;
+ let subject = null;
+
+ beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
+ subject = new Tracking('_category_');
+ setHTMLFixture(`
+ <input data-track-event="click_input1" data-track-label="_label_" value="_value_"/>
+ <input data-track-event="click_input2" data-track-value="_value_override_" value="_value_"/>
+ <input type="checkbox" data-track-event="toggle_checkbox" value="_value_" checked/>
+ <input class="dropdown" data-track-event="toggle_dropdown"/>
+ <div class="js-projects-list-holder"></div>
+ `);
+ });
+
+ it('binds to clicks on elements matching [data-track-event]', () => {
+ subject.bind(document);
+ $('[data-track-event="click_input1"]').click();
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input1', {
+ label: '_label_',
+ value: '_value_',
+ property: '',
+ });
+ });
+
+ it('allows value override with the data-track-value attribute', () => {
+ subject.bind(document);
+ $('[data-track-event="click_input2"]').click();
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input2', {
+ label: '',
+ value: '_value_override_',
+ property: '',
+ });
+ });
+
+ it('handles checkbox values correctly', () => {
+ subject.bind(document);
+ const $checkbox = $('[data-track-event="toggle_checkbox"]');
+
+ $checkbox.click(); // unchecking
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
+ label: '',
+ property: '',
+ value: false,
+ });
+
+ $checkbox.click(); // checking
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_checkbox', {
+ label: '',
+ property: '',
+ value: '_value_',
+ });
+ });
+
+ it('handles bootstrap dropdowns', () => {
+ new Tracking('_category_').bind(document);
+ const $dropdown = $('[data-track-event="toggle_dropdown"]');
+
+ $dropdown.trigger('show.bs.dropdown'); // showing
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_show', {
+ label: '',
+ property: '',
+ value: '',
+ });
+
+ $dropdown.trigger('hide.bs.dropdown'); // hiding
+
+ expect(eventSpy).toHaveBeenCalledWith('_category_', 'toggle_dropdown_hide', {
+ label: '',
+ property: '',
+ value: '',
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/diffs/mock_data/diff_file.js b/spec/javascripts/diffs/mock_data/diff_file.js
index 27428197c1c..531686efff1 100644
--- a/spec/javascripts/diffs/mock_data/diff_file.js
+++ b/spec/javascripts/diffs/mock_data/diff_file.js
@@ -1,3 +1,5 @@
+// Copied to ee/spec/frontend/diffs/mock_data/diff_file.js
+
export default {
submodule: false,
submodule_link: null,
diff --git a/spec/javascripts/helpers/vue_test_utils_helper.js b/spec/javascripts/helpers/vue_test_utils_helper.js
index 68326e37ae7..5b749b11246 100644
--- a/spec/javascripts/helpers/vue_test_utils_helper.js
+++ b/spec/javascripts/helpers/vue_test_utils_helper.js
@@ -1,35 +1,5 @@
-const vNodeContainsText = (vnode, text) =>
- (vnode.text && vnode.text.includes(text)) ||
- (vnode.children && vnode.children.filter(child => vNodeContainsText(child, text)).length);
+// No new code should be added to this file. Instead, modify the
+// file this one re-exports from. For more detail about why, see:
+// https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/31349
-/**
- * Determines whether a `shallowMount` Wrapper contains text
- * within one of it's slots. This will also work on Wrappers
- * acquired with `find()`, but only if it's parent Wrapper
- * was shallowMounted.
- * NOTE: Prefer checking the rendered output of a component
- * wherever possible using something like `text()` instead.
- * @param {Wrapper} shallowWrapper - Vue test utils wrapper (shallowMounted)
- * @param {String} slotName
- * @param {String} text
- */
-export const shallowWrapperContainsSlotText = (shallowWrapper, slotName, text) =>
- Boolean(
- shallowWrapper.vm.$slots[slotName].filter(vnode => vNodeContainsText(vnode, text)).length,
- );
-
-/**
- * Returns a promise that waits for a mutation to be fired before resolving
- * NOTE: There's no reject action here so it will hang if it waits for a mutation that won't happen.
- * @param {Object} store - The Vue store that contains the mutations
- * @param {String} expectedMutationType - The Mutation to wait for
- */
-export const waitForMutation = (store, expectedMutationType) =>
- new Promise(resolve => {
- const unsubscribe = store.subscribe(mutation => {
- if (mutation.type === expectedMutationType) {
- unsubscribe();
- resolve();
- }
- });
- });
+export * from '../../frontend/helpers/vue_test_utils_helper';
diff --git a/spec/javascripts/jobs/components/job_app_spec.js b/spec/javascripts/jobs/components/job_app_spec.js
index c58d59b4b16..d3c1cf831bb 100644
--- a/spec/javascripts/jobs/components/job_app_spec.js
+++ b/spec/javascripts/jobs/components/job_app_spec.js
@@ -3,7 +3,9 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import jobApp from '~/jobs/components/job_app.vue';
import createStore from '~/jobs/store';
+import * as types from '~/jobs/store/mutation_types';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
+import { waitForMutation } from 'spec/helpers/vue_test_utils_helper';
import { resetStore } from '../store/helpers';
import job from '../mock_data';
@@ -22,10 +24,21 @@ describe('Job App ', () => {
variablesSettingsUrl: 'settings/ci-cd/variables',
terminalPath: 'jobs/123/terminal',
pagePath: `${gl.TEST_HOST}jobs/123`,
+ projectPath: 'user-name/project-name',
logState:
'eyJvZmZzZXQiOjE3NDUxLCJuX29wZW5fdGFncyI6MCwiZmdfY29sb3IiOm51bGwsImJnX2NvbG9yIjpudWxsLCJzdHlsZV9tYXNrIjowfQ%3D%3D',
};
+ const waitForJobReceived = () => waitForMutation(store, types.RECEIVE_JOB_SUCCESS);
+ const setupAndMount = ({ jobData = {}, traceData = {} } = {}) => {
+ mock.onGet(props.endpoint).replyOnce(200, { ...job, ...jobData });
+ mock.onGet(`${props.pagePath}/trace.json`).reply(200, traceData);
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ return waitForJobReceived();
+ };
+
beforeEach(() => {
mock = new MockAdapter(axios);
store = createStore();
@@ -39,103 +52,81 @@ describe('Job App ', () => {
describe('while loading', () => {
beforeEach(() => {
- mock.onGet(props.endpoint).reply(200, job, {});
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, {});
- vm = mountComponentWithStore(Component, { props, store });
+ setupAndMount();
});
- it('renders loading icon', done => {
+ it('renders loading icon', () => {
expect(vm.$el.querySelector('.js-job-loading')).not.toBeNull();
expect(vm.$el.querySelector('.js-job-sidebar')).toBeNull();
expect(vm.$el.querySelector('.js-job-content')).toBeNull();
-
- setTimeout(() => {
- done();
- }, 0);
});
});
describe('with successful request', () => {
- beforeEach(() => {
- mock.onGet(`${props.pagePath}/trace.json`).replyOnce(200, {});
- });
-
describe('Header section', () => {
describe('job callout message', () => {
it('should not render the reason when reason is absent', done => {
- mock.onGet(props.endpoint).replyOnce(200, job);
- vm = mountComponentWithStore(Component, { props, store });
-
- setTimeout(() => {
- expect(vm.shouldRenderCalloutMessage).toBe(false);
-
- done();
- }, 0);
+ setupAndMount()
+ .then(() => {
+ expect(vm.shouldRenderCalloutMessage).toBe(false);
+ })
+ .then(done)
+ .catch(done.fail);
});
it('should render the reason when reason is present', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
- callout_message: 'There is an unknown failure, please try again',
- }),
- );
-
- vm = mountComponentWithStore(Component, { props, store });
- setTimeout(() => {
- expect(vm.shouldRenderCalloutMessage).toBe(true);
- done();
- }, 0);
+ setupAndMount({
+ jobData: {
+ callout_message: 'There is an unkown failure, please try again',
+ },
+ })
+ .then(() => {
+ expect(vm.shouldRenderCalloutMessage).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('triggered job', () => {
- beforeEach(() => {
+ beforeEach(done => {
const aYearAgo = new Date();
aYearAgo.setFullYear(aYearAgo.getFullYear() - 1);
- mock
- .onGet(props.endpoint)
- .replyOnce(200, Object.assign({}, job, { started: aYearAgo.toISOString() }));
- vm = mountComponentWithStore(Component, { props, store });
+ setupAndMount({ jobData: { started: aYearAgo.toISOString() } })
+ .then(done)
+ .catch(done.fail);
});
- it('should render provided job information', done => {
- setTimeout(() => {
- expect(
- vm.$el
- .querySelector('.header-main-content')
- .textContent.replace(/\s+/g, ' ')
- .trim(),
- ).toContain('passed Job #4757 triggered 1 year ago by Root');
- done();
- }, 0);
+ it('should render provided job information', () => {
+ expect(
+ vm.$el
+ .querySelector('.header-main-content')
+ .textContent.replace(/\s+/g, ' ')
+ .trim(),
+ ).toContain('passed Job #4757 triggered 1 year ago by Root');
});
- it('should render new issue link', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual(
- job.new_issue_path,
- );
- done();
- }, 0);
+ it('should render new issue link', () => {
+ expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual(
+ job.new_issue_path,
+ );
});
});
describe('created job', () => {
it('should render created key', done => {
- mock.onGet(props.endpoint).replyOnce(200, job);
- vm = mountComponentWithStore(Component, { props, store });
-
- setTimeout(() => {
- expect(
- vm.$el
- .querySelector('.header-main-content')
- .textContent.replace(/\s+/g, ' ')
- .trim(),
- ).toContain('passed Job #4757 created 3 weeks ago by Root');
- done();
- }, 0);
+ setupAndMount()
+ .then(() => {
+ expect(
+ vm.$el
+ .querySelector('.header-main-content')
+ .textContent.replace(/\s+/g, ' ')
+ .trim(),
+ ).toContain('passed Job #4757 created 3 weeks ago by Root');
+ })
+ .then(done)
+ .catch(done.fail);
});
});
});
@@ -143,9 +134,8 @@ describe('Job App ', () => {
describe('stuck block', () => {
describe('without active runners availabl', () => {
it('renders stuck block when there are no runners', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
status: {
group: 'pending',
icon: 'status_pending',
@@ -159,23 +149,23 @@ describe('Job App ', () => {
online: false,
},
tags: [],
- }),
- );
- vm = mountComponentWithStore(Component, { props, store });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-stuck')).not.toBeNull();
- expect(vm.$el.querySelector('.js-job-stuck .js-stuck-no-active-runner')).not.toBeNull();
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-stuck')).not.toBeNull();
+ expect(
+ vm.$el.querySelector('.js-job-stuck .js-stuck-no-active-runner'),
+ ).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('when available runners can not run specified tag', () => {
it('renders tags in stuck block when there are no runners', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
status: {
group: 'pending',
icon: 'status_pending',
@@ -188,27 +178,21 @@ describe('Job App ', () => {
available: false,
online: false,
},
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
- expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
+ expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('when runners are offline and build has tags', () => {
it('renders message about job being stuck because of no runners with the specified tags', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
status: {
group: 'pending',
icon: 'status_pending',
@@ -221,48 +205,35 @@ describe('Job App ', () => {
available: true,
online: true,
},
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
- expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-stuck').textContent).toContain(job.tags[0]);
+ expect(vm.$el.querySelector('.js-job-stuck .js-stuck-with-tags')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
it('does not renders stuck block when there are no runners', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
runners: { available: true },
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-stuck')).toBeNull();
-
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-stuck')).toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('unmet prerequisites block', () => {
it('renders unmet prerequisites block when there is an unmet prerequisites failure', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
status: {
group: 'failed',
icon: 'status_failed',
@@ -282,104 +253,81 @@ describe('Job App ', () => {
available: true,
},
tags: [],
- }),
- );
- vm = mountComponentWithStore(Component, { props, store });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-failed')).not.toBeNull();
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-failed')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('environments block', () => {
it('renders environment block when job has environment', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
deployment_status: {
environment: {
environment_path: '/path',
name: 'foo',
},
},
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-environment')).not.toBeNull();
-
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-environment')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('does not render environment block when job has environment', done => {
- mock.onGet(props.endpoint).replyOnce(200, job);
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-environment')).toBeNull();
- done();
- }, 0);
+ setupAndMount()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-environment')).toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('erased block', () => {
it('renders erased block when `erased` is true', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
erased_by: {
username: 'root',
web_url: 'gitlab.com/root',
},
erased_at: '2016-11-07T11:11:16.525Z',
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-erased-block')).not.toBeNull();
-
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-erased-block')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('does not render erased block when `erased` is false', done => {
- mock.onGet(props.endpoint).replyOnce(200, Object.assign({}, job, { erased_at: null }));
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-erased-block')).toBeNull();
-
- done();
- }, 0);
+ setupAndMount({
+ jobData: {
+ erased_at: null,
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-erased-block')).toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('empty states block', () => {
it('renders empty state when job does not have trace and is not running', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
has_trace: false,
status: {
group: 'pending',
@@ -399,25 +347,18 @@ describe('Job App ', () => {
path: '/path',
},
},
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
-
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('does not render empty state when job does not have trace but it is running', done => {
- mock.onGet(props.endpoint).replyOnce(
- 200,
- Object.assign({}, job, {
+ setupAndMount({
+ jobData: {
has_trace: false,
status: {
group: 'running',
@@ -426,34 +367,23 @@ describe('Job App ', () => {
text: 'running',
details_path: 'path',
},
- }),
- );
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
-
- done();
- }, 0);
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('does not render empty state when job has trace but it is not running', done => {
- mock.onGet(props.endpoint).replyOnce(200, Object.assign({}, job, { has_trace: true }));
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
-
- done();
- }, 0);
+ setupAndMount({ jobData: { has_trace: true } })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-empty-state')).toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
+ done();
});
it('displays remaining time for a delayed job', done => {
@@ -461,120 +391,114 @@ describe('Job App ', () => {
spyOn(Date, 'now').and.callFake(
() => new Date(delayedJobFixture.scheduled_at).getTime() - oneHourInMilliseconds,
);
- mock.onGet(props.endpoint).replyOnce(200, { ...delayedJobFixture });
+ setupAndMount({ jobData: delayedJobFixture })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- store.subscribeAction(action => {
- if (action.type !== 'receiveJobSuccess') {
- return;
- }
+ const title = vm.$el.querySelector('.js-job-empty-state-title');
- Vue.nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.js-job-empty-state')).not.toBeNull();
-
- const title = vm.$el.querySelector('.js-job-empty-state-title');
+ expect(title).toContainText('01:00:00');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
- expect(title).toContainText('01:00:00');
- done();
- })
- .catch(done.fail);
- });
+ describe('sidebar', () => {
+ it('has no blank blocks', done => {
+ setupAndMount({
+ jobData: {
+ duration: null,
+ finished_at: null,
+ erased_at: null,
+ queued: null,
+ runner: null,
+ coverage: null,
+ tags: [],
+ cancel_path: null,
+ },
+ })
+ .then(() => {
+ vm.$el.querySelectorAll('.blocks-container > *').forEach(block => {
+ expect(block.textContent.trim()).not.toBe('');
+ });
+ })
+ .then(done)
+ .catch(done.fail);
});
});
});
describe('archived job', () => {
- beforeEach(() => {
- mock.onGet(props.endpoint).reply(200, Object.assign({}, job, { archived: true }), {});
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
+ beforeEach(done => {
+ setupAndMount({ jobData: { archived: true } })
+ .then(done)
+ .catch(done.fail);
});
- it('renders warning about job being archived', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-archived-job ')).not.toBeNull();
- done();
- }, 0);
+ it('renders warning about job being archived', () => {
+ expect(vm.$el.querySelector('.js-archived-job ')).not.toBeNull();
});
});
describe('non-archived job', () => {
- beforeEach(() => {
- mock.onGet(props.endpoint).reply(200, job, {});
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
+ beforeEach(done => {
+ setupAndMount()
+ .then(done)
+ .catch(done.fail);
});
- it('does not warning about job being archived', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-archived-job ')).toBeNull();
- done();
- }, 0);
+ it('does not warning about job being archived', () => {
+ expect(vm.$el.querySelector('.js-archived-job ')).toBeNull();
});
});
describe('trace output', () => {
- beforeEach(() => {
- mock.onGet(props.endpoint).reply(200, job, {});
- });
-
describe('with append flag', () => {
it('appends the log content to the existing one', done => {
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
- html: '<span>More<span>',
- status: 'running',
- state: 'newstate',
- append: true,
- complete: true,
- });
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- vm.$store.state.trace = 'Update';
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain('Update');
-
- done();
- }, 0);
+ setupAndMount({
+ traceData: {
+ html: '<span>More<span>',
+ status: 'running',
+ state: 'newstate',
+ append: true,
+ complete: true,
+ },
+ })
+ .then(() => {
+ vm.$store.state.trace = 'Update';
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain('Update');
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('without append flag', () => {
it('replaces the trace', done => {
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
- html: '<span>Different<span>',
- status: 'running',
- append: false,
- complete: true,
- });
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
- vm.$store.state.trace = 'Update';
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).not.toContain(
- 'Update',
- );
+ setupAndMount({
+ traceData: {
+ html: '<span>Different<span>',
+ status: 'running',
+ append: false,
+ complete: true,
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).not.toContain(
+ 'Update',
+ );
- expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain('Different');
- done();
- }, 0);
+ expect(vm.$el.querySelector('.js-build-trace').textContent.trim()).toContain(
+ 'Different',
+ );
+ })
+ .then(done)
+ .catch(done.fail);
});
});
@@ -590,83 +514,76 @@ describe('Job App ', () => {
complete: true,
});
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).toContain(
- '50 bytes',
- );
- done();
- }, 0);
+ setupAndMount({
+ traceData: {
+ html: '<span>Update</span>',
+ status: 'success',
+ append: false,
+ size: 50,
+ total: 100,
+ complete: true,
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).toContain(
+ '50 bytes',
+ );
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('when size is equal than total', () => {
it('does not show the truncated information', done => {
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
- html: '<span>Update</span>',
- status: 'success',
- append: false,
- size: 100,
- total: 100,
- complete: true,
- });
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
-
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).not.toContain(
- '50 bytes',
- );
- done();
- }, 0);
+ setupAndMount({
+ traceData: {
+ html: '<span>Update</span>',
+ status: 'success',
+ append: false,
+ size: 100,
+ total: 100,
+ complete: true,
+ },
+ })
+ .then(() => {
+ expect(vm.$el.querySelector('.js-truncated-info').textContent.trim()).not.toContain(
+ '50 bytes',
+ );
+ })
+ .then(done)
+ .catch(done.fail);
});
});
});
describe('trace controls', () => {
- beforeEach(() => {
- mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
- html: '<span>Update</span>',
- status: 'success',
- append: false,
- size: 50,
- total: 100,
- complete: true,
- });
-
- vm = mountComponentWithStore(Component, {
- props,
- store,
- });
+ beforeEach(done => {
+ setupAndMount({
+ traceData: {
+ html: '<span>Update</span>',
+ status: 'success',
+ append: false,
+ size: 50,
+ total: 100,
+ complete: true,
+ },
+ })
+ .then(done)
+ .catch(done.fail);
});
- it('should render scroll buttons', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-scroll-top')).not.toBeNull();
- expect(vm.$el.querySelector('.js-scroll-bottom')).not.toBeNull();
- done();
- }, 0);
+ it('should render scroll buttons', () => {
+ expect(vm.$el.querySelector('.js-scroll-top')).not.toBeNull();
+ expect(vm.$el.querySelector('.js-scroll-bottom')).not.toBeNull();
});
- it('should render link to raw ouput', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-raw-link-controller')).not.toBeNull();
- done();
- }, 0);
+ it('should render link to raw ouput', () => {
+ expect(vm.$el.querySelector('.js-raw-link-controller')).not.toBeNull();
});
- it('should render link to erase job', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
- done();
- }, 0);
+ it('should render link to erase job', () => {
+ expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
});
});
});
diff --git a/spec/javascripts/notes/mock_data.js b/spec/javascripts/notes/mock_data.js
index 1df5cf9ef68..5f81a168498 100644
--- a/spec/javascripts/notes/mock_data.js
+++ b/spec/javascripts/notes/mock_data.js
@@ -1,3 +1,5 @@
+// Copied to ee/spec/frontend/notes/mock_data.js
+
export const notesDataMock = {
discussionsPath: '/gitlab-org/gitlab-ce/issues/26/discussions.json',
lastFetchedAt: 1501862675,
diff --git a/spec/javascripts/persistent_user_callout_spec.js b/spec/javascripts/persistent_user_callout_spec.js
index 2fdfff3db03..d15758be5d2 100644
--- a/spec/javascripts/persistent_user_callout_spec.js
+++ b/spec/javascripts/persistent_user_callout_spec.js
@@ -22,6 +22,24 @@ describe('PersistentUserCallout', () => {
return fixture;
}
+ function createDeferredLinkFixture() {
+ const fixture = document.createElement('div');
+ fixture.innerHTML = `
+ <div
+ class="container"
+ data-dismiss-endpoint="${dismissEndpoint}"
+ data-feature-id="${featureName}"
+ data-defer-links="true"
+ >
+ <button type="button" class="js-close"></button>
+ <a href="/somewhere-pleasant" target="_blank" class="deferred-link">A link</a>
+ <a href="/somewhere-else" target="_blank" class="normal-link">Another link</a>
+ </div>
+ `;
+
+ return fixture;
+ }
+
describe('dismiss', () => {
let button;
let mockAxios;
@@ -74,6 +92,75 @@ describe('PersistentUserCallout', () => {
});
});
+ describe('deferred links', () => {
+ let button;
+ let deferredLink;
+ let normalLink;
+ let mockAxios;
+ let persistentUserCallout;
+ let windowSpy;
+
+ beforeEach(() => {
+ const fixture = createDeferredLinkFixture();
+ const container = fixture.querySelector('.container');
+ button = fixture.querySelector('.js-close');
+ deferredLink = fixture.querySelector('.deferred-link');
+ normalLink = fixture.querySelector('.normal-link');
+ mockAxios = new MockAdapter(axios);
+ persistentUserCallout = new PersistentUserCallout(container);
+ spyOn(persistentUserCallout.container, 'remove');
+ windowSpy = spyOn(window, 'open').and.callFake(() => {});
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('defers loading of a link until callout is dismissed', done => {
+ const { href, target } = deferredLink;
+ mockAxios.onPost(dismissEndpoint).replyOnce(200);
+
+ deferredLink.click();
+
+ setTimeoutPromise()
+ .then(() => {
+ expect(windowSpy).toHaveBeenCalledWith(href, target);
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(
+ JSON.stringify({ feature_name: featureName }),
+ );
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not dismiss callout on non-deferred links', done => {
+ normalLink.click();
+
+ setTimeoutPromise()
+ .then(() => {
+ expect(windowSpy).not.toHaveBeenCalled();
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not follow link when notification is closed', done => {
+ mockAxios.onPost(dismissEndpoint).replyOnce(200);
+
+ button.click();
+
+ setTimeoutPromise()
+ .then(() => {
+ expect(windowSpy).not.toHaveBeenCalled();
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('factory', () => {
it('returns an instance of PersistentUserCallout with the provided container property', () => {
const fixture = createFixture();
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 185abacf8e7..3d59b1f35a9 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -254,6 +254,22 @@ describe Feature do
end
end
+ describe '.remove' do
+ context 'for a non-persisted feature' do
+ it 'returns nil' do
+ expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ end
+ end
+
+ context 'for a persisted feature' do
+ it 'returns true' do
+ described_class.enable(:persisted_feature_flag)
+
+ expect(described_class.remove(:persisted_feature_flag)).to be_truthy
+ end
+ end
+ end
+
describe Feature::Target do
describe '#targets' do
let(:project) { create(:project) }
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index 651fdaaabca..eaf06ed8992 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -6,11 +6,11 @@ describe Gitlab::Ci::Ansi2html do
subject { described_class }
it "prints non-ansi as-is" do
- expect(convert_html("Hello")).to eq('<span class="">Hello</span>')
+ expect(convert_html("Hello")).to eq('<span>Hello</span>')
end
it "strips non-color-changing control sequences" do
- expect(convert_html("Hello \e[2Kworld")).to eq('<span class="">Hello world</span>')
+ expect(convert_html("Hello \e[2Kworld")).to eq('<span>Hello world</span>')
end
it "prints simply red" do
@@ -34,7 +34,7 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets colors after red on blue" do
- expect(convert_html("\e[31;44mHello\e[0m world")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[31;44mHello\e[0m world")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span> world</span>')
end
it "performs color change from red/blue to yellow/blue" do
@@ -46,11 +46,11 @@ describe Gitlab::Ci::Ansi2html do
end
it "performs color change from red/blue to reset to yellow/green" do
- expect(convert_html("\e[31;44mHello\e[0m \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span class=""> </span><span class="term-fg-yellow term-bg-green">world</span>')
+ expect(convert_html("\e[31;44mHello\e[0m \e[33;42mworld")).to eq('<span class="term-fg-red term-bg-blue">Hello</span><span> </span><span class="term-fg-yellow term-bg-green">world</span>')
end
it "ignores unsupported codes" do
- expect(convert_html("\e[51mHello\e[0m")).to eq('<span class="">Hello</span>')
+ expect(convert_html("\e[51mHello\e[0m")).to eq('<span>Hello</span>')
end
it "prints light red" do
@@ -74,8 +74,8 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets bold text" do
- expect(convert_html("\e[1mHello\e[21m world")).to eq('<span class="term-bold">Hello</span><span class=""> world</span>')
- expect(convert_html("\e[1mHello\e[22m world")).to eq('<span class="term-bold">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[1mHello\e[21m world")).to eq('<span class="term-bold">Hello</span><span> world</span>')
+ expect(convert_html("\e[1mHello\e[22m world")).to eq('<span class="term-bold">Hello</span><span> world</span>')
end
it "prints italic text" do
@@ -83,7 +83,7 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets italic text" do
- expect(convert_html("\e[3mHello\e[23m world")).to eq('<span class="term-italic">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[3mHello\e[23m world")).to eq('<span class="term-italic">Hello</span><span> world</span>')
end
it "prints underlined text" do
@@ -91,7 +91,7 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets underlined text" do
- expect(convert_html("\e[4mHello\e[24m world")).to eq('<span class="term-underline">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[4mHello\e[24m world")).to eq('<span class="term-underline">Hello</span><span> world</span>')
end
it "prints concealed text" do
@@ -99,7 +99,7 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets concealed text" do
- expect(convert_html("\e[8mHello\e[28m world")).to eq('<span class="term-conceal">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[8mHello\e[28m world")).to eq('<span class="term-conceal">Hello</span><span> world</span>')
end
it "prints crossed-out text" do
@@ -107,7 +107,7 @@ describe Gitlab::Ci::Ansi2html do
end
it "resets crossed-out text" do
- expect(convert_html("\e[9mHello\e[29m world")).to eq('<span class="term-cross">Hello</span><span class=""> world</span>')
+ expect(convert_html("\e[9mHello\e[29m world")).to eq('<span class="term-cross">Hello</span><span> world</span>')
end
it "can print 256 xterm fg colors" do
@@ -139,15 +139,15 @@ describe Gitlab::Ci::Ansi2html do
end
it "prints &lt;" do
- expect(convert_html("<")).to eq('<span class="">&lt;</span>')
+ expect(convert_html("<")).to eq('<span>&lt;</span>')
end
it "replaces newlines with line break tags" do
- expect(convert_html("\n")).to eq('<span class=""><br/><span class=""></span></span>')
+ expect(convert_html("\n")).to eq('<span><br/></span>')
end
it "groups carriage returns with newlines" do
- expect(convert_html("\r\n")).to eq('<span class=""><br/><span class=""></span></span>')
+ expect(convert_html("\r\n")).to eq('<span><br/></span>')
end
describe "incremental update" do
@@ -184,7 +184,7 @@ describe Gitlab::Ci::Ansi2html do
context "with partial sequence" do
let(:pre_text) { "Hello\e" }
- let(:pre_html) { "<span class=\"\">Hello</span>" }
+ let(:pre_html) { "<span>Hello</span>" }
let(:text) { "[1m World" }
let(:html) { "<span class=\"term-bold\"> World</span>" }
@@ -193,9 +193,9 @@ describe Gitlab::Ci::Ansi2html do
context 'with new line' do
let(:pre_text) { "Hello\r" }
- let(:pre_html) { "<span class=\"\">Hello\r</span>" }
+ let(:pre_html) { "<span>Hello\r</span>" }
let(:text) { "\nWorld" }
- let(:html) { "<span class=\"\"><br/><span class=\"\">World</span></span>" }
+ let(:html) { "<span><br/>World</span>" }
it_behaves_like 'stateable converter'
end
@@ -222,7 +222,7 @@ describe Gitlab::Ci::Ansi2html do
text = "#{section_start}Some text#{section_end}"
class_name_start = section_start.gsub("\033[0K", '').gsub('<', '&lt;')
class_name_end = section_end.gsub("\033[0K", '').gsub('<', '&lt;')
- html = %{<span class="">#{class_name_start}Some text#{class_name_end}</span>}
+ html = %{<span>#{class_name_start}Some text#{class_name_end}</span>}
expect(convert_html(text)).to eq(html)
end
@@ -232,12 +232,11 @@ describe Gitlab::Ci::Ansi2html do
let(:text) { "#{section_start}Some text#{section_end}" }
it 'prints light red' do
- text = "#{section_start}\e[91mHello\e[0m\n#{section_end}"
+ text = "#{section_start}\e[91mHello\e[0m\nLine 1\nLine 2\nLine 3\n#{section_end}"
header = %{<span class="term-fg-l-red section js-section-header section-header js-s-#{class_name(section_name)}">Hello</span>}
line_break = %{<span class="section js-section-header section-header js-s-#{class_name(section_name)}"><br/></span>}
- line = %{<span class="section line s_#{class_name(section_name)}"></span>}
- empty_line = %{<span class="section js-s-#{class_name(section_name)}"></span>}
- html = "#{section_start_html}#{header}#{line_break}#{line}#{empty_line}#{section_end_html}"
+ output_line = %{<span class="section line js-s-#{class_name(section_name)}">Line 1<br/>Line 2<br/>Line 3<br/></span>}
+ html = "#{section_start_html}#{header}#{line_break}#{output_line}#{section_end_html}"
expect(convert_html(text)).to eq(html)
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index d5861d5dd07..800ef122203 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -86,6 +86,22 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
end
end
+
+ context 'when has needs' do
+ let(:config) do
+ { script: 'echo', needs: ['another-job'] }
+ end
+
+ it { expect(entry).to be_valid }
+
+ context 'when has dependencies' do
+ let(:config) do
+ { script: 'echo', dependencies: ['another-job'], needs: ['another-job'] }
+ end
+
+ it { expect(entry).to be_valid }
+ end
+ end
end
context 'when entry value is not correct' do
@@ -223,6 +239,43 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.errors).to include 'job start in must be blank'
end
end
+
+ context 'when has dependencies' do
+ context 'that are not a array of strings' do
+ let(:config) do
+ { script: 'echo', dependencies: 'build-job' }
+ end
+
+ it 'returns error about invalid type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job dependencies should be an array of strings'
+ end
+ end
+ end
+
+ context 'when has needs' do
+ context 'that are not a array of strings' do
+ let(:config) do
+ { script: 'echo', needs: 'build-job' }
+ end
+
+ it 'returns error about invalid type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job needs should be an array of strings'
+ end
+ end
+
+ context 'when have dependencies that are not subset of needs' do
+ let(:config) do
+ { script: 'echo', dependencies: ['another-job'], needs: ['build-job'] }
+ end
+
+ it 'returns error about invalid data' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job dependencies the another-job should be part of needs'
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index cd880177170..6b766cc37bf 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -49,37 +49,44 @@ describe Gitlab::Ci::Config::Normalizer do
end
end
- context 'when jobs depend on parallelized jobs' do
- let(:config) { { job_name => job_config, other_job: { script: 'echo 1', dependencies: [job_name.to_s] } } }
-
- it 'parallelizes dependencies' do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
-
- expect(subject[:other_job][:dependencies]).to include(*job_names)
+ %i[dependencies needs].each do |context|
+ context "when job has #{context} on parallelized jobs" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: { script: 'echo 1', context => [job_name.to_s] }
+ }
+ end
+
+ it "parallelizes #{context}" do
+ job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
+
+ expect(subject[:other_job][context]).to include(*job_names)
+ end
+
+ it "does not include original job name in #{context}" do
+ expect(subject[:other_job][context]).not_to include(job_name)
+ end
end
- it 'does not include original job name in dependencies' do
- expect(subject[:other_job][:dependencies]).not_to include(job_name)
- end
- end
+ context "when there are #{context} which are both parallelized and not" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: { script: 'echo 1' },
+ final_job: { script: 'echo 1', context => [job_name.to_s, "other_job"] }
+ }
+ end
- context 'when there are dependencies which are both parallelized and not' do
- let(:config) do
- {
- job_name => job_config,
- other_job: { script: 'echo 1' },
- final_job: { script: 'echo 1', dependencies: [job_name.to_s, "other_job"] }
- }
- end
-
- it 'parallelizes dependencies' do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
+ it "parallelizes #{context}" do
+ job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
- expect(subject[:final_job][:dependencies]).to include(*job_names)
- end
+ expect(subject[:final_job][context]).to include(*job_names)
+ end
- it 'includes the regular job in dependencies' do
- expect(subject[:final_job][:dependencies]).to include('other_job')
+ it "includes the regular job in #{context}" do
+ expect(subject[:final_job][context]).to include('other_job')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 417a2d119ff..9bccd5be4fe 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -38,8 +38,8 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
it 'populates pipeline with stages' do
expect(pipeline.stages).to be_one
expect(pipeline.stages.first).not_to be_persisted
- expect(pipeline.stages.first.builds).to be_one
- expect(pipeline.stages.first.builds.first).not_to be_persisted
+ expect(pipeline.stages.first.statuses).to be_one
+ expect(pipeline.stages.first.statuses.first).not_to be_persisted
end
it 'correctly assigns user' do
@@ -191,8 +191,8 @@ describe Gitlab::Ci::Pipeline::Chain::Populate do
step.perform!
expect(pipeline.stages.size).to eq 1
- expect(pipeline.stages.first.builds.size).to eq 1
- expect(pipeline.stages.first.builds.first.name).to eq 'rspec'
+ expect(pipeline.stages.first.statuses.size).to eq 1
+ expect(pipeline.stages.first.statuses.first.name).to eq 'rspec'
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 46ea0d7554b..762025f9bd9 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -6,8 +6,9 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:attributes) { { name: 'rspec', ref: 'master' } }
+ let(:previous_stages) { [] }
- let(:seed_build) { described_class.new(pipeline, attributes) }
+ let(:seed_build) { described_class.new(pipeline, attributes, previous_stages) }
describe '#attributes' do
subject { seed_build.attributes }
@@ -381,4 +382,39 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
end
+
+ describe 'applying needs: dependency' do
+ subject { seed_build }
+
+ let(:attributes) do
+ {
+ name: 'rspec',
+ needs_attributes: [{
+ name: 'build'
+ }]
+ }
+ end
+
+ context 'when build job is not present in prior stages' do
+ it { is_expected.not_to be_included }
+ end
+
+ context 'when build job is part of prior stages' do
+ let(:stage_attributes) do
+ {
+ name: 'build',
+ index: 0,
+ builds: [{ name: 'build' }]
+ }
+ end
+
+ let(:stage_seed) do
+ Gitlab::Ci::Pipeline::Seed::Stage.new(pipeline, stage_attributes, [])
+ end
+
+ let(:previous_stages) { [stage_seed] }
+
+ it { is_expected.to be_included }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
index ad864d0d56e..6fba9f37d91 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Pipeline::Seed::Stage do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let(:previous_stages) { [] }
let(:attributes) do
{ name: 'test',
@@ -15,7 +16,7 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
end
subject do
- described_class.new(pipeline, attributes)
+ described_class.new(pipeline, attributes, previous_stages)
end
describe '#size' do
@@ -109,6 +110,17 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
end
end
+ describe '#seeds_names' do
+ it 'returns all job names' do
+ expect(subject.seeds_names).to contain_exactly(
+ 'rspec', 'spinach')
+ end
+
+ it 'returns a set' do
+ expect(subject.seeds_names).to be_a(Set)
+ end
+ end
+
describe '#to_resource' do
it 'builds a valid stage object with all builds' do
subject.to_resource.save!
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 35250632e86..af519f4bae6 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -65,9 +65,9 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
result = stream.html
expect(result).to eq(
- "<span class=\"\">ヾ(´༎ຶД༎ຶ`)ノ<br/><span class=\"\"></span></span>"\
- "<span class=\"term-fg-green\">許功蓋</span><span class=\"\"><br/>"\
- "<span class=\"\"></span></span>")
+ "<span>ヾ(´༎ຶД༎ຶ`)ノ<br/></span>"\
+ "<span class=\"term-fg-green\">許功蓋</span>"\
+ "<span><br/></span>")
expect(result.encoding).to eq(Encoding.default_external)
end
end
@@ -253,7 +253,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
it 'returns html content with state' do
result = stream.html_with_state
- expect(result.html).to eq("<span class=\"\">1234</span>")
+ expect(result.html).to eq("<span>1234</span>")
end
context 'follow-up state' do
@@ -269,7 +269,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
result = stream.html_with_state(last_result.state)
expect(result.append).to be_truthy
- expect(result.html).to eq("<span class=\"\">5678</span>")
+ expect(result.html).to eq("<span>5678</span>")
end
end
end
@@ -305,13 +305,11 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
describe '#html' do
shared_examples_for 'htmls' do
it "returns html" do
- expect(stream.html).to eq(
- "<span class=\"\">12<br/><span class=\"\">34<br/>"\
- "<span class=\"\">56</span></span></span>")
+ expect(stream.html).to eq("<span>12<br/>34<br/>56</span>")
end
it "returns html for last line only" do
- expect(stream.html(last_lines: 1)).to eq("<span class=\"\">56</span>")
+ expect(stream.html(last_lines: 1)).to eq("<span>56</span>")
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index d3676ee03bf..4ffa1fc9fd8 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1112,6 +1112,86 @@ module Gitlab
end
end
+ describe "Needs" do
+ let(:needs) { }
+ let(:dependencies) { }
+
+ let(:config) do
+ {
+ build1: { stage: 'build', script: 'test' },
+ build2: { stage: 'build', script: 'test' },
+ test1: { stage: 'test', script: 'test', needs: needs, dependencies: dependencies },
+ test2: { stage: 'test', script: 'test' },
+ deploy: { stage: 'test', script: 'test' }
+ }
+ end
+
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ context 'no needs' do
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'needs to builds' do
+ let(:needs) { %w(build1 build2) }
+
+ it "does create jobs with valid specification" do
+ expect(subject.builds.size).to eq(5)
+ expect(subject.builds[0]).to eq(
+ stage: "build",
+ stage_idx: 0,
+ name: "build1",
+ options: {
+ script: ["test"]
+ },
+ when: "on_success",
+ allow_failure: false,
+ yaml_variables: []
+ )
+ expect(subject.builds[2]).to eq(
+ stage: "test",
+ stage_idx: 1,
+ name: "test1",
+ options: {
+ script: ["test"]
+ },
+ needs_attributes: [
+ { name: "build1" },
+ { name: "build2" }
+ ],
+ when: "on_success",
+ allow_failure: false,
+ yaml_variables: []
+ )
+ end
+ end
+
+ context 'needs to builds defined as symbols' do
+ let(:needs) { [:build1, :build2] }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'undefined need' do
+ let(:needs) { ['undefined'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined need: undefined') }
+ end
+
+ context 'needs to deploy' do
+ let(:needs) { ['deploy'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: need deploy is not defined in prior stages') }
+ end
+
+ context 'needs and dependencies that are mismatching' do
+ let(:needs) { %w(build1) }
+ let(:dependencies) { %w(build2) }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build2 should be part of needs') }
+ end
+ end
+
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
subject { config_processor.stage_builds_attributes("test") }
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 5107e1efbbd..c3b706fc538 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -25,13 +25,13 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
end
it 'calls the given block' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
end
it 'calls the given block continuously' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
end
it 'cancels the exclusive lease after the block' do
@@ -68,6 +68,15 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
expect { subject }.to raise_error('Failed to obtain a lock')
end
+
+ context 'when lease is granted after retry' do
+ it 'yields block with true' do
+ expect(lease).to receive(:try_obtain).exactly(3).times { nil }
+ expect(lease).to receive(:try_obtain).once { unique_key }
+
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(true)
+ end
+ end
end
context 'when sleep second is specified' do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 158f77cab2c..d3f9be845dd 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -23,14 +23,14 @@ describe Gitlab::HTTP do
end
end
- describe 'allow_local_requests_from_hooks_and_services is' do
+ describe 'allow_local_requests_from_web_hooks_and_services is' do
before do
WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
end
context 'disabled' do
before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_hooks_and_services?).and_return(false)
+ allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
end
it 'deny requests to localhost' do
@@ -52,7 +52,7 @@ describe Gitlab::HTTP do
context 'enabled' do
before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_hooks_and_services?).and_return(true)
+ allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
end
it 'allow requests to localhost' do
diff --git a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
new file mode 100644
index 00000000000..d49d4779735
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::Helm::ResetCommand do
+ let(:rbac) { true }
+ let(:name) { 'helm' }
+ let(:files) { {} }
+ let(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) }
+
+ subject { reset_command }
+
+ it_behaves_like 'helm commands' do
+ let(:commands) do
+ <<~EOS
+ helm reset
+ kubectl delete replicaset -n gitlab-managed-apps -l name\\=tiller
+ EOS
+ end
+ end
+
+ context 'when there is a ca.pem file' do
+ let(:files) { { 'ca.pem': 'some file content' } }
+
+ it_behaves_like 'helm commands' do
+ let(:commands) do
+ <<~EOS1.squish + "\n" + <<~EOS2
+ helm reset
+ --tls
+ --tls-ca-cert /data/helm/helm/config/ca.pem
+ --tls-cert /data/helm/helm/config/cert.pem
+ --tls-key /data/helm/helm/config/key.pem
+ EOS1
+ kubectl delete replicaset -n gitlab-managed-apps -l name\\=tiller
+ EOS2
+ end
+ end
+ end
+
+ describe '#pod_resource' do
+ subject { reset_command.pod_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a pod that uses the tiller serviceAccountName' do
+ expect(subject.spec.serviceAccountName).to eq('tiller')
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates a pod that uses the default serviceAccountName' do
+ expect(subject.spec.serviceAcccountName).to be_nil
+ end
+ end
+ end
+
+ describe '#pod_name' do
+ subject { reset_command.pod_name }
+
+ it { is_expected.to eq('uninstall-helm') }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 97ebb5f1554..f49d4e23e39 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -58,7 +58,7 @@ describe Gitlab::Kubernetes::KubeClient do
context 'when local requests are allowed' do
before do
- stub_application_setting(allow_local_requests_from_hooks_and_services: true)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
end
it 'allows local addresses' do
diff --git a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
index 81954fcf8c5..2923048f742 100644
--- a/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
@@ -17,18 +17,10 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do
it 'samples various statistics' do
expect(sampler).to receive(:sample_memory_usage)
expect(sampler).to receive(:sample_file_descriptors)
- expect(sampler).to receive(:sample_gc)
expect(sampler).to receive(:flush)
sampler.sample
end
-
- it 'clears any GC profiles' do
- expect(sampler).to receive(:flush)
- expect(GC::Profiler).to receive(:clear)
-
- sampler.sample
- end
end
describe '#flush' do
@@ -67,18 +59,6 @@ describe Gitlab::Metrics::Samplers::InfluxSampler do
end
end
- describe '#sample_gc' do
- it 'adds a metric containing garbage collection statistics' do
- expect(GC::Profiler).to receive(:total_time).and_return(0.24)
-
- expect(sampler).to receive(:add_metric)
- .with(/gc_statistics/, an_instance_of(Hash))
- .and_call_original
-
- sampler.sample_gc
- end
- end
-
describe '#add_metric' do
it 'prefixes the series name for a Rails process' do
expect(sampler).to receive(:sidekiq?).and_return(false)
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 4d93b70e6e3..5005a5d9ebc 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -59,17 +59,29 @@ describe Gitlab::Metrics::Samplers::RubySampler do
end
it 'clears any GC profiles' do
- expect(GC::Profiler).to receive(:clear)
+ expect(GC::Profiler).to receive(:clear).at_least(:once)
sampler.sample
end
end
describe '#sample_gc' do
- it 'adds a metric containing garbage collection time statistics' do
- expect(GC::Profiler).to receive(:total_time).and_return(0.24)
+ let!(:sampler) { described_class.new(5) }
- expect(sampler.metrics[:total_time]).to receive(:increment).with({}, 0.24)
+ let(:gc_reports) { [{ GC_TIME: 0.1 }, { GC_TIME: 0.2 }, { GC_TIME: 0.3 }] }
+
+ it 're-enables GC::Profiler if needed' do
+ expect(GC::Profiler).to receive(:enable)
+
+ sampler.sample
+ end
+
+ it 'observes GC cycles time' do
+ expect(sampler).to receive(:sample_gc_reports).and_return(gc_reports)
+
+ expect(sampler.metrics[:gc_duration_seconds]).to receive(:observe).with({}, 0.1).ordered
+ expect(sampler.metrics[:gc_duration_seconds]).to receive(:observe).with({}, 0.2).ordered
+ expect(sampler.metrics[:gc_duration_seconds]).to receive(:observe).with({}, 0.3).ordered
sampler.sample
end
diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb
index 7f2b523f5b7..43f6d13f7ba 100644
--- a/spec/lib/gitlab/octokit/middleware_spec.rb
+++ b/spec/lib/gitlab/octokit/middleware_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Octokit::Middleware do
context 'when localhost requests are not allowed' do
before do
- stub_application_setting(allow_local_requests_from_hooks_and_services: false)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
end
it_behaves_like 'Local URL'
@@ -38,7 +38,7 @@ describe Gitlab::Octokit::Middleware do
context 'when localhost requests are allowed' do
before do
- stub_application_setting(allow_local_requests_from_hooks_and_services: true)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
end
it_behaves_like 'Public URL'
@@ -50,7 +50,7 @@ describe Gitlab::Octokit::Middleware do
context 'when local network requests are not allowed' do
before do
- stub_application_setting(allow_local_requests_from_hooks_and_services: false)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
end
it_behaves_like 'Local URL'
@@ -58,7 +58,7 @@ describe Gitlab::Octokit::Middleware do
context 'when local network requests are allowed' do
before do
- stub_application_setting(allow_local_requests_from_hooks_and_services: true)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
end
it_behaves_like 'Public URL'
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 6d0a2098b2e..297c4f0b683 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -24,7 +24,7 @@ describe Gitlab::UsageData do
create(:cluster, :group, :disabled)
create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
- create(:clusters_applications_cert_managers, :installed, cluster: gcp_cluster)
+ create(:clusters_applications_cert_manager, :installed, cluster: gcp_cluster)
create(:clusters_applications_prometheus, :installed, cluster: gcp_cluster)
create(:clusters_applications_runner, :installed, cluster: gcp_cluster)
create(:clusters_applications_knative, :installed, cluster: gcp_cluster)
diff --git a/spec/lib/peek/views/rugged_spec.rb b/spec/lib/peek/views/rugged_spec.rb
index 8bf996fc6bc..d07d6b51a1f 100644
--- a/spec/lib/peek/views/rugged_spec.rb
+++ b/spec/lib/peek/views/rugged_spec.rb
@@ -24,7 +24,7 @@ describe Peek::Views::Rugged, :request_store do
args: [project.repository.raw, 'HEAD'],
duration: 0.123)
::Gitlab::RuggedInstrumentation.add_call_details(feature: :rugged_test2,
- args: [project.repository.raw, 'refs/heads/master'],
+ args: [project.repository, 'refs/heads/master'],
duration: 0.456)
results = subject.results
@@ -32,7 +32,11 @@ describe Peek::Views::Rugged, :request_store do
expect(results[:duration]).to eq('1234.00ms')
expect(results[:details].count).to eq(2)
- expect(results[:details][0][:args]).to eq([project.repository.raw.to_s, "refs/heads/master"])
- expect(results[:details][1][:args]).to eq([project.repository.raw.to_s, "HEAD"])
+ expected = [
+ [project.repository.raw.to_s, "HEAD"],
+ [project.repository.to_s, "refs/heads/master"]
+ ]
+
+ expect(results[:details].map { |data| data[:args] }).to match_array(expected)
end
end
diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb
new file mode 100644
index 00000000000..450dd550a8f
--- /dev/null
+++ b/spec/models/ci/build_need_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::BuildNeed, model: true do
+ let(:build_need) { build(:ci_build_need) }
+
+ it { is_expected.to belong_to(:build) }
+
+ it { is_expected.to validate_presence_of(:build) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(128) }
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 17c7c05324a..b7e005e3883 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -19,7 +19,8 @@ describe Ci::Build do
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
- it { is_expected.to have_many(:trace_sections)}
+ it { is_expected.to have_many(:trace_sections) }
+ it { is_expected.to have_many(:needs) }
it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) }
it { is_expected.to have_many(:job_variables) }
@@ -182,6 +183,47 @@ describe Ci::Build do
end
end
+ describe '.with_needs' do
+ let!(:build) { create(:ci_build) }
+ let!(:build_b) { create(:ci_build) }
+ let!(:build_need_a) { create(:ci_build_need, build: build) }
+ let!(:build_need_b) { create(:ci_build_need, build: build_b) }
+
+ context 'when passing build name' do
+ subject { described_class.with_needs(build_need_a.name) }
+
+ it { is_expected.to contain_exactly(build) }
+ end
+
+ context 'when not passing any build name' do
+ subject { described_class.with_needs }
+
+ it { is_expected.to contain_exactly(build, build_b) }
+ end
+
+ context 'when not matching build name' do
+ subject { described_class.with_needs('undefined') }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ describe '.without_needs' do
+ let!(:build) { create(:ci_build) }
+
+ subject { described_class.without_needs }
+
+ context 'when no build_need is created' do
+ it { is_expected.to contain_exactly(build) }
+ end
+
+ context 'when a build_need is created' do
+ let!(:need_a) { create(:ci_build_need, build: build) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '#enqueue' do
let(:build) { create(:ci_build, :created) }
@@ -595,6 +637,59 @@ describe Ci::Build do
expect(staging.depends_on_builds.map(&:id))
.to contain_exactly(build.id, retried_rspec.id, rubocop_test.id)
end
+
+ describe '#dependencies' do
+ let(:dependencies) { }
+ let(:needs) { }
+
+ let!(:final) do
+ create(:ci_build,
+ pipeline: pipeline, name: 'final',
+ stage_idx: 3, stage: 'deploy', options: {
+ dependencies: dependencies
+ }
+ )
+ end
+
+ before do
+ needs.to_a.each do |need|
+ create(:ci_build_need, build: final, name: need)
+ end
+ end
+
+ subject { final.dependencies }
+
+ context 'when depedencies are defined' do
+ let(:dependencies) { %w(rspec staging) }
+
+ it { is_expected.to contain_exactly(rspec_test, staging) }
+ end
+
+ context 'when needs are defined' do
+ let(:needs) { %w(build rspec staging) }
+
+ it { is_expected.to contain_exactly(build, rspec_test, staging) }
+
+ context 'when ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
+ end
+ end
+
+ context 'when needs and dependencies are defined' do
+ let(:dependencies) { %w(rspec staging) }
+ let(:needs) { %w(build rspec staging) }
+
+ it { is_expected.to contain_exactly(rspec_test, staging) }
+ end
+
+ context 'when nor dependencies or needs are defined' do
+ it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
+ end
+ end
end
describe '#triggered_by?' do
@@ -3614,6 +3709,7 @@ describe Ci::Build do
before do
build.ensure_metadata
+ build.needs.create!(name: 'another-job')
end
it 'drops metadata' do
@@ -3621,6 +3717,7 @@ describe Ci::Build do
expect(build.reload).to be_degenerated
expect(build.metadata).to be_nil
+ expect(build.needs).to be_empty
end
end
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index 6e1f80cadc2..93050e80b07 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -3,17 +3,17 @@
require 'rails_helper'
describe Clusters::Applications::CertManager do
- let(:cert_manager) { create(:clusters_applications_cert_managers) }
+ let(:cert_manager) { create(:clusters_applications_cert_manager) }
- include_examples 'cluster application core specs', :clusters_applications_cert_managers
- include_examples 'cluster application status specs', :clusters_applications_cert_managers
- include_examples 'cluster application version specs', :clusters_applications_cert_managers
+ include_examples 'cluster application core specs', :clusters_applications_cert_manager
+ include_examples 'cluster application status specs', :clusters_applications_cert_manager
+ include_examples 'cluster application version specs', :clusters_applications_cert_manager
include_examples 'cluster application initial status specs'
describe '#can_uninstall?' do
subject { cert_manager.can_uninstall? }
- it { is_expected.to be_falsey }
+ it { is_expected.to be_truthy }
end
describe '#install_command' do
@@ -72,7 +72,7 @@ describe Clusters::Applications::CertManager do
end
context 'application failed to install previously' do
- let(:cert_manager) { create(:clusters_applications_cert_managers, :errored, version: '0.0.1') }
+ let(:cert_manager) { create(:clusters_applications_cert_manager, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
expect(subject.version).to eq('v0.5.2')
@@ -80,6 +80,44 @@ describe Clusters::Applications::CertManager do
end
end
+ describe '#uninstall_command' do
+ subject { cert_manager.uninstall_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::DeleteCommand) }
+
+ it 'is initialized with cert_manager arguments' do
+ expect(subject.name).to eq('certmanager')
+ expect(subject).to be_rbac
+ expect(subject.files).to eq(cert_manager.files)
+ end
+
+ it 'specifies a post delete command to remove custom resource definitions' do
+ expect(subject.postdelete).to eq([
+ "kubectl delete secret -n gitlab-managed-apps letsencrypt-prod --ignore-not-found",
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found'
+ ])
+ end
+
+ context 'secret key name is not found' do
+ before do
+ allow(File).to receive(:read).and_call_original
+ expect(File).to receive(:read)
+ .with(Rails.root.join('vendor', 'cert_manager', 'cluster_issuer.yaml'))
+ .and_return('key: value')
+ end
+
+ it 'does not try and delete the secret' do
+ expect(subject.postdelete).to eq([
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found'
+ ])
+ end
+ end
+ end
+
describe '#files' do
let(:application) { cert_manager }
let(:values) { subject[:'values.yaml'] }
diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb
index 6ea6c110d62..d4f8b552088 100644
--- a/spec/models/clusters/applications/helm_spec.rb
+++ b/spec/models/clusters/applications/helm_spec.rb
@@ -19,11 +19,27 @@ describe Clusters::Applications::Helm do
end
describe '#can_uninstall?' do
- let(:helm) { create(:clusters_applications_helm) }
+ context "with other existing applications" do
+ Clusters::Cluster::APPLICATIONS.keys.each do |application_name|
+ next if application_name == 'helm'
+
+ it do
+ cluster_application = create("clusters_applications_#{application_name}".to_sym)
+
+ helm = cluster_application.cluster.application_helm
- subject { helm.can_uninstall? }
+ expect(helm.allowed_to_uninstall?).to be_falsy
+ end
+ end
+ end
- it { is_expected.to be_falsey }
+ context "without other existing applications" do
+ subject { helm.can_uninstall? }
+
+ let(:helm) { create(:clusters_applications_helm) }
+
+ it { is_expected.to be_truthy }
+ end
end
describe '#issue_client_cert' do
@@ -73,4 +89,41 @@ describe Clusters::Applications::Helm do
end
end
end
+
+ describe '#uninstall_command' do
+ let(:helm) { create(:clusters_applications_helm) }
+
+ subject { helm.uninstall_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::ResetCommand) }
+
+ it 'has name' do
+ expect(subject.name).to eq('helm')
+ end
+
+ it 'has cert files' do
+ expect(subject.files[:'ca.pem']).to be_present
+ expect(subject.files[:'ca.pem']).to eq(helm.ca_cert)
+
+ expect(subject.files[:'cert.pem']).to be_present
+ expect(subject.files[:'key.pem']).to be_present
+
+ cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem'])
+ expect(cert.not_after).to be > 999.years.from_now
+ end
+
+ describe 'rbac' do
+ context 'rbac cluster' do
+ it { expect(subject).to be_rbac }
+ end
+
+ context 'non rbac cluster' do
+ before do
+ helm.cluster.platform_kubernetes.abac!
+ end
+
+ it { expect(subject).not_to be_rbac }
+ end
+ end
+ end
end
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index 342ed907854..334f10526cb 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -91,7 +91,7 @@ describe Clusters::Applications::Knative do
end
it 'does not install metrics for prometheus' do
- expect(subject.postinstall).to be_nil
+ expect(subject.postinstall).to be_empty
end
context 'with prometheus installed' do
@@ -101,7 +101,7 @@ describe Clusters::Applications::Knative do
subject { knative.install_command }
it 'installs metrics' do
- expect(subject.postinstall).not_to be_nil
+ expect(subject.postinstall).not_to be_empty
expect(subject.postinstall.length).to be(1)
expect(subject.postinstall[0]).to eql("kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}")
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 26267c64112..d9f31c46f59 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -142,7 +142,7 @@ describe Clusters::Applications::Prometheus do
end
it 'does not install knative metrics' do
- expect(subject.postinstall).to be_nil
+ expect(subject.postinstall).to be_empty
end
context 'with knative installed' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 761695bfe91..96212d0c864 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -451,7 +451,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'when applications are created' do
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) }
- let!(:cert_manager) { create(:clusters_applications_cert_managers, cluster: cluster) }
+ let!(:cert_manager) { create(:clusters_applications_cert_manager, cluster: cluster) }
let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
let!(:runner) { create(:clusters_applications_runner, cluster: cluster) }
let!(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 471769e4aab..5811016ea4d 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -106,7 +106,7 @@ describe Clusters::Platforms::Kubernetes do
before do
allow(ApplicationSetting)
.to receive(:current)
- .and_return(ApplicationSetting.build_from_defaults(allow_local_requests_from_hooks_and_services: true))
+ .and_return(ApplicationSetting.build_from_defaults(allow_local_requests_from_web_hooks_and_services: true))
end
it { expect(kubernetes.save).to be_truthy }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index e76186fb280..7b35c2ffd36 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -556,6 +556,7 @@ eos
it 'returns the URI type at the given path' do
expect(commit.uri_type('files/html')).to be(:tree)
expect(commit.uri_type('files/images/logo-black.png')).to be(:raw)
+ expect(commit.uri_type('files/images/wm.svg')).to be(:raw)
expect(project.commit('video').uri_type('files/videos/intro.mp4')).to be(:raw)
expect(commit.uri_type('files/js/application.js')).to be(:blob)
end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index d9e1fe4b165..8d7dafc523d 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -34,6 +34,10 @@ describe DiffNote do
subject { create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) }
+ describe 'validations' do
+ it_behaves_like 'a valid diff positionable note', :diff_note_on_commit
+ end
+
describe "#position=" do
context "when provided a string" do
it "sets the position" do
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index ce0681c4331..d2e0bed721e 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Environment, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
+ using RSpec::Parameterized::TableSyntax
let(:project) { create(:project, :stubbed_repository) }
subject(:environment) { create(:environment, project: project) }
@@ -782,12 +783,9 @@ describe Environment, :use_clean_rails_memory_store_caching do
let(:source_path) { 'source/file.html' }
let(:sha) { RepoHelpers.sample_commit.id }
- before do
- environment.external_url = 'http://example.com'
- end
-
context 'when the public path is not known' do
before do
+ environment.external_url = 'http://example.com'
allow(project).to receive(:public_path_for_source_path).with(source_path, sha).and_return(nil)
end
@@ -797,12 +795,23 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when the public path is known' do
- before do
- allow(project).to receive(:public_path_for_source_path).with(source_path, sha).and_return('file.html')
- end
-
- it 'returns the full external URL' do
- expect(environment.external_url_for(source_path, sha)).to eq('http://example.com/file.html')
+ where(:external_url, :public_path, :full_url) do
+ 'http://example.com' | 'file.html' | 'http://example.com/file.html'
+ 'http://example.com/' | 'file.html' | 'http://example.com/file.html'
+ 'http://example.com' | '/file.html' | 'http://example.com/file.html'
+ 'http://example.com/' | '/file.html' | 'http://example.com/file.html'
+ 'http://example.com/subpath' | 'public/file.html' | 'http://example.com/subpath/public/file.html'
+ 'http://example.com/subpath/' | 'public/file.html' | 'http://example.com/subpath/public/file.html'
+ 'http://example.com/subpath' | '/public/file.html' | 'http://example.com/subpath/public/file.html'
+ 'http://example.com/subpath/' | '/public/file.html' | 'http://example.com/subpath/public/file.html'
+ end
+ with_them do
+ it 'returns the full external URL' do
+ environment.external_url = external_url
+ allow(project).to receive(:public_path_for_source_path).with(source_path, sha).and_return(public_path)
+
+ expect(environment.external_url_for(source_path, sha)).to eq(full_url)
+ end
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 7e9bbf5a407..1c41ceb7deb 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -23,6 +23,7 @@ describe Group do
it { is_expected.to have_many(:badges).class_name('GroupBadge') }
it { is_expected.to have_many(:cluster_groups).class_name('Clusters::Group') }
it { is_expected.to have_many(:clusters).class_name('Clusters::Cluster') }
+ it { is_expected.to have_many(:container_repositories) }
describe '#members & #requesters' do
let(:requester) { create(:user) }
diff --git a/spec/models/lfs_download_object_spec.rb b/spec/models/lfs_download_object_spec.rb
index effd8b08124..8b53effe98f 100644
--- a/spec/models/lfs_download_object_spec.rb
+++ b/spec/models/lfs_download_object_spec.rb
@@ -50,7 +50,7 @@ describe LfsDownloadObject do
before do
allow(ApplicationSetting)
.to receive(:current)
- .and_return(ApplicationSetting.build_from_defaults(allow_local_requests_from_hooks_and_services: setting))
+ .and_return(ApplicationSetting.build_from_defaults(allow_local_requests_from_web_hooks_and_services: setting))
end
context 'are allowed' do
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index 0f1283717e0..38bf8089411 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -7,24 +7,6 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
it { is_expected.to belong_to :namespace }
- describe '.delay_timeout' do
- context 'when timeout is set on redis' do
- it 'uses personalized timeout' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(described_class::REDIS_SHARED_KEY, 1.hour)
- end
-
- expect(described_class.delay_timeout).to eq(1.hour)
- end
- end
-
- context 'when timeout is not set on redis' do
- it 'uses default timeout' do
- expect(described_class.delay_timeout).to eq(3.hours)
- end
- end
- end
-
describe '#schedule_root_storage_statistics' do
let(:namespace) { create(:namespace) }
let(:aggregation_schedule) { namespace.build_aggregation_schedule }
@@ -87,21 +69,5 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
aggregation_schedule.schedule_root_storage_statistics
end
end
-
- context 'with a personalized lease timeout' do
- before do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(described_class::REDIS_SHARED_KEY, 1.hour)
- end
- end
-
- it 'uses a personalized time' do
- expect(Namespaces::RootStatisticsWorker)
- .to receive(:perform_in)
- .with(1.hour, aggregation_schedule.namespace_id)
-
- aggregation_schedule.save!
- end
- end
end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 03003e3dd7d..bfd0e5f0558 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -913,6 +913,22 @@ describe Note do
end
end
+ describe '#special_role=' do
+ let(:role) { Note::SpecialRole::FIRST_TIME_CONTRIBUTOR }
+
+ it 'assigns role' do
+ subject.special_role = role
+
+ expect(subject.special_role).to eq(role)
+ end
+
+ it 'does not assign unknown role' do
+ expect { subject.special_role = :bogus }.to raise_error(/Role is undefined/)
+
+ expect(subject.special_role).to be_nil
+ end
+ end
+
describe '#parent' do
it 'returns project for project notes' do
project = create(:project)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 32fb0c0ff73..157103123ad 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2950,6 +2950,16 @@ describe Project do
expect(project.public_path_for_source_path('file.html', sha)).to be_nil
end
end
+
+ it 'returns a public path with a leading slash unmodified' do
+ route_map = Gitlab::RouteMap.new(<<-MAP.strip_heredoc)
+ - source: 'source/file.html'
+ public: '/public/file'
+ MAP
+ allow(project).to receive(:route_map_for).with(sha).and_return(route_map)
+
+ expect(project.public_path_for_source_path('source/file.html', sha)).to eq('/public/file')
+ end
end
context 'when there is no route map' do
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index db3e4902c64..a164ed9bbea 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -140,18 +140,7 @@ describe ProjectStatistics do
let(:namespace) { create(:group) }
let(:project) { create(:project, namespace: namespace) }
- context 'when the feature flag is off' do
- it 'does not schedule the aggregation worker' do
- stub_feature_flags(update_statistics_namespace: false, namespace: namespace)
-
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- statistics.refresh!(only: [:lfs_objects_size])
- end
- end
-
- context 'when the feature flag is on' do
+ context 'when arguments are passed' do
it 'schedules the aggregation worker' do
expect(Namespaces::ScheduleAggregationWorker)
.to receive(:perform_async)
diff --git a/spec/requests/api/group_container_repositories_spec.rb b/spec/requests/api/group_container_repositories_spec.rb
new file mode 100644
index 00000000000..0a41e455d01
--- /dev/null
+++ b/spec/requests/api/group_container_repositories_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::GroupContainerRepositories do
+ set(:group) { create(:group, :private) }
+ set(:project) { create(:project, :private, group: group) }
+ let(:reporter) { create(:user) }
+ let(:guest) { create(:user) }
+
+ let(:root_repository) { create(:container_repository, :root, project: project) }
+ let(:test_repository) { create(:container_repository, project: project) }
+
+ let(:users) do
+ {
+ anonymous: nil,
+ guest: guest,
+ reporter: reporter
+ }
+ end
+
+ let(:api_user) { reporter }
+
+ before do
+ group.add_reporter(reporter)
+ group.add_guest(guest)
+
+ stub_feature_flags(container_registry_api: true)
+ stub_container_registry_config(enabled: true)
+
+ root_repository
+ test_repository
+ end
+
+ describe 'GET /groups/:id/registry/repositories' do
+ let(:url) { "/groups/#{group.id}/registry/repositories" }
+
+ subject { get api(url, api_user) }
+
+ it_behaves_like 'rejected container repository access', :guest, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
+
+ it_behaves_like 'returns repositories for allowed users', :reporter, 'group' do
+ let(:object) { group }
+ end
+
+ context 'with invalid group id' do
+ let(:url) { '/groups/123412341234/registry/repositories' }
+
+ it 'returns not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 7a6f1cd548c..15d6db42760 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1571,7 +1571,7 @@ describe API::MergeRequests do
end
end
- describe "GET /projects/:id/merge_requests/:merge_request_iid/merge_ref" do
+ describe "GET /projects/:id/merge_requests/:merge_request_iid/merge_ref", :clean_gitlab_redis_shared_state do
before do
merge_request.mark_as_unchecked!
end
diff --git a/spec/requests/api/container_registry_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index b64f3ea1081..f1dc4e6f0b2 100644
--- a/spec/requests/api/container_registry_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe API::ContainerRegistry do
+describe API::ProjectContainerRepositories do
include ExclusiveLeaseHelpers
set(:project) { create(:project, :private) }
@@ -12,6 +12,16 @@ describe API::ContainerRegistry do
let(:root_repository) { create(:container_repository, :root, project: project) }
let(:test_repository) { create(:container_repository, project: project) }
+ let(:users) do
+ {
+ anonymous: nil,
+ developer: developer,
+ guest: guest,
+ maintainer: maintainer,
+ reporter: reporter
+ }
+ end
+
let(:api_user) { maintainer }
before do
@@ -27,57 +37,24 @@ describe API::ContainerRegistry do
test_repository
end
- shared_examples 'being disallowed' do |param|
- context "for #{param}" do
- let(:api_user) { public_send(param) }
-
- it 'returns access denied' do
- subject
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context "for anonymous" do
- let(:api_user) { nil }
-
- it 'returns not found' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
describe 'GET /projects/:id/registry/repositories' do
- subject { get api("/projects/#{project.id}/registry/repositories", api_user) }
-
- it_behaves_like 'being disallowed', :guest
-
- context 'for reporter' do
- let(:api_user) { reporter }
-
- it 'returns a list of repositories' do
- subject
+ let(:url) { "/projects/#{project.id}/registry/repositories" }
- expect(json_response.length).to eq(2)
- expect(json_response.map { |repository| repository['id'] }).to contain_exactly(
- root_repository.id, test_repository.id)
- end
+ subject { get api(url, api_user) }
- it 'returns a matching schema' do
- subject
+ it_behaves_like 'rejected container repository access', :guest, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('registry/repositories')
- end
+ it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
+ let(:object) { project }
end
end
describe 'DELETE /projects/:id/registry/repositories/:repository_id' do
subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}", api_user) }
- it_behaves_like 'being disallowed', :developer
+ it_behaves_like 'rejected container repository access', :developer, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
context 'for maintainer' do
let(:api_user) { maintainer }
@@ -96,7 +73,8 @@ describe API::ContainerRegistry do
describe 'GET /projects/:id/registry/repositories/:repository_id/tags' do
subject { get api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags", api_user) }
- it_behaves_like 'being disallowed', :guest
+ it_behaves_like 'rejected container repository access', :guest, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
context 'for reporter' do
let(:api_user) { reporter }
@@ -124,10 +102,13 @@ describe API::ContainerRegistry do
describe 'DELETE /projects/:id/registry/repositories/:repository_id/tags' do
subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags", api_user), params: params }
- it_behaves_like 'being disallowed', :developer do
+ context 'disallowed' do
let(:params) do
{ name_regex: 'v10.*' }
end
+
+ it_behaves_like 'rejected container repository access', :developer, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
end
context 'for maintainer' do
@@ -191,7 +172,8 @@ describe API::ContainerRegistry do
describe 'GET /projects/:id/registry/repositories/:repository_id/tags/:tag_name' do
subject { get api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA", api_user) }
- it_behaves_like 'being disallowed', :guest
+ it_behaves_like 'rejected container repository access', :guest, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
context 'for reporter' do
let(:api_user) { reporter }
@@ -222,7 +204,8 @@ describe API::ContainerRegistry do
describe 'DELETE /projects/:id/registry/repositories/:repository_id/tags/:tag_name' do
subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA", api_user) }
- it_behaves_like 'being disallowed', :reporter
+ it_behaves_like 'rejected container repository access', :reporter, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
context 'for developer' do
let(:api_user) { developer }
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 8a60980fe80..184c00a356a 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -25,6 +25,9 @@ describe API::Settings, 'Settings' do
expect(json_response['ed25519_key_restriction']).to eq(0)
expect(json_response['performance_bar_allowed_group_id']).to be_nil
expect(json_response['instance_statistics_visibility_private']).to be(false)
+ expect(json_response['allow_local_requests_from_hooks_and_services']).to be(false)
+ expect(json_response['allow_local_requests_from_web_hooks_and_services']).to be(false)
+ expect(json_response['allow_local_requests_from_system_hooks']).to be(true)
expect(json_response).not_to have_key('performance_bar_allowed_group_path')
expect(json_response).not_to have_key('performance_bar_enabled')
end
@@ -67,7 +70,9 @@ describe API::Settings, 'Settings' do
instance_statistics_visibility_private: true,
diff_max_patch_bytes: 150_000,
default_branch_protection: ::Gitlab::Access::PROTECTION_DEV_CAN_MERGE,
- local_markdown_version: 3
+ local_markdown_version: 3,
+ allow_local_requests_from_web_hooks_and_services: true,
+ allow_local_requests_from_system_hooks: false
}
expect(response).to have_gitlab_http_status(200)
@@ -95,6 +100,8 @@ describe API::Settings, 'Settings' do
expect(json_response['diff_max_patch_bytes']).to eq(150_000)
expect(json_response['default_branch_protection']).to eq(Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
expect(json_response['local_markdown_version']).to eq(3)
+ expect(json_response['allow_local_requests_from_web_hooks_and_services']).to eq(true)
+ expect(json_response['allow_local_requests_from_system_hooks']).to eq(false)
end
end
@@ -117,6 +124,14 @@ describe API::Settings, 'Settings' do
expect(json_response['performance_bar_allowed_group_id']).to be_nil
end
+ it 'supports legacy allow_local_requests_from_hooks_and_services' do
+ put api("/application/settings", admin),
+ params: { allow_local_requests_from_hooks_and_services: true }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['allow_local_requests_from_hooks_and_services']).to eq(true)
+ end
+
context 'external policy classification settings' do
let(:settings) do
{
diff --git a/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
new file mode 100644
index 00000000000..258144d4000
--- /dev/null
+++ b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/rspec/top_level_describe_path'
+
+describe RuboCop::Cop::RSpec::TopLevelDescribePath do
+ include RuboCop::RSpec::ExpectOffense
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'when the file ends in _spec.rb' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~SOURCE.strip_indent, 'spec/foo_spec.rb')
+ describe 'Foo' do
+ end
+ SOURCE
+ end
+ end
+
+ context 'when the file is a frontend fixture' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~SOURCE.strip_indent, 'spec/frontend/fixtures/foo.rb')
+ describe 'Foo' do
+ end
+ SOURCE
+ end
+ end
+
+ context 'when the describe is in a shared example' do
+ context 'with shared_examples' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~SOURCE.strip_indent, 'spec/foo.rb')
+ shared_examples 'Foo' do
+ describe '#bar' do
+ end
+ end
+ SOURCE
+ end
+ end
+
+ context 'with shared_examples_for' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~SOURCE.strip_indent, 'spec/foo.rb')
+ shared_examples_for 'Foo' do
+ describe '#bar' do
+ end
+ end
+ SOURCE
+ end
+ end
+ end
+
+ context 'when the describe is at the top level' do
+ it 'marks the describe as offending' do
+ expect_offense(<<~SOURCE.strip_indent, 'spec/foo.rb')
+ describe 'Foo' do
+ ^^^^^^^^^^^^^^ #{described_class::MESSAGE}
+ end
+ SOURCE
+ end
+ end
+end
diff --git a/spec/serializers/analytics_issue_entity_spec.rb b/spec/serializers/analytics_issue_entity_spec.rb
index dd5e43a4b62..c5b03bdd8c1 100644
--- a/spec/serializers/analytics_issue_entity_spec.rb
+++ b/spec/serializers/analytics_issue_entity_spec.rb
@@ -10,12 +10,12 @@ describe AnalyticsIssueEntity do
id: "1",
created_at: "2016-11-12 15:04:02.948604",
author: user,
- name: project.name,
- path: project.namespace
+ project_path: project.path,
+ namespace_path: project.namespace.route.path
}
end
- let(:project) { create(:project) }
+ let(:project) { create(:project, name: 'my project') }
let(:request) { EntityRequest.new(entity: :merge_request) }
let(:entity) do
diff --git a/spec/serializers/analytics_issue_serializer_spec.rb b/spec/serializers/analytics_issue_serializer_spec.rb
index c9ffe1c5dad..9cb2ce13d12 100644
--- a/spec/serializers/analytics_issue_serializer_spec.rb
+++ b/spec/serializers/analytics_issue_serializer_spec.rb
@@ -8,7 +8,7 @@ describe AnalyticsIssueSerializer do
end
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, name: 'my project') }
let(:resource) do
{
total_time: "172802.724419",
@@ -17,8 +17,8 @@ describe AnalyticsIssueSerializer do
id: "1",
created_at: "2016-11-12 15:04:02.948604",
author: user,
- name: project.name,
- path: project.namespace
+ project_path: project.path,
+ namespace_path: project.namespace.route.path
}
end
diff --git a/spec/serializers/analytics_merge_request_serializer_spec.rb b/spec/serializers/analytics_merge_request_serializer_spec.rb
index 123d7d795ce..a864051b2a3 100644
--- a/spec/serializers/analytics_merge_request_serializer_spec.rb
+++ b/spec/serializers/analytics_merge_request_serializer_spec.rb
@@ -8,7 +8,7 @@ describe AnalyticsMergeRequestSerializer do
end
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, name: 'my project') }
let(:resource) do
{
total_time: "172802.724419",
@@ -18,8 +18,8 @@ describe AnalyticsMergeRequestSerializer do
state: 'open',
created_at: "2016-11-12 15:04:02.948604",
author: user,
- name: project.name,
- path: project.namespace
+ project_path: project.path,
+ namespace_path: project.namespace.route.path
}
end
diff --git a/spec/serializers/cluster_application_entity_spec.rb b/spec/serializers/cluster_application_entity_spec.rb
index f38a18fcf59..76ecca06522 100644
--- a/spec/serializers/cluster_application_entity_spec.rb
+++ b/spec/serializers/cluster_application_entity_spec.rb
@@ -22,7 +22,7 @@ describe ClusterApplicationEntity do
end
it 'has can_uninstall' do
- expect(subject[:can_uninstall]).to be_falsey
+ expect(subject[:can_uninstall]).to be_truthy
end
context 'non-helm application' do
diff --git a/spec/serializers/user_serializer_spec.rb b/spec/serializers/user_serializer_spec.rb
new file mode 100644
index 00000000000..2e4a8c644fe
--- /dev/null
+++ b/spec/serializers/user_serializer_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserSerializer do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+
+ context 'serializer with merge request context' do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:serializer) { described_class.new(merge_request_iid: merge_request.iid) }
+
+ before do
+ allow(project).to(
+ receive_message_chain(:merge_requests, :find_by_iid!)
+ .with(merge_request.iid).and_return(merge_request)
+ )
+
+ project.add_maintainer(user1)
+ end
+
+ it 'returns a user with can_merge option' do
+ serialized_user1, serialized_user2 = serializer.represent([user1, user2], project: project).as_json
+
+ expect(serialized_user1).to include("id" => user1.id, "can_merge" => true)
+ expect(serialized_user2).to include("id" => user2.id, "can_merge" => false)
+ end
+ end
+end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 33cd1f37ff6..adb5219d691 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -62,6 +62,54 @@ describe ApplicationSettings::UpdateService do
end
end
+ describe 'updating outbound_local_requests_whitelist' do
+ context 'when params is blank' do
+ let(:params) { {} }
+
+ it 'does not add to whitelist' do
+ expect { subject.execute }.not_to change {
+ application_settings.outbound_local_requests_whitelist
+ }
+ end
+ end
+
+ context 'when param add_to_outbound_local_requests_whitelist contains values' do
+ before do
+ application_settings.outbound_local_requests_whitelist = ['127.0.0.1']
+ end
+
+ let(:params) { { add_to_outbound_local_requests_whitelist: ['example.com', ''] } }
+
+ it 'adds to whitelist' do
+ expect { subject.execute }.to change {
+ application_settings.outbound_local_requests_whitelist
+ }
+
+ expect(application_settings.outbound_local_requests_whitelist).to contain_exactly(
+ '127.0.0.1', 'example.com'
+ )
+ end
+ end
+
+ context 'when param outbound_local_requests_whitelist_raw is passed' do
+ before do
+ application_settings.outbound_local_requests_whitelist = ['127.0.0.1']
+ end
+
+ let(:params) { { outbound_local_requests_whitelist_raw: 'example.com;gitlab.com' } }
+
+ it 'overwrites the existing whitelist' do
+ expect { subject.execute }.to change {
+ application_settings.outbound_local_requests_whitelist
+ }
+
+ expect(application_settings.outbound_local_requests_whitelist).to contain_exactly(
+ 'example.com', 'gitlab.com'
+ )
+ end
+ end
+ end
+
describe 'performance bar settings' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 4f4776bbb27..3ca389ba25b 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -145,6 +145,19 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'not a container repository factory'
end
+ describe '#pull_access_token' do
+ let(:project) { create(:project) }
+ let(:token) { described_class.pull_access_token(project.full_path) }
+
+ subject { { token: token } }
+
+ it_behaves_like 'an accessible' do
+ let(:actions) { ['pull'] }
+ end
+
+ it_behaves_like 'not a container repository factory'
+ end
+
context 'user authorization' do
let(:current_user) { create(:user) }
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index d9b61dfe503..7e2f311a065 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1099,6 +1099,62 @@ describe Ci::CreatePipelineService do
end
end
end
+
+ context 'when needs is used' do
+ let(:pipeline) { execute_service }
+
+ let(:config) do
+ {
+ build_a: {
+ stage: "build",
+ script: "ls",
+ only: %w[master]
+ },
+ test_a: {
+ stage: "test",
+ script: "ls",
+ only: %w[master feature tags],
+ needs: %w[build_a]
+ },
+ deploy: {
+ stage: "deploy",
+ script: "ls",
+ only: %w[tags]
+ }
+ }
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ context 'when pipeline on master is created' do
+ let(:ref_name) { 'refs/heads/master' }
+
+ it 'creates a pipeline with build_a and test_a' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.builds.map(&:name)).to contain_exactly("build_a", "test_a")
+ end
+ end
+
+ context 'when pipeline on feature is created' do
+ let(:ref_name) { 'refs/heads/feature' }
+
+ it 'does not create a pipeline as test_a depends on build_a' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.builds).to be_empty
+ end
+ end
+
+ context 'when pipeline on v1.0.0 is created' do
+ let(:ref_name) { 'refs/tags/v1.0.0' }
+
+ it 'does create a pipeline only with deploy' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.builds.map(&:name)).to contain_exactly("deploy")
+ end
+ end
+ end
end
describe '#execute!' do
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index cadb519ccee..1b28d2d4d02 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -700,6 +700,138 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
+ context 'when pipeline with needs is created' do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when feature ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'when linux:build finishes first it follows stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running created created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+
+ expect(stages).to eq(%w(success pending created))
+ expect(builds.success).to contain_exactly(linux_build, mac_build)
+ expect(builds.pending).to contain_exactly(
+ linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+
+ context 'when one of the jobs is run on a failure' do
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
+
+ let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
+
+ context 'when another job in build phase fails first' do
+ context 'when ci_dag_support is enabled' do
+ it 'does skip linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_skipped
+ end
+ end
+
+ context 'when ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+
+ context 'when linux:build job fails first' do
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ linux_build.reset.drop!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+ end
+
def process_pipeline
described_class.new(pipeline.project, user).execute(pipeline)
end
@@ -712,6 +844,10 @@ describe Ci::ProcessPipelineService, '#execute' do
all_builds.where.not(status: [:created, :skipped])
end
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
def builds_names
builds.pluck(:name)
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 915288cd916..fe7c6fe4700 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -67,6 +67,7 @@ describe Ci::RetryBuildService do
end
create(:ci_job_variable, job: build)
+ create(:ci_build_need, build: build)
build.reload
end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 6e827f2ea5b..a864da0a6fb 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe MergeRequests::MergeabilityCheckService do
+describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shared_state do
shared_examples_for 'unmergeable merge request' do
it 'updates or keeps merge status as cannot_be_merged' do
subject
@@ -60,24 +60,69 @@ describe MergeRequests::MergeabilityCheckService do
subject { described_class.new(merge_request).execute }
+ def execute_within_threads(amount:, retry_lease: true)
+ threads = []
+
+ amount.times do
+ # Let's use a different object for each thread to get closer
+ # to a real world scenario.
+ mr = MergeRequest.find(merge_request.id)
+
+ threads << Thread.new do
+ described_class.new(mr).execute(retry_lease: retry_lease)
+ end
+ end
+
+ threads.each(&:join)
+ threads
+ end
+
before do
project.add_developer(merge_request.author)
end
it_behaves_like 'mergeable merge request'
- context 'when multiple calls to the service' do
- it 'returns success' do
- subject
- result = subject
+ context 'when lock is disabled' do
+ before do
+ stub_feature_flags(merge_ref_auto_sync_lock: false)
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result.success?).to be(true)
+ it_behaves_like 'mergeable merge request'
+ end
+
+ context 'when concurrent calls' do
+ it 'waits first lock and returns "cached" result in subsequent calls' do
+ threads = execute_within_threads(amount: 3)
+ results = threads.map { |t| t.value.status }
+
+ expect(results).to contain_exactly(:success, :success, :success)
+ end
+
+ it 'writes the merge-ref once' do
+ service = instance_double(MergeRequests::MergeToRefService)
+
+ expect(MergeRequests::MergeToRefService).to receive(:new).once { service }
+ expect(service).to receive(:execute).once.and_return(success: true)
+
+ execute_within_threads(amount: 3)
end
- it 'second call does not change the merge-ref' do
- expect { subject }.to change(merge_request, :merge_ref_head).from(nil)
- expect { subject }.not_to change(merge_request.merge_ref_head, :id)
+ it 'resets one merge request upon execution' do
+ expect_any_instance_of(MergeRequest).to receive(:reset).once
+
+ execute_within_threads(amount: 2)
+ end
+
+ context 'when retry_lease flag is false' do
+ it 'the first call succeeds, subsequent concurrent calls get a lock error response' do
+ threads = execute_within_threads(amount: 3, retry_lease: false)
+ results = threads.map { |t| [t.value.status, t.value.message] }
+
+ expect(results).to contain_exactly([:error, 'Failed to obtain a lock'],
+ [:error, 'Failed to obtain a lock'],
+ [:success, nil])
+ end
end
end
@@ -102,8 +147,7 @@ describe MergeRequests::MergeabilityCheckService do
context 'when broken' do
before do
- allow(merge_request).to receive(:broken?) { true }
- allow(project.repository).to receive(:can_be_merged?) { false }
+ expect(merge_request).to receive(:broken?) { true }
end
it_behaves_like 'unmergeable merge request'
@@ -117,10 +161,13 @@ describe MergeRequests::MergeabilityCheckService do
end
end
- context 'when it has conflicts' do
- before do
- allow(merge_request).to receive(:broken?) { false }
- allow(project.repository).to receive(:can_be_merged?) { false }
+ context 'when it cannot be merged on git' do
+ let(:merge_request) do
+ create(:merge_request,
+ merge_status: :unchecked,
+ source_branch: 'conflict-resolvable',
+ source_project: project,
+ target_branch: 'conflict-start')
end
it_behaves_like 'unmergeable merge request'
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 75d534c59bf..970e82e7107 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -17,7 +17,7 @@ describe Projects::LfsPointers::LfsDownloadService do
before do
ApplicationSetting.create_from_defaults
- stub_application_setting(allow_local_requests_from_hooks_and_services: local_request_setting)
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: local_request_setting)
allow(project).to receive(:lfs_enabled?).and_return(true)
end
diff --git a/spec/services/self_monitoring/project/create_service_spec.rb b/spec/services/self_monitoring/project/create_service_spec.rb
index d11e27c6d52..7d4faba526b 100644
--- a/spec/services/self_monitoring/project/create_service_spec.rb
+++ b/spec/services/self_monitoring/project/create_service_spec.rb
@@ -37,7 +37,7 @@ describe SelfMonitoring::Project::CreateService do
allow(ApplicationSetting)
.to receive(:current)
.and_return(
- ApplicationSetting.build_from_defaults(allow_local_requests_from_hooks_and_services: true)
+ ApplicationSetting.build_from_defaults(allow_local_requests_from_web_hooks_and_services: true)
)
end
@@ -90,19 +90,17 @@ describe SelfMonitoring::Project::CreateService do
)
end
- # This should pass when https://gitlab.com/gitlab-org/gitlab-ce/issues/44496
- # is complete and the prometheus listen address is added to the whitelist.
- # context 'when local requests from hooks and services are not allowed' do
- # before do
- # allow(ApplicationSetting)
- # .to receive(:current)
- # .and_return(
- # ApplicationSetting.build_from_defaults(allow_local_requests_from_hooks_and_services: false)
- # )
- # end
-
- # it_behaves_like 'has prometheus service', 'http://localhost:9090'
- # end
+ context 'when local requests from hooks and services are not allowed' do
+ before do
+ allow(ApplicationSetting)
+ .to receive(:current)
+ .and_return(
+ ApplicationSetting.build_from_defaults(allow_local_requests_from_web_hooks_and_services: false)
+ )
+ end
+
+ it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ end
context 'with non default prometheus address' do
before do
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 37bafc0c002..50167a2e059 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -19,17 +19,37 @@ describe WebHookService do
let(:service_instance) { described_class.new(project_hook, data, :push_hooks) }
describe '#initialize' do
- it 'allow_local_requests is true if hook is a SystemHook' do
- instance = described_class.new(build(:system_hook), data, :system_hook)
- expect(instance.request_options[:allow_local_requests]).to be_truthy
+ before do
+ stub_application_setting(setting_name => setting)
end
- it 'allow_local_requests is false if hook is not a SystemHook' do
- %i(project_hook service_hook web_hook_log).each do |hook|
- instance = described_class.new(build(hook), data, hook)
- expect(instance.request_options[:allow_local_requests]).to be_falsey
+ shared_examples_for 'respects outbound network setting' do
+ context 'when local requests are allowed' do
+ let(:setting) { true }
+
+ it { expect(hook.request_options[:allow_local_requests]).to be_truthy }
+ end
+
+ context 'when local requests are not allowed' do
+ let(:setting) { false }
+
+ it { expect(hook.request_options[:allow_local_requests]).to be_falsey }
end
end
+
+ context 'when SystemHook' do
+ let(:setting_name) { :allow_local_requests_from_system_hooks }
+ let(:hook) { described_class.new(build(:system_hook), data, :system_hook) }
+
+ include_examples 'respects outbound network setting'
+ end
+
+ context 'when ProjectHook' do
+ let(:setting_name) { :allow_local_requests_from_web_hooks_and_services }
+ let(:hook) { described_class.new(build(:project_hook), data, :project_hook) }
+
+ include_examples 'respects outbound network setting'
+ end
end
describe '#execute' do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 6994b6687fc..bcc133790d1 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -148,9 +148,9 @@ RSpec.configure do |config|
Gitlab::ThreadMemoryCache.cache_backend.clear
end
- config.around(:example, :quarantine) do
+ config.around(:example, :quarantine) do |example|
# Skip tests in quarantine unless we explicitly focus on them.
- skip('In quarantine') unless config.inclusion_filter[:quarantine]
+ example.run if config.inclusion_filter[:quarantine]
end
config.before(:example, :request_store) do
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index c11725c63d2..fd24c443288 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -16,7 +16,7 @@ RSpec.shared_context 'GroupPolicy context' do
read_group_merge_requests
]
end
- let(:reporter_permissions) { [:admin_label] }
+ let(:reporter_permissions) { %i[admin_label read_container_image] }
let(:developer_permissions) { [:admin_milestone] }
let(:maintainer_permissions) do
%i[
diff --git a/spec/support/shared_examples/application_setting_examples.rb b/spec/support/shared_examples/application_setting_examples.rb
index 0e6a8059376..a43d2a75082 100644
--- a/spec/support/shared_examples/application_setting_examples.rb
+++ b/spec/support/shared_examples/application_setting_examples.rb
@@ -63,6 +63,19 @@ RSpec.shared_examples 'application settings examples' do
context 'outbound_local_requests_whitelist' do
it_behaves_like 'string of domains', :outbound_local_requests_whitelist
+
+ it 'clears outbound_local_requests_whitelist_arrays memoization' do
+ setting.outbound_local_requests_whitelist_raw = 'example.com'
+
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ [], ['example.com']
+ )
+
+ setting.outbound_local_requests_whitelist_raw = 'gitlab.com'
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ [], ['gitlab.com']
+ )
+ end
end
context 'outbound_local_requests_whitelist_arrays' do
@@ -78,7 +91,54 @@ RSpec.shared_examples 'application settings examples' do
]
domain_whitelist = ['www.example.com', 'example.com', 'subdomain.example.com']
- expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(ip_whitelist, domain_whitelist)
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ ip_whitelist, domain_whitelist
+ )
+ end
+ end
+
+ context 'add_to_outbound_local_requests_whitelist' do
+ it 'adds entry to outbound_local_requests_whitelist' do
+ setting.outbound_local_requests_whitelist = ['example.com']
+
+ setting.add_to_outbound_local_requests_whitelist(
+ ['example.com', '127.0.0.1', 'gitlab.com']
+ )
+
+ expect(setting.outbound_local_requests_whitelist).to contain_exactly(
+ 'example.com',
+ '127.0.0.1',
+ 'gitlab.com'
+ )
+ end
+
+ it 'clears outbound_local_requests_whitelist_arrays memoization' do
+ setting.outbound_local_requests_whitelist = ['example.com']
+
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ [],
+ ['example.com']
+ )
+
+ setting.add_to_outbound_local_requests_whitelist(
+ ['example.com', 'gitlab.com']
+ )
+
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ [],
+ ['example.com', 'gitlab.com']
+ )
+ end
+
+ it 'does not raise error with nil' do
+ setting.outbound_local_requests_whitelist = nil
+
+ setting.add_to_outbound_local_requests_whitelist(['gitlab.com'])
+
+ expect(setting.outbound_local_requests_whitelist).to contain_exactly('gitlab.com')
+ expect(setting.outbound_local_requests_whitelist_arrays).to contain_exactly(
+ [], ['gitlab.com']
+ )
end
it 'does not raise error with nil' do
diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb
index 6fd4b14d51d..e2b4b50d41d 100644
--- a/spec/support/shared_examples/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/ci_trace_shared_examples.rb
@@ -7,11 +7,11 @@ shared_examples_for 'common trace features' do
end
it "returns formatted html" do
- expect(trace.html).to eq("<span class=\"\">12<br/><span class=\"\">34</span></span>")
+ expect(trace.html).to eq("<span>12<br/>34</span>")
end
it "returns last line of formatted html" do
- expect(trace.html(last_lines: 1)).to eq("<span class=\"\">34</span>")
+ expect(trace.html(last_lines: 1)).to eq("<span>34</span>")
end
end
diff --git a/spec/support/shared_examples/container_repositories_shared_examples.rb b/spec/support/shared_examples/container_repositories_shared_examples.rb
new file mode 100644
index 00000000000..946b130fca2
--- /dev/null
+++ b/spec/support/shared_examples/container_repositories_shared_examples.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+shared_examples 'rejected container repository access' do |user_type, status|
+ context "for #{user_type}" do
+ let(:api_user) { users[user_type] }
+
+ it "returns #{status}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+ end
+ end
+end
+
+shared_examples 'returns repositories for allowed users' do |user_type, scope|
+ context "for #{user_type}" do
+ it 'returns a list of repositories' do
+ subject
+
+ expect(json_response.length).to eq(2)
+ expect(json_response.map { |repository| repository['id'] }).to contain_exactly(
+ root_repository.id, test_repository.id)
+ expect(response.body).not_to include('tags')
+ end
+
+ it 'returns a matching schema' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/repositories')
+ end
+
+ context 'with tags param' do
+ let(:url) { "/#{scope}s/#{object.id}/registry/repositories?tags=true" }
+
+ before do
+ stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest), with_manifest: true)
+ stub_container_registry_tags(repository: test_repository.path, tags: %w(rootA latest), with_manifest: true)
+ end
+
+ it 'returns a list of repositories and their tags' do
+ subject
+
+ expect(json_response.length).to eq(2)
+ expect(json_response.map { |repository| repository['id'] }).to contain_exactly(
+ root_repository.id, test_repository.id)
+ expect(response.body).to include('tags')
+ end
+
+ it 'returns a matching schema' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/repositories')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/discussions_provider_shared_examples.rb b/spec/support/shared_examples/discussions_provider_shared_examples.rb
new file mode 100644
index 00000000000..77cf1ac3f51
--- /dev/null
+++ b/spec/support/shared_examples/discussions_provider_shared_examples.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+shared_examples 'discussions provider' do
+ it 'returns the expected discussions' do
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: requested_iid }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('entities/discussions')
+
+ expect(json_response.size).to eq(expected_discussion_count)
+ expect(json_response.pluck('id')).to eq(expected_discussion_ids)
+ end
+end
diff --git a/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
new file mode 100644
index 00000000000..8b298c5c974
--- /dev/null
+++ b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+shared_examples_for 'a valid diff positionable note' do |factory_on_commit|
+ context 'for commit' do
+ let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit(sample_commit.id) }
+ let(:commit_id) { commit.id }
+ let(:diff_refs) { commit.diff_refs }
+
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ old_path: "files/ruby/popen.rb",
+ new_path: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: 14,
+ diff_refs: diff_refs
+ )
+ end
+
+ subject { build(factory_on_commit, commit_id: commit_id, position: position) }
+
+ context 'position diff refs matches commit diff refs' do
+ it 'is valid' do
+ expect(subject).to be_valid
+ expect(subject.errors).not_to have_key(:commit_id)
+ end
+ end
+
+ context 'position diff refs does not match commit diff refs' do
+ let(:diff_refs) do
+ Gitlab::Diff::DiffRefs.new(
+ base_sha: "not_existing_sha",
+ head_sha: "existing_sha"
+ )
+ end
+
+ it 'is invalid' do
+ expect(subject).to be_invalid
+ expect(subject.errors).to have_key(:commit_id)
+ end
+ end
+
+ context 'commit does not exist' do
+ let(:commit_id) { 'non-existing' }
+
+ it 'is invalid' do
+ expect(subject).to be_invalid
+ expect(subject.errors).to have_key(:commit_id)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index aad63982e7a..e03435cafe8 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -32,19 +32,6 @@ shared_examples_for 'UpdateProjectStatistics' do
subject.save!
end
-
- context 'when feature flag is disabled for the namespace' do
- it 'does not schedules a namespace statistics worker' do
- namespace = subject.project.root_ancestor
-
- stub_feature_flags(update_statistics_namespace: false, namespace: namespace)
-
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- subject.save!
- end
- end
end
context 'when updating' do
@@ -87,20 +74,6 @@ shared_examples_for 'UpdateProjectStatistics' do
subject.save!
end.not_to exceed_query_limit(control_count)
end
-
- context 'when the feature flag is disabled for the namespace' do
- it 'does not schedule a namespace statistics worker' do
- namespace = subject.project.root_ancestor
-
- stub_feature_flags(update_statistics_namespace: false, namespace: namespace)
-
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.save!
- end
- end
end
context 'when destroying' do
@@ -144,18 +117,5 @@ shared_examples_for 'UpdateProjectStatistics' do
project.destroy!
end
end
-
- context 'when feature flag is disabled for the namespace' do
- it 'does not schedule a namespace statistics worker' do
- namespace = subject.project.root_ancestor
-
- stub_feature_flags(update_statistics_namespace: false, namespace: namespace)
-
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- subject.destroy!
- end
- end
end
end
diff --git a/spec/support/shared_examples/relative_positioning_shared_examples.rb b/spec/support/shared_examples/relative_positioning_shared_examples.rb
index 5ee62644c54..9837ba806db 100644
--- a/spec/support/shared_examples/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/relative_positioning_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples "a class that supports relative positioning" do
+RSpec.shared_examples 'a class that supports relative positioning' do
let(:item1) { create(factory, default_params) }
let(:item2) { create(factory, default_params) }
let(:new_item) { create(factory, default_params) }
@@ -9,24 +9,38 @@ RSpec.shared_examples "a class that supports relative positioning" do
create(factory, params.merge(default_params))
end
- describe '.move_to_end' do
- it 'moves the object to the end' do
- item1.update(relative_position: 5)
- item2.update(relative_position: 15)
+ def create_items_with_positions(positions)
+ positions.map do |position|
+ create_item(relative_position: position)
+ end
+ end
- described_class.move_to_end([item1, item2])
+ describe '.move_nulls_to_end' do
+ it 'moves items with null relative_position to the end' do
+ skip("#{item1} has a default relative position") if item1.relative_position
+ skip("#{item2} has a default relative position") if item2.relative_position
+
+ described_class.move_nulls_to_end([item1, item2])
expect(item2.prev_relative_position).to eq item1.relative_position
expect(item1.prev_relative_position).to eq nil
expect(item2.next_relative_position).to eq nil
end
+ it 'moves the item near the start position when there are no existing positions' do
+ skip("#{item1} has a default relative position") if item1.relative_position
+
+ described_class.move_nulls_to_end([item1])
+
+ expect(item1.relative_position).to eq(described_class::START_POSITION + described_class::IDEAL_DISTANCE)
+ end
+
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
expect(item1).not_to receive(:save)
- described_class.move_to_end([item1])
+ described_class.move_nulls_to_end([item1])
end
end
@@ -96,46 +110,6 @@ RSpec.shared_examples "a class that supports relative positioning" do
end
end
- describe '#shift_after?' do
- before do
- [item1, item2].each do |item1|
- item1.move_to_end && item1.save
- end
- end
-
- it 'returns true' do
- item1.update(relative_position: item2.relative_position - 1)
-
- expect(item1.shift_after?).to be_truthy
- end
-
- it 'returns false' do
- item1.update(relative_position: item2.relative_position - 2)
-
- expect(item1.shift_after?).to be_falsey
- end
- end
-
- describe '#shift_before?' do
- before do
- [item1, item2].each do |item1|
- item1.move_to_end && item1.save
- end
- end
-
- it 'returns true' do
- item1.update(relative_position: item2.relative_position + 1)
-
- expect(item1.shift_before?).to be_truthy
- end
-
- it 'returns false' do
- item1.update(relative_position: item2.relative_position + 2)
-
- expect(item1.shift_before?).to be_falsey
- end
- end
-
describe '#move_between' do
before do
[item1, item2].each do |item1|
@@ -249,5 +223,61 @@ RSpec.shared_examples "a class that supports relative positioning" do
expect(new_item.relative_position).to be(100)
end
+
+ it 'avoids N+1 queries when rebalancing other items' do
+ items = create_items_with_positions([100, 101, 102])
+
+ count = ActiveRecord::QueryRecorder.new do
+ new_item.move_between(items[-2], items[-1])
+ end
+
+ items = create_items_with_positions([150, 151, 152, 153, 154])
+
+ expect { new_item.move_between(items[-2], items[-1]) }.not_to exceed_query_limit(count)
+ end
+ end
+
+ describe '#move_sequence_before' do
+ it 'moves the whole sequence of items to the middle of the nearest gap' do
+ items = create_items_with_positions([90, 100, 101, 102])
+
+ items.last.move_sequence_before
+ items.last.save!
+
+ positions = items.map { |item| item.reload.relative_position }
+ expect(positions).to eq([90, 95, 96, 102])
+ end
+
+ it 'finds a gap if there are unused positions' do
+ items = create_items_with_positions([100, 101, 102])
+
+ items.last.move_sequence_before
+ items.last.save!
+
+ positions = items.map { |item| item.reload.relative_position }
+ expect(positions).to eq([50, 51, 102])
+ end
+ end
+
+ describe '#move_sequence_after' do
+ it 'moves the whole sequence of items to the middle of the nearest gap' do
+ items = create_items_with_positions([100, 101, 102, 110])
+
+ items.first.move_sequence_after
+ items.first.save!
+
+ positions = items.map { |item| item.reload.relative_position }
+ expect(positions).to eq([100, 105, 106, 110])
+ end
+
+ it 'finds a gap if there are unused positions' do
+ items = create_items_with_positions([100, 101, 102])
+
+ items.first.move_sequence_after
+ items.first.save!
+
+ positions = items.map { |item| item.reload.relative_position }
+ expect(positions).to eq([100, 601, 602])
+ end
end
end
diff --git a/spec/support/shared_examples/url_validator_examples.rb b/spec/support/shared_examples/url_validator_examples.rb
index 16fceddb605..c5a775fefb6 100644
--- a/spec/support/shared_examples/url_validator_examples.rb
+++ b/spec/support/shared_examples/url_validator_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
RSpec.shared_examples 'url validator examples' do |schemes|
- let(:validator) { described_class.new(attributes: [:link_url], **options) }
- let!(:badge) { build(:badge, link_url: 'http://www.example.com') }
+ describe '#validate' do
+ let(:validator) { described_class.new(attributes: [:link_url], **options) }
+ let(:badge) { build(:badge, link_url: 'http://www.example.com') }
- subject { validator.validate(badge) }
+ subject { validator.validate(badge) }
- describe '#validate' do
context 'with no options' do
let(:options) { {} }
@@ -42,3 +42,52 @@ RSpec.shared_examples 'url validator examples' do |schemes|
end
end
end
+
+RSpec.shared_examples 'public url validator examples' do |setting|
+ let(:validator) { described_class.new(attributes: [:link_url]) }
+ let(:badge) { build(:badge, link_url: 'http://www.example.com') }
+
+ subject { validator.validate(badge) }
+
+ context 'by default' do
+ it 'blocks urls pointing to localhost' do
+ badge.link_url = 'https://127.0.0.1'
+
+ subject
+
+ expect(badge.errors).to be_present
+ end
+
+ it 'blocks urls pointing to the local network' do
+ badge.link_url = 'https://192.168.1.1'
+
+ subject
+
+ expect(badge.errors).to be_present
+ end
+ end
+
+ context 'when local requests are allowed' do
+ let!(:settings) { create(:application_setting) }
+
+ before do
+ stub_application_setting(setting)
+ end
+
+ it 'does not block urls pointing to localhost' do
+ badge.link_url = 'https://127.0.0.1'
+
+ subject
+
+ expect(badge.errors).not_to be_present
+ end
+
+ it 'does not block urls pointing to the local network' do
+ badge.link_url = 'https://192.168.1.1'
+
+ subject
+
+ expect(badge.errors).not_to be_present
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/mail_google_schema_whitelisting.rb b/spec/tasks/gitlab/mail_google_schema_whitelisting.rb
deleted file mode 100644
index 8d1cff7a261..00000000000
--- a/spec/tasks/gitlab/mail_google_schema_whitelisting.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-require 'spec_helper'
-require 'rake'
-
-describe 'gitlab:mail_google_schema_whitelisting rake task' do
- before :all do
- Rake.application.rake_require "tasks/gitlab/helpers"
- Rake.application.rake_require "tasks/gitlab/mail_google_schema_whitelisting"
- # empty task as env is already loaded
- Rake::Task.define_task :environment
- end
-
- describe 'call' do
- before do
- # avoid writing task output to spec progress
- allow($stdout).to receive :write
- end
-
- let :run_rake_task do
- Rake::Task["gitlab:mail_google_schema_whitelisting"].reenable
- Rake.application.invoke_task "gitlab:mail_google_schema_whitelisting"
- end
-
- it 'runs the task without errors' do
- expect { run_rake_task }.not_to raise_error
- end
- end
-end
diff --git a/spec/validators/public_url_validator_spec.rb b/spec/validators/public_url_validator_spec.rb
index f6364fb1dd5..3cbf1002730 100644
--- a/spec/validators/public_url_validator_spec.rb
+++ b/spec/validators/public_url_validator_spec.rb
@@ -2,27 +2,5 @@ require 'spec_helper'
describe PublicUrlValidator do
include_examples 'url validator examples', AddressableUrlValidator::DEFAULT_OPTIONS[:schemes]
-
- context 'by default' do
- let(:validator) { described_class.new(attributes: [:link_url]) }
- let!(:badge) { build(:badge, link_url: 'http://www.example.com') }
-
- subject { validator.validate(badge) }
-
- it 'blocks urls pointing to localhost' do
- badge.link_url = 'https://127.0.0.1'
-
- subject
-
- expect(badge.errors).to be_present
- end
-
- it 'blocks urls pointing to the local network' do
- badge.link_url = 'https://192.168.1.1'
-
- subject
-
- expect(badge.errors).to be_present
- end
- end
+ include_examples 'public url validator examples', allow_local_requests_from_web_hooks_and_services: true
end
diff --git a/spec/validators/system_hook_url_validator_spec.rb b/spec/validators/system_hook_url_validator_spec.rb
new file mode 100644
index 00000000000..02384bbd1ce
--- /dev/null
+++ b/spec/validators/system_hook_url_validator_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SystemHookUrlValidator do
+ include_examples 'url validator examples', AddressableUrlValidator::DEFAULT_OPTIONS[:schemes]
+ include_examples 'public url validator examples', allow_local_requests_from_system_hooks: true
+end
diff --git a/spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb b/spec/views/dashboard/projects/_blank_state_admin_welcome.haml_spec.rb
index 2f58eec86dc..2f58eec86dc 100644
--- a/spec/views/dashboard/projects/_blank_state_admin_welcome.haml.rb
+++ b/spec/views/dashboard/projects/_blank_state_admin_welcome.haml_spec.rb
diff --git a/spec/views/dashboard/projects/_nav.html.haml.rb b/spec/views/dashboard/projects/_nav.html.haml_spec.rb
index f6a8ca13040..cbdd3c0acc3 100644
--- a/spec/views/dashboard/projects/_nav.html.haml.rb
+++ b/spec/views/dashboard/projects/_nav.html.haml_spec.rb
@@ -4,7 +4,7 @@ describe 'dashboard/projects/_nav.html.haml' do
it 'highlights All tab by default' do
render
- expect(rendered).to have_css('li.active a', text: 'All')
+ expect(rendered).to have_css('a.active', text: 'All')
end
it 'highlights Personal tab personal param is present' do
@@ -12,6 +12,6 @@ describe 'dashboard/projects/_nav.html.haml' do
render
- expect(rendered).to have_css('li.active a', text: 'Personal')
+ expect(rendered).to have_css('a.active', text: 'Personal')
end
end
diff --git a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
index 54ec4f32856..54ec4f32856 100644
--- a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb
+++ b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html_spec.rb
diff --git a/spec/views/shared/_label_row.html.haml.rb b/spec/views/shared/_label_row.html.haml_spec.rb
index a58d5efc1e3..4cce13aa37c 100644
--- a/spec/views/shared/_label_row.html.haml.rb
+++ b/spec/views/shared/_label_row.html.haml_spec.rb
@@ -7,9 +7,20 @@ describe 'shared/_label_row.html.haml' do
}
label_types.each do |label_type, label_factory|
- let!(:label) { create(label_factory) }
+ let!(:label) do
+ label_record = create(label_factory)
+ label_record.present(issuable_subject: label_record.subject)
+ end
context "for a #{label_type}" do
+ before do
+ if label.project_label?
+ @project = label.project
+ else
+ @group = label.group
+ end
+ end
+
it 'has a non-linked label title' do
render 'shared/label_row', label: label
diff --git a/spec/views/shared/milestones/_issuable.html.haml.rb b/spec/views/shared/milestones/_issuable.html.haml_spec.rb
index 0a3f877cae0..0a3f877cae0 100644
--- a/spec/views/shared/milestones/_issuable.html.haml.rb
+++ b/spec/views/shared/milestones/_issuable.html.haml_spec.rb
diff --git a/spec/views/shared/milestones/_issuables.html.haml.rb b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
index cbbb984935f..24b55338db3 100644
--- a/spec/views/shared/milestones/_issuables.html.haml.rb
+++ b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
@@ -6,7 +6,7 @@ describe 'shared/milestones/_issuables.html.haml' do
before do
allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil,
show_full_project_name: nil, dom_class: '',
- issuables: double(size: issuables_size).as_null_object)
+ issuables: double(length: issuables_size).as_null_object)
stub_template 'shared/milestones/_issuable.html.haml' => ''
end
diff --git a/spec/views/shared/milestones/_top.html.haml.rb b/spec/views/shared/milestones/_top.html.haml_spec.rb
index 516d81c87ac..f2ee8be5857 100644
--- a/spec/views/shared/milestones/_top.html.haml.rb
+++ b/spec/views/shared/milestones/_top.html.haml_spec.rb
@@ -7,6 +7,7 @@ describe 'shared/milestones/_top.html.haml' do
before do
allow(milestone).to receive(:milestones) { [] }
+ allow(milestone).to receive(:milestone) { milestone }
end
it 'renders a deprecation message for a legacy milestone' do
diff --git a/spec/workers/build_process_worker_spec.rb b/spec/workers/build_process_worker_spec.rb
new file mode 100644
index 00000000000..d9a02ece142
--- /dev/null
+++ b/spec/workers/build_process_worker_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe BuildProcessWorker do
+ describe '#perform' do
+ context 'when build exists' do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'processes build' do
+ expect_any_instance_of(Ci::Pipeline).to receive(:process!)
+ .with([build.id])
+
+ described_class.new.perform(build.id)
+ end
+ end
+
+ context 'when build does not exist' do
+ it 'does not raise exception' do
+ expect { described_class.new.perform(123) }
+ .not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb
index 8dd74b96d49..6bbdfe03ceb 100644
--- a/spec/workers/namespaces/root_statistics_worker_spec.rb
+++ b/spec/workers/namespaces/root_statistics_worker_spec.rb
@@ -74,15 +74,4 @@ describe Namespaces::RootStatisticsWorker, '#perform' do
worker.perform(group.id)
end
end
-
- context 'when update_statistics_namespace is off' do
- it 'does not create a new one' do
- stub_feature_flags(update_statistics_namespace: false, namespace: group)
-
- expect_any_instance_of(Namespaces::StatisticsRefresherService)
- .not_to receive(:execute)
-
- worker.perform(group.id)
- end
- end
end
diff --git a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
index d4a49a3f53a..be722f451e0 100644
--- a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
+++ b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb
@@ -31,16 +31,6 @@ describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_redis_
expect(group.aggregation_schedule).to be_present
end
end
-
- context 'when update_statistics_namespace is off' do
- it 'does not create a new one' do
- stub_feature_flags(update_statistics_namespace: false, namespace: group)
-
- expect do
- worker.perform(group.id)
- end.not_to change(Namespace::AggregationSchedule, :count)
- end
- end
end
context 'when group is not the root ancestor' do
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index d33cf72e51e..ac677e3b555 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -12,6 +12,17 @@ describe PipelineProcessWorker do
described_class.new.perform(pipeline.id)
end
+
+ context 'when build_ids are passed' do
+ let(:build) { create(:ci_build, pipeline: pipeline, name: 'my-build') }
+
+ it 'processes pipeline with a list of builds' do
+ expect_any_instance_of(Ci::Pipeline).to receive(:process!)
+ .with([build.id])
+
+ described_class.new.perform(pipeline.id, [build.id])
+ end
+ end
end
context 'when pipeline does not exist' do
diff --git a/vendor/jupyter/values.yaml b/vendor/jupyter/values.yaml
index 2aadd3dbe1e..852c6c3f2b0 100644
--- a/vendor/jupyter/values.yaml
+++ b/vendor/jupyter/values.yaml
@@ -57,3 +57,7 @@ ingress:
annotations:
kubernetes.io/ingress.class: "nginx"
kubernetes.io/tls-acme: "true"
+
+proxy:
+ service:
+ type: ClusterIP
diff --git a/yarn.lock b/yarn.lock
index 6643b011dab..d8193af1310 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -996,17 +996,17 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.67.0.tgz#c7b94eca13b99fd3aaa737fb6dcc0abc41d3c579"
integrity sha512-hJOmWEs6RkjzyKkb1vc9wwKGZIBIP0coHkxu/KgOoxhBVudpGk4CH7xJ6UuB2TKpb0SEh5CC1CzRZfBYaFhsaA==
-"@gitlab/ui@^5.11.1":
- version "5.11.1"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-5.11.1.tgz#10ee8a4410eb249032142f85f6180b6c465c48d7"
- integrity sha512-bxIB3//aaYZIT6fpDKhIW60gvVvOCbw6inqC8xffQmklFYFKgcZjEIBu3RH5oJ6t3zFxeelcPQG7+t2F+p3bIg==
+"@gitlab/ui@5.12.1":
+ version "5.12.1"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-5.12.1.tgz#70035747cec96a729e012924ab2d3e3b6067a558"
+ integrity sha512-W4rvZj2Fab1UpXR0Wyi7wSvj+5Ko+TWHibC/q/FSRHMsbeSLq77lljd7rQWeXXNMBvEKwr4NqSmckWsjaSOLfw==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.2.1"
bootstrap "4.3.1"
bootstrap-vue "2.0.0-rc.27"
copy-to-clipboard "^3.0.8"
- echarts "^4.2.0-rc.2"
+ echarts "^4.2.1"
highlight.js "^9.13.1"
js-beautify "^1.8.8"
lodash "^4.17.14"
@@ -4309,12 +4309,12 @@ ecc-jsbn@~0.1.1:
jsbn "~0.1.0"
safer-buffer "^2.1.0"
-echarts@^4.2.0-rc.2:
- version "4.2.0-rc.2"
- resolved "https://registry.yarnpkg.com/echarts/-/echarts-4.2.0-rc.2.tgz#6a98397aafa81b65cbf0bc15d9afdbfb244df91e"
- integrity sha512-5Y4Kyi4eNsRM9Cnl7Q8C6PFVjznBJv1VIiMm/VSQ9zyqeo+ce1695GqUd9v4zfVx+Ow1gnwMJX67h0FNvarScw==
+echarts@^4.2.1:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/echarts/-/echarts-4.2.1.tgz#9a8ea3b03354f86f824d97625c334cf16965ef03"
+ integrity sha512-pw4xScRPsLegD/cqEcoXRKeA2SD4+s+Kyo0Na166NamOWhzNl2yI5RZ2rE97tBlAopNmhyMeBVpAeD5qb+ee1A==
dependencies:
- zrender "4.0.5"
+ zrender "4.0.7"
editions@^1.3.3:
version "1.3.4"
@@ -8354,7 +8354,7 @@ moment-mini@^2.22.1:
resolved "https://registry.yarnpkg.com/moment-mini/-/moment-mini-2.22.1.tgz#bc32d73e43a4505070be6b53494b17623183420d"
integrity sha512-OUCkHOz7ehtNMYuZjNciXUfwTuz8vmF1MTbAy59ebf+ZBYZO5/tZKuChVWCX+uDo+4idJBpGltNfV8st+HwsGw==
-moment@2.x, moment@^2.10.2:
+moment@^2.10.2:
version "2.24.0"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b"
integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==
@@ -8371,7 +8371,7 @@ monaco-editor@^0.15.6:
resolved "https://registry.yarnpkg.com/monaco-editor/-/monaco-editor-0.15.6.tgz#d63b3b06f86f803464f003b252627c3eb4a09483"
integrity sha512-JoU9V9k6KqT9R9Tiw1RTU8ohZ+Xnf9DMg6Ktqqw5hILumwmq7xqa/KLXw513uTUsWbhtnHoSJYYR++u3pkyxJg==
-mousetrap@1.4.6:
+mousetrap@^1.4.6:
version "1.4.6"
resolved "https://registry.yarnpkg.com/mousetrap/-/mousetrap-1.4.6.tgz#eaca72e22e56d5b769b7555873b688c3332e390a"
integrity sha1-6spy4i5W1bdpt1VYc7aIwzMuOQo=
@@ -9308,12 +9308,10 @@ pify@^4.0.0, pify@^4.0.1:
resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231"
integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==
-pikaday@^1.6.1:
- version "1.6.1"
- resolved "https://registry.yarnpkg.com/pikaday/-/pikaday-1.6.1.tgz#b91bcb9b8539cedd8d6d08e4e7465e12095671b0"
- integrity sha512-B+pxVcSGuzLblMe4dnhCF3dnI2zkyj5GAqanGX9cVcOk90fp2ULo1OZFUPRXQXUE5tmcimnk1tPOFs8tUHQetQ==
- optionalDependencies:
- moment "2.x"
+pikaday@^1.8.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/pikaday/-/pikaday-1.8.0.tgz#ce930e257042e852e6aadee1115e01554b2d71c5"
+ integrity sha512-SgGxMYX0NHj9oQnMaSyAipr2gOrbB4Lfs/TJTb6H6hRHs39/5c5VZi73Q8hr53+vWjdn6HzkWcj8Vtl3c9ziaA==
pinkie-promise@^2.0.0:
version "2.0.1"
@@ -13217,7 +13215,7 @@ zen-observable@^0.8.0:
resolved "https://registry.yarnpkg.com/zen-observable/-/zen-observable-0.8.11.tgz#d3415885eeeb42ee5abb9821c95bb518fcd6d199"
integrity sha512-N3xXQVr4L61rZvGMpWe8XoCGX8vhU35dPyQ4fm5CY/KDlG0F75un14hjbckPXTDuKUY6V0dqR2giT6xN8Y4GEQ==
-zrender@4.0.5:
- version "4.0.5"
- resolved "https://registry.yarnpkg.com/zrender/-/zrender-4.0.5.tgz#6e8f738971ce2cd624aac82b2156729b1c0e5a82"
- integrity sha512-SintgipGEJPT9Sz2ABRoE4ZD7Yzy7oR7j7KP6H+C9FlbHWnLUfGVK7E8UV27pGwlxAMB0EsnrqhXx5XjAfv/KA==
+zrender@4.0.7:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/zrender/-/zrender-4.0.7.tgz#15ae960822f5efed410995d37e5107fe3de10e6d"
+ integrity sha512-TNloHe0ums6zxbHfnaCryM61J4IWDajZwNq6dHk9vfWhhysO/OeFvvR0drBs/nbXha2YxSzfQj2FiCd6RVBe+Q==