From 00fa950a34b1c94617110b150b8b2517d5241249 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Fri, 13 Mar 2020 18:09:39 +0000 Subject: Add latest changes from gitlab-org/gitlab@master --- GITLAB_WORKHORSE_VERSION | 2 +- app/controllers/search_controller.rb | 11 ++ app/views/search/_category.html.haml | 1 - .../unreleased/199220-hide-snippet-blob-search.yml | 5 + ...8887-optimize-project-counters-service-desk.yml | 5 + changelogs/unreleased/37256-bump-wh-version.yml | 5 + ...x_on_id_and_service_desk_enabled_to_projects.rb | 18 +++ db/schema.rb | 3 +- doc/administration/gitaly/index.md | 20 ++- doc/api/search.md | 2 + doc/api/vulnerability_findings.md | 5 +- doc/ci/jenkins/index.md | 4 +- doc/development/github_importer.md | 30 ++-- doc/development/licensing.md | 1 + doc/development/performance.md | 7 +- .../img/guide_waf_ingress_installation.png | Bin 54834 -> 0 bytes .../img/guide_waf_ingress_installation_v12_9.png | Bin 0 -> 24830 bytes .../img/guide_waf_ingress_save_changes_v12_9.png | Bin 0 -> 37120 bytes .../web_application_firewall/quick_start_guide.md | 15 +- .../container_scanning/index.md | 3 + .../dependency_scanning/index.md | 3 + doc/user/application_security/sast/index.md | 3 + doc/user/clusters/applications.md | 14 +- doc/user/gitlab_com/index.md | 2 +- ...rometheus_dashboard_edit_metric_link_v_12_9.png | Bin 0 -> 29178 bytes doc/user/project/integrations/prometheus.md | 8 + lib/api/helpers/file_upload_helpers.rb | 5 +- lib/api/project_import.rb | 26 +--- locale/gitlab.pot | 3 - spec/controllers/search_controller_spec.rb | 8 + .../projects/snippets/create_snippet_spec.rb | 2 + spec/features/snippets/search_snippets_spec.rb | 41 ------ spec/models/concerns/bulk_insert_safe_spec.rb | 22 ++- spec/requests/api/project_import_spec.rb | 162 +++++++++++---------- 34 files changed, 237 insertions(+), 199 deletions(-) create mode 100644 changelogs/unreleased/199220-hide-snippet-blob-search.yml create mode 100644 changelogs/unreleased/208887-optimize-project-counters-service-desk.yml create mode 100644 changelogs/unreleased/37256-bump-wh-version.yml create mode 100644 db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb delete mode 100644 doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png create mode 100644 doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png create mode 100644 doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png create mode 100644 doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION index 93fc83f8c43..f062572ef7b 100644 --- a/GITLAB_WORKHORSE_VERSION +++ b/GITLAB_WORKHORSE_VERSION @@ -1 +1 @@ -8.24.0 +8.25.0 diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb index 04d2b3068da..d1e15a72350 100644 --- a/app/controllers/search_controller.rb +++ b/app/controllers/search_controller.rb @@ -5,6 +5,7 @@ class SearchController < ApplicationController include SearchHelper include RendersCommits + before_action :override_snippet_scope, only: :show around_action :allow_gitaly_ref_name_caching skip_before_action :authenticate_user! @@ -103,4 +104,14 @@ class SearchController < ApplicationController Gitlab::UsageDataCounters::SearchCounter.increment_navbar_searches_count end + + # Disallow web snippet_blobs search as we migrate snippet + # from database-backed storage to git repository-based, + # and searching across multiple git repositories is not feasible. + # + # TODO: after 13.0 refactor this into Search::SnippetService + # See https://gitlab.com/gitlab-org/gitlab/issues/208882 + def override_snippet_scope + params[:scope] = 'snippet_titles' if params[:snippets] == 'true' + end end diff --git a/app/views/search/_category.html.haml b/app/views/search/_category.html.haml index 255a62d0d06..6ad155eb715 100644 --- a/app/views/search/_category.html.haml +++ b/app/views/search/_category.html.haml @@ -24,7 +24,6 @@ = users - elsif @show_snippets - = search_filter_link 'snippet_blobs', _("Snippet Contents"), search: { snippets: true, group_id: nil, project_id: nil } = search_filter_link 'snippet_titles', _("Titles and Filenames"), search: { snippets: true, group_id: nil, project_id: nil } - else = search_filter_link 'projects', _("Projects"), data: { qa_selector: 'projects_tab' } diff --git a/changelogs/unreleased/199220-hide-snippet-blob-search.yml b/changelogs/unreleased/199220-hide-snippet-blob-search.yml new file mode 100644 index 00000000000..083d8edaeb3 --- /dev/null +++ b/changelogs/unreleased/199220-hide-snippet-blob-search.yml @@ -0,0 +1,5 @@ +--- +title: Remove and deprecate snippet content search +merge_request: 26359 +author: +type: removed diff --git a/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml b/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml new file mode 100644 index 00000000000..5c422ecb959 --- /dev/null +++ b/changelogs/unreleased/208887-optimize-project-counters-service-desk.yml @@ -0,0 +1,5 @@ +--- +title: Optimize Project related count service desk enabled +merge_request: 27115 +author: +type: performance diff --git a/changelogs/unreleased/37256-bump-wh-version.yml b/changelogs/unreleased/37256-bump-wh-version.yml new file mode 100644 index 00000000000..6f3392e5ac8 --- /dev/null +++ b/changelogs/unreleased/37256-bump-wh-version.yml @@ -0,0 +1,5 @@ +--- +title: Enable Workhorse upload acceleration for Project Import uploads via API +merge_request: 26914 +author: +type: performance diff --git a/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb b/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb new file mode 100644 index 00000000000..903bd12ddf4 --- /dev/null +++ b/db/migrate/20200312163407_add_index_on_id_and_service_desk_enabled_to_projects.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class AddIndexOnIdAndServiceDeskEnabledToProjects < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + INDEX_NAME = 'index_projects_on_id_service_desk_enabled' + + disable_ddl_transaction! + + def up + add_concurrent_index :projects, :id, where: 'service_desk_enabled = true', name: INDEX_NAME + end + + def down + remove_concurrent_index_by_name :projects, INDEX_NAME + end +end diff --git a/db/schema.rb b/db/schema.rb index a9dcfa50d33..f57b0638e05 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 2020_03_11_165635) do +ActiveRecord::Schema.define(version: 2020_03_12_163407) do # These are extensions that must be enabled in order to support this database enable_extension "pg_trgm" @@ -3491,6 +3491,7 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do t.index ["id", "repository_storage", "last_repository_updated_at"], name: "idx_projects_on_repository_storage_last_repository_updated_at" t.index ["id"], name: "index_on_id_partial_with_legacy_storage", where: "((storage_version < 2) OR (storage_version IS NULL))" t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))" + t.index ["id"], name: "index_projects_on_id_service_desk_enabled", where: "(service_desk_enabled = true)" t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))" t.index ["last_activity_at", "id"], name: "index_projects_api_last_activity_at_id_desc", order: { id: :desc } t.index ["last_activity_at", "id"], name: "index_projects_api_vis20_last_activity_at", where: "(visibility_level = 20)" diff --git a/doc/administration/gitaly/index.md b/doc/administration/gitaly/index.md index 1bd027ac0c9..4630b6d8259 100644 --- a/doc/administration/gitaly/index.md +++ b/doc/administration/gitaly/index.md @@ -163,17 +163,21 @@ Git operations in GitLab will result in an API error. unicorn['enable'] = false sidekiq['enable'] = false gitlab_workhorse['enable'] = false + grafana['enable'] = false + + # If you run a seperate monitoring node you can disable these services + alertmanager['enable'] = false + prometheus['enable'] = false + + # If you don't run a seperate monitoring node you can + # Enable Prometheus access & disable these extra services + # This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port. + # prometheus['listen_address'] = '0.0.0.0:9090' + # prometheus['monitor_kubernetes'] = false # If you don't want to run monitoring services uncomment the following (not recommended) - # alertmanager['enable'] = false # gitlab_exporter['enable'] = false - # grafana['enable'] = false # node_exporter['enable'] = false - # prometheus['enable'] = false - - # Enable prometheus monitoring - comment out if you disable monitoring services above. - # This makes Prometheus listen on all interfaces. You must use firewalls to restrict access to this address/port. - prometheus['listen_address'] = '0.0.0.0:9090' # Prevent database connections during 'gitlab-ctl reconfigure' gitlab_rails['rake_cache_clear'] = false @@ -861,7 +865,7 @@ default level is `WARN`. You can run a gRPC trace with: ```shell -GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check +sudo GRPC_TRACE=all GRPC_VERBOSITY=DEBUG gitlab-rake gitlab:gitaly:check ``` ### Observing `gitaly-ruby` traffic diff --git a/doc/api/search.md b/doc/api/search.md index 640a98117e0..9ca9cc5c2a4 100644 --- a/doc/api/search.md +++ b/doc/api/search.md @@ -255,6 +255,8 @@ Example response: ### Scope: snippet_blobs +This scope will be disabled after GitLab 13.0. + ```shell curl --header "PRIVATE-TOKEN: " https://gitlab.example.com/api/v4/search?scope=snippet_blobs&search=test ``` diff --git a/doc/api/vulnerability_findings.md b/doc/api/vulnerability_findings.md index d1d4966f0f0..f5e607d6829 100644 --- a/doc/api/vulnerability_findings.md +++ b/doc/api/vulnerability_findings.md @@ -6,7 +6,7 @@ NOTE: **Note:** This API resource is renamed from Vulnerabilities to Vulnerability Findings because the Vulnerabilities are reserved for serving the upcoming [Standalone Vulnerability objects](https://gitlab.com/gitlab-org/gitlab/issues/13561). To fix any broken integrations with the former Vulnerabilities API, change the `vulnerabilities` URL part to be -`vulnerability_findings`. +`vulnerability_findings`. Every API call to vulnerability findings must be [authenticated](README.md#authentication). @@ -46,6 +46,9 @@ GET /projects/:id/vulnerability_findings?confidence=unknown,experimental GET /projects/:id/vulnerability_findings?pipeline_id=42 ``` +CAUTION: **Deprecation:** +Beginning with GitLab 12.9, the `undefined` severity level is deprecated and the `undefined` confidence level isn't reported for new vulnerabilities. + | Attribute | Type | Required | Description | | ------------- | -------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) which the authenticated user is a member of. | diff --git a/doc/ci/jenkins/index.md b/doc/ci/jenkins/index.md index f3e9efae7d4..55a82606897 100644 --- a/doc/ci/jenkins/index.md +++ b/doc/ci/jenkins/index.md @@ -81,7 +81,9 @@ There are some high level differences between the products worth mentioning: container images to set up your build environment. For example, set up one pipeline that builds your build environment itself and publish that to the container registry. Then, have your pipelines use this instead of each building their own environment, which will be slower and may be less consistent. We have extensive docs on [how to use the Container Registry](../../user/packages/container_registry/index.md). -- Totally stuck and not sure where to turn for advice? The [GitLab community forum](https://forum.gitlab.com/) can be a great resource. +- A central utilities repository can be a great place to put assorted scheduled jobs + or other manual jobs that function like utilities. Jenkins installations tend to + have a few of these. ## Agents vs. Runners diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md index 6b8c083d55f..5d37d2f119f 100644 --- a/doc/development/github_importer.md +++ b/doc/development/github_importer.md @@ -9,7 +9,7 @@ importer and a parallel importer. The Rake task `import:github` uses the sequential importer, while everything else uses the parallel importer. The difference between these two importers is quite simple: the sequential importer does all work in a single thread, making it more useful for debugging purposes -or Rake tasks. The parallel importer on the other hand uses Sidekiq. +or Rake tasks. The parallel importer, on the other hand, uses Sidekiq. ## Requirements @@ -31,9 +31,9 @@ The importer's codebase is broken up into the following directories: ## Architecture overview -When a GitHub project is imported we schedule and execute a job for the -`RepositoryImportworker` worker as all other importers. However, unlike other -importers we don't immediately perform the work necessary. Instead work is +When a GitHub project is imported, we schedule and execute a job for the +`RepositoryImportWorker` worker as all other importers. However, unlike other +importers, we don't immediately perform the work necessary. Instead work is divided into separate stages, with each stage consisting out of a set of Sidekiq jobs that are executed. Between every stage a job is scheduled that periodically checks if all work of the current stage is completed, advancing the import @@ -65,9 +65,9 @@ This worker will import all pull requests. For every pull request a job for the ### 5. Stage::ImportIssuesAndDiffNotesWorker -This worker will import all issues and pull request comments. For every issue we +This worker will import all issues and pull request comments. For every issue, we schedule a job for the `Gitlab::GithubImport::ImportIssueWorker` worker. For -pull request comments we instead schedule jobs for the +pull request comments, we instead schedule jobs for the `Gitlab::GithubImport::DiffNoteImporter` worker. This worker processes both issues and diff notes in parallel so we don't need to @@ -82,7 +82,7 @@ project. ### 6. Stage::ImportNotesWorker This worker imports regular comments for both issues and pull requests. For -every comment we schedule a job for the +every comment, we schedule a job for the `Gitlab::GithubImport::ImportNoteWorker` worker. Regular comments have to be imported at the end since the GitHub API used @@ -116,14 +116,14 @@ schedule the worker of the next stage. To reduce the number of `AdvanceStageWorker` jobs scheduled this worker will briefly wait for jobs to complete before deciding what the next action should -be. For small projects this may slow down the import process a bit, but it will +be. For small projects, this may slow down the import process a bit, but it will also reduce pressure on the system as a whole. ## Refreshing import JIDs GitLab includes a worker called `StuckImportJobsWorker` that will periodically run and mark project imports as failed if they have been running for more than -15 hours. For GitHub projects this poses a bit of a problem: importing large +15 hours. For GitHub projects, this poses a bit of a problem: importing large projects could take several hours depending on how often we hit the GitHub rate limit (more on this below), but we don't want `StuckImportJobsWorker` to mark our import as failed because of this. @@ -137,7 +137,7 @@ long we're still performing work. ## GitHub rate limit -GitHub has a rate limit of 5 000 API calls per hour. The number of requests +GitHub has a rate limit of 5,000 API calls per hour. The number of requests necessary to import a project is largely dominated by the number of unique users involved in a project (e.g. issue authors). Other data such as issue pages and comments typically only requires a few dozen requests to import. This is @@ -176,11 +176,11 @@ There are two types of lookups we cache: in our GitLab database. The expiration time of these keys is 24 hours. When retrieving the cache of a -positive lookups we refresh the TTL automatically. The TTL of false lookups is +positive lookup, we refresh the TTL automatically. The TTL of false lookups is never refreshed. -Because of this caching layer it's possible newly registered GitLab accounts -won't be linked to their corresponding GitHub accounts. This however will sort +Because of this caching layer, it's possible newly registered GitLab accounts +won't be linked to their corresponding GitHub accounts. This, however, will sort itself out once the cached keys expire. The user cache lookup is shared across projects. This means that the more @@ -194,12 +194,12 @@ The code for this resides in: ## Mapping labels and milestones To reduce pressure on the database we do not query it when setting labels and -milestones on issues and merge requests. Instead we cache this data when we +milestones on issues and merge requests. Instead, we cache this data when we import labels and milestones, then we reuse this cache when assigning them to issues/merge requests. Similar to the user lookups these cache keys are expired automatically after 24 hours of not being used. -Unlike the user lookup caches these label and milestone caches are scoped to the +Unlike the user lookup caches, these label and milestone caches are scoped to the project that is being imported. The code for this resides in: diff --git a/doc/development/licensing.md b/doc/development/licensing.md index a716db6b407..c7676cc2596 100644 --- a/doc/development/licensing.md +++ b/doc/development/licensing.md @@ -57,6 +57,7 @@ Libraries with the following licenses are acceptable for use: - [Creative Commons Zero (CC0)][CC0]: A public domain dedication, recommended as a way to disclaim copyright on your work to the maximum extent possible. - [Unlicense][UNLICENSE]: Another public domain dedication. - [OWFa 1.0][OWFa1]: An open-source license and patent grant designed for specifications. +- [JSON License](https://www.json.org/license.html): Equivalent to the MIT license plus the statement, "The Software shall be used for Good, not Evil." ## Unacceptable Licenses diff --git a/doc/development/performance.md b/doc/development/performance.md index 5697f41c3dc..72eb85c623b 100644 --- a/doc/development/performance.md +++ b/doc/development/performance.md @@ -448,9 +448,12 @@ SOME_CONSTANT = 'bar' You might want millions of project rows in your local database, for example, in order to compare relative query performance, or to reproduce a bug. You could -do this by hand with SQL commands, but since you have ActiveRecord models, you -might find using these gems more convenient: +do this by hand with SQL commands or using [Mass Inserting Rails +Models](mass_insert.md) functionality. +Assuming you are working with ActiveRecord models, you might also find these links helpful: + +- [Insert records in batches](insert_into_tables_in_batches.md) - [BulkInsert gem](https://github.com/jamis/bulk_insert) - [ActiveRecord::PgGenerateSeries gem](https://github.com/ryu39/active_record-pg_generate_series) diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png deleted file mode 100644 index a150fa4e46f..00000000000 Binary files a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation.png and /dev/null differ diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png new file mode 100644 index 00000000000..5c4718b0487 Binary files /dev/null and b/doc/topics/web_application_firewall/img/guide_waf_ingress_installation_v12_9.png differ diff --git a/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png b/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png new file mode 100644 index 00000000000..df1223f12ec Binary files /dev/null and b/doc/topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png differ diff --git a/doc/topics/web_application_firewall/quick_start_guide.md b/doc/topics/web_application_firewall/quick_start_guide.md index 5942a5ac996..417e1973b87 100644 --- a/doc/topics/web_application_firewall/quick_start_guide.md +++ b/doc/topics/web_application_firewall/quick_start_guide.md @@ -14,16 +14,6 @@ need to ensure your own [Runners are configured](../../ci/runners/README.md) and **Note**: GitLab's Web Application Firewall is deployed with [Ingress](../../user/clusters/applications.md#Ingress), so it will be available to your applications no matter how you deploy them to Kubernetes. -## Enable or disable ModSecurity - -ModSecurity is enabled by default on GitLab.com. You can toggle the feature flag to false by running the following command in the Rails console: - -```ruby -Feature.disable(:ingress_modsecurity) -``` - -Once disabled, you must uninstall and reinstall your Ingress application for the changes to take effect. See the [Feature Flag](../../user/project/operations/feature_flags.md) documentation for more information. - ## Configuring your Google account Before creating and connecting your Kubernetes cluster to your GitLab project, @@ -112,10 +102,9 @@ Once it is installed, the other applications that rely on it will each have thei For this guide, we need to install Ingress. Ingress provides load balancing, SSL termination, and name-based virtual hosting, using NGINX behind -the scenes. Make sure that the **Enable Web Application Firewall** button is checked -before installing. +the scenes. Make sure to switch the toogle to the enabled position before installing. -![Cluster applications](./img/guide_waf_ingress_installation.png) +![Cluster applications](./img/guide_waf_ingress_installation_v12_9.png) After Ingress is installed, wait a few seconds and copy the IP address that is displayed in order to add in your base **Domain** at the top of the page. For diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md index e51cda3c300..1d4a2187dc6 100644 --- a/doc/user/application_security/container_scanning/index.md +++ b/doc/user/application_security/container_scanning/index.md @@ -347,6 +347,9 @@ it highlighted: } ``` +CAUTION: **Deprecation:** +Beginning with GitLab 12.9, container scanning no longer reports `undefined` severity and confidence levels. + Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used. diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md index b711a652a2f..1a0a7a7711f 100644 --- a/doc/user/application_security/dependency_scanning/index.md +++ b/doc/user/application_security/dependency_scanning/index.md @@ -355,6 +355,9 @@ it highlighted: } ``` +CAUTION: **Deprecation:** +Beginning with GitLab 12.9, dependency scanning no longer reports `undefined` severity and confidence levels. + Here is the description of the report file structure nodes and their meaning. All fields are mandatory to be present in the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used. diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md index 9fbbd594cb2..42889a86e4c 100644 --- a/doc/user/application_security/sast/index.md +++ b/doc/user/application_security/sast/index.md @@ -413,6 +413,9 @@ it highlighted: } ``` +CAUTION: **Deprecation:** +Beginning with GitLab 12.9, SAST no longer reports `undefined` severity and confidence levels. + Here is the description of the report file structure nodes and their meaning. All fields are mandatory in the report JSON unless stated otherwise. Presence of optional fields depends on the underlying analyzers being used. diff --git a/doc/user/clusters/applications.md b/doc/user/clusters/applications.md index 900ba62d40d..192941ff6e1 100644 --- a/doc/user/clusters/applications.md +++ b/doc/user/clusters/applications.md @@ -279,21 +279,23 @@ This feature: kubectl logs -n gitlab-managed-apps $(kubectl get pod -n gitlab-managed-apps -l app=nginx-ingress,component=controller --no-headers=true -o custom-columns=:metadata.name) modsecurity-log -f ``` -To enable ModSecurity, check the **Enable Web Application Firewall** checkbox -when installing your [Ingress application](#ingress). +To enable WAF, switch its respective toggle to the enabled position when installing or updating [Ingress application](#ingress). If this is your first time using GitLab's WAF, we recommend you follow the [quick start guide](../../topics/web_application_firewall/quick_start_guide.md). There is a small performance overhead by enabling ModSecurity. If this is considered significant for your application, you can disable ModSecurity's -rule engine for your deployed application by setting -[the deployment variable](../../topics/autodevops/index.md) +rule engine for your deployed application in any of the following ways: + +1. Setting [the deployment variable](../../topics/autodevops/index.md) `AUTO_DEVOPS_MODSECURITY_SEC_RULE_ENGINE` to `Off`. This will prevent ModSecurity from processing any requests for the given application or environment. -To permanently disable it, you must [uninstall](#uninstalling-applications) and -reinstall your Ingress application for the changes to take effect. +1. Switching its respective toggle to the disabled position and applying changes through the **Save changes** button. This will reinstall +Ingress with the recent changes. + +![Disabling WAF](../../topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_9.png) ### JupyterHub diff --git a/doc/user/gitlab_com/index.md b/doc/user/gitlab_com/index.md index 8c6da131c6d..361efdcba73 100644 --- a/doc/user/gitlab_com/index.md +++ b/doc/user/gitlab_com/index.md @@ -585,7 +585,7 @@ Service discovery: - [`gitlab-cookbooks` / `gitlab_consul` ยท GitLab](https://gitlab.com/gitlab-cookbooks/gitlab_consul) -### Haproxy +### HAProxy High Performance TCP/HTTP Load Balancer: diff --git a/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png b/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png new file mode 100644 index 00000000000..b66b1a9f39b Binary files /dev/null and b/doc/user/project/integrations/img/prometheus_dashboard_edit_metric_link_v_12_9.png differ diff --git a/doc/user/project/integrations/prometheus.md b/doc/user/project/integrations/prometheus.md index 4976ca75bf7..c64077c00f1 100644 --- a/doc/user/project/integrations/prometheus.md +++ b/doc/user/project/integrations/prometheus.md @@ -172,6 +172,14 @@ There are 2 methods to specify a variable in a query or dashboard: 1. Variables can be specified using the [Liquid template format](https://help.shopify.com/en/themes/liquid/basics), for example `{{ci_environment_slug}}` ([added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20793) in GitLab 12.6). 1. You can also enclose it in quotation marks with curly braces with a leading percent, for example `"%{ci_environment_slug}"`. This method is deprecated though and support will be [removed in the next major release](https://gitlab.com/gitlab-org/gitlab/issues/37990). +#### Editing additional metrics from the dashboard + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/208976) in GitLab 12.9. + +You can edit existing additional custom metrics by clicking the **{ellipsis_v}** **More actions** dropdown and selecting **Edit metric**. + +![Edit metric](img/prometheus_dashboard_edit_metric_link_v_12_9.png) + ### Defining custom dashboards per project > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/59974) in GitLab 12.1. diff --git a/lib/api/helpers/file_upload_helpers.rb b/lib/api/helpers/file_upload_helpers.rb index c5fb291a2b7..dd551ec2976 100644 --- a/lib/api/helpers/file_upload_helpers.rb +++ b/lib/api/helpers/file_upload_helpers.rb @@ -4,11 +4,12 @@ module API module Helpers module FileUploadHelpers def file_is_valid? - params[:file] && params[:file]['tempfile'].respond_to?(:read) + filename = params[:file]&.original_filename + filename && ImportExportUploader::EXTENSION_WHITELIST.include?(File.extname(filename).delete('.')) end def validate_file! - render_api_error!('Uploaded file is invalid', 400) unless file_is_valid? + render_api_error!({ error: _('You need to upload a GitLab project export archive (ending in .gz).') }, 422) unless file_is_valid? end end end diff --git a/lib/api/project_import.rb b/lib/api/project_import.rb index d2247aaee34..ffa9dd13754 100644 --- a/lib/api/project_import.rb +++ b/lib/api/project_import.rb @@ -21,10 +21,6 @@ module API def rate_limiter ::Gitlab::ApplicationRateLimiter end - - def with_workhorse_upload_acceleration? - request.headers[Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER].present? - end end before do @@ -46,11 +42,7 @@ module API params do requires :path, type: String, desc: 'The new project path and name' - # TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960 - # and mark WH fields as required (instead of optional) after the WH version including - # https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/459 - # is deployed and GITLAB_WORKHORSE_VERSION is updated accordingly. - requires :file, types: [::API::Validations::Types::WorkhorseFile, File], desc: 'The project export file to be imported' # rubocop:disable Scalability/FileUploads + requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The project export file to be imported' optional :name, type: String, desc: 'The name of the project to be imported. Defaults to the path of the project if not provided.' optional :namespace, type: String, desc: "The ID or name of the namespace that the project will be imported into. Defaults to the current user's namespace." optional :overwrite, type: Boolean, default: false, desc: 'If there is a project in the same namespace and with the same name overwrite it' @@ -75,7 +67,7 @@ module API success Entities::ProjectImportStatus end post 'import' do - require_gitlab_workhorse! if with_workhorse_upload_acceleration? + require_gitlab_workhorse! key = "project_import".to_sym @@ -87,27 +79,19 @@ module API Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42437') + validate_file! + namespace = if import_params[:namespace] find_namespace!(import_params[:namespace]) else current_user.namespace end - # TODO: remove the condition after the WH version including - # https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/459 - # is deployed and GITLAB_WORKHORSE_VERSION is updated accordingly. - file = if with_workhorse_upload_acceleration? - import_params[:file] || bad_request!('Unable to process project import file') - else - validate_file! - import_params[:file]['tempfile'] - end - project_params = { path: import_params[:path], namespace_id: namespace.id, name: import_params[:name], - file: file, + file: import_params[:file], overwrite: import_params[:overwrite] } diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 601703b8f22..0336e834d06 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -18284,9 +18284,6 @@ msgstr "" msgid "Smartcard authentication failed: client certificate header is missing." msgstr "" -msgid "Snippet Contents" -msgstr "" - msgid "Snippets" msgstr "" diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb index 19786417d76..1fe313452fe 100644 --- a/spec/controllers/search_controller_spec.rb +++ b/spec/controllers/search_controller_spec.rb @@ -140,6 +140,14 @@ describe SearchController do end end + context 'snippet search' do + it 'forces title search' do + get :show, params: { scope: 'snippet_blobs', snippets: 'true', search: 'foo' } + + expect(assigns[:scope]).to eq('snippet_titles') + end + end + it 'finds issue comments' do project = create(:project, :public) note = create(:note_on_issue, project: project) diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb index 28d24073b79..b55a42e07a9 100644 --- a/spec/features/projects/snippets/create_snippet_spec.rb +++ b/spec/features/projects/snippets/create_snippet_spec.rb @@ -32,6 +32,8 @@ shared_examples_for 'snippet editor' do visit project_snippets_path(project) + # Wait for the SVG to ensure the button location doesn't shift + within('.empty-state') { find('img.js-lazy-loaded') } click_on('New snippet') wait_for_requests end diff --git a/spec/features/snippets/search_snippets_spec.rb b/spec/features/snippets/search_snippets_spec.rb index dce790e5708..691716d3576 100644 --- a/spec/features/snippets/search_snippets_spec.rb +++ b/spec/features/snippets/search_snippets_spec.rb @@ -16,45 +16,4 @@ describe 'Search Snippets' do expect(page).to have_link(public_snippet.title) expect(page).to have_link(private_snippet.title) end - - it 'User searches for snippet contents' do - create(:personal_snippet, - :public, - title: 'Many lined snippet', - content: <<-CONTENT.strip_heredoc - |line one - |line two - |line three - |line four - |line five - |line six - |line seven - |line eight - |line nine - |line ten - |line eleven - |line twelve - |line thirteen - |line fourteen - CONTENT - ) - - sign_in create(:user) - visit dashboard_snippets_path - submit_search('line seven') - - expect(page).to have_content('line seven') - - # 3 lines before the matched line should be visible - expect(page).to have_content('line six') - expect(page).to have_content('line five') - expect(page).to have_content('line four') - expect(page).not_to have_content('line three') - - # 3 lines after the matched line should be visible - expect(page).to have_content('line eight') - expect(page).to have_content('line nine') - expect(page).to have_content('line ten') - expect(page).not_to have_content('line eleven') - end end diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb index 4969327132a..0cc355ea694 100644 --- a/spec/models/concerns/bulk_insert_safe_spec.rb +++ b/spec/models/concerns/bulk_insert_safe_spec.rb @@ -5,13 +5,16 @@ require 'spec_helper' describe BulkInsertSafe do class BulkInsertItem < ApplicationRecord include BulkInsertSafe + include ShaAttribute - validates :name, :enum_value, :secret_value, presence: true + validates :name, :enum_value, :secret_value, :sha_value, presence: true ENUM_VALUES = { case_1: 1 }.freeze + sha_attribute :sha_value + enum enum_value: ENUM_VALUES attr_encrypted :secret_value, @@ -44,6 +47,7 @@ describe BulkInsertSafe do t.integer :enum_value, null: false t.text :encrypted_secret_value, null: false t.string :encrypted_secret_value_iv, null: false + t.binary :sha_value, null: false, limit: 20 end end @@ -61,7 +65,8 @@ describe BulkInsertSafe do BulkInsertItem.new( name: "item-#{n}", enum_value: 'case_1', - secret_value: "my-secret" + secret_value: 'my-secret', + sha_value: '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12' ) end end @@ -71,7 +76,8 @@ describe BulkInsertSafe do BulkInsertItem.new( name: nil, # requires `name` to be set enum_value: 'case_1', - secret_value: "my-secret" + secret_value: 'my-secret', + sha_value: '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12' ) end end @@ -112,6 +118,16 @@ describe BulkInsertSafe do BulkInsertItem.bulk_insert!(items, batch_size: 5) end + it 'items can be properly fetched from database' do + items = build_valid_items_for_bulk_insertion + + BulkInsertItem.bulk_insert!(items) + + attribute_names = BulkInsertItem.attribute_names - %w[id] + expect(BulkInsertItem.last(items.size).pluck(*attribute_names)).to eq( + items.pluck(*attribute_names)) + end + it 'rolls back the transaction when any item is invalid' do # second batch is bad all_items = build_valid_items_for_bulk_insertion + build_invalid_items_for_bulk_insertion diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index f4aa8b2e19b..563acd0ece4 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -5,7 +5,6 @@ require 'spec_helper' describe API::ProjectImport do include WorkhorseHelpers - let(:export_path) { "#{Dir.tmpdir}/project_export_spec" } let(:user) { create(:user) } let(:file) { File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') } let(:namespace) { create(:group) } @@ -14,29 +13,39 @@ describe API::ProjectImport do let(:workhorse_headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } } before do - allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) - stub_uploads_object_storage(FileUploader) - namespace.add_owner(user) end - after do - FileUtils.rm_rf(export_path, secure: true) - end - describe 'POST /projects/import' do + subject { upload_archive(file_upload, workhorse_headers, params) } + + let(:file_upload) { fixture_file_upload(file) } + + let(:params) do + { + path: 'test-import', + 'file.size' => file_upload.size + } + end + + before do + allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/') + end + it 'schedules an import using a namespace' do stub_import(namespace) + params[:namespace] = namespace.id - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id } + subject expect(response).to have_gitlab_http_status(:created) end it 'schedules an import using the namespace path' do stub_import(namespace) + params[:namespace] = namespace.full_path - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path } + subject expect(response).to have_gitlab_http_status(:created) end @@ -46,24 +55,30 @@ describe API::ProjectImport do it 'schedules an import using a namespace and a different name' do stub_import(namespace) + params[:name] = expected_name + params[:namespace] = namespace.id - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id, name: expected_name } + subject expect(response).to have_gitlab_http_status(:created) end it 'schedules an import using the namespace path and a different name' do stub_import(namespace) + params[:name] = expected_name + params[:namespace] = namespace.full_path - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name } + subject expect(response).to have_gitlab_http_status(:created) end it 'sets name correctly' do stub_import(namespace) + params[:name] = expected_name + params[:namespace] = namespace.full_path - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: expected_name } + subject project = Project.find(json_response['id']) expect(project.name).to eq(expected_name) @@ -71,8 +86,11 @@ describe API::ProjectImport do it 'sets name correctly with an overwrite' do stub_import(namespace) + params[:name] = 'new project name' + params[:namespace] = namespace.full_path + params[:overwrite] = true - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: 'new project name', overwrite: true } + subject project = Project.find(json_response['id']) expect(project.name).to eq('new project name') @@ -80,8 +98,10 @@ describe API::ProjectImport do it 'schedules an import using the path and name explicitly set to nil' do stub_import(namespace) + params[:name] = nil + params[:namespace] = namespace.full_path - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.full_path, name: nil } + subject project = Project.find(json_response['id']) expect(project.name).to eq('test-import') @@ -90,8 +110,9 @@ describe API::ProjectImport do it 'schedules an import at the user namespace level' do stub_import(user.namespace) + params[:path] = 'test-import2' - post api('/projects/import', user), params: { path: 'test-import2', file: fixture_file_upload(file) } + subject expect(response).to have_gitlab_http_status(:created) end @@ -100,7 +121,10 @@ describe API::ProjectImport do expect_any_instance_of(ProjectImportState).not_to receive(:schedule) expect(::Projects::CreateService).not_to receive(:new) - post api('/projects/import', user), params: { namespace: 'nonexistent', path: 'test-import2', file: fixture_file_upload(file) } + params[:namespace] = 'nonexistent' + params[:path] = 'test-import2' + + subject expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Namespace Not Found') @@ -109,37 +133,40 @@ describe API::ProjectImport do it 'does not schedule an import if the user has no permission to the namespace' do expect_any_instance_of(ProjectImportState).not_to receive(:schedule) - post(api('/projects/import', create(:user)), - params: { - path: 'test-import3', - file: fixture_file_upload(file), - namespace: namespace.full_path - }) + new_namespace = create(:group) + params[:path] = 'test-import3' + params[:namespace] = new_namespace.full_path + + subject expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Namespace Not Found') end - it 'does not schedule an import if the user uploads no valid file' do - expect_any_instance_of(ProjectImportState).not_to receive(:schedule) + context 'if user uploads no valid file' do + let(:file) { 'README.md' } - post api('/projects/import', user), params: { path: 'test-import3', file: './random/test' } + it 'does not schedule an import if the user uploads no valid file' do + expect_any_instance_of(ProjectImportState).not_to receive(:schedule) + + params[:path] = 'test-import3' + + subject - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']).to eq('file is invalid') + expect(response).to have_gitlab_http_status(:unprocessable_entity) + expect(json_response['message']['error']).to eq('You need to upload a GitLab project export archive (ending in .gz).') + end end it 'stores params that can be overridden' do stub_import(namespace) override_params = { 'description' => 'Hello world' } - post api('/projects/import', user), - params: { - path: 'test-import', - file: fixture_file_upload(file), - namespace: namespace.id, - override_params: override_params - } + params[:namespace] = namespace.id + params[:override_params] = override_params + + subject + import_project = Project.find(json_response['id']) expect(import_project.import_data.data['override_params']).to eq(override_params) @@ -149,33 +176,14 @@ describe API::ProjectImport do stub_import(namespace) override_params = { 'not_allowed' => 'Hello world' } - post api('/projects/import', user), - params: { - path: 'test-import', - file: fixture_file_upload(file), - namespace: namespace.id, - override_params: override_params - } - import_project = Project.find(json_response['id']) - - expect(import_project.import_data.data['override_params']).to be_empty - end + params[:namespace] = namespace.id + params[:override_params] = override_params - it 'correctly overrides params during the import', :sidekiq_might_not_need_inline do - override_params = { 'description' => 'Hello world' } + subject - perform_enqueued_jobs do - post api('/projects/import', user), - params: { - path: 'test-import', - file: fixture_file_upload(file), - namespace: namespace.id, - override_params: override_params - } - end import_project = Project.find(json_response['id']) - expect(import_project.description).to eq('Hello world') + expect(import_project.import_data.data['override_params']).to be_empty end context 'when target path already exists in namespace' do @@ -184,7 +192,9 @@ describe API::ProjectImport do it 'does not schedule an import' do expect_any_instance_of(ProjectImportState).not_to receive(:schedule) - post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file) } + params[:path] = existing_project.path + + subject expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']).to eq('Name has already been taken') @@ -194,7 +204,10 @@ describe API::ProjectImport do it 'schedules an import' do stub_import(user.namespace) - post api('/projects/import', user), params: { path: existing_project.path, file: fixture_file_upload(file), overwrite: true } + params[:path] = existing_project.path + params[:overwrite] = true + + subject expect(response).to have_gitlab_http_status(:created) end @@ -207,16 +220,16 @@ describe API::ProjectImport do end it 'prevents users from importing projects' do - post api('/projects/import', user), params: { path: 'test-import', file: fixture_file_upload(file), namespace: namespace.id } + params[:namespace] = namespace.id + + subject expect(response).to have_gitlab_http_status(:too_many_requests) expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.') end end - context 'with direct upload enabled' do - subject { upload_archive(file_upload, workhorse_headers, params) } - + context 'when using remote storage' do let(:file_name) { 'project_export.tar.gz' } let!(:fog_connection) do @@ -232,21 +245,11 @@ describe API::ProjectImport do let(:file_upload) { fog_to_uploaded_file(tmp_object) } - let(:params) do - { - path: 'test-import-project', - namespace: namespace.id, - 'file.remote_id' => file_name, - 'file.size' => file_upload.size - } - end - - before do - allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/') - end + it 'schedules an import' do + stub_import(namespace) + params[:namespace] = namespace.id - it 'accepts the request and stores the file' do - expect { subject }.to change { Project.count }.by(1) + subject expect(response).to have_gitlab_http_status(:created) end @@ -257,7 +260,7 @@ describe API::ProjectImport do api("/projects/import", user), method: :post, file_key: :file, - params: params.merge(file: file_upload), + params: params.merge(file: file), headers: headers, send_rewritten_field: true ) @@ -301,6 +304,7 @@ describe API::ProjectImport do expect(response).to have_gitlab_http_status(:ok) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + expect(json_response['TempPath']).to eq(ImportExportUploader.workhorse_local_upload_path) end it 'rejects requests that bypassed gitlab-workhorse' do -- cgit v1.2.1