diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2020-04-08 03:09:31 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2020-04-08 03:09:31 +0000 |
commit | e2ee1eec50aa8df8543d7ecc585ec0ba5ee544ac (patch) | |
tree | 7998650d27ada12ee7d06a21cbb3b5e89f298378 | |
parent | 060c842402c00f830a810702600cbe39dfa6cf62 (diff) | |
download | gitlab-ce-e2ee1eec50aa8df8543d7ecc585ec0ba5ee544ac.tar.gz |
Add latest changes from gitlab-org/gitlab@master
87 files changed, 1876 insertions, 1507 deletions
diff --git a/app/graphql/gitlab_schema.rb b/app/graphql/gitlab_schema.rb index b81996cf378..3c996978b6d 100644 --- a/app/graphql/gitlab_schema.rb +++ b/app/graphql/gitlab_schema.rb @@ -10,11 +10,12 @@ class GitlabSchema < GraphQL::Schema DEFAULT_MAX_DEPTH = 15 AUTHENTICATED_MAX_DEPTH = 20 + use GraphQL::Pagination::Connections use BatchLoader::GraphQL use Gitlab::Graphql::Authorize use Gitlab::Graphql::Present use Gitlab::Graphql::CallsGitaly - use Gitlab::Graphql::Connections + use Gitlab::Graphql::Pagination::Connections use Gitlab::Graphql::GenericTracing use Gitlab::Graphql::Timeout, max_seconds: Gitlab.config.gitlab.graphql_timeout diff --git a/changelogs/unreleased/drop-unused-indexes.yml b/changelogs/unreleased/drop-unused-indexes.yml new file mode 100644 index 00000000000..2ba426cc53b --- /dev/null +++ b/changelogs/unreleased/drop-unused-indexes.yml @@ -0,0 +1,5 @@ +--- +title: Remove unused index for vulnerability severity levels +merge_request: 29023 +author: +type: other diff --git a/db/post_migrate/20200407094005_drop_vulnerability_severity_index.rb b/db/post_migrate/20200407094005_drop_vulnerability_severity_index.rb new file mode 100644 index 00000000000..14e6ce32097 --- /dev/null +++ b/db/post_migrate/20200407094005_drop_vulnerability_severity_index.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class DropVulnerabilitySeverityIndex < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + DOWNTIME = false + INDEX_NAME = 'undefined_vulnerabilities' + + def up + remove_concurrent_index_by_name :vulnerability_occurrences, INDEX_NAME + end + + def down + add_concurrent_index(:vulnerability_occurrences, :id, where: 'severity = 0', name: INDEX_NAME) + end +end diff --git a/db/post_migrate/20200407094923_drop_vulnerabilities_severity_index.rb b/db/post_migrate/20200407094923_drop_vulnerabilities_severity_index.rb new file mode 100644 index 00000000000..90ec7dc7ec2 --- /dev/null +++ b/db/post_migrate/20200407094923_drop_vulnerabilities_severity_index.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class DropVulnerabilitiesSeverityIndex < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + DOWNTIME = false + INDEX_NAME = 'undefined_vulnerability' + + def up + remove_concurrent_index_by_name :vulnerabilities, INDEX_NAME + end + + def down + add_concurrent_index(:vulnerabilities, :id, where: 'severity = 0', name: INDEX_NAME) + end +end diff --git a/db/structure.sql b/db/structure.sql index 9c99d11d8a8..6f3c271a0db 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -10391,10 +10391,6 @@ CREATE INDEX tmp_build_stage_position_index ON public.ci_builds USING btree (sta CREATE INDEX tmp_idx_on_user_id_where_bio_is_filled ON public.users USING btree (id) WHERE ((COALESCE(bio, ''::character varying))::text IS DISTINCT FROM ''::text); -CREATE INDEX undefined_vulnerabilities ON public.vulnerability_occurrences USING btree (id) WHERE (severity = 0); - -CREATE INDEX undefined_vulnerability ON public.vulnerabilities USING btree (id) WHERE (severity = 0); - CREATE UNIQUE INDEX users_security_dashboard_projects_unique_index ON public.users_security_dashboard_projects USING btree (project_id, user_id); CREATE UNIQUE INDEX vulnerability_feedback_unique_idx ON public.vulnerability_feedback USING btree (project_id, category, feedback_type, project_fingerprint); @@ -13012,5 +13008,7 @@ COPY "schema_migrations" (version) FROM STDIN; 20200403184110 20200403185127 20200403185422 +20200407094005 +20200407094923 \. diff --git a/doc/.vale/gitlab/Substitutions.yml b/doc/.vale/gitlab/Substitutions.yml index dd14bdd9b96..44b96d1a5e3 100644 --- a/doc/.vale/gitlab/Substitutions.yml +++ b/doc/.vale/gitlab/Substitutions.yml @@ -14,4 +14,3 @@ swap: postgres: PostgreSQL self hosted: self-managed self-hosted: self-managed - diff --git a/doc/administration/auth/how_to_configure_ldap_gitlab_ee/index.md b/doc/administration/auth/how_to_configure_ldap_gitlab_ee/index.md index 3d790fa659e..46bc079971d 100644 --- a/doc/administration/auth/how_to_configure_ldap_gitlab_ee/index.md +++ b/doc/administration/auth/how_to_configure_ldap_gitlab_ee/index.md @@ -44,7 +44,7 @@ _The reporter permission allows support staff to manage issues, labels, and revi > Guest permissions allows people ops staff to review and lodge new issues while allowing no read or write access to project code or [confidential issues](../../../user/project/issues/confidential_issues.md#permissions-and-access-to-confidential-issues) created by other users. -See the [permission list](../../../user/permissions.md) for complementary info. +See the [permission list](../../../user/permissions.md) for complementary information. #### Group permissions - example diff --git a/doc/administration/high_availability/README.md b/doc/administration/high_availability/README.md index 4734df324e0..71d483128b9 100644 --- a/doc/administration/high_availability/README.md +++ b/doc/administration/high_availability/README.md @@ -273,7 +273,7 @@ On different cloud vendors a best effort like for like can be used. [^8]: The architectures were built and tested with the [Intel Xeon E5 v3 (Haswell)](https://cloud.google.com/compute/docs/cpu-platforms) CPU platform on GCP. On different hardware you may find that adjustments, either lower - or higher, are required for your CPU or Node counts accordingly. For more info a + or higher, are required for your CPU or Node counts accordingly. For more information, a [Sysbench](https://github.com/akopytov/sysbench) benchmark of the CPU can be found [here](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Reference-Architectures/GCP-CPU-Benchmarks). diff --git a/doc/administration/troubleshooting/diagnostics_tools.md b/doc/administration/troubleshooting/diagnostics_tools.md index 97b367dc353..5912981ba6e 100644 --- a/doc/administration/troubleshooting/diagnostics_tools.md +++ b/doc/administration/troubleshooting/diagnostics_tools.md @@ -13,7 +13,7 @@ before attempting to use these tools. ## gitlabsos The [gitlabsos](https://gitlab.com/gitlab-com/support/toolbox/gitlabsos/) utility -provides a unified method of gathering info and logs from GitLab and the system it's +provides a unified method of gathering information and logs from GitLab and the system it's running on. ## strace-parser diff --git a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md index 0471c7b7ffd..ba2224e3fc7 100644 --- a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md +++ b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md @@ -751,7 +751,7 @@ This content has been moved to the [Troubleshooting Sidekiq docs](./sidekiq.md). ## LFS -### Get info about LFS objects and associated project +### Get information about LFS objects and associated project ```ruby o=LfsObject.find_by(oid: "<oid>") diff --git a/doc/administration/troubleshooting/kubernetes_cheat_sheet.md b/doc/administration/troubleshooting/kubernetes_cheat_sheet.md index 0a4e65add4f..30ef3da3a99 100644 --- a/doc/administration/troubleshooting/kubernetes_cheat_sheet.md +++ b/doc/administration/troubleshooting/kubernetes_cheat_sheet.md @@ -168,7 +168,7 @@ and they will assist you with any issues you are having. kubectl exec -it <task-runner-pod-name> -- /srv/gitlab/bin/rails dbconsole -p ``` -- How to get info about Helm installation status: +- How to get information about Helm installation status: ```shell helm status name-of-installation @@ -199,7 +199,7 @@ and they will assist you with any issues you are having. helm upgrade <release name> <chart path> -f gitlab.yaml ``` -- How to get the manifest for a release. It can be useful because it contains the info about +- How to get the manifest for a release. It can be useful because it contains the information about all Kubernetes resources and dependent charts: ```shell diff --git a/doc/api/graphql/index.md b/doc/api/graphql/index.md index dec2dfea3cf..dc691185d28 100644 --- a/doc/api/graphql/index.md +++ b/doc/api/graphql/index.md @@ -72,7 +72,7 @@ Root-level queries are defined in GitLab supports batching queries into a single request using [apollo-link-batch-http](https://www.apollographql.com/docs/link/links/batch-http/). More -info about multiplexed queries is also available for +information about multiplexed queries is also available for [graphql-ruby](https://graphql-ruby.org/queries/multiplex.html) the library GitLab uses on the backend. diff --git a/doc/api/oauth2.md b/doc/api/oauth2.md index 1cbff10b122..959773b217d 100644 --- a/doc/api/oauth2.md +++ b/doc/api/oauth2.md @@ -102,7 +102,7 @@ CAUTION: **Important:** Avoid using this flow for applications that store data outside of the GitLab instance. If you do, make sure to verify `application id` associated with the access token before granting access to the data -(see [`/oauth/token/info`](#retrieving-the-token-info)). +(see [`/oauth/token/info`](#retrieving-the-token-information)). Unlike the web flow, the client receives an `access token` immediately as a result of the authorization request. The flow does not use the client secret @@ -213,7 +213,7 @@ or you can put the token to the Authorization header: curl --header "Authorization: Bearer OAUTH-TOKEN" https://gitlab.example.com/api/v4/user ``` -## Retrieving the Token Info +## Retrieving the token information To verify the details of a token, use the `token/info` endpoint provided by the Doorkeeper gem. For more information, see [`/oauth/token/info`](https://github.com/doorkeeper-gem/doorkeeper/wiki/API-endpoint-descriptions-and-examples#get----oauthtokeninfo). diff --git a/doc/api/repository_files.md b/doc/api/repository_files.md index 845a1187a94..e6425c3fe17 100644 --- a/doc/api/repository_files.md +++ b/doc/api/repository_files.md @@ -82,7 +82,7 @@ X-Gitlab-Size: 1476 ## Get file blame from repository -Allows you to receive blame information. Each blame range contains lines and corresponding commit info. +Allows you to receive blame information. Each blame range contains lines and corresponding commit information. ```plaintext GET /projects/:id/repository/files/:file_path/blame diff --git a/doc/api/v3_to_v4.md b/doc/api/v3_to_v4.md index 07fa7d6d420..76e5dd9abb6 100644 --- a/doc/api/v3_to_v4.md +++ b/doc/api/v3_to_v4.md @@ -67,7 +67,7 @@ Below are the changes made between V3 and V4. - Remove `GET /groups/owned`. Use `GET /groups?owned=true` instead [!9505](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9505) - Return 202 with JSON body on async removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9449) - `GET /projects/:id/milestones?iid[]=x&iid[]=y` array filter has been renamed to `iids` [!9096](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9096) -- Return basic info about pipeline in `GET /projects/:id/pipelines` [!8875](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/8875) +- Return basic information about pipeline in `GET /projects/:id/pipelines` [!8875](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/8875) - Renamed all `build` references to `job` [!9463](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9463) - Drop `GET /projects/:id/repository/commits/:sha/jobs` [!9463](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9463) - Rename Build Triggers to be Pipeline Triggers API [!9713](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9713) diff --git a/doc/ci/docker/using_docker_images.md b/doc/ci/docker/using_docker_images.md index b371a74bd71..90e5c77063e 100644 --- a/doc/ci/docker/using_docker_images.md +++ b/doc/ci/docker/using_docker_images.md @@ -532,7 +532,7 @@ runtime. There are two approaches that you can take in order to access a private registry. Both require setting the environment variable -`DOCKER_AUTH_CONFIG` with appropriate authentication info. +`DOCKER_AUTH_CONFIG` with appropriate authentication information. 1. Per-job: To configure one job to access a private registry, add `DOCKER_AUTH_CONFIG` as a job variable. diff --git a/doc/ci/variables/README.md b/doc/ci/variables/README.md index 81b9cd8bece..afa2e247679 100644 --- a/doc/ci/variables/README.md +++ b/doc/ci/variables/README.md @@ -453,7 +453,7 @@ pass CI variables to the running application by prefixing the key of the variable with `K8S_SECRET_`. These [prefixed -variables](../../topics/autodevops/index.md#application-secret-variables) will +variables](../../topics/autodevops/customize.md#application-secret-variables) will then be available as environment variables on the running application container. diff --git a/doc/development/README.md b/doc/development/README.md index e55989e312a..0620c96ba21 100644 --- a/doc/development/README.md +++ b/doc/development/README.md @@ -222,3 +222,4 @@ Complementary reads: - [Run full Auto DevOps cycle in a GDK instance](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/auto_devops.md) - [Using GitLab Runner with GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/runner.md) +- [Using the Web IDE terminal with GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/howto/web_ide_terminal_gdk_setup.md) diff --git a/doc/development/api_graphql_styleguide.md b/doc/development/api_graphql_styleguide.md index 7fd158aec04..6d224db1a02 100644 --- a/doc/development/api_graphql_styleguide.md +++ b/doc/development/api_graphql_styleguide.md @@ -150,7 +150,7 @@ query($project_path: ID!) { ``` This would return the first 2 pipelines of a project and related -pagination info., ordered by descending ID. The returned data would +pagination information, ordered by descending ID. The returned data would look like this: ```json diff --git a/doc/development/documentation/site_architecture/global_nav.md b/doc/development/documentation/site_architecture/global_nav.md index ae822e91360..12190e2cb9e 100644 --- a/doc/development/documentation/site_architecture/global_nav.md +++ b/doc/development/documentation/site_architecture/global_nav.md @@ -272,7 +272,7 @@ and the following syntax rules. - For `index.html` files, use the clean (canonical) URL: `path/to/`. - For EE-only docs, use the same relative path, but add the attribute `ee_only: true` below the `doc_url` or `category_url`, as explained above. This displays - an "info" icon on the nav to make the user aware that the feature is + an "information" icon on the nav to make the user aware that the feature is EE-only. DANGER: **Important!** diff --git a/doc/development/documentation/styleguide.md b/doc/development/documentation/styleguide.md index 57c6105372f..deb86569879 100644 --- a/doc/development/documentation/styleguide.md +++ b/doc/development/documentation/styleguide.md @@ -583,7 +583,7 @@ For other punctuation rules, please refer to the - [Avoid using symbols and special chars](https://gitlab.com/gitlab-org/gitlab-docs/issues/84) in headers. Whenever possible, they should be plain and short text. - Avoid adding things that show ephemeral statuses. For example, if a feature is - considered beta or experimental, put this info in a note, not in the heading. + considered beta or experimental, put this information in a note, not in the heading. - When introducing a new document, be careful for the headings to be grammatically and syntactically correct. Mention an [assigned technical writer (TW)](https://about.gitlab.com/handbook/product/categories/) for review. diff --git a/doc/development/emails.md b/doc/development/emails.md index 133434523ec..a84895eef5b 100644 --- a/doc/development/emails.md +++ b/doc/development/emails.md @@ -17,7 +17,7 @@ dummy data. The previews live in [`app/mailers/previews`][previews] and can be viewed at [`/rails/mailers`](http://localhost:3000/rails/mailers). -See the [Rails guides](https://guides.rubyonrails.org/action_mailer_basics.html#previewing-emails) for more info. +See the [Rails guides](https://guides.rubyonrails.org/action_mailer_basics.html#previewing-emails) for more information. [previews]: https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/mailers/previews diff --git a/doc/development/fe_guide/style/scss.md b/doc/development/fe_guide/style/scss.md index 83dd7990951..6c0247ad00c 100644 --- a/doc/development/fe_guide/style/scss.md +++ b/doc/development/fe_guide/style/scss.md @@ -264,7 +264,7 @@ Note that this won't fix every problem, but it should fix a majority. ### Ignoring issues If you want a line or set of lines to be ignored by the linter, you can use -`// scss-lint:disable RuleName` ([more info](https://github.com/sds/scss-lint#disabling-linters-via-source)): +`// scss-lint:disable RuleName` ([more information](https://github.com/sds/scss-lint#disabling-linters-via-source)): ```scss // This lint rule is disabled because it is supported only in Chrome/Safari diff --git a/doc/development/feature_flags/controls.md b/doc/development/feature_flags/controls.md index f17ac55a809..21d6d48d446 100644 --- a/doc/development/feature_flags/controls.md +++ b/doc/development/feature_flags/controls.md @@ -98,6 +98,7 @@ Slack: This will enable the feature for GitLab.com, with `new_navigation_bar` being the name of the feature. +This command does *not* enable the feature for 25% of the total users. Instead, when the feature is checked with `enabled?`, it will return `true` 25% of the time. If you are not certain what percentages to use, simply use the following steps: @@ -136,9 +137,8 @@ you run these 2 commands: /chatops run feature set some_feature 25 ``` -Then `some_feature` will be enabled for 25% of the time the users are interacting with -`gitlab-org/gitlab`. Note that the the feature is not enabled to 25% -of the users, rather a simple randomization is made each time the `enabled?` is checked. +Then `some_feature` will be enabled for both 25% of users and all users interacting with +`gitlab-org/gitlab`. NOTE: **Note:** **Percentage of time** rollout is not a good idea if what you want is to make sure a feature diff --git a/doc/development/integrations/secure.md b/doc/development/integrations/secure.md index 8d5944c5de5..30d6c2ae754 100644 --- a/doc/development/integrations/secure.md +++ b/doc/development/integrations/secure.md @@ -80,7 +80,7 @@ See [GitLab CI/CD predefined variables](../../ci/variables/predefined_variables. Also, scanning jobs should be skipped when the corresponding variable prefixed with `_DISABLED` is present. See `DEPENDENCY_SCANNING_DISABLED`, `CONTAINER_SCANNING_DISABLED`, `SAST_DISABLED`, and `DAST_DISABLED` -in [Auto DevOps documentation](../../topics/autodevops/index.md#disable-jobs). +in [Auto DevOps documentation](../../topics/autodevops/customize.md#disable-jobs). Finally, SAST and Dependency Scanning job definitions should use `CI_PROJECT_REPOSITORY_LANGUAGES` (comma-separated list of values) diff --git a/doc/integration/saml.md b/doc/integration/saml.md index 47ffe333c0f..5cb57baf353 100644 --- a/doc/integration/saml.md +++ b/doc/integration/saml.md @@ -370,14 +370,14 @@ You may also bypass the auto signin feature by browsing to >**Note:** This setting is only available on GitLab 8.6 and above. This setting should only be used to map attributes that are part of the -OmniAuth info hash schema. +OmniAuth `info` hash schema. `attribute_statements` is used to map Attribute Names in a SAMLResponse to entries -in the OmniAuth [info hash](https://github.com/omniauth/omniauth/wiki/Auth-Hash-Schema#schema-10-and-later). +in the OmniAuth [`info` hash](https://github.com/omniauth/omniauth/wiki/Auth-Hash-Schema#schema-10-and-later). For example, if your SAMLResponse contains an Attribute called 'EmailAddress', specify `{ email: ['EmailAddress'] }` to map the Attribute to the -corresponding key in the info hash. URI-named Attributes are also supported, e.g. +corresponding key in the `info` hash. URI-named Attributes are also supported, e.g. `{ email: ['http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress'] }`. This setting allows you tell GitLab where to look for certain attributes required diff --git a/doc/integration/shibboleth.md b/doc/integration/shibboleth.md index 8c456e7d3d1..a4a71c655a2 100644 --- a/doc/integration/shibboleth.md +++ b/doc/integration/shibboleth.md @@ -8,7 +8,7 @@ In order to enable Shibboleth support in GitLab we need to use Apache instead of To enable the Shibboleth OmniAuth provider you must configure Apache Shibboleth module. The installation and configuration of the module itself is out of the scope of this document. -Check <https://wiki.shibboleth.net/confluence/display/SP3/Apache> for more info. +Check <https://wiki.shibboleth.net/confluence/display/SP3/Apache> for more information. You can find Apache config in [GitLab Recipes](https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server/apache). diff --git a/doc/security/crime_vulnerability.md b/doc/security/crime_vulnerability.md index 23ee60ab930..93edbc69eb0 100644 --- a/doc/security/crime_vulnerability.md +++ b/doc/security/crime_vulnerability.md @@ -4,12 +4,11 @@ type: reference # How we manage the TLS protocol CRIME vulnerability -> CRIME ("Compression Ratio Info-leak Made Easy") is a security exploit against +[CRIME](https://en.wikipedia.org/w/index.php?title=CRIME&oldid=692423806) is a security exploit against secret web cookies over connections using the HTTPS and SPDY protocols that also use data compression. When used to recover the content of secret authentication cookies, it allows an attacker to perform session hijacking on an authenticated web session, allowing the launching of further attacks. -([CRIME](https://en.wikipedia.org/w/index.php?title=CRIME&oldid=692423806)) ## Description diff --git a/doc/topics/application_development_platform/index.md b/doc/topics/application_development_platform/index.md index df585aae7e8..06cae63cbe3 100644 --- a/doc/topics/application_development_platform/index.md +++ b/doc/topics/application_development_platform/index.md @@ -37,7 +37,7 @@ with various cloud providers. In order to provide modern DevOps workflows, our Application Development Platform will rely on [Auto DevOps](../autodevops/index.md) to provide those workflows. Auto DevOps works with any Kubernetes cluster; you're not limited to running on GitLab's infrastructure. Additionally, Auto DevOps offers -an incremental consumption path. Because it is [composable](../autodevops/index.md#using-components-of-auto-devops), +an incremental consumption path. Because it is [composable](../autodevops/customize.md#using-components-of-auto-devops), you can use as much or as little of the default pipeline as you'd like, and deeply customize without having to integrate a completely different platform. ### Security diff --git a/doc/topics/autodevops/customize.md b/doc/topics/autodevops/customize.md new file mode 100644 index 00000000000..45f28af2192 --- /dev/null +++ b/doc/topics/autodevops/customize.md @@ -0,0 +1,600 @@ +# Customizing Auto DevOps + +While Auto DevOps provides great defaults to get you started, you can customize +almost everything to fit your needs; from custom [buildpacks](#custom-buildpacks), +to [`Dockerfile`s](#custom-dockerfile), [Helm charts](#custom-helm-chart), or +even copying the complete [CI/CD configuration](#customizing-gitlab-ciyml) +into your project to enable staging and canary deployments, and more. + +## Custom buildpacks + +If the automatic buildpack detection fails for your project, or if you want to +use a custom buildpack, you can override the buildpack(s) using a project variable +or a `.buildpacks` file in your project: + +- **Project variable** - Create a project variable `BUILDPACK_URL` with the URL + of the buildpack to use. +- **`.buildpacks` file** - Add a file in your project's repo called `.buildpacks` + and add the URL of the buildpack to use on a line in the file. If you want to + use multiple buildpacks, you can enter them in, one on each line. + +### Multiple buildpacks + +Using multiple buildpacks isn't fully supported by Auto DevOps because, when using the `.buildpacks` +file, Auto Test will not work. + +The buildpack [heroku-buildpack-multi](https://github.com/heroku/heroku-buildpack-multi/), +which is used under the hood to parse the `.buildpacks` file, doesn't provide the necessary commands +`bin/test-compile` and `bin/test`. + +If your goal is to use only a single custom buildpack, you should provide the project variable +`BUILDPACK_URL` instead. + +## Custom `Dockerfile` + +If your project has a `Dockerfile` in the root of the project repo, Auto DevOps +will build a Docker image based on the Dockerfile rather than using buildpacks. +This can be much faster and result in smaller images, especially if your +Dockerfile is based on [Alpine](https://hub.docker.com/_/alpine/). + +## Passing arguments to `docker build` + +Arguments can be passed to the `docker build` command using the +`AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` project variable. + +For example, to build a Docker image based on based on the `ruby:alpine` +instead of the default `ruby:latest`: + +1. Set `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` to `--build-arg=RUBY_VERSION=alpine`. +1. Add the following to a custom `Dockerfile`: + + ```dockerfile + ARG RUBY_VERSION=latest + FROM ruby:$RUBY_VERSION + + # ... put your stuff here + ``` + +NOTE: **Note:** +Passing in complex values (newlines and spaces, for example) will likely +cause escaping issues due to the way this argument is used in Auto DevOps. +Consider using Base64 encoding of such values to avoid this problem. + +CAUTION: **Warning:** +Avoid passing secrets as Docker build arguments if possible, as they may be +persisted in your image. See +[this discussion](https://github.com/moby/moby/issues/13490) for details. + +## Passing secrets to `docker build` + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/25514) in GitLab 12.3, but available in versions 11.9 and above. + +CI environment variables can be passed as [build +secrets](https://docs.docker.com/develop/develop-images/build_enhancements/#new-docker-build-secret-information) to the `docker build` command by listing them comma separated by name in the +`AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` variable. For example, in order to forward the variables `CI_COMMIT_SHA` and `CI_ENVIRONMENT_NAME`, one would set `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` to `CI_COMMIT_SHA,CI_ENVIRONMENT_NAME`. + +Unlike build arguments, these are not persisted by Docker in the final image +(though you can still persist them yourself, so **be careful**). + +In projects: + +- Without a `Dockerfile`, these are available automatically as environment + variables. +- With a `Dockerfile`, the following is required: + + 1. Activate the experimental `Dockerfile` syntax by adding the following + to the top of the file: + + ```dockerfile + # syntax = docker/dockerfile:experimental + ``` + + 1. To make secrets available in any `RUN $COMMAND` in the `Dockerfile`, mount + the secret file and source it prior to running `$COMMAND`: + + ```dockerfile + RUN --mount=type=secret,id=auto-devops-build-secrets . /run/secrets/auto-devops-build-secrets && $COMMAND + ``` + +NOTE: **Note:** +When `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` is set, Auto DevOps +enables the experimental [Docker BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) +feature to use the `--secret` flag. + +## Custom Helm Chart + +Auto DevOps uses [Helm](https://helm.sh/) to deploy your application to Kubernetes. +You can override the Helm chart used by bundling up a chart into your project +repo or by specifying a project variable: + +- **Bundled chart** - If your project has a `./chart` directory with a `Chart.yaml` + file in it, Auto DevOps will detect the chart and use it instead of the [default + one](https://gitlab.com/gitlab-org/charts/auto-deploy-app). + This can be a great way to control exactly how your application is deployed. +- **Project variable** - Create a [project variable](../../ci/variables/README.md#gitlab-cicd-environment-variables) + `AUTO_DEVOPS_CHART` with the URL of a custom chart to use or create two project variables `AUTO_DEVOPS_CHART_REPOSITORY` with the URL of a custom chart repository and `AUTO_DEVOPS_CHART` with the path to the chart. + +## Customize values for Helm Chart + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/30628) in GitLab 12.6, `.gitlab/auto-deploy-values.yaml` will be used by default for Helm upgrades. + +You can override the default values in the `values.yaml` file in the [default Helm chart](https://gitlab.com/gitlab-org/charts/auto-deploy-app). +This can be achieved by either: + +- Adding a file named `.gitlab/auto-deploy-values.yaml` to your repository. It will + be automatically used if found. +- Adding a file with a different name or path to the repository, and set the + `HELM_UPGRADE_VALUES_FILE` [environment variable](#environment-variables) with the path and name. + +NOTE: **Note:** +For GitLab 12.5 and earlier, the `HELM_UPGRADE_EXTRA_ARGS` environment variable can be used to override the default chart values. +To do so, set `HELM_UPGRADE_EXTRA_ARGS` to `--values my-values.yaml`. + +## Custom Helm chart per environment + +You can specify the use of a custom Helm chart per environment by scoping the environment variable +to the desired environment. See [Limiting environment scopes of variables](../../ci/variables/README.md#limiting-environment-scopes-of-environment-variables). + +## Customizing `.gitlab-ci.yml` + +Auto DevOps is completely customizable because the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml): + +- Is just an implementation of a [`.gitlab-ci.yml`](../../ci/yaml/README.md) file. +- Uses only features available to any implementation of `.gitlab-ci.yml`. + +If you want to modify the CI/CD pipeline used by Auto DevOps, you can [`include` +the template](../../ci/yaml/README.md#includetemplate) and customize as +needed. To do this, add a `.gitlab-ci.yml` file to the root of your repository +containing the following: + +```yml +include: + - template: Auto-DevOps.gitlab-ci.yml +``` + +Then add any extra changes you want. Your additions will be merged with the +[Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) using the behaviour described for +[`include`](../../ci/yaml/README.md#include). + +It is also possible to copy and paste the contents of the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) +into your project and edit this as needed. You may prefer to do it +that way if you want to specifically remove any part of it. + +## Customizing the Kubernetes namespace + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27630) in GitLab 12.6. + +For **non**-GitLab-managed clusters, the namespace can be customized using +`.gitlab-ci.yml` by specifying +[`environment:kubernetes:namespace`](../../ci/environments.md#configuring-kubernetes-deployments). +For example, the following configuration overrides the namespace used for +`production` deployments: + +```yaml +include: + - template: Auto-DevOps.gitlab-ci.yml + +production: + environment: + kubernetes: + namespace: production +``` + +When deploying to a custom namespace with Auto DevOps, the service account +provided with the cluster needs at least the `edit` role within the namespace. + +- If the service account can create namespaces, then the namespace can be created on-demand. +- Otherwise, the namespace must exist prior to deployment. + +## Using components of Auto DevOps + +If you only require a subset of the features offered by Auto DevOps, you can include +individual Auto DevOps jobs into your own `.gitlab-ci.yml`. Each component job relies +on a stage that should be defined in the `.gitlab-ci.yml` that includes the template. + +For example, to make use of [Auto Build](stages.md#auto-build), you can add the following to +your `.gitlab-ci.yml`: + +```yaml +stages: + - build + +include: + - template: Jobs/Build.gitlab-ci.yml +``` + +Consult the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) for information on available jobs. + +## PostgreSQL database support + +In order to support applications that require a database, +[PostgreSQL](https://www.postgresql.org/) is provisioned by default. The credentials to access +the database are preconfigured, but can be customized by setting the associated +[variables](#environment-variables). These credentials can be used for defining a +`DATABASE_URL` of the format: + +```yaml +postgres://user:password@postgres-host:postgres-port/postgres-database +``` + +### Upgrading PostgresSQL + +CAUTION: **Deprecation** +The variable `AUTO_DEVOPS_POSTGRES_CHANNEL` that controls default provisioned +PostgreSQL currently defaults to `1`. This is scheduled to change to `2` in +[GitLab 13.0](https://gitlab.com/gitlab-org/gitlab/-/issues/210499). + +The version of the chart used to provision PostgreSQL: + +- Is 0.7.1 in GitLab 12.8 and earlier. +- Can be set to from 0.7.1 to 8.2.1 in GitLab 12.9 and later. + +GitLab encourages users to [migrate their database](upgrading_postgresql.md) +to the newer PostgreSQL. + +To use the new PostgreSQL: + +- New projects can set the `AUTO_DEVOPS_POSTGRES_CHANNEL` variable to `2`. +- Old projects can be upgraded by following the guide to + [upgrading PostgresSQL](upgrading_postgresql.md). + +### Using external PostgreSQL database providers + +While Auto DevOps provides out-of-the-box support for a PostgreSQL container for +production environments, for some use-cases it may not be sufficiently secure or +resilient and you may wish to use an external managed provider for PostgreSQL. +For example, AWS Relational Database Service. + +You will need to define environment-scoped variables for `POSTGRES_ENABLED` and `DATABASE_URL` in your project's CI/CD settings. + +To achieve this: + +1. Disable the built-in PostgreSQL installation for the required environments using + scoped [environment variables](../../ci/environments.md#scoping-environments-with-specs). + For this use case, it's likely that only `production` will need to be added to this + list as the builtin PostgreSQL setup for Review Apps and staging will be sufficient + as a high availability setup is not required. + + ![Auto Metrics](img/disable_postgres.png) + +1. Define the `DATABASE_URL` CI variable as a scoped environment variable that will be + available to your application. This should be a URL in the following format: + + ```yaml + postgres://user:password@postgres-host:postgres-port/postgres-database + ``` + +You will need to ensure that your Kubernetes cluster has network access to wherever +PostgreSQL is hosted. + +## Environment variables + +The following variables can be used for setting up the Auto DevOps domain, +providing a custom Helm chart, or scaling your application. PostgreSQL can +also be customized, and you can easily use a [custom buildpack](#custom-buildpacks). + +### Build and deployment + +The following table lists variables related to building and deploying +applications. + +| **Variable** | **Description** | +|-----------------------------------------|------------------------------------| +| `ADDITIONAL_HOSTS` | Fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. | +| `<ENVIRONMENT>_ADDITIONAL_HOSTS` | For a specific environment, the fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. This takes precedence over `ADDITIONAL_HOSTS`. | +| `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` | When set to a non-empty value and no `Dockerfile` is present, Auto Build builds your application using Cloud Native Buildpacks instead of Herokuish. [More details](stages.md#auto-build-using-cloud-native-buildpacks-beta). | +| `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` | Extra arguments to be passed to the `docker build` command. Note that using quotes will not prevent word splitting. [More details](#passing-arguments-to-docker-build). | +| `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` | A [comma-separated list of CI variable names](#passing-secrets-to-docker-build) to be passed to the `docker build` command as secrets. | +| `AUTO_DEVOPS_CHART` | Helm Chart used to deploy your apps. Defaults to the one [provided by GitLab](https://gitlab.com/gitlab-org/charts/auto-deploy-app). | +| `AUTO_DEVOPS_CHART_REPOSITORY` | Helm Chart repository used to search for charts. Defaults to `https://charts.gitlab.io`. | +| `AUTO_DEVOPS_CHART_REPOSITORY_NAME` | From GitLab 11.11, used to set the name of the Helm repository. Defaults to `gitlab`. | +| `AUTO_DEVOPS_CHART_REPOSITORY_USERNAME` | From GitLab 11.11, used to set a username to connect to the Helm repository. Defaults to no credentials. Also set `AUTO_DEVOPS_CHART_REPOSITORY_PASSWORD`. | +| `AUTO_DEVOPS_CHART_REPOSITORY_PASSWORD` | From GitLab 11.11, used to set a password to connect to the Helm repository. Defaults to no credentials. Also set `AUTO_DEVOPS_CHART_REPOSITORY_USERNAME`. | +| `AUTO_DEVOPS_MODSECURITY_SEC_RULE_ENGINE` | From GitLab 12.5, used in combination with [Modsecurity feature flag](../../user/clusters/applications.md#web-application-firewall-modsecurity) to toggle [Modsecurity's `SecRuleEngine`](https://github.com/SpiderLabs/ModSecurity/wiki/Reference-Manual-(v2.x)#SecRuleEngine) behavior. Defaults to `DetectionOnly`. | +| `BUILDPACK_URL` | Buildpack's full URL. Can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`. For example `https://github.com/heroku/heroku-buildpack-ruby.git#v142`. | +| `CANARY_ENABLED` | From GitLab 11.0, used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments-premium). | +| `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. | +| `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. | +| `HELM_RELEASE_NAME` | From GitLab 12.1, allows the `helm` release name to be overridden. Can be used to assign unique release names when deploying multiple projects to a single namespace. | +| `HELM_UPGRADE_VALUES_FILE` | From GitLab 12.6, allows the `helm upgrade` values file to be overridden. Defaults to `.gitlab/auto-deploy-values.yaml`. | +| `HELM_UPGRADE_EXTRA_ARGS` | From GitLab 11.11, allows extra arguments in `helm` commands when deploying the application. Note that using quotes will not prevent word splitting. **Tip:** you can use this variable to [customize the Auto Deploy Helm chart](#custom-helm-chart) by applying custom override values with `--values my-values.yaml`. | +| `INCREMENTAL_ROLLOUT_MODE` | From GitLab 11.4, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment. Set to `manual` for manual deployment jobs or `timed` for automatic rollout deployments with a 5 minute delay each one. | +| `K8S_SECRET_*` | From GitLab 11.7, any variable prefixed with [`K8S_SECRET_`](#application-secret-variables) will be made available by Auto DevOps as environment variables to the deployed application. | +| `KUBE_INGRESS_BASE_DOMAIN` | From GitLab 11.8, can be used to set a domain per cluster. See [cluster domains](../../user/project/clusters/index.md#base-domain) for more information. | +| `PRODUCTION_REPLICAS` | Number of replicas to deploy in the production environment. Takes precedence over `REPLICAS` and defaults to 1. For zero downtime upgrades, set to 2 or greater. | +| `REPLICAS` | Number of replicas to deploy. Defaults to 1. | +| `ROLLOUT_RESOURCE_TYPE` | From GitLab 11.9, allows specification of the resource type being deployed when using a custom Helm chart. Default value is `deployment`. | +| `ROLLOUT_STATUS_DISABLED` | From GitLab 12.0, used to disable rollout status check because it doesn't support all resource types, for example, `cronjob`. | +| `STAGING_ENABLED` | From GitLab 10.8, used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). | + +TIP: **Tip:** +Set up the replica variables using a +[project variable](../../ci/variables/README.md#gitlab-cicd-environment-variables) +and scale your application by just redeploying it! + +CAUTION: **Caution:** +You should *not* scale your application using Kubernetes directly. This can +cause confusion with Helm not detecting the change, and subsequent deploys with +Auto DevOps can undo your changes. + +### Database + +The following table lists variables related to the database. + +| **Variable** | **Description** | +|-----------------------------------------|------------------------------------| +| `DB_INITIALIZE` | From GitLab 11.4, used to specify the command to run to initialize the application's PostgreSQL database. Runs inside the application pod. | +| `DB_MIGRATE` | From GitLab 11.4, used to specify the command to run to migrate the application's PostgreSQL database. Runs inside the application pod. | +| `POSTGRES_ENABLED` | Whether PostgreSQL is enabled. Defaults to `"true"`. Set to `false` to disable the automatic deployment of PostgreSQL. | +| `POSTGRES_USER` | The PostgreSQL user. Defaults to `user`. Set it to use a custom username. | +| `POSTGRES_PASSWORD` | The PostgreSQL password. Defaults to `testing-password`. Set it to use a custom password. | +| `POSTGRES_DB` | The PostgreSQL database name. Defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-environment-variables). Set it to use a custom database name. | +| `POSTGRES_VERSION` | Tag for the [`postgres` Docker image](https://hub.docker.com/_/postgres) to use. Defaults to `9.6.2`. | + +### Security tools + +The following table lists variables related to security tools. + +| **Variable** | **Description** | +|-----------------------------------------|------------------------------------| +| `SAST_CONFIDENCE_LEVEL` | Minimum confidence level of security issues you want to be reported; `1` for Low, `2` for Medium, `3` for High. Defaults to `3`. | + +### Disable jobs + +The following table lists variables used to disable jobs. + +| **Variable** | **Description** | +|-----------------------------------------|------------------------------------| +| `CODE_QUALITY_DISABLED` | From GitLab 11.0, used to disable the `codequality` job. If the variable is present, the job will not be created. | +| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, used to disable the `sast:container` job. If the variable is present, the job will not be created. | +| `DAST_DISABLED` | From GitLab 11.0, used to disable the `dast` job. If the variable is present, the job will not be created. | +| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | +| `LICENSE_MANAGEMENT_DISABLED` | From GitLab 11.0, used to disable the `license_management` job. If the variable is present, the job will not be created. | +| `PERFORMANCE_DISABLED` | From GitLab 11.0, used to disable the `performance` job. If the variable is present, the job will not be created. | +| `REVIEW_DISABLED` | From GitLab 11.0, used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. | +| `SAST_DISABLED` | From GitLab 11.0, used to disable the `sast` job. If the variable is present, the job will not be created. | +| `TEST_DISABLED` | From GitLab 11.0, used to disable the `test` job. If the variable is present, the job will not be created. | + +### Application secret variables + +> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/49056) in GitLab 11.7. + +Some applications need to define secret variables that are +accessible by the deployed application. Auto DevOps detects variables where the key starts with +`K8S_SECRET_` and make these prefixed variables available to the +deployed application, as environment variables. + +To configure your application variables: + +1. Go to your project's **Settings > CI/CD**, then expand the section + called **Variables**. + +1. Create a CI Variable, ensuring the key is prefixed with + `K8S_SECRET_`. For example, you can create a variable with key + `K8S_SECRET_RAILS_MASTER_KEY`. + +1. Run an Auto Devops pipeline either by manually creating a new + pipeline or by pushing a code change to GitLab. + +Auto DevOps pipelines will take your application secret variables to +populate a Kubernetes secret. This secret is unique per environment. +When deploying your application, the secret is loaded as environment +variables in the container running the application. Following the +example above, you can see the secret below containing the +`RAILS_MASTER_KEY` variable. + +```shell +$ kubectl get secret production-secret -n minimal-ruby-app-54 -o yaml +apiVersion: v1 +data: + RAILS_MASTER_KEY: MTIzNC10ZXN0 +kind: Secret +metadata: + creationTimestamp: 2018-12-20T01:48:26Z + name: production-secret + namespace: minimal-ruby-app-54 + resourceVersion: "429422" + selfLink: /api/v1/namespaces/minimal-ruby-app-54/secrets/production-secret + uid: 57ac2bfd-03f9-11e9-b812-42010a9400e4 +type: Opaque +``` + +Environment variables are generally considered immutable in a Kubernetes +pod. Therefore, if you update an application secret without changing any +code then manually create a new pipeline, you will find that any running +application pods will not have the updated secrets. In this case, you +can either push a code update to GitLab to force the Kubernetes +Deployment to recreate pods or manually delete running pods to +cause Kubernetes to create new pods with updated secrets. + +NOTE: **Note:** +Variables with multiline values are not currently supported due to +limitations with the current Auto DevOps scripting environment. + +### Advanced replica variables setup + +Apart from the two replica-related variables for production mentioned above, +you can also use others for different environments. + +There's a very specific mapping between Kubernetes' label named `track`, +GitLab CI/CD environment names, and the replicas environment variable. +The general rule is: `TRACK_ENV_REPLICAS`. Where: + +- `TRACK`: The capitalized value of the `track` + [Kubernetes label](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + in the Helm Chart app definition. If not set, it will not be taken into account + to the variable name. +- `ENV`: The capitalized environment name of the deploy job that is set in + `.gitlab-ci.yml`. + +That way, you can define your own `TRACK_ENV_REPLICAS` variables with which +you will be able to scale the pod's replicas easily. + +In the example below, the environment's name is `qa` and it deploys the track +`foo` which would result in looking for the `FOO_QA_REPLICAS` environment +variable: + +```yaml +QA testing: + stage: deploy + environment: + name: qa + script: + - deploy foo +``` + +The track `foo` being referenced would also need to be defined in the +application's Helm chart, like: + +```yaml +replicaCount: 1 +image: + repository: gitlab.example.com/group/project + tag: stable + pullPolicy: Always + secrets: + - name: gitlab-registry +application: + track: foo + tier: web +service: + enabled: true + name: web + type: ClusterIP + url: http://my.host.com/ + externalPort: 5000 + internalPort: 5000 +``` + +### Deploy policy for staging and production environments + +> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/-/merge_requests/160) in GitLab 10.8. + +TIP: **Tip:** +You can also set this inside your [project's settings](index.md#deployment-strategy). + +The normal behavior of Auto DevOps is to use Continuous Deployment, pushing +automatically to the `production` environment every time a new pipeline is run +on the default branch. However, there are cases where you might want to use a +staging environment and deploy to production manually. For this scenario, the +`STAGING_ENABLED` environment variable was introduced. + +If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to +`1` as a CI/CD variable), then the application will be automatically deployed +to a `staging` environment, and a `production_manual` job will be created for +you when you're ready to manually deploy to production. + +### Deploy policy for canary environments **(PREMIUM)** + +> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/-/merge_requests/171) in GitLab 11.0. + +A [canary environment](../../user/project/canary_deployments.md) can be used +before any changes are deployed to production. + +If `CANARY_ENABLED` is defined in your project (e.g., set `CANARY_ENABLED` to +`1` as a CI/CD variable) then two manual jobs will be created: + +- `canary` which will deploy the application to the canary environment +- `production_manual` which is to be used by you when you're ready to manually + deploy to production. + +### Incremental rollout to production **(PREMIUM)** + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5415) in GitLab 10.8. + +TIP: **Tip:** +You can also set this inside your [project's settings](index.md#deployment-strategy). + +When you have a new version of your app to deploy in production, you may want +to use an incremental rollout to replace just a few pods with the latest code. +This will allow you to first check how the app is behaving, and later manually +increasing the rollout up to 100%. + +If `INCREMENTAL_ROLLOUT_MODE` is set to `manual` in your project, then instead +of the standard `production` job, 4 different +[manual jobs](../../ci/pipelines/index.md#manual-actions-from-pipeline-graphs) +will be created: + +1. `rollout 10%` +1. `rollout 25%` +1. `rollout 50%` +1. `rollout 100%` + +The percentage is based on the `REPLICAS` variable and defines the number of +pods you want to have for your deployment. If you say `10`, and then you run +the `10%` rollout job, there will be `1` new pod + `9` old ones. + +To start a job, click on the play icon next to the job's name. You are not +required to go from `10%` to `100%`, you can jump to whatever job you want. +You can also scale down by running a lower percentage job, just before hitting +`100%`. Once you get to `100%`, you cannot scale down, and you'd have to roll +back by redeploying the old version using the +[rollback button](../../ci/environments.md#retrying-and-rolling-back) in the +environment page. + +Below, you can see how the pipeline will look if the rollout or staging +variables are defined. + +Without `INCREMENTAL_ROLLOUT_MODE` and without `STAGING_ENABLED`: + +![Staging and rollout disabled](img/rollout_staging_disabled.png) + +Without `INCREMENTAL_ROLLOUT_MODE` and with `STAGING_ENABLED`: + +![Staging enabled](img/staging_enabled.png) + +With `INCREMENTAL_ROLLOUT_MODE` set to `manual` and without `STAGING_ENABLED`: + +![Rollout enabled](img/rollout_enabled.png) + +With `INCREMENTAL_ROLLOUT_MODE` set to `manual` and with `STAGING_ENABLED` + +![Rollout and staging enabled](img/rollout_staging_enabled.png) + +CAUTION: **Caution:** +Before GitLab 11.4 this feature was enabled by the presence of the +`INCREMENTAL_ROLLOUT_ENABLED` environment variable. +This configuration is deprecated and will be removed in the future. + +### Timed incremental rollout to production **(PREMIUM)** + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7545) in GitLab 11.4. + +TIP: **Tip:** +You can also set this inside your [project's settings](index.md#deployment-strategy). + +This configuration is based on +[incremental rollout to production](#incremental-rollout-to-production-premium). + +Everything behaves the same way, except: + +- It's enabled by setting the `INCREMENTAL_ROLLOUT_MODE` variable to `timed`. +- Instead of the standard `production` job, the following jobs are created with a 5 minute delay between each : + 1. `timed rollout 10%` + 1. `timed rollout 25%` + 1. `timed rollout 50%` + 1. `timed rollout 100%` + +## Auto DevOps banner + +The following Auto DevOps banner will show for maintainers+ on new projects when Auto DevOps is not +enabled: + +![Auto DevOps banner](img/autodevops_banner_v12_6.png) + +The banner can be disabled for: + +- A user when they dismiss it themselves. +- A project by explicitly [disabling Auto DevOps](index.md#enablingdisabling-auto-devops). +- An entire GitLab instance: + - By an administrator running the following in a Rails console: + + ```ruby + Feature.get(:auto_devops_banner_disabled).enable + ``` + + - Through the REST API with an admin access token: + + ```shell + curl --data "value=true" --header "PRIVATE-TOKEN: <personal_access_token>" https://gitlab.example.com/api/v4/features/auto_devops_banner_disabled + ``` diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md index b8b16096c28..e0645a28fff 100644 --- a/doc/topics/autodevops/index.md +++ b/doc/topics/autodevops/index.md @@ -61,7 +61,7 @@ in multiple ways: and code quality testing. - Auto DevOps offers an incremental graduation path. If you need advanced customizations, you can start modifying the templates without having to start over on a - completely different platform. Review the [customizing](#customizing) section for more information. + completely different platform. Review the [customizing](customize.md) documentation for more information. ## Features @@ -91,7 +91,7 @@ knowledge of the following: - [Prometheus](https://prometheus.io/docs/introduction/overview/) Auto DevOps provides great defaults for all the stages; you can, however, -[customize](#customizing) almost everything to your needs. +[customize](customize.md) almost everything to your needs. For an overview on the creation of Auto DevOps, read more [in this blog post](https://about.gitlab.com/blog/2017/06/29/whats-next-for-gitlab-ci/). @@ -300,12 +300,12 @@ The available options are: - **Continuous deployment to production**: Enables [Auto Deploy](stages.md#auto-deploy) with `master` branch directly deployed to production. - **Continuous deployment to production using timed incremental rollout**: Sets the - [`INCREMENTAL_ROLLOUT_MODE`](#timed-incremental-rollout-to-production-premium) variable + [`INCREMENTAL_ROLLOUT_MODE`](customize.md#timed-incremental-rollout-to-production-premium) variable to `timed`, and production deployment will be executed with a 5 minute delay between each increment in rollout. - **Automatic deployment to staging, manual deployment to production**: Sets the - [`STAGING_ENABLED`](#deploy-policy-for-staging-and-production-environments) and - [`INCREMENTAL_ROLLOUT_MODE`](#incremental-rollout-to-production-premium) variables + [`STAGING_ENABLED`](customize.md#deploy-policy-for-staging-and-production-environments) and + [`INCREMENTAL_ROLLOUT_MODE`](customize.md#incremental-rollout-to-production-premium) variables to `1` and `manual`. This means: - `master` branch is directly deployed to staging. @@ -336,8 +336,8 @@ be configured. | Cluster name | Cluster environment scope | `KUBE_INGRESS_BASE_DOMAIN` variable value | Variable environment scope | Notes | |--------------|---------------------------|-------------------------------------------|----------------------------|---| | review | `review/*` | `review.example.com` | `review/*` | The review cluster which will run all [Review Apps](../../ci/review_apps/index.md). `*` is a wildcard, which means it will be used by every environment name starting with `review/`. | -| staging | `staging` | `staging.example.com` | `staging` | (Optional) The staging cluster which will run the deployments of the staging environments. You need to [enable it first](#deploy-policy-for-staging-and-production-environments). | -| production | `production` | `example.com` | `production` | The production cluster which will run the deployments of the production environment. You can use [incremental rollouts](#incremental-rollout-to-production-premium). | +| staging | `staging` | `staging.example.com` | `staging` | (Optional) The staging cluster which will run the deployments of the staging environments. You need to [enable it first](customize.md#deploy-policy-for-staging-and-production-environments). | +| production | `production` | `example.com` | `production` | The production cluster which will run the deployments of the production environment. You can use [incremental rollouts](customize.md#incremental-rollout-to-production-premium). | To add a different cluster for each environment: @@ -358,607 +358,6 @@ and verifying that your app is deployed as a review app in the Kubernetes cluster with the `review/*` environment scope. Similarly, you can check the other environments. -## Customizing - -While Auto DevOps provides great defaults to get you started, you can customize -almost everything to fit your needs; from custom [buildpacks](#custom-buildpacks), -to [`Dockerfile`s](#custom-dockerfile), [Helm charts](#custom-helm-chart), or -even copying the complete [CI/CD configuration](#customizing-gitlab-ciyml) -into your project to enable staging and canary deployments, and more. - -### Custom buildpacks - -If the automatic buildpack detection fails for your project, or if you want to -use a custom buildpack, you can override the buildpack(s) using a project variable -or a `.buildpacks` file in your project: - -- **Project variable** - Create a project variable `BUILDPACK_URL` with the URL - of the buildpack to use. -- **`.buildpacks` file** - Add a file in your project's repo called `.buildpacks` - and add the URL of the buildpack to use on a line in the file. If you want to - use multiple buildpacks, you can enter them in, one on each line. - -#### Multiple buildpacks - -Using multiple buildpacks isn't fully supported by Auto DevOps because, when using the `.buildpacks` -file, Auto Test will not work. - -The buildpack [heroku-buildpack-multi](https://github.com/heroku/heroku-buildpack-multi/), -which is used under the hood to parse the `.buildpacks` file, doesn't provide the necessary commands -`bin/test-compile` and `bin/test`. - -If your goal is to use only a single custom buildpack, you should provide the project variable -`BUILDPACK_URL` instead. - -### Custom `Dockerfile` - -If your project has a `Dockerfile` in the root of the project repo, Auto DevOps -will build a Docker image based on the Dockerfile rather than using buildpacks. -This can be much faster and result in smaller images, especially if your -Dockerfile is based on [Alpine](https://hub.docker.com/_/alpine/). - -### Passing arguments to `docker build` - -Arguments can be passed to the `docker build` command using the -`AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` project variable. - -For example, to build a Docker image based on based on the `ruby:alpine` -instead of the default `ruby:latest`: - -1. Set `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` to `--build-arg=RUBY_VERSION=alpine`. -1. Add the following to a custom `Dockerfile`: - - ```dockerfile - ARG RUBY_VERSION=latest - FROM ruby:$RUBY_VERSION - - # ... put your stuff here - ``` - -NOTE: **Note:** -Passing in complex values (newlines and spaces, for example) will likely -cause escaping issues due to the way this argument is used in Auto DevOps. -Consider using Base64 encoding of such values to avoid this problem. - -CAUTION: **Warning:** -Avoid passing secrets as Docker build arguments if possible, as they may be -persisted in your image. See -[this discussion](https://github.com/moby/moby/issues/13490) for details. - -### Passing secrets to `docker build` - -> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/25514) in GitLab 12.3, but available in versions 11.9 and above. - -CI environment variables can be passed as [build -secrets](https://docs.docker.com/develop/develop-images/build_enhancements/#new-docker-build-secret-information) to the `docker build` command by listing them comma separated by name in the -`AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` variable. For example, in order to forward the variables `CI_COMMIT_SHA` and `CI_ENVIRONMENT_NAME`, one would set `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` to `CI_COMMIT_SHA,CI_ENVIRONMENT_NAME`. - -Unlike build arguments, these are not persisted by Docker in the final image -(though you can still persist them yourself, so **be careful**). - -In projects: - -- Without a `Dockerfile`, these are available automatically as environment - variables. -- With a `Dockerfile`, the following is required: - - 1. Activate the experimental `Dockerfile` syntax by adding the following - to the top of the file: - - ```dockerfile - # syntax = docker/dockerfile:experimental - ``` - - 1. To make secrets available in any `RUN $COMMAND` in the `Dockerfile`, mount - the secret file and source it prior to running `$COMMAND`: - - ```dockerfile - RUN --mount=type=secret,id=auto-devops-build-secrets . /run/secrets/auto-devops-build-secrets && $COMMAND - ``` - -NOTE: **Note:** -When `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` is set, Auto DevOps -enables the experimental [Docker BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) -feature to use the `--secret` flag. - -### Custom Helm Chart - -Auto DevOps uses [Helm](https://helm.sh/) to deploy your application to Kubernetes. -You can override the Helm chart used by bundling up a chart into your project -repo or by specifying a project variable: - -- **Bundled chart** - If your project has a `./chart` directory with a `Chart.yaml` - file in it, Auto DevOps will detect the chart and use it instead of the [default - one](https://gitlab.com/gitlab-org/charts/auto-deploy-app). - This can be a great way to control exactly how your application is deployed. -- **Project variable** - Create a [project variable](../../ci/variables/README.md#gitlab-cicd-environment-variables) - `AUTO_DEVOPS_CHART` with the URL of a custom chart to use or create two project variables `AUTO_DEVOPS_CHART_REPOSITORY` with the URL of a custom chart repository and `AUTO_DEVOPS_CHART` with the path to the chart. - -### Customize values for Helm Chart - -> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/30628) in GitLab 12.6, `.gitlab/auto-deploy-values.yaml` will be used by default for Helm upgrades. - -You can override the default values in the `values.yaml` file in the [default Helm chart](https://gitlab.com/gitlab-org/charts/auto-deploy-app). -This can be achieved by either: - -- Adding a file named `.gitlab/auto-deploy-values.yaml` to your repository. It will - be automatically used if found. -- Adding a file with a different name or path to the repository, and set the - `HELM_UPGRADE_VALUES_FILE` [environment variable](#environment-variables) with the path and name. - -NOTE: **Note:** -For GitLab 12.5 and earlier, the `HELM_UPGRADE_EXTRA_ARGS` environment variable can be used to override the default chart values. -To do so, set `HELM_UPGRADE_EXTRA_ARGS` to `--values my-values.yaml`. - -### Custom Helm chart per environment - -You can specify the use of a custom Helm chart per environment by scoping the environment variable -to the desired environment. See [Limiting environment scopes of variables](../../ci/variables/README.md#limiting-environment-scopes-of-environment-variables). - -### Customizing `.gitlab-ci.yml` - -Auto DevOps is completely customizable because the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml): - -- Is just an implementation of a [`.gitlab-ci.yml`](../../ci/yaml/README.md) file. -- Uses only features available to any implementation of `.gitlab-ci.yml`. - -If you want to modify the CI/CD pipeline used by Auto DevOps, you can [`include` -the template](../../ci/yaml/README.md#includetemplate) and customize as -needed. To do this, add a `.gitlab-ci.yml` file to the root of your repository -containing the following: - -```yml -include: - - template: Auto-DevOps.gitlab-ci.yml -``` - -Then add any extra changes you want. Your additions will be merged with the -[Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) using the behaviour described for -[`include`](../../ci/yaml/README.md#include). - -It is also possible to copy and paste the contents of the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) -into your project and edit this as needed. You may prefer to do it -that way if you want to specifically remove any part of it. - -### Customizing the Kubernetes namespace - -> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27630) in GitLab 12.6. - -For **non**-GitLab-managed clusters, the namespace can be customized using -`.gitlab-ci.yml` by specifying -[`environment:kubernetes:namespace`](../../ci/environments.md#configuring-kubernetes-deployments). -For example, the following configuration overrides the namespace used for -`production` deployments: - -```yaml -include: - - template: Auto-DevOps.gitlab-ci.yml - -production: - environment: - kubernetes: - namespace: production -``` - -When deploying to a custom namespace with Auto DevOps, the service account -provided with the cluster needs at least the `edit` role within the namespace. - -- If the service account can create namespaces, then the namespace can be created on-demand. -- Otherwise, the namespace must exist prior to deployment. - -### Using components of Auto DevOps - -If you only require a subset of the features offered by Auto DevOps, you can include -individual Auto DevOps jobs into your own `.gitlab-ci.yml`. Each component job relies -on a stage that should be defined in the `.gitlab-ci.yml` that includes the template. - -For example, to make use of [Auto Build](stages.md#auto-build), you can add the following to -your `.gitlab-ci.yml`: - -```yaml -stages: - - build - -include: - - template: Jobs/Build.gitlab-ci.yml -``` - -Consult the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) for information on available jobs. - -### PostgreSQL database support - -In order to support applications that require a database, -[PostgreSQL](https://www.postgresql.org/) is provisioned by default. The credentials to access -the database are preconfigured, but can be customized by setting the associated -[variables](#environment-variables). These credentials can be used for defining a -`DATABASE_URL` of the format: - -```yaml -postgres://user:password@postgres-host:postgres-port/postgres-database -``` - -#### Upgrading PostgresSQL - -CAUTION: **Deprecation** -The variable `AUTO_DEVOPS_POSTGRES_CHANNEL` that controls default provisioned -PostgreSQL currently defaults to `1`. This is scheduled to change to `2` in -[GitLab 13.0](https://gitlab.com/gitlab-org/gitlab/-/issues/210499). - -The version of the chart used to provision PostgreSQL: - -- Is 0.7.1 in GitLab 12.8 and earlier. -- Can be set to from 0.7.1 to 8.2.1 in GitLab 12.9 and later. - -GitLab encourages users to [migrate their database](upgrading_postgresql.md) -to the newer PostgreSQL. - -To use the new PostgreSQL: - -- New projects can set the `AUTO_DEVOPS_POSTGRES_CHANNEL` variable to `2`. -- Old projects can be upgraded by following the guide to - [upgrading PostgresSQL](upgrading_postgresql.md). - -#### Using external PostgreSQL database providers - -While Auto DevOps provides out-of-the-box support for a PostgreSQL container for -production environments, for some use-cases it may not be sufficiently secure or -resilient and you may wish to use an external managed provider for PostgreSQL. -For example, AWS Relational Database Service. - -You will need to define environment-scoped variables for `POSTGRES_ENABLED` and `DATABASE_URL` in your project's CI/CD settings. - -To achieve this: - -1. Disable the built-in PostgreSQL installation for the required environments using - scoped [environment variables](../../ci/environments.md#scoping-environments-with-specs). - For this use case, it's likely that only `production` will need to be added to this - list as the builtin PostgreSQL setup for Review Apps and staging will be sufficient - as a high availability setup is not required. - - ![Auto Metrics](img/disable_postgres.png) - -1. Define the `DATABASE_URL` CI variable as a scoped environment variable that will be - available to your application. This should be a URL in the following format: - - ```yaml - postgres://user:password@postgres-host:postgres-port/postgres-database - ``` - -You will need to ensure that your Kubernetes cluster has network access to wherever -PostgreSQL is hosted. - -### Environment variables - -The following variables can be used for setting up the Auto DevOps domain, -providing a custom Helm chart, or scaling your application. PostgreSQL can -also be customized, and you can easily use a [custom buildpack](#custom-buildpacks). - -#### Build and deployment - -The following table lists variables related to building and deploying -applications. - -| **Variable** | **Description** | -|-----------------------------------------|------------------------------------| -| `ADDITIONAL_HOSTS` | Fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. | -| `<ENVIRONMENT>_ADDITIONAL_HOSTS` | For a specific environment, the fully qualified domain names specified as a comma-separated list that are added to the Ingress hosts. This takes precedence over `ADDITIONAL_HOSTS`. | -| `AUTO_DEVOPS_BUILD_IMAGE_CNB_ENABLED` | When set to a non-empty value and no `Dockerfile` is present, Auto Build builds your application using Cloud Native Buildpacks instead of Herokuish. [More details](stages.md#auto-build-using-cloud-native-buildpacks-beta). | -| `AUTO_DEVOPS_BUILD_IMAGE_EXTRA_ARGS` | Extra arguments to be passed to the `docker build` command. Note that using quotes will not prevent word splitting. [More details](#passing-arguments-to-docker-build). | -| `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` | A [comma-separated list of CI variable names](#passing-secrets-to-docker-build) to be passed to the `docker build` command as secrets. | -| `AUTO_DEVOPS_CHART` | Helm Chart used to deploy your apps. Defaults to the one [provided by GitLab](https://gitlab.com/gitlab-org/charts/auto-deploy-app). | -| `AUTO_DEVOPS_CHART_REPOSITORY` | Helm Chart repository used to search for charts. Defaults to `https://charts.gitlab.io`. | -| `AUTO_DEVOPS_CHART_REPOSITORY_NAME` | From GitLab 11.11, used to set the name of the Helm repository. Defaults to `gitlab`. | -| `AUTO_DEVOPS_CHART_REPOSITORY_USERNAME` | From GitLab 11.11, used to set a username to connect to the Helm repository. Defaults to no credentials. Also set `AUTO_DEVOPS_CHART_REPOSITORY_PASSWORD`. | -| `AUTO_DEVOPS_CHART_REPOSITORY_PASSWORD` | From GitLab 11.11, used to set a password to connect to the Helm repository. Defaults to no credentials. Also set `AUTO_DEVOPS_CHART_REPOSITORY_USERNAME`. | -| `AUTO_DEVOPS_MODSECURITY_SEC_RULE_ENGINE` | From GitLab 12.5, used in combination with [Modsecurity feature flag](../../user/clusters/applications.md#web-application-firewall-modsecurity) to toggle [Modsecurity's `SecRuleEngine`](https://github.com/SpiderLabs/ModSecurity/wiki/Reference-Manual-(v2.x)#SecRuleEngine) behavior. Defaults to `DetectionOnly`. | -| `BUILDPACK_URL` | Buildpack's full URL. Can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`. For example `https://github.com/heroku/heroku-buildpack-ruby.git#v142`. | -| `CANARY_ENABLED` | From GitLab 11.0, used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments-premium). | -| `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. | -| `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. | -| `HELM_RELEASE_NAME` | From GitLab 12.1, allows the `helm` release name to be overridden. Can be used to assign unique release names when deploying multiple projects to a single namespace. | -| `HELM_UPGRADE_VALUES_FILE` | From GitLab 12.6, allows the `helm upgrade` values file to be overridden. Defaults to `.gitlab/auto-deploy-values.yaml`. | -| `HELM_UPGRADE_EXTRA_ARGS` | From GitLab 11.11, allows extra arguments in `helm` commands when deploying the application. Note that using quotes will not prevent word splitting. **Tip:** you can use this variable to [customize the Auto Deploy Helm chart](#custom-helm-chart) by applying custom override values with `--values my-values.yaml`. | -| `INCREMENTAL_ROLLOUT_MODE` | From GitLab 11.4, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment. Set to `manual` for manual deployment jobs or `timed` for automatic rollout deployments with a 5 minute delay each one. | -| `K8S_SECRET_*` | From GitLab 11.7, any variable prefixed with [`K8S_SECRET_`](#application-secret-variables) will be made available by Auto DevOps as environment variables to the deployed application. | -| `KUBE_INGRESS_BASE_DOMAIN` | From GitLab 11.8, can be used to set a domain per cluster. See [cluster domains](../../user/project/clusters/index.md#base-domain) for more information. | -| `PRODUCTION_REPLICAS` | Number of replicas to deploy in the production environment. Takes precedence over `REPLICAS` and defaults to 1. For zero downtime upgrades, set to 2 or greater. | -| `REPLICAS` | Number of replicas to deploy. Defaults to 1. | -| `ROLLOUT_RESOURCE_TYPE` | From GitLab 11.9, allows specification of the resource type being deployed when using a custom Helm chart. Default value is `deployment`. | -| `ROLLOUT_STATUS_DISABLED` | From GitLab 12.0, used to disable rollout status check because it doesn't support all resource types, for example, `cronjob`. | -| `STAGING_ENABLED` | From GitLab 10.8, used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). | - -TIP: **Tip:** -Set up the replica variables using a -[project variable](../../ci/variables/README.md#gitlab-cicd-environment-variables) -and scale your application by just redeploying it! - -CAUTION: **Caution:** -You should *not* scale your application using Kubernetes directly. This can -cause confusion with Helm not detecting the change, and subsequent deploys with -Auto DevOps can undo your changes. - -#### Database - -The following table lists variables related to the database. - -| **Variable** | **Description** | -|-----------------------------------------|------------------------------------| -| `DB_INITIALIZE` | From GitLab 11.4, used to specify the command to run to initialize the application's PostgreSQL database. Runs inside the application pod. | -| `DB_MIGRATE` | From GitLab 11.4, used to specify the command to run to migrate the application's PostgreSQL database. Runs inside the application pod. | -| `POSTGRES_ENABLED` | Whether PostgreSQL is enabled. Defaults to `"true"`. Set to `false` to disable the automatic deployment of PostgreSQL. | -| `POSTGRES_USER` | The PostgreSQL user. Defaults to `user`. Set it to use a custom username. | -| `POSTGRES_PASSWORD` | The PostgreSQL password. Defaults to `testing-password`. Set it to use a custom password. | -| `POSTGRES_DB` | The PostgreSQL database name. Defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-environment-variables). Set it to use a custom database name. | -| `POSTGRES_VERSION` | Tag for the [`postgres` Docker image](https://hub.docker.com/_/postgres) to use. Defaults to `9.6.2`. | - -#### Security tools - -The following table lists variables related to security tools. - -| **Variable** | **Description** | -|-----------------------------------------|------------------------------------| -| `SAST_CONFIDENCE_LEVEL` | Minimum confidence level of security issues you want to be reported; `1` for Low, `2` for Medium, `3` for High. Defaults to `3`. | - -#### Disable jobs - -The following table lists variables used to disable jobs. - -| **Variable** | **Description** | -|-----------------------------------------|------------------------------------| -| `CODE_QUALITY_DISABLED` | From GitLab 11.0, used to disable the `codequality` job. If the variable is present, the job will not be created. | -| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, used to disable the `sast:container` job. If the variable is present, the job will not be created. | -| `DAST_DISABLED` | From GitLab 11.0, used to disable the `dast` job. If the variable is present, the job will not be created. | -| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | -| `LICENSE_MANAGEMENT_DISABLED` | From GitLab 11.0, used to disable the `license_management` job. If the variable is present, the job will not be created. | -| `PERFORMANCE_DISABLED` | From GitLab 11.0, used to disable the `performance` job. If the variable is present, the job will not be created. | -| `REVIEW_DISABLED` | From GitLab 11.0, used to disable the `review` and the manual `review:stop` job. If the variable is present, these jobs will not be created. | -| `SAST_DISABLED` | From GitLab 11.0, used to disable the `sast` job. If the variable is present, the job will not be created. | -| `TEST_DISABLED` | From GitLab 11.0, used to disable the `test` job. If the variable is present, the job will not be created. | - -#### Application secret variables - -> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/49056) in GitLab 11.7. - -Some applications need to define secret variables that are -accessible by the deployed application. Auto DevOps detects variables where the key starts with -`K8S_SECRET_` and make these prefixed variables available to the -deployed application, as environment variables. - -To configure your application variables: - -1. Go to your project's **Settings > CI/CD**, then expand the section - called **Variables**. - -1. Create a CI Variable, ensuring the key is prefixed with - `K8S_SECRET_`. For example, you can create a variable with key - `K8S_SECRET_RAILS_MASTER_KEY`. - -1. Run an Auto Devops pipeline either by manually creating a new - pipeline or by pushing a code change to GitLab. - -Auto DevOps pipelines will take your application secret variables to -populate a Kubernetes secret. This secret is unique per environment. -When deploying your application, the secret is loaded as environment -variables in the container running the application. Following the -example above, you can see the secret below containing the -`RAILS_MASTER_KEY` variable. - -```shell -$ kubectl get secret production-secret -n minimal-ruby-app-54 -o yaml -apiVersion: v1 -data: - RAILS_MASTER_KEY: MTIzNC10ZXN0 -kind: Secret -metadata: - creationTimestamp: 2018-12-20T01:48:26Z - name: production-secret - namespace: minimal-ruby-app-54 - resourceVersion: "429422" - selfLink: /api/v1/namespaces/minimal-ruby-app-54/secrets/production-secret - uid: 57ac2bfd-03f9-11e9-b812-42010a9400e4 -type: Opaque -``` - -Environment variables are generally considered immutable in a Kubernetes -pod. Therefore, if you update an application secret without changing any -code then manually create a new pipeline, you will find that any running -application pods will not have the updated secrets. In this case, you -can either push a code update to GitLab to force the Kubernetes -Deployment to recreate pods or manually delete running pods to -cause Kubernetes to create new pods with updated secrets. - -NOTE: **Note:** -Variables with multiline values are not currently supported due to -limitations with the current Auto DevOps scripting environment. - -#### Advanced replica variables setup - -Apart from the two replica-related variables for production mentioned above, -you can also use others for different environments. - -There's a very specific mapping between Kubernetes' label named `track`, -GitLab CI/CD environment names, and the replicas environment variable. -The general rule is: `TRACK_ENV_REPLICAS`. Where: - -- `TRACK`: The capitalized value of the `track` - [Kubernetes label](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) - in the Helm Chart app definition. If not set, it will not be taken into account - to the variable name. -- `ENV`: The capitalized environment name of the deploy job that is set in - `.gitlab-ci.yml`. - -That way, you can define your own `TRACK_ENV_REPLICAS` variables with which -you will be able to scale the pod's replicas easily. - -In the example below, the environment's name is `qa` and it deploys the track -`foo` which would result in looking for the `FOO_QA_REPLICAS` environment -variable: - -```yaml -QA testing: - stage: deploy - environment: - name: qa - script: - - deploy foo -``` - -The track `foo` being referenced would also need to be defined in the -application's Helm chart, like: - -```yaml -replicaCount: 1 -image: - repository: gitlab.example.com/group/project - tag: stable - pullPolicy: Always - secrets: - - name: gitlab-registry -application: - track: foo - tier: web -service: - enabled: true - name: web - type: ClusterIP - url: http://my.host.com/ - externalPort: 5000 - internalPort: 5000 -``` - -#### Deploy policy for staging and production environments - -> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/-/merge_requests/160) in GitLab 10.8. - -TIP: **Tip:** -You can also set this inside your [project's settings](#deployment-strategy). - -The normal behavior of Auto DevOps is to use Continuous Deployment, pushing -automatically to the `production` environment every time a new pipeline is run -on the default branch. However, there are cases where you might want to use a -staging environment and deploy to production manually. For this scenario, the -`STAGING_ENABLED` environment variable was introduced. - -If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to -`1` as a CI/CD variable), then the application will be automatically deployed -to a `staging` environment, and a `production_manual` job will be created for -you when you're ready to manually deploy to production. - -#### Deploy policy for canary environments **(PREMIUM)** - -> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/-/merge_requests/171) in GitLab 11.0. - -A [canary environment](../../user/project/canary_deployments.md) can be used -before any changes are deployed to production. - -If `CANARY_ENABLED` is defined in your project (e.g., set `CANARY_ENABLED` to -`1` as a CI/CD variable) then two manual jobs will be created: - -- `canary` which will deploy the application to the canary environment -- `production_manual` which is to be used by you when you're ready to manually - deploy to production. - -#### Incremental rollout to production **(PREMIUM)** - -> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5415) in GitLab 10.8. - -TIP: **Tip:** -You can also set this inside your [project's settings](#deployment-strategy). - -When you have a new version of your app to deploy in production, you may want -to use an incremental rollout to replace just a few pods with the latest code. -This will allow you to first check how the app is behaving, and later manually -increasing the rollout up to 100%. - -If `INCREMENTAL_ROLLOUT_MODE` is set to `manual` in your project, then instead -of the standard `production` job, 4 different -[manual jobs](../../ci/pipelines/index.md#manual-actions-from-pipeline-graphs) -will be created: - -1. `rollout 10%` -1. `rollout 25%` -1. `rollout 50%` -1. `rollout 100%` - -The percentage is based on the `REPLICAS` variable and defines the number of -pods you want to have for your deployment. If you say `10`, and then you run -the `10%` rollout job, there will be `1` new pod + `9` old ones. - -To start a job, click on the play icon next to the job's name. You are not -required to go from `10%` to `100%`, you can jump to whatever job you want. -You can also scale down by running a lower percentage job, just before hitting -`100%`. Once you get to `100%`, you cannot scale down, and you'd have to roll -back by redeploying the old version using the -[rollback button](../../ci/environments.md#retrying-and-rolling-back) in the -environment page. - -Below, you can see how the pipeline will look if the rollout or staging -variables are defined. - -Without `INCREMENTAL_ROLLOUT_MODE` and without `STAGING_ENABLED`: - -![Staging and rollout disabled](img/rollout_staging_disabled.png) - -Without `INCREMENTAL_ROLLOUT_MODE` and with `STAGING_ENABLED`: - -![Staging enabled](img/staging_enabled.png) - -With `INCREMENTAL_ROLLOUT_MODE` set to `manual` and without `STAGING_ENABLED`: - -![Rollout enabled](img/rollout_enabled.png) - -With `INCREMENTAL_ROLLOUT_MODE` set to `manual` and with `STAGING_ENABLED` - -![Rollout and staging enabled](img/rollout_staging_enabled.png) - -CAUTION: **Caution:** -Before GitLab 11.4 this feature was enabled by the presence of the -`INCREMENTAL_ROLLOUT_ENABLED` environment variable. -This configuration is deprecated and will be removed in the future. - -#### Timed incremental rollout to production **(PREMIUM)** - -> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/7545) in GitLab 11.4. - -TIP: **Tip:** -You can also set this inside your [project's settings](#deployment-strategy). - -This configuration is based on -[incremental rollout to production](#incremental-rollout-to-production-premium). - -Everything behaves the same way, except: - -- It's enabled by setting the `INCREMENTAL_ROLLOUT_MODE` variable to `timed`. -- Instead of the standard `production` job, the following jobs are created with a 5 minute delay between each : - 1. `timed rollout 10%` - 1. `timed rollout 25%` - 1. `timed rollout 50%` - 1. `timed rollout 100%` - -### Auto DevOps banner - -The following Auto DevOps banner will show for maintainers+ on new projects when Auto DevOps is not -enabled: - -![Auto DevOps banner](img/autodevops_banner_v12_6.png) - -The banner can be disabled for: - -- A user when they dismiss it themselves. -- A project by explicitly [disabling Auto DevOps](#enablingdisabling-auto-devops). -- An entire GitLab instance: - - By an administrator running the following in a Rails console: - - ```ruby - Feature.get(:auto_devops_banner_disabled).enable - ``` - - - Through the REST API with an admin access token: - - ```shell - curl --data "value=true" --header "PRIVATE-TOKEN: <personal_access_token>" https://gitlab.example.com/api/v4/features/auto_devops_banner_disabled - ``` - ## Currently supported languages Note that not all buildpacks support Auto Test yet, as it's a relatively new @@ -1024,7 +423,7 @@ spec: key files the buildpack is looking for. For example, for ruby apps, you must have a `Gemfile` to be properly detected, even though it is possible to write a Ruby app without a `Gemfile`. Try specifying a [custom - buildpack](#custom-buildpacks). + buildpack](customize.md#custom-buildpacks). - Auto Test may fail because of a mismatch between testing frameworks. In this case, you may need to customize your `.gitlab-ci.yml` with your test commands. - Auto Deploy will fail if GitLab can not create a Kubernetes namespace and diff --git a/doc/topics/autodevops/quick_start_guide.md b/doc/topics/autodevops/quick_start_guide.md index ba6e627cd8b..493e3b727a8 100644 --- a/doc/topics/autodevops/quick_start_guide.md +++ b/doc/topics/autodevops/quick_start_guide.md @@ -207,7 +207,7 @@ deployment and clicking a square will take you to the pod's logs page. TIP: **Tip:** There is only one pod hosting the application at the moment, but you can add -more pods by defining the [`REPLICAS` variable](index.md#environment-variables) +more pods by defining the [`REPLICAS` variable](customize.md#environment-variables) under **Settings > CI/CD > Environment variables**. ### Working with branches @@ -278,8 +278,8 @@ and customized to fit your workflow. Here are some helpful resources for further 1. [Auto DevOps](index.md) 1. [Multiple Kubernetes clusters](index.md#using-multiple-kubernetes-clusters-premium) **(PREMIUM)** -1. [Incremental rollout to production](index.md#incremental-rollout-to-production-premium) **(PREMIUM)** -1. [Disable jobs you don't need with environment variables](index.md#environment-variables) +1. [Incremental rollout to production](customize.md#incremental-rollout-to-production-premium) **(PREMIUM)** +1. [Disable jobs you don't need with environment variables](customize.md#environment-variables) 1. [Use a static IP for your cluster](../../user/clusters/applications.md#using-a-static-ip) -1. [Use your own buildpacks to build your application](index.md#custom-buildpacks) +1. [Use your own buildpacks to build your application](customize.md#custom-buildpacks) 1. [Prometheus monitoring](../../user/project/integrations/prometheus.md) diff --git a/doc/topics/autodevops/stages.md b/doc/topics/autodevops/stages.md index 434a30021d5..265a117ced1 100644 --- a/doc/topics/autodevops/stages.md +++ b/doc/topics/autodevops/stages.md @@ -19,7 +19,7 @@ If a project's repository contains a `Dockerfile` at its root, Auto Build will u If you are also using Auto Review Apps and Auto Deploy and choose to provide your own `Dockerfile`, make sure you expose your application to port `5000` as this is the port assumed by the -[default Helm chart](https://gitlab.com/gitlab-org/charts/auto-deploy-app). Alternatively you can override the default values by [customizing the Auto Deploy Helm chart](index.md#custom-helm-chart) +[default Helm chart](https://gitlab.com/gitlab-org/charts/auto-deploy-app). Alternatively you can override the default values by [customizing the Auto Deploy Helm chart](customize.md#custom-helm-chart) ### Auto Build using Heroku buildpacks @@ -78,7 +78,7 @@ Auto Test automatically runs the appropriate tests for your application using buildpacks](https://devcenter.heroku.com/articles/buildpacks) by analyzing your project to detect the language and framework. Several languages and frameworks are detected automatically, but if your language is not detected, -you may succeed with a [custom buildpack](index.md#custom-buildpacks). Check the +you may succeed with a [custom buildpack](customize.md#custom-buildpacks). Check the [currently supported languages](index.md#currently-supported-languages). Auto Test uses tests you already have in your application. If there are no @@ -182,7 +182,7 @@ be deleted. Review apps are deployed using the [auto-deploy-app](https://gitlab.com/gitlab-org/charts/auto-deploy-app) chart with -Helm, which can be [customized](index.md#custom-helm-chart). The app will be deployed into the [Kubernetes +Helm, which can be [customized](customize.md#custom-helm-chart). The app will be deployed into the [Kubernetes namespace](../../user/project/clusters/index.md#deployment-variables) for the environment. @@ -265,8 +265,8 @@ Auto Deploy doesn't include deployments to staging or canary by default, but the [Auto DevOps template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) contains job definitions for these tasks if you want to enable them. -You can make use of [environment variables](index.md#environment-variables) to automatically -scale your pod replicas and to apply custom arguments to the Auto DevOps `helm upgrade` commands. This is an easy way to [customize the Auto Deploy Helm chart](index.md#custom-helm-chart). +You can make use of [environment variables](customize.md#environment-variables) to automatically +scale your pod replicas and to apply custom arguments to the Auto DevOps `helm upgrade` commands. This is an easy way to [customize the Auto Deploy Helm chart](customize.md#custom-helm-chart). Apps are deployed using the [auto-deploy-app](https://gitlab.com/gitlab-org/charts/auto-deploy-app) chart with @@ -315,7 +315,7 @@ including support for `Deployment` in the `extensions/v1beta1` version. To use Auto Deploy on a Kubernetes 1.16+ cluster, you must: -1. Set the following in the [`.gitlab/auto-deploy-values.yaml` file](index.md#customize-values-for-helm-chart): +1. Set the following in the [`.gitlab/auto-deploy-values.yaml` file](customize.md#customize-values-for-helm-chart): ```yml deploymentApiVersion: apps/v1 @@ -391,7 +391,7 @@ need to: Once you have configured your worker to respond to health checks, run a Sidekiq worker for your Rails application. You can enable workers by setting the -following in the [`.gitlab/auto-deploy-values.yaml` file](index.md#customize-values-for-helm-chart): +following in the [`.gitlab/auto-deploy-values.yaml` file](customize.md#customize-values-for-helm-chart): ```yml workers: diff --git a/doc/topics/autodevops/upgrading_postgresql.md b/doc/topics/autodevops/upgrading_postgresql.md index b2bad21673a..ccb009905eb 100644 --- a/doc/topics/autodevops/upgrading_postgresql.md +++ b/doc/topics/autodevops/upgrading_postgresql.md @@ -1,6 +1,6 @@ # Upgrading PostgreSQL for Auto DevOps -Auto DevOps provides an [in-cluster PostgreSQL database](index.md#postgresql-database-support) +Auto DevOps provides an [in-cluster PostgreSQL database](customize.md#postgresql-database-support) for your application. The version of the chart used to provision PostgreSQL: diff --git a/doc/topics/git/numerous_undo_possibilities_in_git/index.md b/doc/topics/git/numerous_undo_possibilities_in_git/index.md index 9c40041414c..9d175b2edb1 100644 --- a/doc/topics/git/numerous_undo_possibilities_in_git/index.md +++ b/doc/topics/git/numerous_undo_possibilities_in_git/index.md @@ -14,7 +14,7 @@ In this tutorial, we will show you different ways of undoing your work in Git, f we will assume you have a basic working knowledge of. Check GitLab's [Git documentation](../index.md) for reference. -Also, we will only provide some general info of the commands, which is enough +Also, we will only provide some general information of the commands, which is enough to get you started for the easy cases/examples, but for anything more advanced please refer to the [Git book](https://git-scm.com/book/en/v2). diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md index ebb044fa9d0..4a401210928 100644 --- a/doc/user/admin_area/settings/continuous_integration.md +++ b/doc/user/admin_area/settings/continuous_integration.md @@ -116,7 +116,7 @@ Once saved, you can see the build quota in the group admin view. The quota can also be viewed in the project admin view if shared Runners are enabled. -![Project admin info](img/admin_project_quota_view.png) +![Project admin information](img/admin_project_quota_view.png) You can see an overview of the pipeline minutes quota of all projects of a group in the **Usage Quotas** page available to the group page settings list. diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md index e2af69c19e9..5b755916fd8 100644 --- a/doc/user/application_security/container_scanning/index.md +++ b/doc/user/application_security/container_scanning/index.md @@ -367,8 +367,8 @@ the report JSON unless stated otherwise. Presence of optional fields depends on | `vulnerabilities[].message` | A short text that describes the vulnerability, it may include occurrence's specific information. Optional. | | `vulnerabilities[].description` | A long text that describes the vulnerability. Optional. | | `vulnerabilities[].cve` | A fingerprint string value that represents a concrete occurrence of the vulnerability. It's used to determine whether two vulnerability occurrences are same or different. May not be 100% accurate. **This is NOT a [CVE](https://cve.mitre.org/)**. | -| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this info), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. **Note:** Our current container scanning tool based on [klar](https://github.com/optiopay/klar) only provides the following levels: `Unknown`, `Low`, `Medium`, `High`, `Critical`. | -| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this info), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. **Note:** Our current container scanning tool based on [klar](https://github.com/optiopay/klar) does not provide a confidence level, so this value is currently hardcoded to `Unknown`. | +| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this information), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. **Note:** Our current container scanning tool based on [klar](https://github.com/optiopay/klar) only provides the following levels: `Unknown`, `Low`, `Medium`, `High`, `Critical`. | +| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this information), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. **Note:** Our current container scanning tool based on [klar](https://github.com/optiopay/klar) does not provide a confidence level, so this value is currently hardcoded to `Unknown`. | | `vulnerabilities[].solution` | Explanation of how to fix the vulnerability. Optional. | | `vulnerabilities[].scanner` | A node that describes the analyzer used to find this vulnerability. | | `vulnerabilities[].scanner.id` | Id of the scanner as a snake_case string. | diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md index faf885b6552..a178751da7d 100644 --- a/doc/user/application_security/dependency_scanning/index.md +++ b/doc/user/application_security/dependency_scanning/index.md @@ -365,8 +365,8 @@ the report JSON unless stated otherwise. Presence of optional fields depends on | `vulnerabilities[].message` | A short text that describes the vulnerability, it may include occurrence's specific information. Optional. | | `vulnerabilities[].description` | A long text that describes the vulnerability. Optional. | | `vulnerabilities[].cve` | A fingerprint string value that represents a concrete occurrence of the vulnerability. It's used to determine whether two vulnerability occurrences are same or different. May not be 100% accurate. **This is NOT a [CVE](https://cve.mitre.org/)**. | -| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this info), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. | -| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this info), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. | +| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this information), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. | +| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this information), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. | | `vulnerabilities[].solution` | Explanation of how to fix the vulnerability. Optional. | | `vulnerabilities[].scanner` | A node that describes the analyzer used to find this vulnerability. | | `vulnerabilities[].scanner.id` | Id of the scanner as a snake_case string. | diff --git a/doc/user/application_security/offline_deployments/index.md b/doc/user/application_security/offline_deployments/index.md index 4511b4e80d6..db309357530 100644 --- a/doc/user/application_security/offline_deployments/index.md +++ b/doc/user/application_security/offline_deployments/index.md @@ -73,7 +73,7 @@ hosted within your network. ## Specific scanner instructions Each individual scanner may be slightly different than the steps described -above. You can find more info at each of the pages below: +above. You can find more information at each of the pages below: - [Container scanning offline directions](../container_scanning/index.md#running-container-scanning-in-an-offline-environment) - [SAST offline directions](../sast/index.md#gitlab-sast-in-an-offline-environment) diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md index 0930ee6610b..252abc9295f 100644 --- a/doc/user/application_security/sast/index.md +++ b/doc/user/application_security/sast/index.md @@ -443,8 +443,8 @@ the report JSON unless stated otherwise. Presence of optional fields depends on | `vulnerabilities[].message` | A short text that describes the vulnerability, it may include the occurrence's specific information. Optional. | | `vulnerabilities[].description` | A long text that describes the vulnerability. Optional. | | `vulnerabilities[].cve` | A fingerprint string value that represents a concrete occurrence of the vulnerability. Is used to determine whether two vulnerability occurrences are same or different. May not be 100% accurate. **This is NOT a [CVE](https://cve.mitre.org/)**. | -| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this info), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. | -| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this info), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. | +| `vulnerabilities[].severity` | How much the vulnerability impacts the software. Possible values: `Undefined` (an analyzer has not provided this information), `Info`, `Unknown`, `Low`, `Medium`, `High`, `Critical`. | +| `vulnerabilities[].confidence` | How reliable the vulnerability's assessment is. Possible values: `Undefined` (an analyzer has not provided this information), `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, `Confirmed`. | | `vulnerabilities[].solution` | Explanation of how to fix the vulnerability. Optional. | | `vulnerabilities[].scanner` | A node that describes the analyzer used to find this vulnerability. | | `vulnerabilities[].scanner.id` | Id of the scanner as a snake_case string. | diff --git a/doc/user/application_security/security_dashboard/index.md b/doc/user/application_security/security_dashboard/index.md index 4ce6a9403c5..93a325e6777 100644 --- a/doc/user/application_security/security_dashboard/index.md +++ b/doc/user/application_security/security_dashboard/index.md @@ -156,7 +156,7 @@ That way, reports are created even if no code change happens. ## Security scans using Auto DevOps When using [Auto DevOps](../../../topics/autodevops/index.md), use -[special environment variables](../../../topics/autodevops/index.md#environment-variables) +[special environment variables](../../../topics/autodevops/customize.md#environment-variables) to configure daily security scans. <!-- ## Troubleshooting diff --git a/doc/user/group/index.md b/doc/user/group/index.md index c506da05999..fdcc4105620 100644 --- a/doc/user/group/index.md +++ b/doc/user/group/index.md @@ -99,7 +99,7 @@ To create a new Group, either: Add the following information: -![new group info](img/create_new_group_info.png) +![new group information](img/create_new_group_info.png) 1. The **Group name** will automatically populate the URL. Optionally, you can change it. This is the name that displays in group views. diff --git a/doc/user/index.md b/doc/user/index.md index ac19c5a0de7..cc521c2a767 100644 --- a/doc/user/index.md +++ b/doc/user/index.md @@ -100,7 +100,7 @@ directly from GitLab. No third-party integrations needed. There is a lot you can customize and configure to enjoy the best of GitLab. -- [Settings](profile/index.md): Manage your user settings to change your personal info, +- [Settings](profile/index.md): Manage your user settings to change your personal information, personal access tokens, authorized applications, etc. - [Authentication](../topics/authentication/index.md): Read through the authentication methods available in GitLab. diff --git a/doc/user/markdown.md b/doc/user/markdown.md index ca5347ca613..a1f83a47015 100644 --- a/doc/user/markdown.md +++ b/doc/user/markdown.md @@ -110,7 +110,7 @@ changing how standard Markdown is used: | [emphasis](#emphasis) | [multiple underscores in words](#multiple-underscores-in-words-and-mid-word-emphasis) | [headers](#headers) | [linkable Header IDs](#header-ids-and-links) | | [images](#images) | [embedded videos](#videos) and [audio](#audio) | -| [linebreaks](#line-breaks) | [more linebreak control](#newlines) | +| [line breaks](#line-breaks) | [more line break control](#newlines) | | [links](#links) | [automatically linking URLs](#url-auto-linking) | ## New GFM Markdown extensions @@ -256,7 +256,7 @@ when rendered within GitLab, may appear different depending on the OS and browse Most emoji are natively supported on macOS, Windows, iOS, Android and will fallback to image-based emoji where there is lack of support. NOTE: **Note:** On Linux, you can download [Noto Color Emoji](https://www.google.com/get/noto/help/emoji/) -to get full native emoji support. Ubuntu 18.04 (like many modern Linux distros) has +to get full native emoji support. Ubuntu 18.04 (like many modern Linux distributions) has this font installed by default. ### Front matter @@ -1148,7 +1148,7 @@ GFM adheres to the Markdown specification in how [paragraphs and line breaks are A paragraph is one or more consecutive lines of text, separated by one or more blank lines (two newlines at the end of the first paragraph), as [explained above](#line-breaks). -If you need more control over line-breaks or soft returns, you can add a single line-break +If you need more control over line breaks or soft returns, you can add a single line break by ending a line with a backslash, or two or more spaces. Two newlines in a row will create a new paragraph, with a blank line in between: @@ -1163,21 +1163,6 @@ Another line, this time ending with a backslash.\ A new line due to the previous backslash. ``` -<!-- (Do *NOT* remove the two ending whitespaces in the third line) --> -<!-- (They are needed for the Markdown text to render correctly) --> - -First paragraph. -Another line in the same paragraph. -A third line in the same paragraph, but this time ending with two spaces. -A new line directly under the first paragraph. - -<!-- (Do *NOT* remove the two ending whitespaces in the second line) --> -<!-- (They are needed for the Markdown text to render correctly on docs.gitlab.com, the backslash works fine inside GitLab itself) --> - -Second paragraph. -Another line, this time ending with a backslash. -A new line due to the previous backslash. - ### Links There are two ways to create links, inline-style and reference-style: @@ -1406,7 +1391,7 @@ Example: | header 1 | header 2 | header 3 | | --- | ------ |---------:| | cell 1 | cell 2 | cell 3 | -| cell 4 | cell 5 is longer | cell 6 is much longer than the others, but that's ok. It will eventually wrap the text when the cell is too large for the display size. | +| cell 4 | cell 5 is longer | cell 6 is much longer than the others, but that's okay. It will eventually wrap the text when the cell is too large for the display size. | | cell 7 | | cell <br> 9 | Additionally, you can choose the alignment of text within columns by adding colons (`:`) diff --git a/doc/user/packages/conan_repository/index.md b/doc/user/packages/conan_repository/index.md index aa81b3d056e..ffbd8a848b5 100644 --- a/doc/user/packages/conan_repository/index.md +++ b/doc/user/packages/conan_repository/index.md @@ -246,9 +246,9 @@ conan search Hello/0.1@my-group+my-project/beta --all --remote=gitlab The scope of your search will include all projects you have permission to access, this includes your private projects as well as all public projects. -## Fetching Conan package info from the GitLab Package Registry +## Fetching Conan package information from the GitLab Package Registry -The `conan info` command will return info about a given package: +The `conan info` command will return information about a given package: ```shell conan info Hello/0.1@my-group+my-project/beta @@ -261,7 +261,7 @@ The GitLab Conan repository supports the following Conan CLI commands: - `conan upload`: Upload your recipe and package files to the GitLab Package Registry. - `conan install`: Install a conan package from the GitLab Package Registry, this includes using the `conanfile.txt` file. - `conan search`: Search the GitLab Package Registry for public packages, and private packages you have permission to view. -- `conan info`: View the info on a given package from the GitLab Package Registry. +- `conan info`: View the information on a given package from the GitLab Package Registry. - `conan remove`: Delete the package from the GitLab Package Registry. ## Using GitLab CI with Conan packages diff --git a/doc/user/project/clusters/add_eks_clusters.md b/doc/user/project/clusters/add_eks_clusters.md index 6bde461dfa6..82b3cb49075 100644 --- a/doc/user/project/clusters/add_eks_clusters.md +++ b/doc/user/project/clusters/add_eks_clusters.md @@ -279,7 +279,7 @@ If a default Storage Class doesn't already exist and is desired, follow Amazon's to create one. Alternatively, disable PostgreSQL by setting the project variable -[`POSTGRES_ENABLED`](../../../topics/autodevops/#environment-variables) to `false`. +[`POSTGRES_ENABLED`](../../../topics/autodevops/customize.md#environment-variables) to `false`. ### Deploy the app to EKS diff --git a/doc/user/project/integrations/irker.md b/doc/user/project/integrations/irker.md index f7e129d1e7f..db5397ee7d5 100644 --- a/doc/user/project/integrations/irker.md +++ b/doc/user/project/integrations/irker.md @@ -4,7 +4,7 @@ GitLab provides a way to push update messages to an Irker server. When configured, pushes to a project will trigger the service to send data directly to the Irker server. -See the project homepage for further info: <https://gitlab.com/esr/irker> +See the project homepage for further information: <https://gitlab.com/esr/irker> ## Needed setup diff --git a/doc/user/project/integrations/prometheus.md b/doc/user/project/integrations/prometheus.md index 6d848f73cb6..bbed9ae4686 100644 --- a/doc/user/project/integrations/prometheus.md +++ b/doc/user/project/integrations/prometheus.md @@ -775,7 +775,7 @@ GitLab unfurls the link as an embedded metrics panel: ![Embedded Metrics Rendered](img/embedded_metrics_rendered_v12_8.png) You can also embed a single chart. To get a link to a chart, click the -**{ellipsis_v}** **More info** menu in the upper right corner of the chart, +**{ellipsis_v}** **More actions** menu in the upper right corner of the chart, and select **Copy link to chart**, as shown in this example: ![Copy Link To Chart](img/copy_link_to_chart_v12_10.png) diff --git a/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md b/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md index cde6ffaa48c..bb5aadfa9b9 100644 --- a/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md +++ b/doc/user/project/merge_requests/reviewing_and_managing_merge_requests.md @@ -116,7 +116,7 @@ environment. Deployments that are ongoing will be shown, as well as the deploying/deployed state for environments. If it's the first time the branch is deployed, the link will return a `404` error until done. During the deployment, the stop button will -be disabled. If the pipeline fails to deploy, the deployment info will be hidden. +be disabled. If the pipeline fails to deploy, the deployment information will be hidden. ![Merge request pipeline](img/merge_request_pipeline.png) diff --git a/doc/user/project/web_ide/index.md b/doc/user/project/web_ide/index.md index 23b72e33aae..c98448ff904 100644 --- a/doc/user/project/web_ide/index.md +++ b/doc/user/project/web_ide/index.md @@ -270,7 +270,7 @@ terminal: - The `webide-file-sync` executable must start **after** the project directory is available. This is why we need to add `sleep 5` to the `command`. See [this issue](https://gitlab.com/gitlab-org/webide-file-sync/issues/7) for - more info. + more information. - `$CI_PROJECT_DIR` is a [predefined environment variable](../../../ci/variables/predefined_variables.md) for GitLab Runners. This is where your project's repository will be. diff --git a/lib/gitlab/graphql/connections.rb b/lib/gitlab/graphql/connections.rb deleted file mode 100644 index 08d5cd0b72e..00000000000 --- a/lib/gitlab/graphql/connections.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Connections - def self.use(_schema) - GraphQL::Relay::BaseConnection.register_connection_implementation( - ActiveRecord::Relation, - Gitlab::Graphql::Connections::Keyset::Connection - ) - GraphQL::Relay::BaseConnection.register_connection_implementation( - Gitlab::Graphql::FilterableArray, - Gitlab::Graphql::Connections::FilterableArrayConnection - ) - GraphQL::Relay::BaseConnection.register_connection_implementation( - Gitlab::Graphql::ExternallyPaginatedArray, - Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection - ) - end - end - end -end diff --git a/lib/gitlab/graphql/pagination/connections.rb b/lib/gitlab/graphql/pagination/connections.rb new file mode 100644 index 00000000000..febdc938317 --- /dev/null +++ b/lib/gitlab/graphql/pagination/connections.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module Pagination + module Connections + def self.use(schema) + schema.connections.add( + ActiveRecord::Relation, + Gitlab::Graphql::Pagination::Keyset::Connection) + + schema.connections.add( + Gitlab::Graphql::FilterableArray, + Gitlab::Graphql::Pagination::FilterableArrayConnection) + + schema.connections.add( + Gitlab::Graphql::ExternallyPaginatedArray, + Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection) + end + end + end + end +end diff --git a/lib/gitlab/graphql/connections/externally_paginated_array_connection.rb b/lib/gitlab/graphql/pagination/externally_paginated_array_connection.rb index f0861260691..1f01dd07571 100644 --- a/lib/gitlab/graphql/connections/externally_paginated_array_connection.rb +++ b/lib/gitlab/graphql/pagination/externally_paginated_array_connection.rb @@ -3,20 +3,14 @@ # Make a customized connection type module Gitlab module Graphql - module Connections - class ExternallyPaginatedArrayConnection < GraphQL::Relay::ArrayConnection - # As the pagination happens externally - # we just return all the nodes here. - def sliced_nodes - @nodes - end - + module Pagination + class ExternallyPaginatedArrayConnection < GraphQL::Pagination::ArrayConnection def start_cursor - nodes.previous_cursor + items.previous_cursor end def end_cursor - nodes.next_cursor + items.next_cursor end def next_page? diff --git a/lib/gitlab/graphql/connections/filterable_array_connection.rb b/lib/gitlab/graphql/pagination/filterable_array_connection.rb index 800f2c949c6..4a76cd5fb00 100644 --- a/lib/gitlab/graphql/connections/filterable_array_connection.rb +++ b/lib/gitlab/graphql/pagination/filterable_array_connection.rb @@ -2,14 +2,14 @@ module Gitlab module Graphql - module Connections + module Pagination # FilterableArrayConnection is useful especially for lazy-loaded values. # It allows us to call a callback only on the slice of array being # rendered in the "after loaded" phase. For example we can check # permissions only on a small subset of items. - class FilterableArrayConnection < GraphQL::Relay::ArrayConnection - def paged_nodes - @filtered_nodes ||= nodes.filter_callback.call(super) + class FilterableArrayConnection < GraphQL::Pagination::ArrayConnection + def nodes + @nodes ||= items.filter_callback.call(super) end end end diff --git a/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/base_condition.rb index 26c9d77a8df..afea7c602be 100644 --- a/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb +++ b/lib/gitlab/graphql/pagination/keyset/conditions/base_condition.rb @@ -2,7 +2,7 @@ module Gitlab module Graphql - module Connections + module Pagination module Keyset module Conditions class BaseCondition diff --git a/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition.rb index 3239d27c0cd..3164598b7b9 100644 --- a/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition.rb +++ b/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition.rb @@ -2,7 +2,7 @@ module Gitlab module Graphql - module Connections + module Pagination module Keyset module Conditions class NotNullCondition < BaseCondition diff --git a/lib/gitlab/graphql/connections/keyset/conditions/null_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/null_condition.rb index 18ea0692e2c..fa25181d663 100644 --- a/lib/gitlab/graphql/connections/keyset/conditions/null_condition.rb +++ b/lib/gitlab/graphql/pagination/keyset/conditions/null_condition.rb @@ -2,7 +2,7 @@ module Gitlab module Graphql - module Connections + module Pagination module Keyset module Conditions class NullCondition < BaseCondition diff --git a/lib/gitlab/graphql/connections/keyset/connection.rb b/lib/gitlab/graphql/pagination/keyset/connection.rb index 5de075f2f7a..5466924a794 100644 --- a/lib/gitlab/graphql/connections/keyset/connection.rb +++ b/lib/gitlab/graphql/pagination/keyset/connection.rb @@ -27,21 +27,21 @@ # module Gitlab module Graphql - module Connections + module Pagination module Keyset - class Connection < GraphQL::Relay::BaseConnection + class Connection < GraphQL::Pagination::ActiveRecordRelationConnection include Gitlab::Utils::StrongMemoize - def cursor_from_node(node) + def cursor_for(node) encoded_json_from_ordering(node) end def sliced_nodes @sliced_nodes ||= begin - OrderInfo.validate_ordering(ordered_nodes, order_list) + OrderInfo.validate_ordering(ordered_items, order_list) - sliced = ordered_nodes + sliced = ordered_items sliced = slice_nodes(sliced, before, :before) if before.present? sliced = slice_nodes(sliced, after, :after) if after.present? @@ -49,12 +49,12 @@ module Gitlab end end - def paged_nodes + def nodes # These are the nodes that will be loaded into memory for rendering # So we're ok loading them into memory here as that's bound to happen # anyway. Having them ready means we can modify the result while # rendering the fields. - @paged_nodes ||= load_paged_nodes.to_a + @nodes ||= load_paged_nodes.to_a end private @@ -85,31 +85,31 @@ module Gitlab @limit_value ||= [first, last, max_page_size].compact.min end - def ordered_nodes - strong_memoize(:order_nodes) do - unless nodes.primary_key.present? + def ordered_items + strong_memoize(:ordered_items) do + unless items.primary_key.present? raise ArgumentError.new('Relation must have a primary key') end - list = OrderInfo.build_order_list(nodes) + list = OrderInfo.build_order_list(items) # ensure there is a primary key ordering - if list&.last&.attribute_name != nodes.primary_key - nodes.order(arel_table[nodes.primary_key].desc) # rubocop: disable CodeReuse/ActiveRecord + if list&.last&.attribute_name != items.primary_key + items.order(arel_table[items.primary_key].desc) # rubocop: disable CodeReuse/ActiveRecord else - nodes + items end end end def order_list strong_memoize(:order_list) do - OrderInfo.build_order_list(ordered_nodes) + OrderInfo.build_order_list(ordered_items) end end def arel_table - nodes.arel_table + items.arel_table end # Storing the current order values in the cursor allows us to diff --git a/lib/gitlab/graphql/connections/keyset/order_info.rb b/lib/gitlab/graphql/pagination/keyset/order_info.rb index 7f61bf937b4..876d6114f3c 100644 --- a/lib/gitlab/graphql/connections/keyset/order_info.rb +++ b/lib/gitlab/graphql/pagination/keyset/order_info.rb @@ -2,7 +2,7 @@ module Gitlab module Graphql - module Connections + module Pagination module Keyset class OrderInfo attr_reader :attribute_name, :sort_direction, :named_function diff --git a/lib/gitlab/graphql/connections/keyset/query_builder.rb b/lib/gitlab/graphql/pagination/keyset/query_builder.rb index fe85898f638..331981ce723 100644 --- a/lib/gitlab/graphql/connections/keyset/query_builder.rb +++ b/lib/gitlab/graphql/pagination/keyset/query_builder.rb @@ -2,7 +2,7 @@ module Gitlab module Graphql - module Connections + module Pagination module Keyset class QueryBuilder def initialize(arel_table, order_list, decoded_cursor, before_or_after) diff --git a/lib/gitlab/import_export/group/tree_restorer.rb b/lib/gitlab/import_export/group/tree_restorer.rb index f6ebd83bfaa..323e6727a9f 100644 --- a/lib/gitlab/import_export/group/tree_restorer.rb +++ b/lib/gitlab/import_export/group/tree_restorer.rb @@ -20,6 +20,7 @@ module Gitlab def restore @group_attributes = relation_reader.consume_attributes(nil) @group_members = relation_reader.consume_relation(nil, 'members') + .map(&:first) # We need to remove `name` and `path` as we did consume it in previous pass @group_attributes.delete('name') diff --git a/lib/gitlab/import_export/json/legacy_reader.rb b/lib/gitlab/import_export/json/legacy_reader.rb index 57579fe9def..12d6458aedc 100644 --- a/lib/gitlab/import_export/json/legacy_reader.rb +++ b/lib/gitlab/import_export/json/legacy_reader.rb @@ -53,6 +53,7 @@ module Gitlab def initialize(relation_names:, allowed_path:) @relation_names = relation_names.map(&:to_s) + @consumed_relations = Set.new # This is legacy reader, to be used in transition # period before `.ndjson`, @@ -81,17 +82,19 @@ module Gitlab raise ArgumentError, "Invalid #{importable_name} passed to `consume_relation`. Use #{@allowed_path} instead." end - value = relations.delete(key) + Enumerator.new do |documents| + next unless @consumed_relations.add?("#{importable_path}/#{key}") - return value unless block_given? - return if value.nil? + value = relations.delete(key) + next if value.nil? - if value.is_a?(Array) - value.each.with_index do |item, idx| - yield(item, idx) + if value.is_a?(Array) + value.each.with_index do |item, idx| + documents << [item, idx] + end + else + documents << [value, 0] end - else - yield(value, 0) end end diff --git a/lib/gitlab/import_export/json/ndjson_reader.rb b/lib/gitlab/import_export/json/ndjson_reader.rb new file mode 100644 index 00000000000..e9b05afc7d4 --- /dev/null +++ b/lib/gitlab/import_export/json/ndjson_reader.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Gitlab + module ImportExport + module JSON + class NdjsonReader + MAX_JSON_DOCUMENT_SIZE = 50.megabytes + + attr_reader :dir_path + + def initialize(dir_path) + @dir_path = dir_path + @consumed_relations = Set.new + end + + def exist? + Dir.exist?(@dir_path) + end + + # This can be removed once legacy_reader is deprecated. + def legacy? + false + end + + def consume_attributes(importable_path) + # This reads from `tree/project.json` + path = file_path("#{importable_path}.json") + data = File.read(path, MAX_JSON_DOCUMENT_SIZE) + json_decode(data) + end + + def consume_relation(importable_path, key) + Enumerator.new do |documents| + next unless @consumed_relations.add?("#{importable_path}/#{key}") + + # This reads from `tree/project/merge_requests.ndjson` + path = file_path(importable_path, "#{key}.ndjson") + next unless File.exist?(path) + + File.foreach(path, MAX_JSON_DOCUMENT_SIZE).with_index do |line, line_num| + documents << [json_decode(line), line_num] + end + end + end + + private + + def json_decode(string) + ActiveSupport::JSON.decode(string) + rescue ActiveSupport::JSON.parse_error => e + Gitlab::ErrorTracking.log_exception(e) + raise Gitlab::ImportExport::Error, 'Incorrect JSON format' + end + + def file_path(*path) + File.join(dir_path, *path) + end + end + end + end +end diff --git a/lib/gitlab/import_export/project/tree_restorer.rb b/lib/gitlab/import_export/project/tree_restorer.rb index 99e57d9decd..ad3720b56be 100644 --- a/lib/gitlab/import_export/project/tree_restorer.rb +++ b/lib/gitlab/import_export/project/tree_restorer.rb @@ -17,8 +17,13 @@ module Gitlab end def restore + unless relation_reader + raise Gitlab::ImportExport::Error, 'invalid import format' + end + @project_attributes = relation_reader.consume_attributes(importable_path) @project_members = relation_reader.consume_relation(importable_path, 'project_members') + .map(&:first) if relation_tree_restorer.restore import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do @@ -38,14 +43,27 @@ module Gitlab def relation_reader strong_memoize(:relation_reader) do - ImportExport::JSON::LegacyReader::File.new( - File.join(shared.export_path, 'project.json'), - relation_names: reader.project_relation_names, - allowed_path: importable_path - ) + [ndjson_relation_reader, legacy_relation_reader] + .compact.find(&:exist?) end end + def ndjson_relation_reader + return unless Feature.enabled?(:project_import_ndjson, project.namespace) + + ImportExport::JSON::NdjsonReader.new( + File.join(shared.export_path, 'tree') + ) + end + + def legacy_relation_reader + ImportExport::JSON::LegacyReader::File.new( + File.join(shared.export_path, 'project.json'), + relation_names: reader.project_relation_names, + allowed_path: importable_path + ) + end + def relation_tree_restorer @relation_tree_restorer ||= RelationTreeRestorer.new( user: @user, diff --git a/lib/gitlab/import_export/relation_tree_restorer.rb b/lib/gitlab/import_export/relation_tree_restorer.rb index 78ed365cea0..056945d0294 100644 --- a/lib/gitlab/import_export/relation_tree_restorer.rb +++ b/lib/gitlab/import_export/relation_tree_restorer.rb @@ -67,7 +67,7 @@ module Gitlab end def process_relation!(relation_key, relation_definition) - @relation_reader.consume_relation(@importable_path, relation_key) do |data_hash, relation_index| + @relation_reader.consume_relation(@importable_path, relation_key).each do |data_hash, relation_index| process_relation_item!(relation_key, relation_definition, relation_index, data_hash) end end diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/complex/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..feb1a70a89e --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/complex/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/group/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/group/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..0788ca18fb3 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/group/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/invalid_json/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/invalid_json/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..6524ed5042c --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/invalid_json/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/light/project.json b/spec/fixtures/lib/gitlab/import_export/light/project.json index eaed8565a18..326a2cef9ff 100644 --- a/spec/fixtures/lib/gitlab/import_export/light/project.json +++ b/spec/fixtures/lib/gitlab/import_export/light/project.json @@ -186,5 +186,23 @@ } ], "snippets": [], - "hooks": [] + "hooks": [], + "custom_attributes": [ + { + "id": 201, + "project_id": 5, + "created_at": "2016-06-14T15:01:51.315Z", + "updated_at": "2016-06-14T15:01:51.315Z", + "key": "color", + "value": "red" + }, + { + "id": 202, + "project_id": 5, + "created_at": "2016-06-14T15:01:51.315Z", + "updated_at": "2016-06-14T15:01:51.315Z", + "key": "size", + "value": "small" + } + ] } diff --git a/spec/fixtures/lib/gitlab/import_export/light/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/light/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..eac19c23b44 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/light/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/milestone-iid/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/milestone-iid/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..726afa0bfa4 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/milestone-iid/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..13f3d3c6791 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/tree.tar.gz diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_records/tree.tar.gz b/spec/fixtures/lib/gitlab/import_export/with_invalid_records/tree.tar.gz Binary files differnew file mode 100644 index 00000000000..24c51e72d7d --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/with_invalid_records/tree.tar.gz diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb index 50b210cdbca..0f21a55f7e9 100644 --- a/spec/graphql/gitlab_schema_spec.rb +++ b/spec/graphql/gitlab_schema_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe GitlabSchema do - let_it_be(:implementations) { GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS } + let_it_be(:connections) { GitlabSchema.connections.all_wrappers } let(:user) { build :user } it 'uses batch loading' do @@ -34,22 +34,22 @@ describe GitlabSchema do expect(described_class.query).to eq(::Types::QueryType) end - it 'paginates active record relations using `Connections::Keyset::Connection`' do - connection = implementations[ActiveRecord::Relation.name] + it 'paginates active record relations using `Pagination::Keyset::Connection`' do + connection = connections[ActiveRecord::Relation] - expect(connection).to eq(Gitlab::Graphql::Connections::Keyset::Connection) + expect(connection).to eq(Gitlab::Graphql::Pagination::Keyset::Connection) end - it 'paginates ExternallyPaginatedArray using `Connections::ExternallyPaginatedArrayConnection`' do - connection = implementations[Gitlab::Graphql::ExternallyPaginatedArray.name] + it 'paginates ExternallyPaginatedArray using `Pagination::ExternallyPaginatedArrayConnection`' do + connection = connections[Gitlab::Graphql::ExternallyPaginatedArray] - expect(connection).to eq(Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection) + expect(connection).to eq(Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection) end - it 'paginates FilterableArray using `Connections::FilterableArrayConnection`' do - connection = implementations[Gitlab::Graphql::FilterableArray.name] + it 'paginates FilterableArray using `Pagination::FilterableArrayConnection`' do + connection = connections[Gitlab::Graphql::FilterableArray] - expect(connection).to eq(Gitlab::Graphql::Connections::FilterableArrayConnection) + expect(connection).to eq(Gitlab::Graphql::Pagination::FilterableArrayConnection) end describe '.execute' do diff --git a/spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb index 83c94ed6260..85a5b1dacc7 100644 --- a/spec/lib/gitlab/graphql/connections/externally_paginated_array_connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection do +describe Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection do let(:prev_cursor) { 1 } let(:next_cursor) { 6 } let(:values) { [2, 3, 4, 5] } @@ -10,21 +10,13 @@ describe Gitlab::Graphql::Connections::ExternallyPaginatedArrayConnection do let(:arguments) { {} } subject(:connection) do - described_class.new(all_nodes, arguments) + described_class.new(all_nodes, { max_page_size: values.size }.merge(arguments)) end - describe '#sliced_nodes' do - let(:sliced_nodes) { connection.sliced_nodes } + describe '#nodes' do + let(:paged_nodes) { connection.nodes } - it 'returns all the nodes' do - expect(connection.sliced_nodes).to eq(values) - end - end - - describe '#paged_nodes' do - let(:paged_nodes) { connection.send(:paged_nodes) } - - it_behaves_like "connection with paged nodes" do + it_behaves_like 'connection with paged nodes' do let(:paged_nodes_size) { values.size } end end diff --git a/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb index b2f0862be62..c82e3ad3019 100644 --- a/spec/lib/gitlab/graphql/connections/filterable_array_connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb @@ -2,19 +2,19 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::FilterableArrayConnection do +describe Gitlab::Graphql::Pagination::FilterableArrayConnection do let(:callback) { proc { |nodes| nodes } } let(:all_nodes) { Gitlab::Graphql::FilterableArray.new(callback, 1, 2, 3, 4, 5) } let(:arguments) { {} } subject(:connection) do - described_class.new(all_nodes, arguments, max_page_size: 3) + described_class.new(all_nodes, { max_page_size: 3 }.merge(arguments)) end - describe '#paged_nodes' do - let(:paged_nodes) { subject.paged_nodes } + describe '#nodes' do + let(:paged_nodes) { subject.nodes } - it_behaves_like "connection with paged nodes" do + it_behaves_like 'connection with paged nodes' do let(:paged_nodes_size) { 3 } end @@ -22,7 +22,7 @@ describe Gitlab::Graphql::Connections::FilterableArrayConnection do let(:callback) { proc { |nodes| nodes[1..-1] } } it 'does not return filtered elements' do - expect(subject.paged_nodes).to contain_exactly(all_nodes[1], all_nodes[2]) + expect(subject.nodes).to contain_exactly(all_nodes[1], all_nodes[2]) end end end diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb index 26fc5344871..33dc6bdea97 100644 --- a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do +describe Gitlab::Graphql::Pagination::Keyset::Conditions::NotNullCondition do describe '#build' do let(:operators) { ['>', '>'] } let(:before_or_after) { :after } @@ -75,7 +75,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do context 'when ordering by LOWER' do let(:arel_table) { Project.arel_table } let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) } - let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) } + let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) } let(:values) { ['Test', 500] } context 'when :after' do diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb index be0a21b2438..1c74f2fb0ab 100644 --- a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do +describe Gitlab::Graphql::Pagination::Keyset::Conditions::NullCondition do describe '#build' do let(:values) { [nil, 500] } let(:operators) { [nil, '>'] } @@ -58,7 +58,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do context 'when ordering by LOWER' do let(:arel_table) { Project.arel_table } let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) } - let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) } + let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) } context 'when :after' do it 'generates sql' do diff --git a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb index c193ab2b50f..fdacecbaca6 100644 --- a/spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb @@ -2,25 +2,28 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::Keyset::Connection do +describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:nodes) { Project.all.order(id: :asc) } let(:arguments) { {} } + let(:query_type) { GraphQL::ObjectType.new } + let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)} + let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) } subject(:connection) do - described_class.new(nodes, arguments, max_page_size: 3) + described_class.new(nodes, { context: context, max_page_size: 3 }.merge(arguments)) end def encoded_cursor(node) - described_class.new(nodes, {}).cursor_from_node(node) + described_class.new(nodes, { context: context }).cursor_for(node) end def decoded_cursor(cursor) JSON.parse(Base64Bp.urlsafe_decode64(cursor)) end - describe '#cursor_from_nodes' do + describe '#cursor_for' do let(:project) { create(:project) } - let(:cursor) { connection.cursor_from_node(project) } + let(:cursor) { connection.cursor_for(project) } it 'returns an encoded ID' do expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s) @@ -264,11 +267,11 @@ describe Gitlab::Graphql::Connections::Keyset::Connection do end end - describe '#paged_nodes' do + describe '#nodes' do let_it_be(:all_nodes) { create_list(:project, 5) } - let(:paged_nodes) { subject.paged_nodes } + let(:paged_nodes) { subject.nodes } - it_behaves_like "connection with paged nodes" do + it_behaves_like 'connection with paged nodes' do let(:paged_nodes_size) { 3 } end diff --git a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb index eb823fc0122..cb5656d7c00 100644 --- a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::Keyset::OrderInfo do +describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do describe '#build_order_list' do let(:order_list) { described_class.build_order_list(relation) } diff --git a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb index b46ce4bf023..7fd5b889cf9 100644 --- a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do +describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do context 'when number of ordering fields is 0' do it 'raises an error' do expect { described_class.new(Issue.arel_table, [], {}, :after) } @@ -12,7 +12,7 @@ describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do describe '#conditions' do let(:relation) { Issue.order(relative_position: :desc).order(:id) } - let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) } + let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) } let(:arel_table) { Issue.arel_table } let(:builder) { described_class.new(arel_table, order_list, decoded_cursor, before_or_after) } let(:before_or_after) { :after } diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb b/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb index 297a5946703..3e9bd3fe741 100644 --- a/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb +++ b/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb @@ -15,7 +15,6 @@ RSpec.shared_examples 'import/export json legacy reader' do subject { legacy_reader.consume_attributes("project") } context 'no excluded attributes' do - let(:excluded_attributes) { [] } let(:relation_names) { [] } it 'returns the whole tree from parsed JSON' do @@ -42,60 +41,53 @@ RSpec.shared_examples 'import/export json legacy reader' do describe '#consume_relation' do context 'when valid path is passed' do - let(:key) { 'description' } + let(:key) { 'labels' } - context 'block not given' do - it 'returns value of the key' do - expect(legacy_reader).to receive(:relations).and_return({ key => 'test value' }) - expect(legacy_reader.consume_relation("project", key)).to eq('test value') - end - end + subject { legacy_reader.consume_relation("project", key) } - context 'key has been consumed' do - before do - legacy_reader.consume_relation("project", key) + context 'key has not been consumed' do + it 'returns an Enumerator' do + expect(subject).to be_an_instance_of(Enumerator) end - it 'does not yield' do - expect do |blk| - legacy_reader.consume_relation("project", key, &blk) - end.not_to yield_control - end - end + context 'value is nil' do + before do + expect(legacy_reader).to receive(:relations).and_return({ key => nil }) + end - context 'value is nil' do - before do - expect(legacy_reader).to receive(:relations).and_return({ key => nil }) + it 'yields nothing to the Enumerator' do + expect(subject.to_a).to eq([]) + end end - it 'does not yield' do - expect do |blk| - legacy_reader.consume_relation("project", key, &blk) - end.not_to yield_control - end - end + context 'value is an array' do + before do + expect(legacy_reader).to receive(:relations).and_return({ key => %w[label1 label2] }) + end - context 'value is not array' do - before do - expect(legacy_reader).to receive(:relations).and_return({ key => 'value' }) + it 'yields every relation value to the Enumerator' do + expect(subject.to_a).to eq([['label1', 0], ['label2', 1]]) + end end - it 'yield the value with index 0' do - expect do |blk| - legacy_reader.consume_relation("project", key, &blk) - end.to yield_with_args('value', 0) + context 'value is not array' do + before do + expect(legacy_reader).to receive(:relations).and_return({ key => 'non-array value' }) + end + + it 'yields the value with index 0 to the Enumerator' do + expect(subject.to_a).to eq([['non-array value', 0]]) + end end end - context 'value is an array' do + context 'key has been consumed' do before do - expect(legacy_reader).to receive(:relations).and_return({ key => %w[item1 item2 item3] }) + legacy_reader.consume_relation("project", key).first end - it 'yield each array element with index' do - expect do |blk| - legacy_reader.consume_relation("project", key, &blk) - end.to yield_successive_args(['item1', 0], ['item2', 1], ['item3', 2]) + it 'yields nothing to the Enumerator' do + expect(subject.to_a).to eq([]) end end end diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb new file mode 100644 index 00000000000..40b784fdb87 --- /dev/null +++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::ImportExport::JSON::NdjsonReader do + include ImportExport::CommonUtil + + let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/tree' } + let(:root_tree) { JSON.parse(File.read(File.join(fixture, 'project.json'))) } + let(:ndjson_reader) { described_class.new(dir_path) } + let(:importable_path) { 'project' } + + before :all do + extract_archive('spec/fixtures/lib/gitlab/import_export/light', 'tree.tar.gz') + end + + after :all do + cleanup_artifacts_from_extract_archive('light') + end + + describe '#exist?' do + subject { ndjson_reader.exist? } + + context 'given valid dir_path' do + let(:dir_path) { fixture } + + it { is_expected.to be true } + end + + context 'given invalid dir_path' do + let(:dir_path) { 'invalid-dir-path' } + + it { is_expected.to be false } + end + end + + describe '#legacy?' do + let(:dir_path) { fixture } + + subject { ndjson_reader.legacy? } + + it { is_expected.to be false } + end + + describe '#consume_attributes' do + let(:dir_path) { fixture } + + subject { ndjson_reader.consume_attributes(importable_path) } + + it 'returns the whole root tree from parsed JSON' do + expect(subject).to eq(root_tree) + end + end + + describe '#consume_relation' do + let(:dir_path) { fixture } + + subject { ndjson_reader.consume_relation(importable_path, key) } + + context 'given any key' do + let(:key) { 'any-key' } + + it 'returns an Enumerator' do + expect(subject).to be_an_instance_of(Enumerator) + end + end + + context 'key has been consumed' do + let(:key) { 'issues' } + + before do + ndjson_reader.consume_relation(importable_path, key).first + end + + it 'yields nothing to the Enumerator' do + expect(subject.to_a).to eq([]) + end + end + + context 'key has not been consumed' do + context 'relation file does not exist' do + let(:key) { 'non-exist-relation-file-name' } + + before do + relation_file_path = File.join(dir_path, importable_path, "#{key}.ndjson") + expect(File).to receive(:exist?).with(relation_file_path).and_return(false) + end + + it 'yields nothing to the Enumerator' do + expect(subject.to_a).to eq([]) + end + end + + context 'relation file is empty' do + let(:key) { 'empty' } + + it 'yields nothing to the Enumerator' do + expect(subject.to_a).to eq([]) + end + end + + context 'relation file contains multiple lines' do + let(:key) { 'custom_attributes' } + let(:attr_1) { JSON.parse('{"id":201,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"color","value":"red"}') } + let(:attr_2) { JSON.parse('{"id":202,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"size","value":"small"}') } + + it 'yields every relation value to the Enumerator' do + expect(subject.to_a).to eq([[attr_1, 0], [attr_2, 1]]) + end + end + end + end +end diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 247d455fecc..96aed774cfc 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -11,76 +11,83 @@ describe Gitlab::ImportExport::Project::TreeRestorer do let(:shared) { project.import_export_shared } - describe 'restore project tree' do - before_all do - # Using an admin for import, so we can check assignment of existing members - @user = create(:admin) - @existing_members = [ - create(:user, email: 'bernard_willms@gitlabexample.com'), - create(:user, email: 'saul_will@gitlabexample.com') - ] + RSpec.shared_examples 'project tree restorer work properly' do |reader| + describe 'restore project tree' do + before_all do + # Using an admin for import, so we can check assignment of existing members + @user = create(:admin) + @existing_members = [ + create(:user, email: 'bernard_willms@gitlabexample.com'), + create(:user, email: 'saul_will@gitlabexample.com') + ] - RSpec::Mocks.with_temporary_scope do - @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') - @shared = @project.import_export_shared + RSpec::Mocks.with_temporary_scope do + @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') + @shared = @project.import_export_shared - setup_import_export_config('complex') + setup_import_export_config('complex') + setup_reader(reader) - allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true) - allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false) + allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true) + allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false) - expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA') - allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch) + expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA') + allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch) - project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project) + project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project) - @restored_project_json = project_tree_restorer.restore + @restored_project_json = project_tree_restorer.restore + end end - end - context 'JSON' do - it 'restores models based on JSON' do - expect(@restored_project_json).to be_truthy + after(:context) do + cleanup_artifacts_from_extract_archive('complex') end - it 'restore correct project features' do - project = Project.find_by_path('project') + context 'JSON' do + it 'restores models based on JSON' do + expect(@restored_project_json).to be_truthy + end - expect(project.project_feature.issues_access_level).to eq(ProjectFeature::PRIVATE) - expect(project.project_feature.builds_access_level).to eq(ProjectFeature::PRIVATE) - expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::PRIVATE) - expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::PRIVATE) - expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::PRIVATE) - end + it 'restore correct project features' do + project = Project.find_by_path('project') - it 'has the project description' do - expect(Project.find_by_path('project').description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.') - end + expect(project.project_feature.issues_access_level).to eq(ProjectFeature::PRIVATE) + expect(project.project_feature.builds_access_level).to eq(ProjectFeature::PRIVATE) + expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::PRIVATE) + expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::PRIVATE) + expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::PRIVATE) + end - it 'has the same label associated to two issues' do - expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2) - end + it 'has the project description' do + expect(Project.find_by_path('project').description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.') + end - it 'has milestones associated to two separate issues' do - expect(Milestone.find_by_description('test milestone').issues.count).to eq(2) - end + it 'has the same label associated to two issues' do + expect(ProjectLabel.find_by_title('test2').issues.count).to eq(2) + end + + it 'has milestones associated to two separate issues' do + expect(Milestone.find_by_description('test milestone').issues.count).to eq(2) + end - context 'when importing a project with cached_markdown_version and note_html' do - context 'for an Issue' do - it 'does not import note_html' do - note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi' - issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first + context 'when importing a project with cached_markdown_version and note_html' do + context 'for an Issue' do + it 'does not import note_html' do + note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi' + issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first - expect(issue_note.note_html).to match(/#{note_content}/) + expect(issue_note.note_html).to match(/#{note_content}/) + end end - end - context 'for a Merge Request' do - it 'does not import note_html' do - note_content = 'Sit voluptatibus eveniet architecto quidem' - merge_request_note = match_mr1_note(note_content) + context 'for a Merge Request' do + it 'does not import note_html' do + note_content = 'Sit voluptatibus eveniet architecto quidem' + merge_request_note = match_mr1_note(note_content) - expect(merge_request_note.note_html).to match(/#{note_content}/) + expect(merge_request_note.note_html).to match(/#{note_content}/) + end end context 'merge request system note metadata' do @@ -103,33 +110,32 @@ describe Gitlab::ImportExport::Project::TreeRestorer do end end end - end - it 'creates a valid pipeline note' do - expect(Ci::Pipeline.find_by_sha('sha-notes').notes).not_to be_empty - end + it 'creates a valid pipeline note' do + expect(Ci::Pipeline.find_by_sha('sha-notes').notes).not_to be_empty + end - it 'pipeline has the correct user ID' do - expect(Ci::Pipeline.find_by_sha('sha-notes').user_id).to eq(@user.id) - end + it 'pipeline has the correct user ID' do + expect(Ci::Pipeline.find_by_sha('sha-notes').user_id).to eq(@user.id) + end - it 'restores pipelines with missing ref' do - expect(Ci::Pipeline.where(ref: nil)).not_to be_empty - end + it 'restores pipelines with missing ref' do + expect(Ci::Pipeline.where(ref: nil)).not_to be_empty + end - it 'restores pipeline for merge request' do - pipeline = Ci::Pipeline.find_by_sha('048721d90c449b244b7b4c53a9186b04330174ec') + it 'restores pipeline for merge request' do + pipeline = Ci::Pipeline.find_by_sha('048721d90c449b244b7b4c53a9186b04330174ec') - expect(pipeline).to be_valid - expect(pipeline.tag).to be_falsey - expect(pipeline.source).to eq('merge_request_event') - expect(pipeline.merge_request.id).to be > 0 - expect(pipeline.merge_request.target_branch).to eq('feature') - expect(pipeline.merge_request.source_branch).to eq('feature_conflict') - end + expect(pipeline).to be_valid + expect(pipeline.tag).to be_falsey + expect(pipeline.source).to eq('merge_request_event') + expect(pipeline.merge_request.id).to be > 0 + expect(pipeline.merge_request.target_branch).to eq('feature') + expect(pipeline.merge_request.source_branch).to eq('feature_conflict') + end - it 'restores pipelines based on ascending id order' do - expected_ordered_shas = %w[ + it 'restores pipelines based on ascending id order' do + expected_ordered_shas = %w[ 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73 ce84140e8b878ce6e7c4d298c7202ff38170e3ac 048721d90c449b244b7b4c53a9186b04330174ec @@ -137,732 +143,749 @@ describe Gitlab::ImportExport::Project::TreeRestorer do 5f923865dde3436854e9ceb9cdb7815618d4e849 d2d430676773caa88cdaf7c55944073b2fd5561a 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73 - ] + ] - project = Project.find_by_path('project') + project = Project.find_by_path('project') - project.ci_pipelines.order(:id).each_with_index do |pipeline, i| - expect(pipeline['sha']).to eq expected_ordered_shas[i] + project.ci_pipelines.order(:id).each_with_index do |pipeline, i| + expect(pipeline['sha']).to eq expected_ordered_shas[i] + end end - end - it 'preserves updated_at on issues' do - issue = Issue.find_by(description: 'Aliquam enim illo et possimus.') + it 'preserves updated_at on issues' do + issue = Issue.find_by(description: 'Aliquam enim illo et possimus.') - expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC') - end + expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC') + end - it 'has multiple issue assignees' do - expect(Issue.find_by(title: 'Voluptatem').assignees).to contain_exactly(@user, *@existing_members) - expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty - end + it 'has multiple issue assignees' do + expect(Issue.find_by(title: 'Voluptatem').assignees).to contain_exactly(@user, *@existing_members) + expect(Issue.find_by(title: 'Issue without assignees').assignees).to be_empty + end - it 'restores timelogs for issues' do - timelog = Issue.find_by(title: 'issue_with_timelogs').timelogs.last + it 'restores timelogs for issues' do + timelog = Issue.find_by(title: 'issue_with_timelogs').timelogs.last - aggregate_failures do - expect(timelog.time_spent).to eq(72000) - expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z") + aggregate_failures do + expect(timelog.time_spent).to eq(72000) + expect(timelog.spent_at).to eq("2019-12-27T00:00:00.000Z") + end end - end - it 'contains the merge access levels on a protected branch' do - expect(ProtectedBranch.first.merge_access_levels).not_to be_empty - end + it 'contains the merge access levels on a protected branch' do + expect(ProtectedBranch.first.merge_access_levels).not_to be_empty + end - it 'contains the push access levels on a protected branch' do - expect(ProtectedBranch.first.push_access_levels).not_to be_empty - end + it 'contains the push access levels on a protected branch' do + expect(ProtectedBranch.first.push_access_levels).not_to be_empty + end - it 'contains the create access levels on a protected tag' do - expect(ProtectedTag.first.create_access_levels).not_to be_empty - end + it 'contains the create access levels on a protected tag' do + expect(ProtectedTag.first.create_access_levels).not_to be_empty + end - it 'restores issue resource label events' do - expect(Issue.find_by(title: 'Voluptatem').resource_label_events).not_to be_empty - end + it 'restores issue resource label events' do + expect(Issue.find_by(title: 'Voluptatem').resource_label_events).not_to be_empty + end - it 'restores merge requests resource label events' do - expect(MergeRequest.find_by(title: 'MR1').resource_label_events).not_to be_empty - end + it 'restores merge requests resource label events' do + expect(MergeRequest.find_by(title: 'MR1').resource_label_events).not_to be_empty + end - it 'restores suggestion' do - note = Note.find_by("note LIKE 'Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum%'") + it 'restores suggestion' do + note = Note.find_by("note LIKE 'Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum%'") - expect(note.suggestions.count).to eq(1) - expect(note.suggestions.first.from_content).to eq("Original line\n") - end + expect(note.suggestions.count).to eq(1) + expect(note.suggestions.first.from_content).to eq("Original line\n") + end - context 'event at forth level of the tree' do - let(:event) { Event.find_by(action: 6) } + context 'event at forth level of the tree' do + let(:event) { Event.find_by(action: 6) } - it 'restores the event' do - expect(event).not_to be_nil - end + it 'restores the event' do + expect(event).not_to be_nil + end - it 'has the action' do - expect(event.action).not_to be_nil - end + it 'has the action' do + expect(event.action).not_to be_nil + end - it 'event belongs to note, belongs to merge request, belongs to a project' do - expect(event.note.noteable.project).not_to be_nil + it 'event belongs to note, belongs to merge request, belongs to a project' do + expect(event.note.noteable.project).not_to be_nil + end end - end - it 'has the correct data for merge request diff files' do - expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55) - end + it 'has the correct data for merge request diff files' do + expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55) + end - it 'has the correct data for merge request diff commits' do - expect(MergeRequestDiffCommit.count).to eq(77) - end + it 'has the correct data for merge request diff commits' do + expect(MergeRequestDiffCommit.count).to eq(77) + end - it 'has the correct data for merge request latest_merge_request_diff' do - MergeRequest.find_each do |merge_request| - expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id)) + it 'has the correct data for merge request latest_merge_request_diff' do + MergeRequest.find_each do |merge_request| + expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id)) + end end - end - it 'has labels associated to label links, associated to issues' do - expect(Label.first.label_links.first.target).not_to be_nil - end + it 'has labels associated to label links, associated to issues' do + expect(Label.first.label_links.first.target).not_to be_nil + end - it 'has project labels' do - expect(ProjectLabel.count).to eq(3) - end + it 'has project labels' do + expect(ProjectLabel.count).to eq(3) + end - it 'has no group labels' do - expect(GroupLabel.count).to eq(0) - end + it 'has no group labels' do + expect(GroupLabel.count).to eq(0) + end - it 'has issue boards' do - expect(Project.find_by_path('project').boards.count).to eq(1) - end + it 'has issue boards' do + expect(Project.find_by_path('project').boards.count).to eq(1) + end - it 'has lists associated with the issue board' do - expect(Project.find_by_path('project').boards.find_by_name('TestBoardABC').lists.count).to eq(3) - end + it 'has lists associated with the issue board' do + expect(Project.find_by_path('project').boards.find_by_name('TestBoardABC').lists.count).to eq(3) + end - it 'has a project feature' do - expect(@project.project_feature).not_to be_nil - end + it 'has a project feature' do + expect(@project.project_feature).not_to be_nil + end - it 'has custom attributes' do - expect(@project.custom_attributes.count).to eq(2) - end + it 'has custom attributes' do + expect(@project.custom_attributes.count).to eq(2) + end - it 'has badges' do - expect(@project.project_badges.count).to eq(2) - end + it 'has badges' do + expect(@project.project_badges.count).to eq(2) + end - it 'has snippets' do - expect(@project.snippets.count).to eq(1) - end + it 'has snippets' do + expect(@project.snippets.count).to eq(1) + end - it 'has award emoji for a snippet' do - award_emoji = @project.snippets.first.award_emoji + it 'has award emoji for a snippet' do + award_emoji = @project.snippets.first.award_emoji - expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee') - end + expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee') + end - it 'snippet has notes' do - expect(@project.snippets.first.notes.count).to eq(1) - end + it 'snippet has notes' do + expect(@project.snippets.first.notes.count).to eq(1) + end - it 'snippet has award emojis on notes' do - award_emoji = @project.snippets.first.notes.first.award_emoji.first + it 'snippet has award emojis on notes' do + award_emoji = @project.snippets.first.notes.first.award_emoji.first - expect(award_emoji.name).to eq('thumbsup') - end + expect(award_emoji.name).to eq('thumbsup') + end - it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do - expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false) - end + it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do + expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false) + end - it 'restores `auto_devops`' do - expect(@project.auto_devops_enabled?).to eq(true) - expect(@project.auto_devops.deploy_strategy).to eq('continuous') - end + it 'restores `auto_devops`' do + expect(@project.auto_devops_enabled?).to eq(true) + expect(@project.auto_devops.deploy_strategy).to eq('continuous') + end - it 'restores the correct service' do - expect(CustomIssueTrackerService.first).not_to be_nil - end + it 'restores the correct service' do + expect(CustomIssueTrackerService.first).not_to be_nil + end - it 'restores zoom meetings' do - meetings = @project.issues.first.zoom_meetings + it 'restores zoom meetings' do + meetings = @project.issues.first.zoom_meetings - expect(meetings.count).to eq(1) - expect(meetings.first.url).to eq('https://zoom.us/j/123456789') - end + expect(meetings.count).to eq(1) + expect(meetings.first.url).to eq('https://zoom.us/j/123456789') + end - it 'restores sentry issues' do - sentry_issue = @project.issues.first.sentry_issue + it 'restores sentry issues' do + sentry_issue = @project.issues.first.sentry_issue - expect(sentry_issue.sentry_issue_identifier).to eq(1234567891) - end + expect(sentry_issue.sentry_issue_identifier).to eq(1234567891) + end - it 'has award emoji for an issue' do - award_emoji = @project.issues.first.award_emoji.first + it 'has award emoji for an issue' do + award_emoji = @project.issues.first.award_emoji.first - expect(award_emoji.name).to eq('musical_keyboard') - end + expect(award_emoji.name).to eq('musical_keyboard') + end - it 'has award emoji for a note in an issue' do - award_emoji = @project.issues.first.notes.first.award_emoji.first + it 'has award emoji for a note in an issue' do + award_emoji = @project.issues.first.notes.first.award_emoji.first - expect(award_emoji.name).to eq('clapper') - end + expect(award_emoji.name).to eq('clapper') + end - it 'restores container_expiration_policy' do - policy = Project.find_by_path('project').container_expiration_policy + it 'restores container_expiration_policy' do + policy = Project.find_by_path('project').container_expiration_policy - aggregate_failures do - expect(policy).to be_an_instance_of(ContainerExpirationPolicy) - expect(policy).to be_persisted - expect(policy.cadence).to eq('3month') + aggregate_failures do + expect(policy).to be_an_instance_of(ContainerExpirationPolicy) + expect(policy).to be_persisted + expect(policy.cadence).to eq('3month') + end end - end - it 'restores error_tracking_setting' do - setting = @project.error_tracking_setting + it 'restores error_tracking_setting' do + setting = @project.error_tracking_setting - aggregate_failures do - expect(setting.api_url).to eq("https://gitlab.example.com/api/0/projects/sentry-org/sentry-project") - expect(setting.project_name).to eq("Sentry Project") - expect(setting.organization_name).to eq("Sentry Org") + aggregate_failures do + expect(setting.api_url).to eq("https://gitlab.example.com/api/0/projects/sentry-org/sentry-project") + expect(setting.project_name).to eq("Sentry Project") + expect(setting.organization_name).to eq("Sentry Org") + end end - end - it 'restores external pull requests' do - external_pr = @project.external_pull_requests.last + it 'restores external pull requests' do + external_pr = @project.external_pull_requests.last - aggregate_failures do - expect(external_pr.pull_request_iid).to eq(4) - expect(external_pr.source_branch).to eq("feature") - expect(external_pr.target_branch).to eq("master") - expect(external_pr.status).to eq("open") + aggregate_failures do + expect(external_pr.pull_request_iid).to eq(4) + expect(external_pr.source_branch).to eq("feature") + expect(external_pr.target_branch).to eq("master") + expect(external_pr.status).to eq("open") + end end - end - it 'restores pipeline schedules' do - pipeline_schedule = @project.pipeline_schedules.last + it 'restores pipeline schedules' do + pipeline_schedule = @project.pipeline_schedules.last - aggregate_failures do - expect(pipeline_schedule.description).to eq('Schedule Description') - expect(pipeline_schedule.ref).to eq('master') - expect(pipeline_schedule.cron).to eq('0 4 * * 0') - expect(pipeline_schedule.cron_timezone).to eq('UTC') - expect(pipeline_schedule.active).to eq(true) + aggregate_failures do + expect(pipeline_schedule.description).to eq('Schedule Description') + expect(pipeline_schedule.ref).to eq('master') + expect(pipeline_schedule.cron).to eq('0 4 * * 0') + expect(pipeline_schedule.cron_timezone).to eq('UTC') + expect(pipeline_schedule.active).to eq(true) + end end - end - it 'restores releases with links' do - release = @project.releases.last - link = release.links.last + it 'restores releases with links' do + release = @project.releases.last + link = release.links.last - aggregate_failures do - expect(release.tag).to eq('release-1.1') - expect(release.description).to eq('Some release notes') - expect(release.name).to eq('release-1.1') - expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9') - expect(release.released_at).to eq('2019-12-26T10:17:14.615Z') + aggregate_failures do + expect(release.tag).to eq('release-1.1') + expect(release.description).to eq('Some release notes') + expect(release.name).to eq('release-1.1') + expect(release.sha).to eq('901de3a8bd5573f4a049b1457d28bc1592ba6bf9') + expect(release.released_at).to eq('2019-12-26T10:17:14.615Z') - expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download') - expect(link.name).to eq('release-1.1.dmg') + expect(link.url).to eq('http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download') + expect(link.name).to eq('release-1.1.dmg') + end end - end - context 'Merge requests' do - it 'always has the new project as a target' do - expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project) - end + context 'Merge requests' do + it 'always has the new project as a target' do + expect(MergeRequest.find_by_title('MR1').target_project).to eq(@project) + end - it 'has the same source project as originally if source/target are the same' do - expect(MergeRequest.find_by_title('MR1').source_project).to eq(@project) - end + it 'has the same source project as originally if source/target are the same' do + expect(MergeRequest.find_by_title('MR1').source_project).to eq(@project) + end - it 'has the new project as target if source/target differ' do - expect(MergeRequest.find_by_title('MR2').target_project).to eq(@project) - end + it 'has the new project as target if source/target differ' do + expect(MergeRequest.find_by_title('MR2').target_project).to eq(@project) + end - it 'has no source if source/target differ' do - expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil - end + it 'has no source if source/target differ' do + expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil + end - it 'has award emoji' do - award_emoji = MergeRequest.find_by_title('MR1').award_emoji + it 'has award emoji' do + award_emoji = MergeRequest.find_by_title('MR1').award_emoji - expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum') - end + expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum') + end - context 'notes' do - it 'has award emoji' do - merge_request_note = match_mr1_note('Sit voluptatibus eveniet architecto quidem') - award_emoji = merge_request_note.award_emoji.first + context 'notes' do + it 'has award emoji' do + merge_request_note = match_mr1_note('Sit voluptatibus eveniet architecto quidem') + award_emoji = merge_request_note.award_emoji.first - expect(award_emoji.name).to eq('tada') + expect(award_emoji.name).to eq('tada') + end end end - end - context 'tokens are regenerated' do - it 'has new CI trigger tokens' do - expect(Ci::Trigger.where(token: %w[cdbfasdf44a5958c83654733449e585 33a66349b5ad01fc00174af87804e40])) - .to be_empty - end + context 'tokens are regenerated' do + it 'has new CI trigger tokens' do + expect(Ci::Trigger.where(token: %w[cdbfasdf44a5958c83654733449e585 33a66349b5ad01fc00174af87804e40])) + .to be_empty + end - it 'has a new CI build token' do - expect(Ci::Build.where(token: 'abcd')).to be_empty + it 'has a new CI build token' do + expect(Ci::Build.where(token: 'abcd')).to be_empty + end end - end - context 'has restored the correct number of records' do - it 'has the correct number of merge requests' do - expect(@project.merge_requests.size).to eq(9) - end + context 'has restored the correct number of records' do + it 'has the correct number of merge requests' do + expect(@project.merge_requests.size).to eq(9) + end - it 'only restores valid triggers' do - expect(@project.triggers.size).to eq(1) - end + it 'only restores valid triggers' do + expect(@project.triggers.size).to eq(1) + end - it 'has the correct number of pipelines and statuses' do - expect(@project.ci_pipelines.size).to eq(7) + it 'has the correct number of pipelines and statuses' do + expect(@project.ci_pipelines.size).to eq(7) - @project.ci_pipelines.order(:id).zip([2, 0, 2, 2, 2, 2, 0]) - .each do |(pipeline, expected_status_size)| - expect(pipeline.statuses.size).to eq(expected_status_size) + @project.ci_pipelines.order(:id).zip([2, 0, 2, 2, 2, 2, 0]) + .each do |(pipeline, expected_status_size)| + expect(pipeline.statuses.size).to eq(expected_status_size) + end end end - end - context 'when restoring hierarchy of pipeline, stages and jobs' do - it 'restores pipelines' do - expect(Ci::Pipeline.all.count).to be 7 - end + context 'when restoring hierarchy of pipeline, stages and jobs' do + it 'restores pipelines' do + expect(Ci::Pipeline.all.count).to be 7 + end - it 'restores pipeline stages' do - expect(Ci::Stage.all.count).to be 6 - end + it 'restores pipeline stages' do + expect(Ci::Stage.all.count).to be 6 + end - it 'correctly restores association between stage and a pipeline' do - expect(Ci::Stage.all).to all(have_attributes(pipeline_id: a_value > 0)) - end + it 'correctly restores association between stage and a pipeline' do + expect(Ci::Stage.all).to all(have_attributes(pipeline_id: a_value > 0)) + end - it 'restores statuses' do - expect(CommitStatus.all.count).to be 10 - end + it 'restores statuses' do + expect(CommitStatus.all.count).to be 10 + end - it 'correctly restores association between a stage and a job' do - expect(CommitStatus.all).to all(have_attributes(stage_id: a_value > 0)) - end + it 'correctly restores association between a stage and a job' do + expect(CommitStatus.all).to all(have_attributes(stage_id: a_value > 0)) + end - it 'correctly restores association between a pipeline and a job' do - expect(CommitStatus.all).to all(have_attributes(pipeline_id: a_value > 0)) - end + it 'correctly restores association between a pipeline and a job' do + expect(CommitStatus.all).to all(have_attributes(pipeline_id: a_value > 0)) + end - it 'restores a Hash for CommitStatus options' do - expect(CommitStatus.all.map(&:options).compact).to all(be_a(Hash)) - end + it 'restores a Hash for CommitStatus options' do + expect(CommitStatus.all.map(&:options).compact).to all(be_a(Hash)) + end - it 'restores external pull request for the restored pipeline' do - pipeline_with_external_pr = @project.ci_pipelines.find_by(source: 'external_pull_request_event') + it 'restores external pull request for the restored pipeline' do + pipeline_with_external_pr = @project.ci_pipelines.find_by(source: 'external_pull_request_event') - expect(pipeline_with_external_pr.external_pull_request).to be_persisted - end + expect(pipeline_with_external_pr.external_pull_request).to be_persisted + end - it 'has no import failures' do - expect(@project.import_failures.size).to eq 0 + it 'has no import failures' do + expect(@project.import_failures.size).to eq 0 + end end end end - end - shared_examples 'restores group correctly' do |**results| - it 'has group label' do - expect(project.group.labels.size).to eq(results.fetch(:labels, 0)) - expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0) - end + shared_examples 'restores group correctly' do |**results| + it 'has group label' do + expect(project.group.labels.size).to eq(results.fetch(:labels, 0)) + expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0) + end - it 'has group milestone' do - expect(project.group.milestones.size).to eq(results.fetch(:milestones, 0)) - end + it 'has group milestone' do + expect(project.group.milestones.size).to eq(results.fetch(:milestones, 0)) + end - it 'has the correct visibility level' do - # INTERNAL in the `project.json`, group's is PRIVATE - expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + it 'has the correct visibility level' do + # INTERNAL in the `project.json`, group's is PRIVATE + expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + end end - end - context 'project.json file access check' do - let(:user) { create(:user) } - let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } - let(:project_tree_restorer) do - described_class.new(user: user, shared: shared, project: project) - end - let(:restored_project_json) { project_tree_restorer.restore } + context 'project.json file access check' do + let(:user) { create(:user) } + let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } + let(:project_tree_restorer) do + described_class.new(user: user, shared: shared, project: project) + end + let(:restored_project_json) { project_tree_restorer.restore } - it 'does not read a symlink' do - Dir.mktmpdir do |tmpdir| - setup_symlink(tmpdir, 'project.json') - allow(shared).to receive(:export_path).and_call_original + it 'does not read a symlink' do + Dir.mktmpdir do |tmpdir| + setup_symlink(tmpdir, 'project.json') + allow(shared).to receive(:export_path).and_call_original - expect(project_tree_restorer.restore).to eq(false) - expect(shared.errors).to include('Incorrect JSON format') + expect(project_tree_restorer.restore).to eq(false) + expect(shared.errors).to include('invalid import format') + end end end - end - context 'Light JSON' do - let(:user) { create(:user) } - let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } - let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } - let(:restored_project_json) { project_tree_restorer.restore } + context 'Light JSON' do + let(:user) { create(:user) } + let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } + let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } + let(:restored_project_json) { project_tree_restorer.restore } - context 'with a simple project' do - before do - setup_import_export_config('light') - expect(restored_project_json).to eq(true) - end + context 'with a simple project' do + before do + setup_import_export_config('light') + setup_reader(reader) + + expect(restored_project_json).to eq(true) + end + + after do + cleanup_artifacts_from_extract_archive('light') + end + + it 'issue system note metadata restored successfully' do + note_content = 'created merge request !1 to address this issue' + note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first + + expect(note.noteable_type).to eq('Issue') + expect(note.system).to eq(true) + expect(note.system_note_metadata.action).to eq('merge') + expect(note.system_note_metadata.commit_count).to be_nil + end + + context 'when there is an existing build with build token' do + before do + create(:ci_build, token: 'abcd') + end - it_behaves_like 'restores project successfully', - issues: 1, - labels: 2, - label_with_priorities: 'A project label', - milestones: 1, - first_issue_labels: 1, - services: 1 - - it 'issue system note metadata restored successfully' do - note_content = 'created merge request !1 to address this issue' - note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first - - expect(note.noteable_type).to eq('Issue') - expect(note.system).to eq(true) - expect(note.system_note_metadata.action).to eq('merge') - expect(note.system_note_metadata.commit_count).to be_nil + it_behaves_like 'restores project successfully', + issues: 1, + labels: 2, + label_with_priorities: 'A project label', + milestones: 1, + first_issue_labels: 1, + services: 1 + end + + context 'when there is an existing build with build token' do + before do + create(:ci_build, token: 'abcd') + end + + it_behaves_like 'restores project successfully', + issues: 1, + labels: 2, + label_with_priorities: 'A project label', + milestones: 1, + first_issue_labels: 1 + end end - context 'when there is an existing build with build token' do + context 'multiple pipelines reference the same external pull request' do before do - create(:ci_build, token: 'abcd') + setup_import_export_config('multi_pipeline_ref_one_external_pr') + setup_reader(reader) + + expect(restored_project_json).to eq(true) + end + + after do + cleanup_artifacts_from_extract_archive('multi_pipeline_ref_one_external_pr') end it_behaves_like 'restores project successfully', - issues: 1, - labels: 2, - label_with_priorities: 'A project label', - milestones: 1, - first_issue_labels: 1 + issues: 0, + labels: 0, + milestones: 0, + ci_pipelines: 2, + external_pull_requests: 1, + import_failures: 0 + + it 'restores external pull request for the restored pipelines' do + external_pr = project.external_pull_requests.first + + project.ci_pipelines.each do |pipeline_with_external_pr| + expect(pipeline_with_external_pr.external_pull_request).to be_persisted + expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr) + end + end end - end - context 'multiple pipelines reference the same external pull request' do - before do - setup_import_export_config('multi_pipeline_ref_one_external_pr') - expect(restored_project_json).to eq(true) - end + context 'when post import action throw non-retriable exception' do + let(:exception) { StandardError.new('post_import_error') } + + before do + setup_import_export_config('light') + setup_reader(reader) - it_behaves_like 'restores project successfully', - issues: 0, - labels: 0, - milestones: 0, - ci_pipelines: 2, - external_pull_requests: 1, - import_failures: 0 + expect(project) + .to receive(:merge_requests) + .and_raise(exception) + end - it 'restores external pull request for the restored pipelines' do - external_pr = project.external_pull_requests.first + after do + cleanup_artifacts_from_extract_archive('light') + end - project.ci_pipelines.each do |pipeline_with_external_pr| - expect(pipeline_with_external_pr.external_pull_request).to be_persisted - expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr) + it 'report post import error' do + expect(restored_project_json).to eq(false) + expect(shared.errors).to include('post_import_error') end end - end - context 'when post import action throw non-retriable exception' do - let(:exception) { StandardError.new('post_import_error') } + context 'when post import action throw retriable exception one time' do + let(:exception) { GRPC::DeadlineExceeded.new } - before do - setup_import_export_config('light') - expect(project) - .to receive(:merge_requests) - .and_raise(exception) - end + before do + setup_import_export_config('light') + setup_reader(reader) - it 'report post import error' do - expect(restored_project_json).to eq(false) - expect(shared.errors).to include('post_import_error') - end - end + expect(project) + .to receive(:merge_requests) + .and_raise(exception) + expect(project) + .to receive(:merge_requests) + .and_call_original + expect(restored_project_json).to eq(true) + end - context 'when post import action throw retriable exception one time' do - let(:exception) { GRPC::DeadlineExceeded.new } + after do + cleanup_artifacts_from_extract_archive('light') + end - before do - setup_import_export_config('light') - expect(project) - .to receive(:merge_requests) - .and_raise(exception) - expect(project) - .to receive(:merge_requests) - .and_call_original - expect(restored_project_json).to eq(true) - end + it_behaves_like 'restores project successfully', + issues: 1, + labels: 2, + label_with_priorities: 'A project label', + milestones: 1, + first_issue_labels: 1, + services: 1, + import_failures: 1 - it_behaves_like 'restores project successfully', - issues: 1, - labels: 2, - label_with_priorities: 'A project label', - milestones: 1, - first_issue_labels: 1, - services: 1, - import_failures: 1 - - it 'records the failures in the database' do - import_failure = ImportFailure.last - - expect(import_failure.project_id).to eq(project.id) - expect(import_failure.relation_key).to be_nil - expect(import_failure.relation_index).to be_nil - expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded') - expect(import_failure.exception_message).to be_present - expect(import_failure.correlation_id_value).not_to be_empty - expect(import_failure.created_at).to be_present - end - end + it 'records the failures in the database' do + import_failure = ImportFailure.last - context 'when the project has overridden params in import data' do - before do - setup_import_export_config('light') + expect(import_failure.project_id).to eq(project.id) + expect(import_failure.relation_key).to be_nil + expect(import_failure.relation_index).to be_nil + expect(import_failure.exception_class).to eq('GRPC::DeadlineExceeded') + expect(import_failure.exception_message).to be_present + expect(import_failure.correlation_id_value).not_to be_empty + expect(import_failure.created_at).to be_present + end end - it 'handles string versions of visibility_level' do - # Project needs to be in a group for visibility level comparison - # to happen - group = create(:group) - project.group = group + context 'when the project has overridden params in import data' do + before do + setup_import_export_config('light') + setup_reader(reader) + end - project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } }) + after do + cleanup_artifacts_from_extract_archive('light') + end - expect(restored_project_json).to eq(true) - expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL) - end + it 'handles string versions of visibility_level' do + # Project needs to be in a group for visibility level comparison + # to happen + group = create(:group) + project.group = group - it 'overwrites the params stored in the JSON' do - project.create_import_data(data: { override_params: { description: "Overridden" } }) + project.create_import_data(data: { override_params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } }) - expect(restored_project_json).to eq(true) - expect(project.description).to eq("Overridden") - end + expect(restored_project_json).to eq(true) + expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL) + end - it 'does not allow setting params that are excluded from import_export settings' do - project.create_import_data(data: { override_params: { lfs_enabled: true } }) + it 'overwrites the params stored in the JSON' do + project.create_import_data(data: { override_params: { description: "Overridden" } }) - expect(restored_project_json).to eq(true) - expect(project.lfs_enabled).to be_falsey - end + expect(restored_project_json).to eq(true) + expect(project.description).to eq("Overridden") + end - it 'overrides project feature access levels' do - access_level_keys = project.project_feature.attributes.keys.select { |a| a =~ /_access_level/ } + it 'does not allow setting params that are excluded from import_export settings' do + project.create_import_data(data: { override_params: { lfs_enabled: true } }) - # `pages_access_level` is not included, since it is not available in the public API - # and has a dependency on project's visibility level - # see ProjectFeature model - access_level_keys.delete('pages_access_level') + expect(restored_project_json).to eq(true) + expect(project.lfs_enabled).to be_falsey + end + + it 'overrides project feature access levels' do + access_level_keys = project.project_feature.attributes.keys.select { |a| a =~ /_access_level/ } + + # `pages_access_level` is not included, since it is not available in the public API + # and has a dependency on project's visibility level + # see ProjectFeature model + access_level_keys.delete('pages_access_level') - disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }] + disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }] - project.create_import_data(data: { override_params: disabled_access_levels }) + project.create_import_data(data: { override_params: disabled_access_levels }) - expect(restored_project_json).to eq(true) + expect(restored_project_json).to eq(true) - aggregate_failures do - access_level_keys.each do |key| - expect(project.public_send(key)).to eq(ProjectFeature::DISABLED) + aggregate_failures do + access_level_keys.each do |key| + expect(project.public_send(key)).to eq(ProjectFeature::DISABLED) + end end end end - end - context 'with a project that has a group' do - let!(:project) do - create(:project, - :builds_disabled, - :issues_disabled, - name: 'project', - path: 'project', - group: create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE)) - end + context 'with a project that has a group' do + let!(:project) do + create(:project, + :builds_disabled, + :issues_disabled, + name: 'project', + path: 'project', + group: create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE)) + end - before do - setup_import_export_config('group') - expect(restored_project_json).to eq(true) - end + before do + setup_import_export_config('group') + setup_reader(reader) - it_behaves_like 'restores project successfully', - issues: 3, - labels: 2, - label_with_priorities: 'A project label', - milestones: 2, - first_issue_labels: 1 - - it_behaves_like 'restores group correctly', - labels: 0, - milestones: 0, - first_issue_labels: 1 - - it 'restores issue states' do - expect(project.issues.with_state(:closed).count).to eq(1) - expect(project.issues.with_state(:opened).count).to eq(2) - end - end + expect(restored_project_json).to eq(true) + end - context 'with existing group models' do - let!(:project) do - create(:project, - :builds_disabled, - :issues_disabled, - name: 'project', - path: 'project', - group: create(:group)) - end + after do + cleanup_artifacts_from_extract_archive('group') + end - before do - setup_import_export_config('light') - end + it_behaves_like 'restores project successfully', + issues: 3, + labels: 2, + label_with_priorities: 'A project label', + milestones: 2, + first_issue_labels: 1 - it 'does not import any templated services' do - expect(restored_project_json).to eq(true) + it_behaves_like 'restores group correctly', + labels: 0, + milestones: 0, + first_issue_labels: 1 - expect(project.services.where(template: true).count).to eq(0) + it 'restores issue states' do + expect(project.issues.with_state(:closed).count).to eq(1) + expect(project.issues.with_state(:opened).count).to eq(2) + end end - it 'does not import any instance services' do - expect(restored_project_json).to eq(true) + context 'with existing group models' do + let!(:project) do + create(:project, + :builds_disabled, + :issues_disabled, + name: 'project', + path: 'project', + group: create(:group)) + end - expect(project.services.where(instance: true).count).to eq(0) - end + before do + setup_import_export_config('light') + setup_reader(reader) + end - it 'imports labels' do - create(:group_label, name: 'Another label', group: project.group) + after do + cleanup_artifacts_from_extract_archive('light') + end - expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) + it 'does not import any templated services' do + expect(restored_project_json).to eq(true) - expect(restored_project_json).to eq(true) - expect(project.labels.count).to eq(1) - end + expect(project.services.where(template: true).count).to eq(0) + end - it 'imports milestones' do - create(:milestone, name: 'A milestone', group: project.group) + it 'does not import any instance services' do + expect(restored_project_json).to eq(true) - expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) + expect(project.services.where(instance: true).count).to eq(0) + end - expect(restored_project_json).to eq(true) - expect(project.group.milestones.count).to eq(1) - expect(project.milestones.count).to eq(0) - end - end + it 'imports labels' do + create(:group_label, name: 'Another label', group: project.group) - context 'with clashing milestones on IID' do - let!(:project) do - create(:project, - :builds_disabled, - :issues_disabled, - name: 'project', - path: 'project', - group: create(:group)) - end + expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) - before do - setup_import_export_config('milestone-iid') - end + expect(restored_project_json).to eq(true) + expect(project.labels.count).to eq(1) + end - it 'preserves the project milestone IID' do - expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) + it 'imports milestones' do + create(:milestone, name: 'A milestone', group: project.group) - expect(restored_project_json).to eq(true) - expect(project.milestones.count).to eq(2) - expect(Milestone.find_by_title('Another milestone').iid).to eq(1) - expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2) - end - end + expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) - context 'with external authorization classification labels' do - before do - setup_import_export_config('light') + expect(restored_project_json).to eq(true) + expect(project.group.milestones.count).to eq(1) + expect(project.milestones.count).to eq(0) + end end - it 'converts empty external classification authorization labels to nil' do - project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } }) + context 'with clashing milestones on IID' do + let!(:project) do + create(:project, + :builds_disabled, + :issues_disabled, + name: 'project', + path: 'project', + group: create(:group)) + end - expect(restored_project_json).to eq(true) - expect(project.external_authorization_classification_label).to be_nil - end + before do + setup_import_export_config('milestone-iid') + setup_reader(reader) + end - it 'preserves valid external classification authorization labels' do - project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } }) + after do + cleanup_artifacts_from_extract_archive('milestone-iid') + end - expect(restored_project_json).to eq(true) - expect(project.external_authorization_classification_label).to eq("foobar") - end - end - end + it 'preserves the project milestone IID' do + expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) - context 'Minimal JSON' do - let(:project) { create(:project) } - let(:user) { create(:user) } - let(:tree_hash) { { 'visibility_level' => visibility } } - let(:restorer) do - described_class.new(user: user, shared: shared, project: project) - end + expect(restored_project_json).to eq(true) + expect(project.milestones.count).to eq(2) + expect(Milestone.find_by_title('Another milestone').iid).to eq(1) + expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2) + end + end - before do - allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:valid?).and_return(true) - allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:tree_hash) { tree_hash } - end + context 'with external authorization classification labels' do + before do + setup_import_export_config('light') + setup_reader(reader) + end - context 'no group visibility' do - let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } + after do + cleanup_artifacts_from_extract_archive('light') + end - it 'uses the project visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(visibility) - end - end + it 'converts empty external classification authorization labels to nil' do + project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } }) - context 'with restricted internal visibility' do - describe 'internal project' do - let(:visibility) { Gitlab::VisibilityLevel::INTERNAL } + expect(restored_project_json).to eq(true) + expect(project.external_authorization_classification_label).to be_nil + end - it 'uses private visibility' do - stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) + it 'preserves valid external classification authorization labels' do + project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } }) - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + expect(restored_project_json).to eq(true) + expect(project.external_authorization_classification_label).to eq("foobar") end end end - context 'with group visibility' do - before do - group = create(:group, visibility_level: group_visibility) - - project.update(group: group) + context 'Minimal JSON' do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:tree_hash) { { 'visibility_level' => visibility } } + let(:restorer) do + described_class.new(user: user, shared: shared, project: project) end - context 'private group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE } - let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } - - it 'uses the group visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(group_visibility) - end + before do + allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(true) + allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(false) + allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:tree_hash) { tree_hash } end - context 'public group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC } + context 'no group visibility' do let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } it 'uses the project visibility' do @@ -871,17 +894,11 @@ describe Gitlab::ImportExport::Project::TreeRestorer do end end - context 'internal group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL } - let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } - - it 'uses the group visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(group_visibility) - end + context 'with restricted internal visibility' do + describe 'internal project' do + let(:visibility) { Gitlab::VisibilityLevel::INTERNAL } - context 'with restricted internal visibility' do - it 'sets private visibility' do + it 'uses private visibility' do stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) expect(restorer.restore).to eq(true) @@ -889,43 +906,116 @@ describe Gitlab::ImportExport::Project::TreeRestorer do end end end - end - end - context 'JSON with invalid records' do - subject(:restored_project_json) { project_tree_restorer.restore } + context 'with group visibility' do + before do + group = create(:group, visibility_level: group_visibility) + + project.update(group: group) + end - let(:user) { create(:user) } - let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } - let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } + context 'private group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE } + let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } - before do - setup_import_export_config('with_invalid_records') + it 'uses the group visibility' do + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(group_visibility) + end + end + + context 'public group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC } + let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } + + it 'uses the project visibility' do + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(visibility) + end + end - subject + context 'internal group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL } + let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } + + it 'uses the group visibility' do + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(group_visibility) + end + + context 'with restricted internal visibility' do + it 'sets private visibility' do + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) + + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + end + end + end + end end - context 'when failures occur because a relation fails to be processed' do - it_behaves_like 'restores project successfully', - issues: 0, - labels: 0, - label_with_priorities: nil, - milestones: 1, - first_issue_labels: 0, - services: 0, - import_failures: 1 - - it 'records the failures in the database' do - import_failure = ImportFailure.last - - expect(import_failure.project_id).to eq(project.id) - expect(import_failure.relation_key).to eq('milestones') - expect(import_failure.relation_index).to be_present - expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid') - expect(import_failure.exception_message).to be_present - expect(import_failure.correlation_id_value).not_to be_empty - expect(import_failure.created_at).to be_present + context 'JSON with invalid records' do + subject(:restored_project_json) { project_tree_restorer.restore } + + let(:user) { create(:user) } + let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } + let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } + + before do + setup_import_export_config('with_invalid_records') + setup_reader(reader) + + subject + end + + after do + cleanup_artifacts_from_extract_archive('with_invalid_records') end + + context 'when failures occur because a relation fails to be processed' do + it_behaves_like 'restores project successfully', + issues: 0, + labels: 0, + label_with_priorities: nil, + milestones: 1, + first_issue_labels: 0, + services: 0, + import_failures: 1 + + it 'records the failures in the database' do + import_failure = ImportFailure.last + + expect(import_failure.project_id).to eq(project.id) + expect(import_failure.relation_key).to eq('milestones') + expect(import_failure.relation_index).to be_present + expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid') + expect(import_failure.exception_message).to be_present + expect(import_failure.correlation_id_value).not_to be_empty + expect(import_failure.created_at).to be_present + end + end + end + end + + context 'enable ndjson import' do + before_all do + # Test suite `restore project tree` run `project_tree_restorer.restore` in `before_all`. + # `Enable all features by default for testing` happens in `before(:each)` + # So it requires manually enable feature flag to allow ndjson_reader + Feature.enable(:project_import_ndjson) + end + + it_behaves_like 'project tree restorer work properly', :legacy_reader + + it_behaves_like 'project tree restorer work properly', :ndjson_reader + end + + context 'disable ndjson import' do + before do + stub_feature_flags(project_import_ndjson: false) end + + it_behaves_like 'project tree restorer work properly', :legacy_reader end end diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb index 52e1efa70e0..0b58a75220d 100644 --- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb @@ -14,7 +14,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do let(:user) { create(:user) } let(:shared) { Gitlab::ImportExport::Shared.new(importable) } - let(:attributes) { {} } + let(:attributes) { relation_reader.consume_attributes(importable_name) } let(:members_mapper) do Gitlab::ImportExport::MembersMapper.new(exported_members: {}, user: user, importable: importable) @@ -30,7 +30,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do relation_factory: relation_factory, reader: reader, importable: importable, - importable_path: nil, + importable_path: importable_path, importable_attributes: attributes ) end @@ -94,21 +94,24 @@ describe Gitlab::ImportExport::RelationTreeRestorer do end context 'when restoring a project' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') } + let(:importable_name) { 'project' } + let(:importable_path) { 'project' } let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder } let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory } let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } context 'using legacy reader' do + let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } let(:relation_reader) do Gitlab::ImportExport::JSON::LegacyReader::File.new( path, - relation_names: reader.project_relation_names + relation_names: reader.project_relation_names, + allowed_path: 'project' ) end - let(:attributes) { relation_reader.consume_attributes(nil) } + let(:attributes) { relation_reader.consume_attributes('project') } it_behaves_like 'import project successfully' @@ -118,6 +121,21 @@ describe Gitlab::ImportExport::RelationTreeRestorer do include_examples 'logging of relations creation' end + + context 'using ndjson reader' do + let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } + let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) } + + before :all do + extract_archive('spec/fixtures/lib/gitlab/import_export/complex', 'tree.tar.gz') + end + + after :all do + cleanup_artifacts_from_extract_archive('complex') + end + + it_behaves_like 'import project successfully' + end end end @@ -125,9 +143,16 @@ describe Gitlab::ImportExport::RelationTreeRestorer do let(:path) { 'spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json' } let(:group) { create(:group) } let(:importable) { create(:group, parent: group) } + let(:importable_name) { nil } + let(:importable_path) { nil } let(:object_builder) { Gitlab::ImportExport::Group::ObjectBuilder } let(:relation_factory) { Gitlab::ImportExport::Group::RelationFactory } - let(:relation_reader) { Gitlab::ImportExport::JSON::LegacyReader::File.new(path, relation_names: reader.group_relation_names) } + let(:relation_reader) do + Gitlab::ImportExport::JSON::LegacyReader::File.new( + path, + relation_names: reader.group_relation_names) + end + let(:reader) do Gitlab::ImportExport::Reader.new( shared: shared, @@ -135,6 +160,10 @@ describe Gitlab::ImportExport::RelationTreeRestorer do ) end + it 'restores group tree' do + expect(subject).to eq(true) + end + include_examples 'logging of relations creation' end end diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb index 1bb942ff39b..74582df6cd9 100644 --- a/spec/support/helpers/graphql_helpers.rb +++ b/spec/support/helpers/graphql_helpers.rb @@ -378,8 +378,9 @@ module GraphqlHelpers def execute_query(query_type) schema = Class.new(GraphQL::Schema) do + use GraphQL::Pagination::Connections use Gitlab::Graphql::Authorize - use Gitlab::Graphql::Connections + use Gitlab::Graphql::Pagination::Connections query(query_type) end diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb index efe14b7244c..16762f32d8b 100644 --- a/spec/support/import_export/common_util.rb +++ b/spec/support/import_export/common_util.rb @@ -15,9 +15,39 @@ module ImportExport export_path = [prefix, 'spec', 'fixtures', 'lib', 'gitlab', 'import_export', name].compact export_path = File.join(*export_path) + extract_archive(export_path, 'tree.tar.gz') + allow_any_instance_of(Gitlab::ImportExport).to receive(:export_path) { export_path } end + def extract_archive(path, archive) + if File.exist?(File.join(path, archive)) + system("cd #{path}; tar xzvf #{archive} &> /dev/null") + end + end + + def cleanup_artifacts_from_extract_archive(name, prefix = nil) + export_path = [prefix, 'spec', 'fixtures', 'lib', 'gitlab', 'import_export', name].compact + export_path = File.join(*export_path) + + if File.exist?(File.join(export_path, 'tree.tar.gz')) + system("cd #{export_path}; rm -fr tree &> /dev/null") + end + end + + def setup_reader(reader) + case reader + when :legacy_reader + allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(true) + allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(false) + when :ndjson_reader + allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(false) + allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(true) + else + raise "invalid reader #{reader}. Supported readers: :legacy_reader, :ndjson_reader" + end + end + def fixtures_path "spec/fixtures/lib/gitlab/import_export" end |