summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml168
-rw-r--r--.nvmrc2
-rw-r--r--.ruby-version2
-rw-r--r--GITLAB_WORKHORSE_VERSION2
-rw-r--r--Gemfile4
-rw-r--r--Gemfile.lock143
-rw-r--r--Gemfile.rails47
-rw-r--r--Gemfile.rails4.lock (renamed from Gemfile.rails5.lock)147
-rw-r--r--app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js9
-rw-r--r--app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/header_ci_component.vue45
-rw-r--r--app/assets/stylesheets/pages/issuable.scss6
-rw-r--r--app/assets/stylesheets/pages/note_form.scss6
-rw-r--r--app/assets/stylesheets/pages/notes.scss55
-rw-r--r--app/helpers/nav_helper.rb16
-rw-r--r--app/models/application_record.rb5
-rw-r--r--app/models/clusters/platforms/kubernetes.rb4
-rw-r--r--app/models/concerns/mentionable.rb4
-rw-r--r--app/models/concerns/mentionable/reference_regexes.rb22
-rw-r--r--app/models/deployment.rb6
-rw-r--r--app/models/identity.rb8
-rw-r--r--app/models/identity/uniqueness_scopes.rb11
-rw-r--r--app/models/project_services/issue_tracker_service.rb2
-rw-r--r--app/models/project_services/kubernetes_service.rb4
-rw-r--r--app/models/shard.rb11
-rw-r--r--app/serializers/environment_status_entity.rb6
-rw-r--r--app/services/clusters/applications/check_installation_progress_service.rb7
-rw-r--r--app/services/clusters/applications/install_service.rb6
-rw-r--r--app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb5
-rw-r--r--app/services/users/build_service.rb8
-rw-r--r--app/views/admin/application_settings/_ci_cd.html.haml10
-rw-r--r--app/views/events/event/_push.html.haml4
-rw-r--r--app/views/layouts/nav/_dashboard.html.haml2
-rw-r--r--app/views/projects/diffs/_text_file.html.haml2
-rw-r--r--app/views/projects/merge_requests/_mr_title.html.haml2
-rw-r--r--app/views/shared/_label.html.haml45
-rw-r--r--app/views/shared/notes/_note.html.haml2
-rw-r--r--app/views/shared/notes/_notes_with_form.html.haml2
-rwxr-xr-xbin/rails2
-rwxr-xr-xbin/rake2
-rwxr-xr-xbin/rspec2
-rwxr-xr-xbin/setup2
-rw-r--r--changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml5
-rw-r--r--changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml5
-rw-r--r--changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml5
-rw-r--r--changelogs/unreleased/53972-fix-fill-shards.yml5
-rw-r--r--changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml5
-rw-r--r--changelogs/unreleased/drop-gcp-cluster-table.yml5
-rw-r--r--changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml6
-rw-r--r--changelogs/unreleased/kubernetes-http-response-code.yml5
-rw-r--r--changelogs/unreleased/sh-bump-ruby-2-5-3.yml5
-rw-r--r--changelogs/unreleased/switch-rails.yml5
-rw-r--r--config/application.rb2
-rw-r--r--config/boot.rb4
-rw-r--r--config/environment.rb6
-rw-r--r--config/initializers/mysql_set_length_for_binary_indexes.rb50
-rw-r--r--db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb37
-rw-r--r--db/migrate/20181031190559_drop_gcp_clusters_table.rb53
-rw-r--r--db/schema.rb969
-rw-r--r--doc/administration/auth/authentiq.md36
-rw-r--r--doc/administration/high_availability/nfs.md10
-rw-r--r--doc/administration/high_availability/redis.md2
-rw-r--r--doc/administration/logs.md12
-rw-r--r--doc/administration/monitoring/performance/influxdb_configuration.md6
-rw-r--r--doc/administration/monitoring/prometheus/index.md121
-rw-r--r--doc/administration/operations/fast_ssh_key_lookup.md16
-rw-r--r--doc/administration/reply_by_email_postfix_setup.md2
-rw-r--r--doc/administration/troubleshooting/debug.md6
-rw-r--r--doc/api/events.md4
-rw-r--r--doc/ci/autodeploy/quick_start_guide.md6
-rw-r--r--doc/ci/docker/using_docker_build.md8
-rw-r--r--doc/ci/examples/browser_performance.md73
-rw-r--r--doc/ci/examples/code_quality.md82
-rw-r--r--doc/ci/examples/container_scanning.md83
-rw-r--r--doc/ci/examples/dast.md67
-rw-r--r--doc/ci/examples/deployment/composer-npm-deploy.md24
-rw-r--r--doc/ci/examples/test-and-deploy-python-application-to-heroku.md12
-rw-r--r--doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md11
-rw-r--r--doc/ci/quick_start/README.md2
-rw-r--r--doc/ci/runners/README.md6
-rw-r--r--doc/ci/yaml/README.md77
-rw-r--r--doc/development/README.md1
-rw-r--r--doc/development/adding_database_indexes.md6
-rw-r--r--doc/development/build_test_package.md13
-rw-r--r--doc/development/code_review.md68
-rw-r--r--doc/development/diffs.md24
-rw-r--r--doc/development/ee_features.md153
-rw-r--r--doc/development/fe_guide/style_guide_scss.md6
-rw-r--r--doc/development/fe_guide/vue.md8
-rw-r--r--doc/development/fe_guide/vuex.md12
-rw-r--r--doc/development/github_importer.md18
-rw-r--r--doc/development/instrumentation.md8
-rw-r--r--doc/development/new_fe_guide/development/performance.md2
-rw-r--r--doc/development/new_fe_guide/development/testing.md295
-rw-r--r--doc/development/performance.md22
-rw-r--r--doc/development/post_deployment_migrations.md6
-rw-r--r--doc/development/query_count_limits.md4
-rw-r--r--doc/development/swapping_tables.md4
-rw-r--r--doc/development/switching_to_rails5.md27
-rw-r--r--doc/development/ui_guide.md10
-rw-r--r--doc/development/ux_guide/users.md16
-rw-r--r--doc/development/what_requires_downtime.md2
-rw-r--r--doc/gitlab-basics/create-your-ssh-keys.md2
-rw-r--r--doc/install/installation.md6
-rw-r--r--doc/integration/akismet.md16
-rw-r--r--doc/integration/google.md2
-rw-r--r--doc/integration/recaptcha.md20
-rw-r--r--doc/security/rack_attack.md18
-rw-r--r--doc/security/two_factor_authentication.md4
-rw-r--r--doc/topics/git/troubleshooting_git.md15
-rw-r--r--doc/university/training/end-user/README.md71
-rw-r--r--doc/university/training/topics/bisect.md12
-rw-r--r--doc/university/training/topics/getting_started.md9
-rw-r--r--doc/university/training/topics/git_log.md10
-rw-r--r--doc/university/training/topics/merge_conflicts.md18
-rw-r--r--doc/university/training/topics/rollback_commits.md18
-rw-r--r--doc/university/training/topics/stash.md12
-rw-r--r--doc/update/README.md8
-rw-r--r--doc/user/admin_area/settings/continuous_integration.md16
-rw-r--r--doc/user/profile/account/two_factor_authentication.md4
-rw-r--r--doc/user/profile/index.md6
-rw-r--r--doc/user/project/clusters/eks_and_gitlab/img/rbac.pngbin0 -> 49712 bytes
-rw-r--r--doc/user/project/clusters/eks_and_gitlab/index.md117
-rw-r--r--doc/user/project/container_registry.md4
-rw-r--r--doc/user/project/deploy_tokens/index.md12
-rw-r--r--doc/user/project/import/github.md30
-rw-r--r--doc/user/project/integrations/bugzilla.md5
-rw-r--r--doc/user/project/integrations/jira_cloud_configuration.md5
-rw-r--r--doc/user/project/integrations/jira_server_configuration.md12
-rw-r--r--doc/user/project/integrations/redmine.md7
-rw-r--r--doc/user/project/integrations/webhooks.md8
-rw-r--r--doc/user/project/issues/automatic_issue_closing.md9
-rw-r--r--doc/user/project/new_ci_build_permissions_model.md2
-rw-r--r--doc/user/project/repository/index.md34
-rw-r--r--lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml2
-rw-r--r--lib/gitlab/kubernetes/helm/api.rb18
-rw-r--r--lib/gitlab/kubernetes/namespace.rb4
-rw-r--r--lib/gitlab/sentry.rb14
-rw-r--r--qa/qa/page/merge_request/show.rb8
-rw-r--r--qa/qa/resource/merge_request.rb10
-rwxr-xr-xscripts/rails4-gemfile-lock-check19
-rwxr-xr-xscripts/rails5-gemfile-lock-check19
-rwxr-xr-xscripts/review_apps/review-apps.sh26
-rw-r--r--spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js57
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js20
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb6
-rw-r--r--spec/lib/gitlab/kubernetes/namespace_spec.rb2
-rw-r--r--spec/lib/gitlab/sentry_spec.rb24
-rw-r--r--spec/serializers/environment_status_entity_spec.rb36
-rw-r--r--spec/services/clusters/applications/check_installation_progress_service_spec.rb31
-rw-r--r--spec/services/clusters/applications/install_service_spec.rb2
-rw-r--r--spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb15
-rw-r--r--spec/services/users/build_service_spec.rb6
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb4
157 files changed, 2587 insertions, 1614 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 54ce9f4956f..94922aec598 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,4 +1,4 @@
-image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.5-golang-1.9-git-2.18-chrome-69.0-node-8.x-yarn-1.2-postgresql-9.6-graphicsmagick-1.3.29"
+image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.5.3-golang-1.9-git-2.18-chrome-69.0-node-10.x-yarn-1.12-postgresql-9.6-graphicsmagick-1.3.29"
.dedicated-runner: &dedicated-runner
retry: 1
@@ -6,7 +6,7 @@ image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.5-golang-1.9-git
- gitlab-org
.default-cache: &default-cache
- key: "ruby-2.4.5-debian-stretch-with-yarn"
+ key: "debian-stretch-ruby-2.5.3-node-10.x"
paths:
- vendor/ruby
- .yarn-cache/
@@ -75,15 +75,15 @@ stages:
- mysql:5.7
- redis:alpine
-.rails5: &rails5
- allow_failure: true
- only:
+.rails4: &rails4
+ allow_failure: false
+ except:
variables:
- - $CI_COMMIT_REF_NAME =~ /rails5/
- - $RAILS5_ENABLED
+ - $CI_COMMIT_REF_NAME =~ /norails4/
+ - $RAILS5_DISABLED
variables:
- BUNDLE_GEMFILE: "Gemfile.rails5"
- RAILS5: "true"
+ BUNDLE_GEMFILE: "Gemfile.rails4"
+ RAILS5: "false"
# Skip all jobs except the ones that begin with 'docs/'.
# Used for commits including ONLY documentation changes.
@@ -121,7 +121,7 @@ stages:
<<: *except-docs-and-qa
.single-script-job: &single-script-job
- image: ruby:2.4-alpine
+ image: ruby:2.5-alpine
stage: test
cache: {}
dependencies: []
@@ -177,17 +177,17 @@ stages:
<<: *rspec-metadata
<<: *use-pg
-.rspec-metadata-pg-rails5: &rspec-metadata-pg-rails5
+.rspec-metadata-pg-rails4: &rspec-metadata-pg-rails4
<<: *rspec-metadata-pg
- <<: *rails5
+ <<: *rails4
.rspec-metadata-mysql: &rspec-metadata-mysql
<<: *rspec-metadata
<<: *use-mysql
-.rspec-metadata-mysql-rails5: &rspec-metadata-mysql-rails5
+.rspec-metadata-mysql-rails4: &rspec-metadata-mysql-rails4
<<: *rspec-metadata-mysql
- <<: *rails5
+ <<: *rails4
.only-canonical-masters: &only-canonical-masters
only:
@@ -227,6 +227,8 @@ stages:
script:
- git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v9.3.0
- git checkout -f FETCH_HEAD
+ - sed -i "s/gem 'oj', '~> 2.17.4'//" Gemfile
+ - bundle update google-protobuf grpc
- bundle install $BUNDLE_INSTALL_FLAGS
- date
- cp config/gitlab.yml.example config/gitlab.yml
@@ -316,10 +318,10 @@ review-docs-cleanup:
# Trigger a docker image build in CNG (Cloud Native GitLab) repository
#
cloud-native-image:
- image: ruby:2.4-alpine
+ image: ruby:2.5-alpine
before_script: []
dependencies: []
- stage: test
+ stage: post-test
allow_failure: true
variables:
GIT_DEPTH: "1"
@@ -369,7 +371,7 @@ update-tests-metadata:
flaky-examples-check:
<<: *dedicated-runner
- image: ruby:2.4-alpine
+ image: ruby:2.5-alpine
services: []
before_script: []
variables:
@@ -427,7 +429,7 @@ setup-test-env:
script:
- bundle exec ruby -Ispec -e 'require "spec_helper" ; TestEnv.init'
- scripts/gitaly-test-build # Do not use 'bundle exec' here
- - BUNDLE_GEMFILE=Gemfile.rails5 bundle install $BUNDLE_INSTALL_FLAGS
+ - BUNDLE_GEMFILE=Gemfile.rails4 bundle install $BUNDLE_INSTALL_FLAGS
artifacts:
expire_in: 7d
paths:
@@ -519,67 +521,67 @@ rspec-mysql 27 30: *rspec-metadata-mysql
rspec-mysql 28 30: *rspec-metadata-mysql
rspec-mysql 29 30: *rspec-metadata-mysql
-rspec-pg-rails5 0 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 1 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 2 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 3 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 4 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 5 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 6 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 7 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 8 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 9 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 10 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 11 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 12 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 13 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 14 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 15 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 16 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 17 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 18 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 19 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 20 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 21 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 22 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 23 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 24 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 25 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 26 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 27 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 28 30: *rspec-metadata-pg-rails5
-rspec-pg-rails5 29 30: *rspec-metadata-pg-rails5
-
-rspec-mysql-rails5 0 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 1 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 2 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 3 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 4 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 5 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 6 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 7 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 8 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 9 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 10 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 11 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 12 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 13 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 14 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 15 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 16 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 17 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 18 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 19 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 20 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 21 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 22 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 23 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 24 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 25 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 26 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 27 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 28 30: *rspec-metadata-mysql-rails5
-rspec-mysql-rails5 29 30: *rspec-metadata-mysql-rails5
+rspec-pg-rails4 0 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 1 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 2 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 3 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 4 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 5 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 6 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 7 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 8 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 9 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 10 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 11 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 12 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 13 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 14 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 15 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 16 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 17 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 18 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 19 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 20 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 21 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 22 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 23 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 24 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 25 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 26 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 27 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 28 30: *rspec-metadata-pg-rails4
+rspec-pg-rails4 29 30: *rspec-metadata-pg-rails4
+
+rspec-mysql-rails4 0 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 1 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 2 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 3 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 4 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 5 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 6 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 7 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 8 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 9 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 10 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 11 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 12 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 13 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 14 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 15 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 16 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 17 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 18 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 19 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 20 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 21 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 22 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 23 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 24 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 25 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 26 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 27 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 28 30: *rspec-metadata-mysql-rails4
+rspec-mysql-rails4 29 30: *rspec-metadata-mysql-rails4
static-analysis:
<<: *dedicated-no-docs-no-db-pull-cache-job
@@ -589,7 +591,7 @@ static-analysis:
script:
- scripts/static-analysis
cache:
- key: "ruby-2.4.5-debian-stretch-with-yarn-and-rubocop"
+ key: "debian-stretch-ruby-2.5.3-node-10.x-and-rubocop"
paths:
- vendor/ruby
- .yarn-cache/
@@ -625,11 +627,11 @@ downtime_check:
- /(^docs[\/-].*|.*-docs$)/
- /(^qa[\/-].*|.*-qa$)/
-rails5_gemfile_lock_check:
+rails4_gemfile_lock_check:
<<: *dedicated-no-docs-no-db-pull-cache-job
<<: *except-docs-and-qa
script:
- - scripts/rails5-gemfile-lock-check
+ - scripts/rails4-gemfile-lock-check
ee_compat_check:
<<: *rake-exec
@@ -696,7 +698,7 @@ gitlab:setup-mysql:
# Frontend-related jobs
gitlab:assets:compile:
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
- image: dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.4-git-2.18-chrome-69.0-node-8.x-yarn-1.2-graphicsmagick-1.3.29-docker-18.06.1
+ image: dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.5.3-git-2.18-chrome-69.0-node-8.x-yarn-1.2-graphicsmagick-1.3.29-docker-18.06.1
dependencies: []
services:
- docker:stable-dind
diff --git a/.nvmrc b/.nvmrc
index dba04c1e178..db24ab967fb 100644
--- a/.nvmrc
+++ b/.nvmrc
@@ -1 +1 @@
-8.11.3
+10.13.0
diff --git a/.ruby-version b/.ruby-version
index 59aa62c1fa4..aedc15bb0c6 100644
--- a/.ruby-version
+++ b/.ruby-version
@@ -1 +1 @@
-2.4.5
+2.5.3
diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION
index a3fcc7121bb..21c8c7b46b8 100644
--- a/GITLAB_WORKHORSE_VERSION
+++ b/GITLAB_WORKHORSE_VERSION
@@ -1 +1 @@
-7.1.0
+7.1.1
diff --git a/Gemfile b/Gemfile
index 6a644358001..7030bd9c5e8 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,6 +1,6 @@
# --- Special code for migrating to Rails 5.0 ---
def rails5?
- %w[1 true].include?(ENV["RAILS5"])
+ !%w[0 false].include?(ENV["RAILS5"])
end
gem_versions = {}
@@ -315,7 +315,7 @@ group :development do
# Better errors handler
gem 'better_errors', '~> 2.1.0'
- gem 'binding_of_caller', '~> 0.7.2'
+ gem 'binding_of_caller', '~> 0.8.0'
# thin instead webrick
gem 'thin', '~> 1.7.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index 23261373c2b..f08855ed049 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -4,41 +4,44 @@ GEM
RedCloth (4.3.2)
abstract_type (0.0.7)
ace-rails-ap (4.1.2)
- actionmailer (4.2.10)
- actionpack (= 4.2.10)
- actionview (= 4.2.10)
- activejob (= 4.2.10)
+ actioncable (5.0.7)
+ actionpack (= 5.0.7)
+ nio4r (>= 1.2, < 3.0)
+ websocket-driver (~> 0.6.1)
+ actionmailer (5.0.7)
+ actionpack (= 5.0.7)
+ actionview (= 5.0.7)
+ activejob (= 5.0.7)
mail (~> 2.5, >= 2.5.4)
- rails-dom-testing (~> 1.0, >= 1.0.5)
- actionpack (4.2.10)
- actionview (= 4.2.10)
- activesupport (= 4.2.10)
- rack (~> 1.6)
- rack-test (~> 0.6.2)
- rails-dom-testing (~> 1.0, >= 1.0.5)
+ rails-dom-testing (~> 2.0)
+ actionpack (5.0.7)
+ actionview (= 5.0.7)
+ activesupport (= 5.0.7)
+ rack (~> 2.0)
+ rack-test (~> 0.6.3)
+ rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.0.2)
- actionview (4.2.10)
- activesupport (= 4.2.10)
+ actionview (5.0.7)
+ activesupport (= 5.0.7)
builder (~> 3.1)
erubis (~> 2.7.0)
- rails-dom-testing (~> 1.0, >= 1.0.5)
+ rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.0.3)
- activejob (4.2.10)
- activesupport (= 4.2.10)
- globalid (>= 0.3.0)
- activemodel (4.2.10)
- activesupport (= 4.2.10)
- builder (~> 3.1)
- activerecord (4.2.10)
- activemodel (= 4.2.10)
- activesupport (= 4.2.10)
- arel (~> 6.0)
- activerecord_sane_schema_dumper (0.2)
- rails (>= 4, < 5)
- activesupport (4.2.10)
- i18n (~> 0.7)
+ activejob (5.0.7)
+ activesupport (= 5.0.7)
+ globalid (>= 0.3.6)
+ activemodel (5.0.7)
+ activesupport (= 5.0.7)
+ activerecord (5.0.7)
+ activemodel (= 5.0.7)
+ activesupport (= 5.0.7)
+ arel (~> 7.0)
+ activerecord_sane_schema_dumper (1.0)
+ rails (>= 5, < 6)
+ activesupport (5.0.7)
+ concurrent-ruby (~> 1.0, >= 1.0.2)
+ i18n (>= 0.7, < 2)
minitest (~> 5.1)
- thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
acts-as-taggable-on (5.0.0)
activerecord (>= 4.2.8)
@@ -49,7 +52,7 @@ GEM
public_suffix (>= 2.0.2, < 4.0)
aes_key_wrap (1.0.1)
akismet (2.0.0)
- arel (6.0.4)
+ arel (7.1.4)
asana (0.6.0)
faraday (~> 0.9)
faraday_middleware (~> 0.9)
@@ -79,7 +82,7 @@ GEM
erubis (>= 2.6.6)
rack (>= 0.9.0)
bindata (2.4.3)
- binding_of_caller (0.7.2)
+ binding_of_caller (0.8.0)
debug_inspector (>= 0.0.1)
bootsnap (1.3.2)
msgpack (~> 1.0)
@@ -137,14 +140,14 @@ GEM
addressable
daemons (1.2.6)
database_cleaner (1.5.3)
- debug_inspector (0.0.2)
+ debug_inspector (0.0.3)
debugger-ruby_core_source (1.3.8)
deckar01-task_list (2.0.0)
html-pipeline
declarative (0.0.10)
declarative-option (0.1.0)
- default_value_for (3.0.2)
- activerecord (>= 3.2.0, < 5.1)
+ default_value_for (3.0.5)
+ activerecord (>= 3.2.0, < 5.2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.0)
@@ -376,7 +379,7 @@ GEM
json (~> 1.8)
multi_xml (>= 0.5.2)
httpclient (2.8.3)
- i18n (0.9.5)
+ i18n (1.1.0)
concurrent-ruby (~> 1.0)
icalendar (2.4.1)
ice_nine (0.11.2)
@@ -470,6 +473,7 @@ GEM
net-ldap (0.16.0)
net-ssh (5.0.1)
netrc (0.11.0)
+ nio4r (2.3.1)
nokogiri (1.8.4)
mini_portile2 (~> 2.3.0)
nokogumbo (1.5.0)
@@ -598,7 +602,7 @@ GEM
get_process_mem (~> 0.2)
puma (>= 2.7, < 4)
pyu-ruby-sasl (0.0.3.3)
- rack (1.6.11)
+ rack (2.0.5)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (4.4.1)
@@ -616,31 +620,36 @@ GEM
rack
rack-test (0.6.3)
rack (>= 1.0)
- rails (4.2.10)
- actionmailer (= 4.2.10)
- actionpack (= 4.2.10)
- actionview (= 4.2.10)
- activejob (= 4.2.10)
- activemodel (= 4.2.10)
- activerecord (= 4.2.10)
- activesupport (= 4.2.10)
- bundler (>= 1.3.0, < 2.0)
- railties (= 4.2.10)
- sprockets-rails
+ rails (5.0.7)
+ actioncable (= 5.0.7)
+ actionmailer (= 5.0.7)
+ actionpack (= 5.0.7)
+ actionview (= 5.0.7)
+ activejob (= 5.0.7)
+ activemodel (= 5.0.7)
+ activerecord (= 5.0.7)
+ activesupport (= 5.0.7)
+ bundler (>= 1.3.0)
+ railties (= 5.0.7)
+ sprockets-rails (>= 2.0.0)
+ rails-controller-testing (1.0.2)
+ actionpack (~> 5.x, >= 5.0.1)
+ actionview (~> 5.x, >= 5.0.1)
+ activesupport (~> 5.x)
rails-deprecated_sanitizer (1.0.3)
activesupport (>= 4.2.0.alpha)
- rails-dom-testing (1.0.9)
- activesupport (>= 4.2.0, < 5.0)
- nokogiri (~> 1.6)
- rails-deprecated_sanitizer (>= 1.0.1)
+ rails-dom-testing (2.0.3)
+ activesupport (>= 4.2.0)
+ nokogiri (>= 1.6)
rails-html-sanitizer (1.0.4)
loofah (~> 2.2, >= 2.2.2)
- rails-i18n (4.0.9)
- i18n (~> 0.7)
- railties (~> 4.0)
- railties (4.2.10)
- actionpack (= 4.2.10)
- activesupport (= 4.2.10)
+ rails-i18n (5.1.1)
+ i18n (>= 0.7, < 2)
+ railties (>= 5.0, < 6)
+ railties (5.0.7)
+ actionpack (= 5.0.7)
+ activesupport (= 5.0.7)
+ method_source
rake (>= 0.8.7)
thor (>= 0.18.1, < 2.0)
rainbow (3.0.0)
@@ -731,8 +740,7 @@ GEM
rspec-core
rspec-set (0.1.3)
rspec-support (3.7.1)
- rspec_junit_formatter (0.2.3)
- builder (< 4)
+ rspec_junit_formatter (0.4.1)
rspec-core (>= 2, < 4, != 2.12.0)
rspec_profiling (0.0.5)
activerecord
@@ -849,8 +857,6 @@ GEM
sysexits (1.2.0)
temple (0.8.0)
test-prof (0.2.5)
- test_after_commit (1.1.0)
- activerecord (>= 3.2)
text (1.3.1)
thin (1.7.2)
daemons (~> 1.0, >= 1.0.9)
@@ -913,6 +919,9 @@ GEM
hashdiff
webpack-rails (0.9.11)
railties (>= 3.2.0)
+ websocket-driver (0.6.5)
+ websocket-extensions (>= 0.1.0)
+ websocket-extensions (0.1.3)
wikicloth (0.8.1)
builder
expression_parser
@@ -928,7 +937,7 @@ PLATFORMS
DEPENDENCIES
RedCloth (~> 4.3.2)
ace-rails-ap (~> 4.1.0)
- activerecord_sane_schema_dumper (= 0.2)
+ activerecord_sane_schema_dumper (= 1.0)
acts-as-taggable-on (~> 5.0)
addressable (~> 2.5.2)
akismet (~> 2.0)
@@ -943,7 +952,7 @@ DEPENDENCIES
bcrypt_pbkdf (~> 1.0)
benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0)
- binding_of_caller (~> 0.7.2)
+ binding_of_caller (~> 0.8.0)
bootsnap (~> 1.3)
bootstrap_form (~> 2.7.0)
brakeman (~> 4.2)
@@ -962,7 +971,7 @@ DEPENDENCIES
creole (~> 0.5.0)
database_cleaner (~> 1.5.0)
deckar01-task_list (= 2.0.0)
- default_value_for (~> 3.0.0)
+ default_value_for (~> 3.0.5)
device_detector
devise (~> 4.4)
devise-two-factor (~> 3.0.0)
@@ -1080,9 +1089,10 @@ DEPENDENCIES
rack-cors (~> 1.0.0)
rack-oauth2 (~> 1.2.1)
rack-proxy (~> 0.6.0)
- rails (= 4.2.10)
+ rails (= 5.0.7)
+ rails-controller-testing
rails-deprecated_sanitizer (~> 1.0.3)
- rails-i18n (~> 4.0.9)
+ rails-i18n (~> 5.1)
rainbow (~> 3.0)
raindrops (~> 0.18)
rblineprof (~> 0.3.6)
@@ -1135,7 +1145,6 @@ DEPENDENCIES
state_machines-activerecord (~> 0.5.1)
sys-filesystem (~> 1.1.6)
test-prof (~> 0.2.5)
- test_after_commit (~> 1.1)
thin (~> 1.7.0)
timecop (~> 0.8.0)
toml-rb (~> 1.0.0)
diff --git a/Gemfile.rails4 b/Gemfile.rails4
new file mode 100644
index 00000000000..0ec00e702aa
--- /dev/null
+++ b/Gemfile.rails4
@@ -0,0 +1,7 @@
+# BUNDLE_GEMFILE=Gemfile.rails4 bundle install
+
+ENV["RAILS5"] = "false"
+
+gemfile = File.expand_path("../Gemfile", __FILE__)
+
+eval(File.read(gemfile), nil, gemfile)
diff --git a/Gemfile.rails5.lock b/Gemfile.rails4.lock
index 8893088446b..5bec40de909 100644
--- a/Gemfile.rails5.lock
+++ b/Gemfile.rails4.lock
@@ -4,44 +4,41 @@ GEM
RedCloth (4.3.2)
abstract_type (0.0.7)
ace-rails-ap (4.1.2)
- actioncable (5.0.7)
- actionpack (= 5.0.7)
- nio4r (>= 1.2, < 3.0)
- websocket-driver (~> 0.6.1)
- actionmailer (5.0.7)
- actionpack (= 5.0.7)
- actionview (= 5.0.7)
- activejob (= 5.0.7)
+ actionmailer (4.2.10)
+ actionpack (= 4.2.10)
+ actionview (= 4.2.10)
+ activejob (= 4.2.10)
mail (~> 2.5, >= 2.5.4)
- rails-dom-testing (~> 2.0)
- actionpack (5.0.7)
- actionview (= 5.0.7)
- activesupport (= 5.0.7)
- rack (~> 2.0)
- rack-test (~> 0.6.3)
- rails-dom-testing (~> 2.0)
+ rails-dom-testing (~> 1.0, >= 1.0.5)
+ actionpack (4.2.10)
+ actionview (= 4.2.10)
+ activesupport (= 4.2.10)
+ rack (~> 1.6)
+ rack-test (~> 0.6.2)
+ rails-dom-testing (~> 1.0, >= 1.0.5)
rails-html-sanitizer (~> 1.0, >= 1.0.2)
- actionview (5.0.7)
- activesupport (= 5.0.7)
+ actionview (4.2.10)
+ activesupport (= 4.2.10)
builder (~> 3.1)
erubis (~> 2.7.0)
- rails-dom-testing (~> 2.0)
+ rails-dom-testing (~> 1.0, >= 1.0.5)
rails-html-sanitizer (~> 1.0, >= 1.0.3)
- activejob (5.0.7)
- activesupport (= 5.0.7)
- globalid (>= 0.3.6)
- activemodel (5.0.7)
- activesupport (= 5.0.7)
- activerecord (5.0.7)
- activemodel (= 5.0.7)
- activesupport (= 5.0.7)
- arel (~> 7.0)
- activerecord_sane_schema_dumper (1.0)
- rails (>= 5, < 6)
- activesupport (5.0.7)
- concurrent-ruby (~> 1.0, >= 1.0.2)
- i18n (>= 0.7, < 2)
+ activejob (4.2.10)
+ activesupport (= 4.2.10)
+ globalid (>= 0.3.0)
+ activemodel (4.2.10)
+ activesupport (= 4.2.10)
+ builder (~> 3.1)
+ activerecord (4.2.10)
+ activemodel (= 4.2.10)
+ activesupport (= 4.2.10)
+ arel (~> 6.0)
+ activerecord_sane_schema_dumper (0.2)
+ rails (>= 4, < 5)
+ activesupport (4.2.10)
+ i18n (~> 0.7)
minitest (~> 5.1)
+ thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
acts-as-taggable-on (5.0.0)
activerecord (>= 4.2.8)
@@ -52,7 +49,7 @@ GEM
public_suffix (>= 2.0.2, < 4.0)
aes_key_wrap (1.0.1)
akismet (2.0.0)
- arel (7.1.4)
+ arel (6.0.4)
asana (0.6.0)
faraday (~> 0.9)
faraday_middleware (~> 0.9)
@@ -82,7 +79,7 @@ GEM
erubis (>= 2.6.6)
rack (>= 0.9.0)
bindata (2.4.3)
- binding_of_caller (0.7.2)
+ binding_of_caller (0.8.0)
debug_inspector (>= 0.0.1)
bootsnap (1.3.2)
msgpack (~> 1.0)
@@ -140,14 +137,14 @@ GEM
addressable
daemons (1.2.6)
database_cleaner (1.5.3)
- debug_inspector (0.0.2)
+ debug_inspector (0.0.3)
debugger-ruby_core_source (1.3.8)
deckar01-task_list (2.0.0)
html-pipeline
declarative (0.0.10)
declarative-option (0.1.0)
- default_value_for (3.0.5)
- activerecord (>= 3.2.0, < 5.2)
+ default_value_for (3.0.2)
+ activerecord (>= 3.2.0, < 5.1)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.0)
@@ -379,7 +376,7 @@ GEM
json (~> 1.8)
multi_xml (>= 0.5.2)
httpclient (2.8.3)
- i18n (1.1.0)
+ i18n (0.9.5)
concurrent-ruby (~> 1.0)
icalendar (2.4.1)
ice_nine (0.11.2)
@@ -454,9 +451,9 @@ GEM
memoizable (0.4.2)
thread_safe (~> 0.3, >= 0.3.1)
method_source (0.9.0)
- mime-types (3.1)
+ mime-types (3.2.2)
mime-types-data (~> 3.2015)
- mime-types-data (3.2016.0521)
+ mime-types-data (3.2018.0812)
mimemagic (0.3.0)
mini_magick (4.8.0)
mini_mime (1.0.1)
@@ -473,7 +470,6 @@ GEM
net-ldap (0.16.0)
net-ssh (5.0.1)
netrc (0.11.0)
- nio4r (2.3.1)
nokogiri (1.8.4)
mini_portile2 (~> 2.3.0)
nokogumbo (1.5.0)
@@ -602,7 +598,7 @@ GEM
get_process_mem (~> 0.2)
puma (>= 2.7, < 4)
pyu-ruby-sasl (0.0.3.3)
- rack (2.0.5)
+ rack (1.6.11)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (4.4.1)
@@ -620,36 +616,31 @@ GEM
rack
rack-test (0.6.3)
rack (>= 1.0)
- rails (5.0.7)
- actioncable (= 5.0.7)
- actionmailer (= 5.0.7)
- actionpack (= 5.0.7)
- actionview (= 5.0.7)
- activejob (= 5.0.7)
- activemodel (= 5.0.7)
- activerecord (= 5.0.7)
- activesupport (= 5.0.7)
- bundler (>= 1.3.0)
- railties (= 5.0.7)
- sprockets-rails (>= 2.0.0)
- rails-controller-testing (1.0.2)
- actionpack (~> 5.x, >= 5.0.1)
- actionview (~> 5.x, >= 5.0.1)
- activesupport (~> 5.x)
+ rails (4.2.10)
+ actionmailer (= 4.2.10)
+ actionpack (= 4.2.10)
+ actionview (= 4.2.10)
+ activejob (= 4.2.10)
+ activemodel (= 4.2.10)
+ activerecord (= 4.2.10)
+ activesupport (= 4.2.10)
+ bundler (>= 1.3.0, < 2.0)
+ railties (= 4.2.10)
+ sprockets-rails
rails-deprecated_sanitizer (1.0.3)
activesupport (>= 4.2.0.alpha)
- rails-dom-testing (2.0.3)
- activesupport (>= 4.2.0)
- nokogiri (>= 1.6)
+ rails-dom-testing (1.0.9)
+ activesupport (>= 4.2.0, < 5.0)
+ nokogiri (~> 1.6)
+ rails-deprecated_sanitizer (>= 1.0.1)
rails-html-sanitizer (1.0.4)
loofah (~> 2.2, >= 2.2.2)
- rails-i18n (5.1.1)
- i18n (>= 0.7, < 2)
- railties (>= 5.0, < 6)
- railties (5.0.7)
- actionpack (= 5.0.7)
- activesupport (= 5.0.7)
- method_source
+ rails-i18n (4.0.9)
+ i18n (~> 0.7)
+ railties (~> 4.0)
+ railties (4.2.10)
+ actionpack (= 4.2.10)
+ activesupport (= 4.2.10)
rake (>= 0.8.7)
thor (>= 0.18.1, < 2.0)
rainbow (3.0.0)
@@ -740,7 +731,8 @@ GEM
rspec-core
rspec-set (0.1.3)
rspec-support (3.7.1)
- rspec_junit_formatter (0.4.1)
+ rspec_junit_formatter (0.2.3)
+ builder (< 4)
rspec-core (>= 2, < 4, != 2.12.0)
rspec_profiling (0.0.5)
activerecord
@@ -857,6 +849,8 @@ GEM
sysexits (1.2.0)
temple (0.8.0)
test-prof (0.2.5)
+ test_after_commit (1.1.0)
+ activerecord (>= 3.2)
text (1.3.1)
thin (1.7.2)
daemons (~> 1.0, >= 1.0.9)
@@ -919,9 +913,6 @@ GEM
hashdiff
webpack-rails (0.9.11)
railties (>= 3.2.0)
- websocket-driver (0.6.5)
- websocket-extensions (>= 0.1.0)
- websocket-extensions (0.1.3)
wikicloth (0.8.1)
builder
expression_parser
@@ -937,7 +928,7 @@ PLATFORMS
DEPENDENCIES
RedCloth (~> 4.3.2)
ace-rails-ap (~> 4.1.0)
- activerecord_sane_schema_dumper (= 1.0)
+ activerecord_sane_schema_dumper (= 0.2)
acts-as-taggable-on (~> 5.0)
addressable (~> 2.5.2)
akismet (~> 2.0)
@@ -952,7 +943,7 @@ DEPENDENCIES
bcrypt_pbkdf (~> 1.0)
benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0)
- binding_of_caller (~> 0.7.2)
+ binding_of_caller (~> 0.8.0)
bootsnap (~> 1.3)
bootstrap_form (~> 2.7.0)
brakeman (~> 4.2)
@@ -971,7 +962,7 @@ DEPENDENCIES
creole (~> 0.5.0)
database_cleaner (~> 1.5.0)
deckar01-task_list (= 2.0.0)
- default_value_for (~> 3.0.5)
+ default_value_for (~> 3.0.0)
device_detector
devise (~> 4.4)
devise-two-factor (~> 3.0.0)
@@ -1089,10 +1080,9 @@ DEPENDENCIES
rack-cors (~> 1.0.0)
rack-oauth2 (~> 1.2.1)
rack-proxy (~> 0.6.0)
- rails (= 5.0.7)
- rails-controller-testing
+ rails (= 4.2.10)
rails-deprecated_sanitizer (~> 1.0.3)
- rails-i18n (~> 5.1)
+ rails-i18n (~> 4.0.9)
rainbow (~> 3.0)
raindrops (~> 0.18)
rblineprof (~> 0.3.6)
@@ -1145,6 +1135,7 @@ DEPENDENCIES
state_machines-activerecord (~> 0.5.1)
sys-filesystem (~> 1.1.6)
test-prof (~> 0.2.5)
+ test_after_commit (~> 1.1)
thin (~> 1.7.0)
timecop (~> 0.8.0)
toml-rb (~> 1.0.0)
diff --git a/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js b/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js
index c23d4c484a5..89dcff74d0e 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js
+++ b/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js
@@ -135,10 +135,6 @@ export default class FilteredSearchVisualTokens {
}
static updateUserTokenAppearance(tokenValueContainer, tokenValueElement, tokenValue) {
- if (tokenValue === 'none') {
- return Promise.resolve();
- }
-
const username = tokenValue.replace(/^@/, '');
return (
UsersCache.retrieve(username)
@@ -184,7 +180,12 @@ export default class FilteredSearchVisualTokens {
const tokenValueElement = tokenValueContainer.querySelector('.value');
tokenValueElement.innerText = tokenValue;
+ if (tokenValue === 'none' || tokenValue === 'any') {
+ return;
+ }
+
const tokenType = tokenName.toLowerCase();
+
if (tokenType === 'label') {
FilteredSearchVisualTokens.updateLabelTokenColor(tokenValueContainer, tokenValue);
} else if (tokenType === 'author' || tokenType === 'assignee') {
diff --git a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
index e68a2aa73fa..d7f24c1afc5 100644
--- a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
+++ b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue
@@ -44,8 +44,11 @@ export default {
isNew() {
return this.diffMode === diffModes.new;
},
+ isRenamed() {
+ return this.diffMode === diffModes.renamed;
+ },
imagePath() {
- return this.isNew ? this.newPath : this.oldPath;
+ return this.isNew || this.isRenamed ? this.newPath : this.oldPath;
},
},
methods: {
@@ -114,7 +117,7 @@ export default {
}]"
>
<slot
- v-if="isNew"
+ v-if="isNew || isRenamed"
slot="image-overlay"
name="image-overlay"
>
diff --git a/app/assets/javascripts/vue_shared/components/header_ci_component.vue b/app/assets/javascripts/vue_shared/components/header_ci_component.vue
index aee88cae48d..88e95c33b9b 100644
--- a/app/assets/javascripts/vue_shared/components/header_ci_component.vue
+++ b/app/assets/javascripts/vue_shared/components/header_ci_component.vue
@@ -1,8 +1,9 @@
<script>
+import { GlTooltipDirective, GlLink, GlButton } from '@gitlab-org/gitlab-ui';
import CiIconBadge from './ci_badge_link.vue';
import TimeagoTooltip from './time_ago_tooltip.vue';
-import tooltip from '../directives/tooltip';
import UserAvatarImage from './user_avatar/user_avatar_image.vue';
+import LoadingButton from '~/vue_shared/components/loading_button.vue';
/**
* Renders header component for job and pipeline page based on UI mockups
@@ -16,9 +17,12 @@ export default {
CiIconBadge,
TimeagoTooltip,
UserAvatarImage,
+ GlLink,
+ GlButton,
+ LoadingButton,
},
directives: {
- tooltip,
+ GlTooltip: GlTooltipDirective,
},
props: {
status: {
@@ -98,8 +102,8 @@ export default {
by
<template v-if="user">
- <a
- v-tooltip
+ <gl-link
+ v-gl-tooltip
:href="user.path"
:title="user.email"
class="js-user-link commit-committer-link"
@@ -113,7 +117,7 @@ export default {
/>
{{ user.name }}
- </a>
+ </gl-link>
<span
v-if="user.status_tooltip_html"
v-html="user.status_tooltip_html"></span>
@@ -127,16 +131,16 @@ export default {
<template
v-for="(action, i) in actions"
>
- <a
+ <gl-link
v-if="action.type === 'link'"
:key="i"
:href="action.path"
:class="action.cssClass"
>
{{ action.label }}
- </a>
+ </gl-link>
- <a
+ <gl-link
v-else-if="action.type === 'ujs-link'"
:key="i"
:href="action.path"
@@ -145,31 +149,24 @@ export default {
rel="nofollow"
>
{{ action.label }}
- </a>
+ </gl-link>
- <button
+ <loading-button
v-else-if="action.type === 'button'"
:key="i"
+ :loading="action.isLoading"
:disabled="action.isLoading"
:class="action.cssClass"
- type="button"
+ container-class="d-inline"
+ :label="action.label"
@click="onClickAction(action)"
- >
- {{ action.label }}
- <i
- v-show="action.isLoading"
- class="fa fa-spin fa-spinner"
- aria-hidden="true"
- >
- </i>
- </button>
+ />
</template>
</section>
- <button
+ <gl-button
v-if="hasSidebarButton"
id="toggleSidebar"
- type="button"
- class="btn btn-default d-block d-sm-none
+ class="d-block d-sm-none
sidebar-toggle-btn js-sidebar-build-toggle js-sidebar-build-toggle-header"
@click="onClickSidebarButton"
>
@@ -179,6 +176,6 @@ sidebar-toggle-btn js-sidebar-build-toggle js-sidebar-build-toggle-header"
aria-labelledby="toggleSidebar"
>
</i>
- </button>
+ </gl-button>
</header>
</template>
diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss
index 00b06aea898..3aa79bf2466 100644
--- a/app/assets/stylesheets/pages/issuable.scss
+++ b/app/assets/stylesheets/pages/issuable.scss
@@ -47,12 +47,6 @@
@extend .fixed-width-container;
}
}
-
- .diffs {
- .mr-version-controls {
- @extend .fixed-width-container;
- }
- }
}
.issuable-details {
diff --git a/app/assets/stylesheets/pages/note_form.scss b/app/assets/stylesheets/pages/note_form.scss
index 855d73a9939..45b921a2fbb 100644
--- a/app/assets/stylesheets/pages/note_form.scss
+++ b/app/assets/stylesheets/pages/note_form.scss
@@ -176,8 +176,10 @@
background-color: $white-light;
}
-.discussion-form-container {
- padding: $gl-padding-top $gl-padding $gl-padding;
+table {
+ .discussion-form-container {
+ padding: $gl-padding-top $gl-padding $gl-padding;
+ }
}
.discussion-notes .disabled-comment {
diff --git a/app/assets/stylesheets/pages/notes.scss b/app/assets/stylesheets/pages/notes.scss
index 1b957f6cc69..35e01c9c807 100644
--- a/app/assets/stylesheets/pages/notes.scss
+++ b/app/assets/stylesheets/pages/notes.scss
@@ -13,12 +13,32 @@ $note-form-margin-left: 72px;
}
}
+@mixin outline-comment() {
+ margin: $gl-padding;
+ border: 1px solid $border-color;
+ border-radius: $border-radius-default;
+}
+
.note-wrapper {
padding: $gl-padding;
+
+ &.outlined {
+ @include outline-comment();
+ }
}
-.issuable-discussion {
- .notes.timeline > .timeline-entry {
+.main-notes-list {
+ @include vertical-line(39px);
+}
+
+.notes {
+ display: block;
+ list-style: none;
+ margin: 0;
+ padding: 0;
+ position: relative;
+
+ &.timeline > .timeline-entry {
border: 1px solid $border-color;
border-radius: $border-radius-default;
margin: $gl-padding 0;
@@ -51,18 +71,6 @@ $note-form-margin-left: 72px;
border-top: 1px solid $border-color;
}
}
-}
-
-.main-notes-list {
- @include vertical-line(36px);
-}
-
-.notes {
- display: block;
- list-style: none;
- margin: 0;
- padding: 0;
- position: relative;
> .note-discussion {
.card {
@@ -71,10 +79,6 @@ $note-form-margin-left: 72px;
li.note {
border-bottom: 1px solid $border-color;
-
- &:first-child {
- border-radius: $border-radius-default $border-radius-default 0 0;
- }
}
}
@@ -387,6 +391,7 @@ $note-form-margin-left: 72px;
line-height: 42px;
padding: 0 $gl-padding;
border-top: 1px solid $border-color;
+ border-radius: 0;
&:hover {
background-color: $gray-light;
@@ -479,9 +484,7 @@ $note-form-margin-left: 72px;
}
.note-wrapper {
- margin: $gl-padding;
- border: 1px solid $border-color;
- border-radius: $border-radius-default;
+ @include outline-comment();
}
.discussion-reply-holder {
@@ -491,6 +494,16 @@ $note-form-margin-left: 72px;
}
}
+.commit-diff {
+ .notes {
+ @include vertical-line(52px);
+ }
+
+ .discussion-reply-holder {
+ border-top: 1px solid $border-color;
+ }
+}
+
.discussion-header,
.note-header-info {
a {
diff --git a/app/helpers/nav_helper.rb b/app/helpers/nav_helper.rb
index 761f42f2f0f..a7fe8c3d59c 100644
--- a/app/helpers/nav_helper.rb
+++ b/app/helpers/nav_helper.rb
@@ -19,10 +19,7 @@ module NavHelper
end
def page_gutter_class
- if current_path?('merge_requests#show') ||
- current_path?('projects/merge_requests/conflicts#show') ||
- current_path?('issues#show') ||
- current_path?('milestones#show')
+ if page_has_markdown?
if cookies[:collapsed_gutter] == 'true'
%w[page-gutter right-sidebar-collapsed]
@@ -50,6 +47,17 @@ module NavHelper
class_names
end
+ def show_separator?
+ Gitlab::Sherlock.enabled? || can?(current_user, :read_instance_statistics)
+ end
+
+ def page_has_markdown?
+ current_path?('merge_requests#show') ||
+ current_path?('projects/merge_requests/conflicts#show') ||
+ current_path?('issues#show') ||
+ current_path?('milestones#show')
+ end
+
private
def get_header_links
diff --git a/app/models/application_record.rb b/app/models/application_record.rb
new file mode 100644
index 00000000000..71fbba5b328
--- /dev/null
+++ b/app/models/application_record.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+class ApplicationRecord < ActiveRecord::Base
+ self.abstract_class = true
+end
diff --git a/app/models/clusters/platforms/kubernetes.rb b/app/models/clusters/platforms/kubernetes.rb
index 9860abeecf7..3c5d7756eec 100644
--- a/app/models/clusters/platforms/kubernetes.rb
+++ b/app/models/clusters/platforms/kubernetes.rb
@@ -173,9 +173,7 @@ module Clusters
kubeclient = build_kube_client!
kubeclient.get_pods(namespace: actual_namespace).as_json
- rescue Kubeclient::HttpError => err
- raise err unless err.error_code == 404
-
+ rescue Kubeclient::ResourceNotFoundError
[]
end
diff --git a/app/models/concerns/mentionable.rb b/app/models/concerns/mentionable.rb
index 298d0d42d90..0d88b34fb48 100644
--- a/app/models/concerns/mentionable.rb
+++ b/app/models/concerns/mentionable.rb
@@ -97,9 +97,9 @@ module Mentionable
# Allows heavy processing to be skipped
def matches_cross_reference_regex?
reference_pattern = if !project || project.default_issues_tracker?
- ReferenceRegexes::DEFAULT_PATTERN
+ ReferenceRegexes.default_pattern
else
- ReferenceRegexes::EXTERNAL_PATTERN
+ ReferenceRegexes.external_pattern
end
self.class.mentionable_attrs.any? do |attr, _|
diff --git a/app/models/concerns/mentionable/reference_regexes.rb b/app/models/concerns/mentionable/reference_regexes.rb
index fe8fbb71184..b8fb3f71925 100644
--- a/app/models/concerns/mentionable/reference_regexes.rb
+++ b/app/models/concerns/mentionable/reference_regexes.rb
@@ -2,6 +2,8 @@
module Mentionable
module ReferenceRegexes
+ extend Gitlab::Utils::StrongMemoize
+
def self.reference_pattern(link_patterns, issue_pattern)
Regexp.union(link_patterns,
issue_pattern,
@@ -15,16 +17,20 @@ module Mentionable
]
end
- DEFAULT_PATTERN = begin
- issue_pattern = Issue.reference_pattern
- link_patterns = Regexp.union([Issue, Commit, MergeRequest, Epic].map(&:link_reference_pattern).compact)
- reference_pattern(link_patterns, issue_pattern)
+ def self.default_pattern
+ strong_memoize(:default_pattern) do
+ issue_pattern = Issue.reference_pattern
+ link_patterns = Regexp.union([Issue, Commit, MergeRequest, Epic].map(&:link_reference_pattern).compact)
+ reference_pattern(link_patterns, issue_pattern)
+ end
end
- EXTERNAL_PATTERN = begin
- issue_pattern = IssueTrackerService.reference_pattern
- link_patterns = URI.regexp(%w(http https))
- reference_pattern(link_patterns, issue_pattern)
+ def self.external_pattern
+ strong_memoize(:external_pattern) do
+ issue_pattern = IssueTrackerService.reference_pattern
+ link_patterns = URI.regexp(%w(http https))
+ reference_pattern(link_patterns, issue_pattern)
+ end
end
end
end
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index 83434276995..811e623b7f7 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -160,18 +160,18 @@ class Deployment < ActiveRecord::Base
end
def has_metrics?
- prometheus_adapter&.can_query?
+ prometheus_adapter&.can_query? && success?
end
def metrics
- return {} unless has_metrics? && success?
+ return {} unless has_metrics?
metrics = prometheus_adapter.query(:deployment, self)
metrics&.merge(deployment_time: finished_at.to_i) || {}
end
def additional_metrics
- return {} unless has_metrics? && success?
+ return {} unless has_metrics?
metrics = prometheus_adapter.query(:additional_metrics_deployment, self)
metrics&.merge(deployment_time: finished_at.to_i) || {}
diff --git a/app/models/identity.rb b/app/models/identity.rb
index f5a13dbd6f2..d63dd432426 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -1,18 +1,14 @@
# frozen_string_literal: true
class Identity < ActiveRecord::Base
- def self.uniqueness_scope
- :provider
- end
-
include Sortable
include CaseSensitivity
belongs_to :user
validates :provider, presence: true
- validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false }
- validates :user_id, uniqueness: { scope: uniqueness_scope }
+ validates :extern_uid, allow_blank: true, uniqueness: { scope: UniquenessScopes.scopes, case_sensitive: false }
+ validates :user_id, uniqueness: { scope: UniquenessScopes.scopes }
before_save :ensure_normalized_extern_uid, if: :extern_uid_changed?
after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider?
diff --git a/app/models/identity/uniqueness_scopes.rb b/app/models/identity/uniqueness_scopes.rb
new file mode 100644
index 00000000000..674b735903f
--- /dev/null
+++ b/app/models/identity/uniqueness_scopes.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class Identity < ActiveRecord::Base
+ # This module and method are defined in a separate file to allow EE to
+ # redefine the `scopes` method before it is used in the `Identity` model.
+ module UniquenessScopes
+ def self.scopes
+ [:provider]
+ end
+ end
+end
diff --git a/app/models/project_services/issue_tracker_service.rb b/app/models/project_services/issue_tracker_service.rb
index a399982e5ec..f54497fc6d8 100644
--- a/app/models/project_services/issue_tracker_service.rb
+++ b/app/models/project_services/issue_tracker_service.rb
@@ -9,7 +9,7 @@ class IssueTrackerService < Service
# Override this method on services that uses different patterns
# This pattern does not support cross-project references
# The other code assumes that this pattern is a superset of all
- # overridden patterns. See ReferenceRegexes::EXTERNAL_PATTERN
+ # overridden patterns. See ReferenceRegexes.external_pattern
def self.reference_pattern(only_long: false)
if only_long
/(\b[A-Z][A-Z0-9_]*-)(?<issue>\d+)/
diff --git a/app/models/project_services/kubernetes_service.rb b/app/models/project_services/kubernetes_service.rb
index 3459ded7ccf..c52a531e5fe 100644
--- a/app/models/project_services/kubernetes_service.rb
+++ b/app/models/project_services/kubernetes_service.rb
@@ -203,9 +203,7 @@ class KubernetesService < DeploymentService
kubeclient = build_kube_client!
kubeclient.get_pods(namespace: actual_namespace).as_json
- rescue Kubeclient::HttpError => err
- raise err unless err.error_code == 404
-
+ rescue Kubeclient::ResourceNotFoundError
[]
end
diff --git a/app/models/shard.rb b/app/models/shard.rb
index 2fa22bd040c..2e75bc91df0 100644
--- a/app/models/shard.rb
+++ b/app/models/shard.rb
@@ -9,13 +9,12 @@ class Shard < ActiveRecord::Base
# The GitLab config does not change for the lifecycle of the process
in_config = Gitlab.config.repositories.storages.keys.map(&:to_s)
+ in_db = all.pluck(:name)
- transaction do
- in_db = all.pluck(:name)
- missing = in_config - in_db
-
- missing.map { |name| by_name(name) }
- end
+ # This may race with other processes creating shards at the same time, but
+ # `by_name` will handle that correctly
+ missing = in_config - in_db
+ missing.map { |name| by_name(name) }
end
def self.by_name(name)
diff --git a/app/serializers/environment_status_entity.rb b/app/serializers/environment_status_entity.rb
index 4c6664e9e25..f6321b9e520 100644
--- a/app/serializers/environment_status_entity.rb
+++ b/app/serializers/environment_status_entity.rb
@@ -11,7 +11,7 @@ class EnvironmentStatusEntity < Grape::Entity
project_environment_path(es.project, es.environment)
end
- expose :metrics_url, if: ->(*) { can_read_environment? && environment.has_metrics? } do |es|
+ expose :metrics_url, if: ->(*) { can_read_environment? && deployment.has_metrics? } do |es|
metrics_project_environment_deployment_path(es.project, es.environment, es.deployment)
end
@@ -45,6 +45,10 @@ class EnvironmentStatusEntity < Grape::Entity
object.environment
end
+ def deployment
+ object.deployment
+ end
+
def project
object.environment.project
end
diff --git a/app/services/clusters/applications/check_installation_progress_service.rb b/app/services/clusters/applications/check_installation_progress_service.rb
index 19dc0478591..ca0f7b30053 100644
--- a/app/services/clusters/applications/check_installation_progress_service.rb
+++ b/app/services/clusters/applications/check_installation_progress_service.rb
@@ -15,8 +15,9 @@ module Clusters
check_timeout
end
rescue Kubeclient::HttpError => e
- Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}"
- app.make_errored!("Kubernetes error") unless app.errored?
+ Rails.logger.error("Kubernetes error: #{e.error_code} #{e.message}")
+ Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id })
+ app.make_errored!("Kubernetes error: #{e.error_code}") unless app.errored?
end
private
@@ -53,7 +54,7 @@ module Clusters
def remove_installation_pod
helm_api.delete_pod!(install_command.pod_name)
rescue => e
- Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}"
+ Rails.logger.error("Kubernetes error: #{e.class.name} #{e.message}")
# no-op
end
diff --git a/app/services/clusters/applications/install_service.rb b/app/services/clusters/applications/install_service.rb
index 5a24d78e712..f4385748c43 100644
--- a/app/services/clusters/applications/install_service.rb
+++ b/app/services/clusters/applications/install_service.rb
@@ -13,10 +13,12 @@ module Clusters
ClusterWaitForAppInstallationWorker.perform_in(
ClusterWaitForAppInstallationWorker::INTERVAL, app.name, app.id)
rescue Kubeclient::HttpError => e
- Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}"
- app.make_errored!("Kubernetes error.")
+ Rails.logger.error("Kubernetes error: #{e.error_code} #{e.message}")
+ Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id })
+ app.make_errored!("Kubernetes error: #{e.error_code}")
rescue StandardError => e
Rails.logger.error "Can't start installation process: #{e.class.name} #{e.message}"
+ Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id })
app.make_errored!("Can't start installation process.")
end
end
diff --git a/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb b/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb
index 277cc4b788d..4ad04ab801e 100644
--- a/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb
+++ b/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb
@@ -21,10 +21,7 @@ module Clusters
def get_secret
kubeclient.get_secret(service_account_token_name, namespace).as_json
- rescue Kubeclient::HttpError => err
- raise err unless err.error_code == 404
-
- nil
+ rescue Kubeclient::ResourceNotFoundError
end
end
end
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index de6ff92d1da..24ac20fdd29 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -95,10 +95,6 @@ module Users
if params[:reset_password]
user_params.merge!(force_random_password: true, password_expires_at: nil)
end
-
- if user_default_internal_regex_enabled? && !user_params.key?(:external)
- user_params[:external] = user_external?
- end
else
allowed_signup_params = signup_params
allowed_signup_params << :skip_confirmation if skip_authorization
@@ -109,6 +105,10 @@ module Users
end
end
+ if user_default_internal_regex_enabled? && !user_params.key?(:external)
+ user_params[:external] = user_external?
+ end
+
user_params
end
diff --git a/app/views/admin/application_settings/_ci_cd.html.haml b/app/views/admin/application_settings/_ci_cd.html.haml
index adb496495d1..0d42094fc89 100644
--- a/app/views/admin/application_settings/_ci_cd.html.haml
+++ b/app/views/admin/application_settings/_ci_cd.html.haml
@@ -42,12 +42,12 @@
<code>4 mins 2 sec</code>, <code>2h42min</code>.
= link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'default-artifacts-expiration')
.form-group
- = f.label :archive_builds_in_human_readable, 'Archive builds in', class: 'label-bold'
+ = f.label :archive_builds_in_human_readable, 'Archive jobs', class: 'label-bold'
= f.text_field :archive_builds_in_human_readable, class: 'form-control', placeholder: 'never'
.form-text.text-muted
- Set the duration when build gonna be considered old. Archived builds cannot be retried.
- Make it empty to never expire builds. It has to be larger than 1 day.
- The default unit is in seconds, but you can define an alternative. For example:
- <code>4 mins 2 sec</code>, <code>2h42min</code>.
+ Set the duration for which the jobs will be considered as old and expired.
+ Once that time passes, the jobs will be archived and no longer able to be
+ retried. Make it empty to never expire jobs. It has to be no less than 1 day,
+ for example: <code>15 days</code>, <code>1 month</code>, <code>2 years</code>.
= f.submit 'Save changes', class: "btn btn-success"
diff --git a/app/views/events/event/_push.html.haml b/app/views/events/event/_push.html.haml
index 82693ec832e..69914fccc48 100644
--- a/app/views/events/event/_push.html.haml
+++ b/app/views/events/event/_push.html.haml
@@ -7,10 +7,10 @@
.event-title.d-flex.flex-wrap
= inline_event_icon(event)
%span.event-type.d-inline-block.append-right-4.pushed #{event.action_name} #{event.ref_type}
- %span
+ %span.append-right-4
- commits_link = project_commits_path(project, event.ref_name)
- should_link = event.tag? ? project.repository.tag_exists?(event.ref_name) : project.repository.branch_exists?(event.ref_name)
- = link_to_if should_link, event.ref_name, commits_link, class: 'ref-name append-right-4'
+ = link_to_if should_link, event.ref_name, commits_link, class: 'ref-name'
= render "events/event_scope", event: event
diff --git a/app/views/layouts/nav/_dashboard.html.haml b/app/views/layouts/nav/_dashboard.html.haml
index 8f8b6b454d9..ea5f2b166b4 100644
--- a/app/views/layouts/nav/_dashboard.html.haml
+++ b/app/views/layouts/nav/_dashboard.html.haml
@@ -64,7 +64,7 @@
= link_to '#', class: 'dashboard-shortcuts-web-ide', title: _('Web IDE') do
= _('Web IDE')
- - if Gitlab::Sherlock.enabled? || can?(current_user, :read_instance_statistics)
+ - if show_separator?
%li.line-separator.d-none.d-sm-block
= render_if_exists 'dashboard/operations/nav_link'
- if can?(current_user, :read_instance_statistics)
diff --git a/app/views/projects/diffs/_text_file.html.haml b/app/views/projects/diffs/_text_file.html.haml
index e8a5e63e59e..bc9f6c71fa8 100644
--- a/app/views/projects/diffs/_text_file.html.haml
+++ b/app/views/projects/diffs/_text_file.html.haml
@@ -3,7 +3,7 @@
.suppressed-container
%a.show-suppressed-diff.js-show-suppressed-diff Changes suppressed. Click to show.
-%table.text-file.diff-wrap-lines.code.js-syntax-highlight{ data: diff_view_data, class: too_big ? 'hide' : '' }
+%table.text-file.diff-wrap-lines.code.js-syntax-highlight.commit-diff{ data: diff_view_data, class: too_big ? 'hide' : '' }
= render partial: "projects/diffs/line",
collection: diff_file.highlighted_diff_lines,
as: :line,
diff --git a/app/views/projects/merge_requests/_mr_title.html.haml b/app/views/projects/merge_requests/_mr_title.html.haml
index 1bf42ded97a..3cd83feb842 100644
--- a/app/views/projects/merge_requests/_mr_title.html.haml
+++ b/app/views/projects/merge_requests/_mr_title.html.haml
@@ -37,6 +37,6 @@
= link_to 'Reopen', merge_request_path(@merge_request, merge_request: { state_event: :reopen }), method: :put, class: 'reopen-mr-link', title: 'Reopen merge request'
- if can_update_merge_request
- = link_to 'Edit', edit_project_merge_request_path(@project, @merge_request), class: "d-none d-sm-none d-md-block btn btn-grouped js-issuable-edit"
+ = link_to 'Edit', edit_project_merge_request_path(@project, @merge_request), class: "d-none d-sm-none d-md-block btn btn-grouped js-issuable-edit qa-edit-button"
= render 'shared/issuable/close_reopen_button', issuable: @merge_request, can_update: can_update_merge_request, can_reopen: can_update_merge_request
diff --git a/app/views/shared/_label.html.haml b/app/views/shared/_label.html.haml
index 71f34c0d85b..21ea188d7b3 100644
--- a/app/views/shared/_label.html.haml
+++ b/app/views/shared/_label.html.haml
@@ -23,28 +23,29 @@
%li.inline
= link_to edit_label_path(label), class: 'btn btn-transparent label-action edit has-tooltip', title: _('Edit'), data: { placement: 'bottom' }, aria_label: _('Edit') do
= sprite_icon('pencil')
- %li.inline
- .dropdown
- %button{ type: 'button', class: 'btn btn-transparent js-label-options-dropdown label-action', data: { toggle: 'dropdown' }, aria_label: _('Label actions dropdown') }
- = sprite_icon('ellipsis_v')
- .dropdown-menu.dropdown-open-left
- %ul
- - if label.is_a?(ProjectLabel) && label.project.group && can?(current_user, :admin_label, label.project.group)
- %li
- %button.js-promote-project-label-button.btn.btn-transparent.btn-action{ disabled: true, type: 'button',
- data: { url: promote_project_label_path(label.project, label),
- label_title: label.title,
- label_color: label.color,
- label_text_color: label.text_color,
- group_name: label.project.group.name,
- target: '#promote-label-modal',
- container: 'body',
- toggle: 'modal' } }
- = _('Promote to group label')
- - if can?(current_user, :admin_label, label)
- %li
- %span{ data: { toggle: 'modal', target: "#modal-delete-label-#{label.id}" } }
- %button.text-danger.remove-row{ type: 'button' }= _('Delete')
+ - if can?(current_user, :admin_label, label)
+ %li.inline
+ .dropdown
+ %button{ type: 'button', class: 'btn btn-transparent js-label-options-dropdown label-action', data: { toggle: 'dropdown' }, aria_label: _('Label actions dropdown') }
+ = sprite_icon('ellipsis_v')
+ .dropdown-menu.dropdown-open-left
+ %ul
+ - if label.is_a?(ProjectLabel) && label.project.group && can?(current_user, :admin_label, label.project.group)
+ %li
+ %button.js-promote-project-label-button.btn.btn-transparent.btn-action{ disabled: true, type: 'button',
+ data: { url: promote_project_label_path(label.project, label),
+ label_title: label.title,
+ label_color: label.color,
+ label_text_color: label.text_color,
+ group_name: label.project.group.name,
+ target: '#promote-label-modal',
+ container: 'body',
+ toggle: 'modal' } }
+ = _('Promote to group label')
+ - if can?(current_user, :admin_label, label)
+ %li
+ %span{ data: { toggle: 'modal', target: "#modal-delete-label-#{label.id}" } }
+ %button.text-danger.remove-row{ type: 'button' }= _('Delete')
- if current_user
%li.inline.label-subscription
- if can_subscribe_to_label_in_different_levels?(label)
diff --git a/app/views/shared/notes/_note.html.haml b/app/views/shared/notes/_note.html.haml
index 84adbd444c5..bc918430823 100644
--- a/app/views/shared/notes/_note.html.haml
+++ b/app/views/shared/notes/_note.html.haml
@@ -5,7 +5,7 @@
- note_editable = can?(current_user, :admin_note, note)
- note_counter = local_assigns.fetch(:note_counter, 0)
-%li.timeline-entry{ id: dom_id(note),
+%li.timeline-entry.note-wrapper.outlined{ id: dom_id(note),
class: ["note", "note-row-#{note.id}", ('system-note' if note.system)],
data: { author_id: note.author.id,
editable: note_editable,
diff --git a/app/views/shared/notes/_notes_with_form.html.haml b/app/views/shared/notes/_notes_with_form.html.haml
index ec1e10bb0c1..4c4050c6054 100644
--- a/app/views/shared/notes/_notes_with_form.html.haml
+++ b/app/views/shared/notes/_notes_with_form.html.haml
@@ -8,7 +8,7 @@
- if can_create_note?
.notes.notes-form.timeline
- .timeline-entry
+ .timeline-entry.note-form
.timeline-entry-inner
.flash-container.timeline-content
diff --git a/bin/rails b/bin/rails
index 228f812ccaf..d21b64b3007 100755
--- a/bin/rails
+++ b/bin/rails
@@ -1,7 +1,7 @@
#!/usr/bin/env ruby
# Remove this block when upgraded to rails 5.0.
-unless %w[1 true].include?(ENV["RAILS5"])
+if %w[0 false].include?(ENV["RAILS5"])
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
diff --git a/bin/rake b/bin/rake
index b52a8321f1a..1362d51e3d6 100755
--- a/bin/rake
+++ b/bin/rake
@@ -1,7 +1,7 @@
#!/usr/bin/env ruby
# Remove this block when upgraded to rails 5.0.
-unless %w[1 true].include?(ENV["RAILS5"])
+if %w[0 false].include?(ENV["RAILS5"])
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
diff --git a/bin/rspec b/bin/rspec
index 26583242051..b0770e30a70 100755
--- a/bin/rspec
+++ b/bin/rspec
@@ -2,7 +2,7 @@
# Remove these two lines below when upgraded to rails 5.0.
# Allow run `rspec` command as `RAILS5=1 rspec ...` instead of `BUNDLE_GEMFILE=Gemfile.rails5 rspec ...`
-gemfile = %w[1 true].include?(ENV["RAILS5"]) ? "Gemfile.rails5" : "Gemfile"
+gemfile = %w[0 false].include?(ENV["RAILS5"]) ? "Gemfile.rails4" : "Gemfile"
ENV['BUNDLE_GEMFILE'] ||= File.expand_path("../#{gemfile}", __dir__)
begin
diff --git a/bin/setup b/bin/setup
index ec1ebe02950..34bb667087a 100755
--- a/bin/setup
+++ b/bin/setup
@@ -1,7 +1,7 @@
#!/usr/bin/env ruby
def rails5?
- %w[1 true].include?(ENV["RAILS5"])
+ !%w[0 false].include?(ENV["RAILS5"])
end
require "pathname"
diff --git a/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml b/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml
new file mode 100644
index 00000000000..98e15a5cc0a
--- /dev/null
+++ b/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml
@@ -0,0 +1,5 @@
+---
+title: Fix a bug where internal email pattern wasn't respected
+merge_request: 22516
+author:
+type: fixed
diff --git a/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml b/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml
new file mode 100644
index 00000000000..a59a276a334
--- /dev/null
+++ b/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml
@@ -0,0 +1,5 @@
+---
+title: Fix rendering of filter bar tokens for special values
+merge_request: 22865
+author: Heinrich Lee Yu
+type: fixed
diff --git a/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml b/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml
new file mode 100644
index 00000000000..a9ca56303eb
--- /dev/null
+++ b/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml
@@ -0,0 +1,5 @@
+---
+title: Removes promote to group label for anonymous user
+merge_request: 23042
+author: Jacopo Beschi @jacopo-beschi
+type: fixed
diff --git a/changelogs/unreleased/53972-fix-fill-shards.yml b/changelogs/unreleased/53972-fix-fill-shards.yml
new file mode 100644
index 00000000000..ca94d6cc589
--- /dev/null
+++ b/changelogs/unreleased/53972-fix-fill-shards.yml
@@ -0,0 +1,5 @@
+---
+title: Fix a race condition intermittently breaking GitLab startup
+merge_request: 23028
+author:
+type: fixed
diff --git a/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml b/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml
new file mode 100644
index 00000000000..9f4f104a12c
--- /dev/null
+++ b/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml
@@ -0,0 +1,5 @@
+---
+title: Adds margin after a deleted branch name in the activity feed.
+merge_request: 23038
+author:
+type: fixed
diff --git a/changelogs/unreleased/drop-gcp-cluster-table.yml b/changelogs/unreleased/drop-gcp-cluster-table.yml
new file mode 100644
index 00000000000..15964ec2eaf
--- /dev/null
+++ b/changelogs/unreleased/drop-gcp-cluster-table.yml
@@ -0,0 +1,5 @@
+---
+title: Drop gcp_clusters table
+merge_request: 22713
+author:
+type: other
diff --git a/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml b/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml
new file mode 100644
index 00000000000..5427ead3d1b
--- /dev/null
+++ b/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml
@@ -0,0 +1,6 @@
+---
+title: Avoid returning deployment metrics url to MR widget when the deployment is
+ not successful
+merge_request: 23010
+author:
+type: fixed
diff --git a/changelogs/unreleased/kubernetes-http-response-code.yml b/changelogs/unreleased/kubernetes-http-response-code.yml
new file mode 100644
index 00000000000..551fe2edc3c
--- /dev/null
+++ b/changelogs/unreleased/kubernetes-http-response-code.yml
@@ -0,0 +1,5 @@
+---
+title: Show HTTP response code for Kubernetes errors
+merge_request: 22964
+author:
+type: other
diff --git a/changelogs/unreleased/sh-bump-ruby-2-5-3.yml b/changelogs/unreleased/sh-bump-ruby-2-5-3.yml
new file mode 100644
index 00000000000..13cadc73e9c
--- /dev/null
+++ b/changelogs/unreleased/sh-bump-ruby-2-5-3.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade to Ruby 2.5.3
+merge_request: 2806
+author:
+type: performance
diff --git a/changelogs/unreleased/switch-rails.yml b/changelogs/unreleased/switch-rails.yml
new file mode 100644
index 00000000000..4edf709dbd4
--- /dev/null
+++ b/changelogs/unreleased/switch-rails.yml
@@ -0,0 +1,5 @@
+---
+title: Switch to Rails 5
+merge_request: 21492
+author:
+type: other
diff --git a/config/application.rb b/config/application.rb
index 95b0f74a5a3..1b084e91cfb 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -8,7 +8,7 @@ module Gitlab
# This method is used for smooth upgrading from the current Rails 4.x to Rails 5.0.
# https://gitlab.com/gitlab-org/gitlab-ce/issues/14286
def self.rails5?
- ENV["RAILS5"].in?(%w[1 true])
+ !%w[0 false].include?(ENV["RAILS5"])
end
class Application < Rails::Application
diff --git a/config/boot.rb b/config/boot.rb
index 655c54ddb84..1aeacdabbad 100644
--- a/config/boot.rb
+++ b/config/boot.rb
@@ -1,10 +1,10 @@
def rails5?
- %w[1 true].include?(ENV["RAILS5"])
+ !%w[0 false].include?(ENV["RAILS5"])
end
require 'rubygems' unless rails5?
-gemfile = rails5? ? "Gemfile.rails5" : "Gemfile"
+gemfile = rails5? ? "Gemfile" : "Gemfile.rails4"
ENV['BUNDLE_GEMFILE'] ||= File.expand_path("../#{gemfile}", __dir__)
# Set up gems listed in the Gemfile.
diff --git a/config/environment.rb b/config/environment.rb
index 5d35937f7c6..3a52656a2c1 100644
--- a/config/environment.rb
+++ b/config/environment.rb
@@ -1,10 +1,10 @@
# Load the rails application
# Remove this condition when upgraded to rails 5.0.
-if %w[1 true].include?(ENV["RAILS5"])
- require_relative 'application'
-else
+if %w[0 false].include?(ENV["RAILS5"])
require File.expand_path('application', __dir__)
+else
+ require_relative 'application'
end
# Initialize the rails application
diff --git a/config/initializers/mysql_set_length_for_binary_indexes.rb b/config/initializers/mysql_set_length_for_binary_indexes.rb
index 81ed2fb83de..0445d8fcae2 100644
--- a/config/initializers/mysql_set_length_for_binary_indexes.rb
+++ b/config/initializers/mysql_set_length_for_binary_indexes.rb
@@ -24,28 +24,46 @@ if defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter)
ActiveRecord::ConnectionAdapters::Mysql2Adapter.send(:prepend, MysqlSetLengthForBinaryIndex)
end
-if Gitlab.rails5?
- module MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema
- # This method is used in Rails 5 schema loading as t.index
- def index(column_names, options = {})
- options[:length] ||= {}
- Array(column_names).each do |column_name|
- column = columns.find { |c| c.name == column_name }
-
- if column&.type == :binary
- options[:length][column_name] = 20
- end
- end
+module MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema
+ # This method is used in Rails 5 schema loading as t.index
+ def index(column_names, options = {})
+ # Ignore indexes that use opclasses,
+ # also see config/initializers/mysql_ignore_postgresql_options.rb
+ if options[:opclasses]
+ warn "WARNING: index on columns #{column_names} uses unsupported option, skipping."
+ return
+ end
+
+ # when running rails 4 with rails 5 schema, rails 4 doesn't support multiple
+ # indexes on the same set of columns. Mysql doesn't support partial indexes, so if
+ # an index already exists and we add another index, skip it if it's partial:
+ # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_102821326
+ if !Gitlab.rails5? && indexes[column_names] && options[:where]
+ warn "WARNING: index on columns #{column_names} already exists and partial index is not supported, skipping."
+ return
+ end
+
+ options[:length] ||= {}
+ Array(column_names).each do |column_name|
+ column = columns.find { |c| c.name == column_name }
- # Ignore indexes that use opclasses,
- # also see config/initializers/mysql_ignore_postgresql_options.rb
- unless options[:opclasses]
- super(column_names, options)
+ if column&.type == :binary
+ options[:length][column_name] = 20
end
end
+
+ super(column_names, options)
end
+end
+def mysql_adapter?
+ defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter) && ActiveRecord::Base.connection.is_a?(ActiveRecord::ConnectionAdapters::Mysql2Adapter)
+end
+
+if Gitlab.rails5?
if defined?(ActiveRecord::ConnectionAdapters::MySQL::TableDefinition)
ActiveRecord::ConnectionAdapters::MySQL::TableDefinition.send(:prepend, MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema)
end
+elsif mysql_adapter? && defined?(ActiveRecord::ConnectionAdapters::TableDefinition)
+ ActiveRecord::ConnectionAdapters::TableDefinition.send(:prepend, MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema)
end
diff --git a/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb b/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb
new file mode 100644
index 00000000000..a7106111f46
--- /dev/null
+++ b/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+class DropFkGcpClustersTable < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_foreign_key_if_exists :gcp_clusters, column: :project_id
+ remove_foreign_key_if_exists :gcp_clusters, column: :user_id
+ remove_foreign_key_if_exists :gcp_clusters, column: :service_id
+ end
+
+ def down
+ add_foreign_key_if_not_exists :gcp_clusters, :projects, column: :project_id, on_delete: :cascade
+ add_foreign_key_if_not_exists :gcp_clusters, :users, column: :user_id, on_delete: :nullify
+ add_foreign_key_if_not_exists :gcp_clusters, :services, column: :service_id, on_delete: :nullify
+ end
+
+ private
+
+ def add_foreign_key_if_not_exists(source, target, column:, on_delete:)
+ return unless table_exists?(source)
+ return if foreign_key_exists?(source, target, column: column)
+
+ add_concurrent_foreign_key(source, target, column: column, on_delete: on_delete)
+ end
+
+ def remove_foreign_key_if_exists(table, column:)
+ return unless table_exists?(table)
+ return unless foreign_key_exists?(table, column: column)
+
+ remove_foreign_key(table, column: column)
+ end
+end
diff --git a/db/migrate/20181031190559_drop_gcp_clusters_table.rb b/db/migrate/20181031190559_drop_gcp_clusters_table.rb
new file mode 100644
index 00000000000..808d474b4fc
--- /dev/null
+++ b/db/migrate/20181031190559_drop_gcp_clusters_table.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+class DropGcpClustersTable < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ drop_table :gcp_clusters
+ end
+
+ def down
+ create_table :gcp_clusters do |t|
+ # Order columns by best align scheme
+ t.references :project, null: false, index: { unique: true }, foreign_key: { on_delete: :cascade }
+ t.references :user, foreign_key: { on_delete: :nullify }
+ t.references :service, foreign_key: { on_delete: :nullify }
+ t.integer :status
+ t.integer :gcp_cluster_size, null: false
+
+ # Timestamps
+ t.datetime_with_timezone :created_at, null: false
+ t.datetime_with_timezone :updated_at, null: false
+
+ # Enable/disable
+ t.boolean :enabled, default: true
+
+ # General
+ t.text :status_reason
+
+ # k8s integration specific
+ t.string :project_namespace
+
+ # Cluster details
+ t.string :endpoint
+ t.text :ca_cert
+ t.text :encrypted_kubernetes_token
+ t.string :encrypted_kubernetes_token_iv
+ t.string :username
+ t.text :encrypted_password
+ t.string :encrypted_password_iv
+
+ # GKE
+ t.string :gcp_project_id, null: false
+ t.string :gcp_cluster_zone, null: false
+ t.string :gcp_cluster_name, null: false
+ t.string :gcp_machine_type
+ t.string :gcp_operation_id
+ t.text :encrypted_gcp_token
+ t.string :encrypted_gcp_token_iv
+ end
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 56137caf1d7..deaa2d30b26 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -1,4 +1,3 @@
-# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
@@ -126,7 +125,7 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "unique_ips_limit_enabled", default: false, null: false
t.string "default_artifacts_expire_in", default: "0", null: false
t.string "uuid"
- t.decimal "polling_interval_multiplier", default: 1.0, null: false
+ t.decimal "polling_interval_multiplier", default: "1.0", null: false
t.integer "cached_markdown_version"
t.boolean "clientside_sentry_enabled", default: false, null: false
t.string "clientside_sentry_dsn"
@@ -177,10 +176,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "details"
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["entity_id", "entity_type"], name: "index_audit_events_on_entity_id_and_entity_type", using: :btree
end
- add_index "audit_events", ["entity_id", "entity_type"], name: "index_audit_events_on_entity_id_and_entity_type", using: :btree
-
create_table "award_emoji", force: :cascade do |t|
t.string "name"
t.integer "user_id"
@@ -188,11 +186,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "awardable_type"
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["awardable_type", "awardable_id"], name: "index_award_emoji_on_awardable_type_and_awardable_id", using: :btree
+ t.index ["user_id", "name"], name: "index_award_emoji_on_user_id_and_name", using: :btree
end
- add_index "award_emoji", ["awardable_type", "awardable_id"], name: "index_award_emoji_on_awardable_type_and_awardable_id", using: :btree
- add_index "award_emoji", ["user_id", "name"], name: "index_award_emoji_on_user_id_and_name", using: :btree
-
create_table "badges", force: :cascade do |t|
t.string "link_url", null: false
t.string "image_url", null: false
@@ -201,47 +198,43 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "type", null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.index ["group_id"], name: "index_badges_on_group_id", using: :btree
+ t.index ["project_id"], name: "index_badges_on_project_id", using: :btree
end
- add_index "badges", ["group_id"], name: "index_badges_on_group_id", using: :btree
- add_index "badges", ["project_id"], name: "index_badges_on_project_id", using: :btree
-
create_table "board_group_recent_visits", id: :bigserial, force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "user_id"
t.integer "board_id"
t.integer "group_id"
+ t.index ["board_id"], name: "index_board_group_recent_visits_on_board_id", using: :btree
+ t.index ["group_id"], name: "index_board_group_recent_visits_on_group_id", using: :btree
+ t.index ["user_id", "group_id", "board_id"], name: "index_board_group_recent_visits_on_user_group_and_board", unique: true, using: :btree
+ t.index ["user_id"], name: "index_board_group_recent_visits_on_user_id", using: :btree
end
- add_index "board_group_recent_visits", ["board_id"], name: "index_board_group_recent_visits_on_board_id", using: :btree
- add_index "board_group_recent_visits", ["group_id"], name: "index_board_group_recent_visits_on_group_id", using: :btree
- add_index "board_group_recent_visits", ["user_id", "group_id", "board_id"], name: "index_board_group_recent_visits_on_user_group_and_board", unique: true, using: :btree
- add_index "board_group_recent_visits", ["user_id"], name: "index_board_group_recent_visits_on_user_id", using: :btree
-
create_table "board_project_recent_visits", id: :bigserial, force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "user_id"
t.integer "project_id"
t.integer "board_id"
+ t.index ["board_id"], name: "index_board_project_recent_visits_on_board_id", using: :btree
+ t.index ["project_id"], name: "index_board_project_recent_visits_on_project_id", using: :btree
+ t.index ["user_id", "project_id", "board_id"], name: "index_board_project_recent_visits_on_user_project_and_board", unique: true, using: :btree
+ t.index ["user_id"], name: "index_board_project_recent_visits_on_user_id", using: :btree
end
- add_index "board_project_recent_visits", ["board_id"], name: "index_board_project_recent_visits_on_board_id", using: :btree
- add_index "board_project_recent_visits", ["project_id"], name: "index_board_project_recent_visits_on_project_id", using: :btree
- add_index "board_project_recent_visits", ["user_id", "project_id", "board_id"], name: "index_board_project_recent_visits_on_user_project_and_board", unique: true, using: :btree
- add_index "board_project_recent_visits", ["user_id"], name: "index_board_project_recent_visits_on_user_id", using: :btree
-
create_table "boards", force: :cascade do |t|
t.integer "project_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "group_id"
+ t.index ["group_id"], name: "index_boards_on_group_id", using: :btree
+ t.index ["project_id"], name: "index_boards_on_project_id", using: :btree
end
- add_index "boards", ["group_id"], name: "index_boards_on_group_id", using: :btree
- add_index "boards", ["project_id"], name: "index_boards_on_project_id", using: :btree
-
create_table "broadcast_messages", force: :cascade do |t|
t.text "message", null: false
t.datetime "starts_at", null: false
@@ -252,10 +245,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "font"
t.text "message_html", null: false
t.integer "cached_markdown_version"
+ t.index ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree
end
- add_index "broadcast_messages", ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree
-
create_table "chat_names", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "service_id", null: false
@@ -266,51 +258,46 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "last_used_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["service_id", "team_id", "chat_id"], name: "index_chat_names_on_service_id_and_team_id_and_chat_id", unique: true, using: :btree
+ t.index ["user_id", "service_id"], name: "index_chat_names_on_user_id_and_service_id", unique: true, using: :btree
end
- add_index "chat_names", ["service_id", "team_id", "chat_id"], name: "index_chat_names_on_service_id_and_team_id_and_chat_id", unique: true, using: :btree
- add_index "chat_names", ["user_id", "service_id"], name: "index_chat_names_on_user_id_and_service_id", unique: true, using: :btree
-
create_table "chat_teams", force: :cascade do |t|
t.integer "namespace_id", null: false
t.string "team_id"
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree
end
- add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree
-
create_table "ci_build_trace_chunks", id: :bigserial, force: :cascade do |t|
t.integer "build_id", null: false
t.integer "chunk_index", null: false
t.integer "data_store", null: false
t.binary "raw_data"
+ t.index ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree
end
- add_index "ci_build_trace_chunks", ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree
-
create_table "ci_build_trace_section_names", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
+ t.index ["project_id", "name"], name: "index_ci_build_trace_section_names_on_project_id_and_name", unique: true, using: :btree
end
- add_index "ci_build_trace_section_names", ["project_id", "name"], name: "index_ci_build_trace_section_names_on_project_id_and_name", unique: true, using: :btree
-
create_table "ci_build_trace_sections", force: :cascade do |t|
t.integer "project_id", null: false
t.datetime_with_timezone "date_start", null: false
t.datetime_with_timezone "date_end", null: false
- t.integer "byte_start", limit: 8, null: false
- t.integer "byte_end", limit: 8, null: false
+ t.bigint "byte_start", null: false
+ t.bigint "byte_end", null: false
t.integer "build_id", null: false
t.integer "section_name_id", null: false
+ t.index ["build_id", "section_name_id"], name: "index_ci_build_trace_sections_on_build_id_and_section_name_id", unique: true, using: :btree
+ t.index ["project_id"], name: "index_ci_build_trace_sections_on_project_id", using: :btree
+ t.index ["section_name_id"], name: "index_ci_build_trace_sections_on_section_name_id", using: :btree
end
- add_index "ci_build_trace_sections", ["build_id", "section_name_id"], name: "index_ci_build_trace_sections_on_build_id_and_section_name_id", unique: true, using: :btree
- add_index "ci_build_trace_sections", ["project_id"], name: "index_ci_build_trace_sections_on_project_id", using: :btree
- add_index "ci_build_trace_sections", ["section_name_id"], name: "index_ci_build_trace_sections_on_section_name_id", using: :btree
-
create_table "ci_builds", force: :cascade do |t|
t.string "status"
t.datetime "finished_at"
@@ -341,7 +328,7 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "erased_at"
t.datetime "artifacts_expire_at"
t.string "environment"
- t.integer "artifacts_size", limit: 8
+ t.bigint "artifacts_size"
t.string "when"
t.text "yaml_variables"
t.datetime "queued_at"
@@ -356,45 +343,42 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "protected"
t.integer "failure_reason"
t.datetime_with_timezone "scheduled_at"
+ t.index ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
+ t.index ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
+ t.index ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree
+ t.index ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree
+ t.index ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree
+ t.index ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree
+ t.index ["id"], name: "partial_index_ci_builds_on_id_with_legacy_artifacts", where: "(artifacts_file <> ''::text)", using: :btree
+ t.index ["project_id", "id"], name: "index_ci_builds_on_project_id_and_id", using: :btree
+ t.index ["protected"], name: "index_ci_builds_on_protected", using: :btree
+ t.index ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree
+ t.index ["scheduled_at"], name: "partial_index_ci_builds_on_scheduled_at_with_scheduled_jobs", where: "((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text))", using: :btree
+ t.index ["stage_id", "stage_idx"], name: "tmp_build_stage_position_index", where: "(stage_idx IS NOT NULL)", using: :btree
+ t.index ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree
+ t.index ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree
+ t.index ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
+ t.index ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
+ t.index ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
end
- add_index "ci_builds", ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
- add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
- add_index "ci_builds", ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree
- add_index "ci_builds", ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree
- add_index "ci_builds", ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree
- add_index "ci_builds", ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree
- add_index "ci_builds", ["id"], name: "partial_index_ci_builds_on_id_with_legacy_artifacts", where: "(artifacts_file <> ''::text)", using: :btree
- add_index "ci_builds", ["project_id", "id"], name: "index_ci_builds_on_project_id_and_id", using: :btree
- add_index "ci_builds", ["protected"], name: "index_ci_builds_on_protected", using: :btree
- add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree
- add_index "ci_builds", ["scheduled_at"], name: "partial_index_ci_builds_on_scheduled_at_with_scheduled_jobs", where: "((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text))", using: :btree
- add_index "ci_builds", ["stage_id", "stage_idx"], name: "tmp_build_stage_position_index", where: "(stage_idx IS NOT NULL)", using: :btree
- add_index "ci_builds", ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree
- add_index "ci_builds", ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree
- add_index "ci_builds", ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
- add_index "ci_builds", ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
- add_index "ci_builds", ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
-
create_table "ci_builds_metadata", force: :cascade do |t|
t.integer "build_id", null: false
t.integer "project_id", null: false
t.integer "timeout"
t.integer "timeout_source", default: 1, null: false
+ t.index ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true, using: :btree
+ t.index ["project_id"], name: "index_ci_builds_metadata_on_project_id", using: :btree
end
- add_index "ci_builds_metadata", ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true, using: :btree
- add_index "ci_builds_metadata", ["project_id"], name: "index_ci_builds_metadata_on_project_id", using: :btree
-
create_table "ci_builds_runner_session", id: :bigserial, force: :cascade do |t|
t.integer "build_id", null: false
t.string "url", null: false
t.string "certificate"
t.string "authorization"
+ t.index ["build_id"], name: "index_ci_builds_runner_session_on_build_id", unique: true, using: :btree
end
- add_index "ci_builds_runner_session", ["build_id"], name: "index_ci_builds_runner_session_on_build_id", unique: true, using: :btree
-
create_table "ci_group_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
@@ -405,16 +389,15 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "protected", default: false, null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.index ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
end
- add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
-
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "file_store"
- t.integer "size", limit: 8
+ t.bigint "size"
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
@@ -422,13 +405,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.binary "file_sha256"
t.integer "file_format", limit: 2
t.integer "file_location", limit: 2
+ t.index ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree
+ t.index ["file_store"], name: "index_ci_job_artifacts_on_file_store", using: :btree
+ t.index ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
+ t.index ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
end
- add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree
- add_index "ci_job_artifacts", ["file_store"], name: "index_ci_job_artifacts_on_file_store", using: :btree
- add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
- add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
-
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
@@ -438,10 +420,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "pipeline_schedule_id", null: false
t.datetime_with_timezone "created_at"
t.datetime_with_timezone "updated_at"
+ t.index ["pipeline_schedule_id", "key"], name: "index_ci_pipeline_schedule_variables_on_schedule_id_and_key", unique: true, using: :btree
end
- add_index "ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], name: "index_ci_pipeline_schedule_variables_on_schedule_id_and_key", unique: true, using: :btree
-
create_table "ci_pipeline_schedules", force: :cascade do |t|
t.string "description"
t.string "ref"
@@ -453,11 +434,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "active", default: true
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["next_run_at", "active"], name: "index_ci_pipeline_schedules_on_next_run_at_and_active", using: :btree
+ t.index ["project_id"], name: "index_ci_pipeline_schedules_on_project_id", using: :btree
end
- add_index "ci_pipeline_schedules", ["next_run_at", "active"], name: "index_ci_pipeline_schedules_on_next_run_at_and_active", using: :btree
- add_index "ci_pipeline_schedules", ["project_id"], name: "index_ci_pipeline_schedules_on_project_id", using: :btree
-
create_table "ci_pipeline_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
@@ -465,10 +445,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "encrypted_value_salt"
t.string "encrypted_value_iv"
t.integer "pipeline_id", null: false
+ t.index ["pipeline_id", "key"], name: "index_ci_pipeline_variables_on_pipeline_id_and_key", unique: true, using: :btree
end
- add_index "ci_pipeline_variables", ["pipeline_id", "key"], name: "index_ci_pipeline_variables_on_pipeline_id_and_key", unique: true, using: :btree
-
create_table "ci_pipelines", force: :cascade do |t|
t.string "ref"
t.string "sha"
@@ -492,37 +471,34 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "protected"
t.integer "failure_reason"
t.integer "iid"
+ t.index ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
+ t.index ["pipeline_schedule_id"], name: "index_ci_pipelines_on_pipeline_schedule_id", using: :btree
+ t.index ["project_id", "iid"], name: "index_ci_pipelines_on_project_id_and_iid", unique: true, where: "(iid IS NOT NULL)", using: :btree
+ t.index ["project_id", "ref", "status", "id"], name: "index_ci_pipelines_on_project_id_and_ref_and_status_and_id", using: :btree
+ t.index ["project_id", "sha"], name: "index_ci_pipelines_on_project_id_and_sha", using: :btree
+ t.index ["project_id", "source"], name: "index_ci_pipelines_on_project_id_and_source", using: :btree
+ t.index ["project_id", "status", "config_source"], name: "index_ci_pipelines_on_project_id_and_status_and_config_source", using: :btree
+ t.index ["project_id"], name: "index_ci_pipelines_on_project_id", using: :btree
+ t.index ["status"], name: "index_ci_pipelines_on_status", using: :btree
+ t.index ["user_id"], name: "index_ci_pipelines_on_user_id", using: :btree
end
- add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
- add_index "ci_pipelines", ["pipeline_schedule_id"], name: "index_ci_pipelines_on_pipeline_schedule_id", using: :btree
- add_index "ci_pipelines", ["project_id", "iid"], name: "index_ci_pipelines_on_project_id_and_iid", unique: true, where: "(iid IS NOT NULL)", using: :btree
- add_index "ci_pipelines", ["project_id", "ref", "status", "id"], name: "index_ci_pipelines_on_project_id_and_ref_and_status_and_id", using: :btree
- add_index "ci_pipelines", ["project_id", "sha"], name: "index_ci_pipelines_on_project_id_and_sha", using: :btree
- add_index "ci_pipelines", ["project_id", "source"], name: "index_ci_pipelines_on_project_id_and_source", using: :btree
- add_index "ci_pipelines", ["project_id", "status", "config_source"], name: "index_ci_pipelines_on_project_id_and_status_and_config_source", using: :btree
- add_index "ci_pipelines", ["project_id"], name: "index_ci_pipelines_on_project_id", using: :btree
- add_index "ci_pipelines", ["status"], name: "index_ci_pipelines_on_status", using: :btree
- add_index "ci_pipelines", ["user_id"], name: "index_ci_pipelines_on_user_id", using: :btree
-
create_table "ci_runner_namespaces", force: :cascade do |t|
t.integer "runner_id"
t.integer "namespace_id"
+ t.index ["namespace_id"], name: "index_ci_runner_namespaces_on_namespace_id", using: :btree
+ t.index ["runner_id", "namespace_id"], name: "index_ci_runner_namespaces_on_runner_id_and_namespace_id", unique: true, using: :btree
end
- add_index "ci_runner_namespaces", ["namespace_id"], name: "index_ci_runner_namespaces_on_namespace_id", using: :btree
- add_index "ci_runner_namespaces", ["runner_id", "namespace_id"], name: "index_ci_runner_namespaces_on_runner_id_and_namespace_id", unique: true, using: :btree
-
create_table "ci_runner_projects", force: :cascade do |t|
t.integer "runner_id", null: false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "project_id"
+ t.index ["project_id"], name: "index_ci_runner_projects_on_project_id", using: :btree
+ t.index ["runner_id"], name: "index_ci_runner_projects_on_runner_id", using: :btree
end
- add_index "ci_runner_projects", ["project_id"], name: "index_ci_runner_projects_on_project_id", using: :btree
- add_index "ci_runner_projects", ["runner_id"], name: "index_ci_runner_projects_on_runner_id", using: :btree
-
create_table "ci_runners", force: :cascade do |t|
t.string "token"
t.datetime "created_at"
@@ -542,14 +518,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "ip_address"
t.integer "maximum_timeout"
t.integer "runner_type", limit: 2, null: false
+ t.index ["contacted_at"], name: "index_ci_runners_on_contacted_at", using: :btree
+ t.index ["is_shared"], name: "index_ci_runners_on_is_shared", using: :btree
+ t.index ["locked"], name: "index_ci_runners_on_locked", using: :btree
+ t.index ["runner_type"], name: "index_ci_runners_on_runner_type", using: :btree
+ t.index ["token"], name: "index_ci_runners_on_token", using: :btree
end
- add_index "ci_runners", ["contacted_at"], name: "index_ci_runners_on_contacted_at", using: :btree
- add_index "ci_runners", ["is_shared"], name: "index_ci_runners_on_is_shared", using: :btree
- add_index "ci_runners", ["locked"], name: "index_ci_runners_on_locked", using: :btree
- add_index "ci_runners", ["runner_type"], name: "index_ci_runners_on_runner_type", using: :btree
- add_index "ci_runners", ["token"], name: "index_ci_runners_on_token", using: :btree
-
create_table "ci_stages", force: :cascade do |t|
t.integer "project_id"
t.integer "pipeline_id"
@@ -559,23 +534,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "status"
t.integer "lock_version"
t.integer "position"
+ t.index ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", unique: true, using: :btree
+ t.index ["pipeline_id", "position"], name: "index_ci_stages_on_pipeline_id_and_position", using: :btree
+ t.index ["pipeline_id"], name: "index_ci_stages_on_pipeline_id", using: :btree
+ t.index ["project_id"], name: "index_ci_stages_on_project_id", using: :btree
end
- add_index "ci_stages", ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", unique: true, using: :btree
- add_index "ci_stages", ["pipeline_id", "position"], name: "index_ci_stages_on_pipeline_id_and_position", using: :btree
- add_index "ci_stages", ["pipeline_id"], name: "index_ci_stages_on_pipeline_id", using: :btree
- add_index "ci_stages", ["project_id"], name: "index_ci_stages_on_project_id", using: :btree
-
create_table "ci_trigger_requests", force: :cascade do |t|
t.integer "trigger_id", null: false
t.text "variables"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "commit_id"
+ t.index ["commit_id"], name: "index_ci_trigger_requests_on_commit_id", using: :btree
end
- add_index "ci_trigger_requests", ["commit_id"], name: "index_ci_trigger_requests_on_commit_id", using: :btree
-
create_table "ci_triggers", force: :cascade do |t|
t.string "token"
t.datetime "created_at"
@@ -584,10 +557,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "owner_id"
t.string "description"
t.string "ref"
+ t.index ["project_id"], name: "index_ci_triggers_on_project_id", using: :btree
end
- add_index "ci_triggers", ["project_id"], name: "index_ci_triggers_on_project_id", using: :btree
-
create_table "ci_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
@@ -597,18 +569,16 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "project_id", null: false
t.boolean "protected", default: false, null: false
t.string "environment_scope", default: "*", null: false
+ t.index ["project_id", "key", "environment_scope"], name: "index_ci_variables_on_project_id_and_key_and_environment_scope", unique: true, using: :btree
end
- add_index "ci_variables", ["project_id", "key", "environment_scope"], name: "index_ci_variables_on_project_id_and_key_and_environment_scope", unique: true, using: :btree
-
create_table "cluster_groups", force: :cascade do |t|
t.integer "cluster_id", null: false
t.integer "group_id", null: false
+ t.index ["cluster_id", "group_id"], name: "index_cluster_groups_on_cluster_id_and_group_id", unique: true, using: :btree
+ t.index ["group_id"], name: "index_cluster_groups_on_group_id", using: :btree
end
- add_index "cluster_groups", ["cluster_id", "group_id"], name: "index_cluster_groups_on_cluster_id_and_group_id", unique: true, using: :btree
- add_index "cluster_groups", ["group_id"], name: "index_cluster_groups_on_group_id", using: :btree
-
create_table "cluster_platforms_kubernetes", force: :cascade do |t|
t.integer "cluster_id", null: false
t.datetime_with_timezone "created_at", null: false
@@ -622,20 +592,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "encrypted_token"
t.string "encrypted_token_iv"
t.integer "authorization_type", limit: 2
+ t.index ["cluster_id"], name: "index_cluster_platforms_kubernetes_on_cluster_id", unique: true, using: :btree
end
- add_index "cluster_platforms_kubernetes", ["cluster_id"], name: "index_cluster_platforms_kubernetes_on_cluster_id", unique: true, using: :btree
-
create_table "cluster_projects", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "cluster_id", null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.index ["cluster_id"], name: "index_cluster_projects_on_cluster_id", using: :btree
+ t.index ["project_id"], name: "index_cluster_projects_on_project_id", using: :btree
end
- add_index "cluster_projects", ["cluster_id"], name: "index_cluster_projects_on_cluster_id", using: :btree
- add_index "cluster_projects", ["project_id"], name: "index_cluster_projects_on_project_id", using: :btree
-
create_table "cluster_providers_gcp", force: :cascade do |t|
t.integer "cluster_id", null: false
t.integer "status"
@@ -651,10 +619,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "encrypted_access_token"
t.string "encrypted_access_token_iv"
t.boolean "legacy_abac", default: true, null: false
+ t.index ["cluster_id"], name: "index_cluster_providers_gcp_on_cluster_id", unique: true, using: :btree
end
- add_index "cluster_providers_gcp", ["cluster_id"], name: "index_cluster_providers_gcp_on_cluster_id", unique: true, using: :btree
-
create_table "clusters", force: :cascade do |t|
t.integer "user_id"
t.integer "provider_type"
@@ -665,11 +632,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "name", null: false
t.string "environment_scope", default: "*", null: false
t.integer "cluster_type", limit: 2, default: 3, null: false
+ t.index ["enabled"], name: "index_clusters_on_enabled", using: :btree
+ t.index ["user_id"], name: "index_clusters_on_user_id", using: :btree
end
- add_index "clusters", ["enabled"], name: "index_clusters_on_enabled", using: :btree
- add_index "clusters", ["user_id"], name: "index_clusters_on_user_id", using: :btree
-
create_table "clusters_applications_helm", force: :cascade do |t|
t.integer "cluster_id", null: false
t.datetime_with_timezone "created_at", null: false
@@ -733,11 +699,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "version", null: false
t.text "status_reason"
t.boolean "privileged", default: true, null: false
+ t.index ["cluster_id"], name: "index_clusters_applications_runners_on_cluster_id", unique: true, using: :btree
+ t.index ["runner_id"], name: "index_clusters_applications_runners_on_runner_id", using: :btree
end
- add_index "clusters_applications_runners", ["cluster_id"], name: "index_clusters_applications_runners_on_cluster_id", unique: true, using: :btree
- add_index "clusters_applications_runners", ["runner_id"], name: "index_clusters_applications_runners_on_runner_id", using: :btree
-
create_table "clusters_kubernetes_namespaces", id: :bigserial, force: :cascade do |t|
t.integer "cluster_id", null: false
t.integer "project_id"
@@ -748,23 +713,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "encrypted_service_account_token_iv"
t.string "namespace", null: false
t.string "service_account_name"
+ t.index ["cluster_id", "namespace"], name: "kubernetes_namespaces_cluster_and_namespace", unique: true, using: :btree
+ t.index ["cluster_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_id", using: :btree
+ t.index ["cluster_project_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_project_id", using: :btree
+ t.index ["project_id"], name: "index_clusters_kubernetes_namespaces_on_project_id", using: :btree
end
- add_index "clusters_kubernetes_namespaces", ["cluster_id", "namespace"], name: "kubernetes_namespaces_cluster_and_namespace", unique: true, using: :btree
- add_index "clusters_kubernetes_namespaces", ["cluster_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_id", using: :btree
- add_index "clusters_kubernetes_namespaces", ["cluster_project_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_project_id", using: :btree
- add_index "clusters_kubernetes_namespaces", ["project_id"], name: "index_clusters_kubernetes_namespaces_on_project_id", using: :btree
-
create_table "container_repositories", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["project_id", "name"], name: "index_container_repositories_on_project_id_and_name", unique: true, using: :btree
+ t.index ["project_id"], name: "index_container_repositories_on_project_id", using: :btree
end
- add_index "container_repositories", ["project_id", "name"], name: "index_container_repositories_on_project_id_and_name", unique: true, using: :btree
- add_index "container_repositories", ["project_id"], name: "index_container_repositories_on_project_id", using: :btree
-
create_table "conversational_development_index_metrics", force: :cascade do |t|
t.float "leader_issues", null: false
t.float "instance_issues", null: false
@@ -806,10 +769,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "can_push", default: false, null: false
+ t.index ["project_id"], name: "index_deploy_keys_projects_on_project_id", using: :btree
end
- add_index "deploy_keys_projects", ["project_id"], name: "index_deploy_keys_projects_on_project_id", using: :btree
-
create_table "deploy_tokens", force: :cascade do |t|
t.boolean "revoked", default: false
t.boolean "read_repository", default: false, null: false
@@ -818,11 +780,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime_with_timezone "created_at", null: false
t.string "name", null: false
t.string "token", null: false
+ t.index ["token", "expires_at", "id"], name: "index_deploy_tokens_on_token_and_expires_at_and_id", where: "(revoked IS FALSE)", using: :btree
+ t.index ["token"], name: "index_deploy_tokens_on_token", unique: true, using: :btree
end
- add_index "deploy_tokens", ["token", "expires_at", "id"], name: "index_deploy_tokens_on_token_and_expires_at_and_id", where: "(revoked IS FALSE)", using: :btree
- add_index "deploy_tokens", ["token"], name: "index_deploy_tokens_on_token", unique: true, using: :btree
-
create_table "deployments", force: :cascade do |t|
t.integer "iid", null: false
t.integer "project_id", null: false
@@ -838,18 +799,17 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "on_stop"
t.integer "status", limit: 2, default: 2, null: false
t.datetime_with_timezone "finished_at"
+ t.index ["created_at"], name: "index_deployments_on_created_at", using: :btree
+ t.index ["deployable_type", "deployable_id"], name: "index_deployments_on_deployable_type_and_deployable_id", using: :btree
+ t.index ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree
+ t.index ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree
+ t.index ["environment_id", "status"], name: "index_deployments_on_environment_id_and_status", using: :btree
+ t.index ["id"], name: "partial_index_deployments_for_legacy_successful_deployments", where: "((finished_at IS NULL) AND (status = 2))", using: :btree
+ t.index ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree
+ t.index ["project_id", "status", "created_at"], name: "index_deployments_on_project_id_and_status_and_created_at", using: :btree
+ t.index ["project_id", "status"], name: "index_deployments_on_project_id_and_status", using: :btree
end
- add_index "deployments", ["created_at"], name: "index_deployments_on_created_at", using: :btree
- add_index "deployments", ["deployable_type", "deployable_id"], name: "index_deployments_on_deployable_type_and_deployable_id", using: :btree
- add_index "deployments", ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree
- add_index "deployments", ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree
- add_index "deployments", ["environment_id", "status"], name: "index_deployments_on_environment_id_and_status", using: :btree
- add_index "deployments", ["id"], name: "partial_index_deployments_for_legacy_successful_deployments", where: "((finished_at IS NULL) AND (status = 2))", using: :btree
- add_index "deployments", ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree
- add_index "deployments", ["project_id", "status", "created_at"], name: "index_deployments_on_project_id_and_status_and_created_at", using: :btree
- add_index "deployments", ["project_id", "status"], name: "index_deployments_on_project_id_and_status", using: :btree
-
create_table "emails", force: :cascade do |t|
t.integer "user_id", null: false
t.string "email", null: false
@@ -858,12 +818,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "confirmation_token"
t.datetime_with_timezone "confirmed_at"
t.datetime_with_timezone "confirmation_sent_at"
+ t.index ["confirmation_token"], name: "index_emails_on_confirmation_token", unique: true, using: :btree
+ t.index ["email"], name: "index_emails_on_email", unique: true, using: :btree
+ t.index ["user_id"], name: "index_emails_on_user_id", using: :btree
end
- add_index "emails", ["confirmation_token"], name: "index_emails_on_confirmation_token", unique: true, using: :btree
- add_index "emails", ["email"], name: "index_emails_on_email", unique: true, using: :btree
- add_index "emails", ["user_id"], name: "index_emails_on_user_id", using: :btree
-
create_table "environments", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
@@ -873,11 +832,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "environment_type"
t.string "state", default: "available", null: false
t.string "slug", null: false
+ t.index ["project_id", "name"], name: "index_environments_on_project_id_and_name", unique: true, using: :btree
+ t.index ["project_id", "slug"], name: "index_environments_on_project_id_and_slug", unique: true, using: :btree
end
- add_index "environments", ["project_id", "name"], name: "index_environments_on_project_id_and_name", unique: true, using: :btree
- add_index "environments", ["project_id", "slug"], name: "index_environments_on_project_id_and_slug", unique: true, using: :btree
-
create_table "events", force: :cascade do |t|
t.integer "project_id"
t.integer "author_id", null: false
@@ -886,95 +844,59 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime_with_timezone "updated_at", null: false
t.integer "action", limit: 2, null: false
t.string "target_type"
+ t.index ["action"], name: "index_events_on_action", using: :btree
+ t.index ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree
+ t.index ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
+ t.index ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree
end
- add_index "events", ["action"], name: "index_events_on_action", using: :btree
- add_index "events", ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree
- add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
- add_index "events", ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree
-
create_table "feature_gates", force: :cascade do |t|
t.string "feature_key", null: false
t.string "key", null: false
t.string "value"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["feature_key", "key", "value"], name: "index_feature_gates_on_feature_key_and_key_and_value", unique: true, using: :btree
end
- add_index "feature_gates", ["feature_key", "key", "value"], name: "index_feature_gates_on_feature_key_and_key_and_value", unique: true, using: :btree
-
create_table "features", force: :cascade do |t|
t.string "key", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["key"], name: "index_features_on_key", unique: true, using: :btree
end
- add_index "features", ["key"], name: "index_features_on_key", unique: true, using: :btree
-
create_table "fork_network_members", force: :cascade do |t|
t.integer "fork_network_id", null: false
t.integer "project_id", null: false
t.integer "forked_from_project_id"
+ t.index ["fork_network_id"], name: "index_fork_network_members_on_fork_network_id", using: :btree
+ t.index ["project_id"], name: "index_fork_network_members_on_project_id", unique: true, using: :btree
end
- add_index "fork_network_members", ["fork_network_id"], name: "index_fork_network_members_on_fork_network_id", using: :btree
- add_index "fork_network_members", ["project_id"], name: "index_fork_network_members_on_project_id", unique: true, using: :btree
-
create_table "fork_networks", force: :cascade do |t|
t.integer "root_project_id"
t.string "deleted_root_project_name"
+ t.index ["root_project_id"], name: "index_fork_networks_on_root_project_id", unique: true, using: :btree
end
- add_index "fork_networks", ["root_project_id"], name: "index_fork_networks_on_root_project_id", unique: true, using: :btree
-
create_table "forked_project_links", force: :cascade do |t|
t.integer "forked_to_project_id", null: false
t.integer "forked_from_project_id", null: false
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree
end
- add_index "forked_project_links", ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree
-
- create_table "gcp_clusters", force: :cascade do |t|
- t.integer "project_id", null: false
- t.integer "user_id"
- t.integer "service_id"
- t.integer "status"
- t.integer "gcp_cluster_size", null: false
- t.datetime_with_timezone "created_at", null: false
- t.datetime_with_timezone "updated_at", null: false
- t.boolean "enabled", default: true
- t.text "status_reason"
- t.string "project_namespace"
- t.string "endpoint"
- t.text "ca_cert"
- t.text "encrypted_kubernetes_token"
- t.string "encrypted_kubernetes_token_iv"
- t.string "username"
- t.text "encrypted_password"
- t.string "encrypted_password_iv"
- t.string "gcp_project_id", null: false
- t.string "gcp_cluster_zone", null: false
- t.string "gcp_cluster_name", null: false
- t.string "gcp_machine_type"
- t.string "gcp_operation_id"
- t.text "encrypted_gcp_token"
- t.string "encrypted_gcp_token_iv"
- end
-
- add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree
-
create_table "gpg_key_subkeys", force: :cascade do |t|
t.integer "gpg_key_id", null: false
t.binary "keyid"
t.binary "fingerprint"
+ t.index ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree
+ t.index ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree
+ t.index ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree
end
- add_index "gpg_key_subkeys", ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree
- add_index "gpg_key_subkeys", ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree
- add_index "gpg_key_subkeys", ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree
-
create_table "gpg_keys", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -982,12 +904,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.binary "primary_keyid"
t.binary "fingerprint"
t.text "key"
+ t.index ["fingerprint"], name: "index_gpg_keys_on_fingerprint", unique: true, using: :btree
+ t.index ["primary_keyid"], name: "index_gpg_keys_on_primary_keyid", unique: true, using: :btree
+ t.index ["user_id"], name: "index_gpg_keys_on_user_id", using: :btree
end
- add_index "gpg_keys", ["fingerprint"], name: "index_gpg_keys_on_fingerprint", unique: true, using: :btree
- add_index "gpg_keys", ["primary_keyid"], name: "index_gpg_keys_on_primary_keyid", unique: true, using: :btree
- add_index "gpg_keys", ["user_id"], name: "index_gpg_keys_on_user_id", using: :btree
-
create_table "gpg_signatures", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -999,63 +920,57 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "gpg_key_user_email"
t.integer "verification_status", limit: 2, default: 0, null: false
t.integer "gpg_key_subkey_id"
+ t.index ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree
+ t.index ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree
+ t.index ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree
+ t.index ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree
+ t.index ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree
end
- add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree
- add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree
- add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree
- add_index "gpg_signatures", ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree
- add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree
-
create_table "group_custom_attributes", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "group_id", null: false
t.string "key", null: false
t.string "value", null: false
+ t.index ["group_id", "key"], name: "index_group_custom_attributes_on_group_id_and_key", unique: true, using: :btree
+ t.index ["key", "value"], name: "index_group_custom_attributes_on_key_and_value", using: :btree
end
- add_index "group_custom_attributes", ["group_id", "key"], name: "index_group_custom_attributes_on_group_id_and_key", unique: true, using: :btree
- add_index "group_custom_attributes", ["key", "value"], name: "index_group_custom_attributes_on_key_and_value", using: :btree
-
create_table "identities", force: :cascade do |t|
t.string "extern_uid"
t.string "provider"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["user_id"], name: "index_identities_on_user_id", using: :btree
end
- add_index "identities", ["user_id"], name: "index_identities_on_user_id", using: :btree
-
create_table "import_export_uploads", force: :cascade do |t|
t.datetime_with_timezone "updated_at", null: false
t.integer "project_id"
t.text "import_file"
t.text "export_file"
+ t.index ["project_id"], name: "index_import_export_uploads_on_project_id", using: :btree
+ t.index ["updated_at"], name: "index_import_export_uploads_on_updated_at", using: :btree
end
- add_index "import_export_uploads", ["project_id"], name: "index_import_export_uploads_on_project_id", using: :btree
- add_index "import_export_uploads", ["updated_at"], name: "index_import_export_uploads_on_updated_at", using: :btree
-
create_table "internal_ids", id: :bigserial, force: :cascade do |t|
t.integer "project_id"
t.integer "usage", null: false
t.integer "last_value", null: false
t.integer "namespace_id"
+ t.index ["usage", "namespace_id"], name: "index_internal_ids_on_usage_and_namespace_id", unique: true, where: "(namespace_id IS NOT NULL)", using: :btree
+ t.index ["usage", "project_id"], name: "index_internal_ids_on_usage_and_project_id", unique: true, where: "(project_id IS NOT NULL)", using: :btree
end
- add_index "internal_ids", ["usage", "namespace_id"], name: "index_internal_ids_on_usage_and_namespace_id", unique: true, where: "(namespace_id IS NOT NULL)", using: :btree
- add_index "internal_ids", ["usage", "project_id"], name: "index_internal_ids_on_usage_and_project_id", unique: true, where: "(project_id IS NOT NULL)", using: :btree
-
create_table "issue_assignees", id: false, force: :cascade do |t|
t.integer "user_id", null: false
t.integer "issue_id", null: false
+ t.index ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree
+ t.index ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree
end
- add_index "issue_assignees", ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree
- add_index "issue_assignees", ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree
-
create_table "issue_metrics", force: :cascade do |t|
t.integer "issue_id", null: false
t.datetime "first_mentioned_in_commit_at"
@@ -1063,10 +978,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "first_added_to_board_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["issue_id"], name: "index_issue_metrics", using: :btree
end
- add_index "issue_metrics", ["issue_id"], name: "index_issue_metrics", using: :btree
-
create_table "issues", force: :cascade do |t|
t.string "title"
t.integer "author_id"
@@ -1092,23 +1006,22 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "discussion_locked"
t.datetime_with_timezone "closed_at"
t.integer "closed_by_id"
+ t.index ["author_id"], name: "index_issues_on_author_id", using: :btree
+ t.index ["confidential"], name: "index_issues_on_confidential", using: :btree
+ t.index ["description"], name: "index_issues_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
+ t.index ["milestone_id"], name: "index_issues_on_milestone_id", using: :btree
+ t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)", using: :btree
+ t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state", using: :btree
+ t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)", using: :btree
+ t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true, using: :btree
+ t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state", using: :btree
+ t.index ["relative_position"], name: "index_issues_on_relative_position", using: :btree
+ t.index ["state"], name: "index_issues_on_state", using: :btree
+ t.index ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
+ t.index ["updated_at"], name: "index_issues_on_updated_at", using: :btree
+ t.index ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
end
- add_index "issues", ["author_id"], name: "index_issues_on_author_id", using: :btree
- add_index "issues", ["confidential"], name: "index_issues_on_confidential", using: :btree
- add_index "issues", ["description"], name: "index_issues_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
- add_index "issues", ["milestone_id"], name: "index_issues_on_milestone_id", using: :btree
- add_index "issues", ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)", using: :btree
- add_index "issues", ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state", using: :btree
- add_index "issues", ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)", using: :btree
- add_index "issues", ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true, using: :btree
- add_index "issues", ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state", using: :btree
- add_index "issues", ["relative_position"], name: "index_issues_on_relative_position", using: :btree
- add_index "issues", ["state"], name: "index_issues_on_state", using: :btree
- add_index "issues", ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
- add_index "issues", ["updated_at"], name: "index_issues_on_updated_at", using: :btree
- add_index "issues", ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
-
create_table "keys", force: :cascade do |t|
t.integer "user_id"
t.datetime "created_at"
@@ -1119,33 +1032,30 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "fingerprint"
t.boolean "public", default: false, null: false
t.datetime "last_used_at"
+ t.index ["fingerprint"], name: "index_keys_on_fingerprint", unique: true, using: :btree
+ t.index ["user_id"], name: "index_keys_on_user_id", using: :btree
end
- add_index "keys", ["fingerprint"], name: "index_keys_on_fingerprint", unique: true, using: :btree
- add_index "keys", ["user_id"], name: "index_keys_on_user_id", using: :btree
-
create_table "label_links", force: :cascade do |t|
t.integer "label_id"
t.integer "target_id"
t.string "target_type"
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["label_id"], name: "index_label_links_on_label_id", using: :btree
+ t.index ["target_id", "target_type"], name: "index_label_links_on_target_id_and_target_type", using: :btree
end
- add_index "label_links", ["label_id"], name: "index_label_links_on_label_id", using: :btree
- add_index "label_links", ["target_id", "target_type"], name: "index_label_links_on_target_id_and_target_type", using: :btree
-
create_table "label_priorities", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "label_id", null: false
t.integer "priority", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["priority"], name: "index_label_priorities_on_priority", using: :btree
+ t.index ["project_id", "label_id"], name: "index_label_priorities_on_project_id_and_label_id", unique: true, using: :btree
end
- add_index "label_priorities", ["priority"], name: "index_label_priorities_on_priority", using: :btree
- add_index "label_priorities", ["project_id", "label_id"], name: "index_label_priorities_on_project_id_and_label_id", unique: true, using: :btree
-
create_table "labels", force: :cascade do |t|
t.string "title"
t.string "color"
@@ -1158,45 +1068,41 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "type"
t.integer "group_id"
t.integer "cached_markdown_version"
+ t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true, using: :btree
+ t.index ["project_id"], name: "index_labels_on_project_id", using: :btree
+ t.index ["template"], name: "index_labels_on_template", where: "template", using: :btree
+ t.index ["title"], name: "index_labels_on_title", using: :btree
+ t.index ["type", "project_id"], name: "index_labels_on_type_and_project_id", using: :btree
end
- add_index "labels", ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true, using: :btree
- add_index "labels", ["project_id"], name: "index_labels_on_project_id", using: :btree
- add_index "labels", ["template"], name: "index_labels_on_template", where: "template", using: :btree
- add_index "labels", ["title"], name: "index_labels_on_title", using: :btree
- add_index "labels", ["type", "project_id"], name: "index_labels_on_type_and_project_id", using: :btree
-
create_table "lfs_file_locks", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "user_id", null: false
t.datetime "created_at", null: false
t.string "path", limit: 511
+ t.index ["project_id", "path"], name: "index_lfs_file_locks_on_project_id_and_path", unique: true, using: :btree
+ t.index ["user_id"], name: "index_lfs_file_locks_on_user_id", using: :btree
end
- add_index "lfs_file_locks", ["project_id", "path"], name: "index_lfs_file_locks_on_project_id_and_path", unique: true, using: :btree
- add_index "lfs_file_locks", ["user_id"], name: "index_lfs_file_locks_on_user_id", using: :btree
-
create_table "lfs_objects", force: :cascade do |t|
t.string "oid", null: false
- t.integer "size", limit: 8, null: false
+ t.bigint "size", null: false
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
t.integer "file_store"
+ t.index ["file_store"], name: "index_lfs_objects_on_file_store", using: :btree
+ t.index ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
end
- add_index "lfs_objects", ["file_store"], name: "index_lfs_objects_on_file_store", using: :btree
- add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
-
create_table "lfs_objects_projects", force: :cascade do |t|
t.integer "lfs_object_id", null: false
t.integer "project_id", null: false
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["project_id"], name: "index_lfs_objects_projects_on_project_id", using: :btree
end
- add_index "lfs_objects_projects", ["project_id"], name: "index_lfs_objects_projects_on_project_id", using: :btree
-
create_table "lists", force: :cascade do |t|
t.integer "board_id", null: false
t.integer "label_id"
@@ -1204,12 +1110,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "position"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["board_id", "label_id"], name: "index_lists_on_board_id_and_label_id", unique: true, using: :btree
+ t.index ["label_id"], name: "index_lists_on_label_id", using: :btree
+ t.index ["list_type"], name: "index_lists_on_list_type", using: :btree
end
- add_index "lists", ["board_id", "label_id"], name: "index_lists_on_board_id_and_label_id", unique: true, using: :btree
- add_index "lists", ["label_id"], name: "index_lists_on_label_id", using: :btree
- add_index "lists", ["list_type"], name: "index_lists_on_list_type", using: :btree
-
create_table "members", force: :cascade do |t|
t.integer "access_level", null: false
t.integer "source_id", null: false
@@ -1225,14 +1130,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "invite_accepted_at"
t.datetime "requested_at"
t.date "expires_at"
+ t.index ["access_level"], name: "index_members_on_access_level", using: :btree
+ t.index ["invite_token"], name: "index_members_on_invite_token", unique: true, using: :btree
+ t.index ["requested_at"], name: "index_members_on_requested_at", using: :btree
+ t.index ["source_id", "source_type"], name: "index_members_on_source_id_and_source_type", using: :btree
+ t.index ["user_id"], name: "index_members_on_user_id", using: :btree
end
- add_index "members", ["access_level"], name: "index_members_on_access_level", using: :btree
- add_index "members", ["invite_token"], name: "index_members_on_invite_token", unique: true, using: :btree
- add_index "members", ["requested_at"], name: "index_members_on_requested_at", using: :btree
- add_index "members", ["source_id", "source_type"], name: "index_members_on_source_id_and_source_type", using: :btree
- add_index "members", ["user_id"], name: "index_members_on_user_id", using: :btree
-
create_table "merge_request_diff_commits", id: false, force: :cascade do |t|
t.datetime_with_timezone "authored_date"
t.datetime_with_timezone "committed_date"
@@ -1244,11 +1148,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "committer_name"
t.text "committer_email"
t.text "message"
+ t.index ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_commits_on_mr_diff_id_and_order", unique: true, using: :btree
+ t.index ["sha"], name: "index_merge_request_diff_commits_on_sha", using: :btree
end
- add_index "merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_commits_on_mr_diff_id_and_order", unique: true, using: :btree
- add_index "merge_request_diff_commits", ["sha"], name: "index_merge_request_diff_commits_on_sha", using: :btree
-
create_table "merge_request_diff_files", id: false, force: :cascade do |t|
t.integer "merge_request_diff_id", null: false
t.integer "relative_order", null: false
@@ -1262,10 +1165,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.text "old_path", null: false
t.text "diff", null: false
t.boolean "binary"
+ t.index ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_files_on_mr_diff_id_and_order", unique: true, using: :btree
end
- add_index "merge_request_diff_files", ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_files_on_mr_diff_id_and_order", unique: true, using: :btree
-
create_table "merge_request_diffs", force: :cascade do |t|
t.string "state"
t.integer "merge_request_id", null: false
@@ -1276,10 +1178,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "head_commit_sha"
t.string "start_commit_sha"
t.integer "commits_count"
+ t.index ["merge_request_id", "id"], name: "index_merge_request_diffs_on_merge_request_id_and_id", using: :btree
end
- add_index "merge_request_diffs", ["merge_request_id", "id"], name: "index_merge_request_diffs_on_merge_request_id_and_id", using: :btree
-
create_table "merge_request_metrics", force: :cascade do |t|
t.integer "merge_request_id", null: false
t.datetime "latest_build_started_at"
@@ -1292,12 +1193,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "merged_by_id"
t.integer "latest_closed_by_id"
t.datetime_with_timezone "latest_closed_at"
+ t.index ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at", using: :btree
+ t.index ["merge_request_id"], name: "index_merge_request_metrics", using: :btree
+ t.index ["pipeline_id"], name: "index_merge_request_metrics_on_pipeline_id", using: :btree
end
- add_index "merge_request_metrics", ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at", using: :btree
- add_index "merge_request_metrics", ["merge_request_id"], name: "index_merge_request_metrics", using: :btree
- add_index "merge_request_metrics", ["pipeline_id"], name: "index_merge_request_metrics_on_pipeline_id", using: :btree
-
create_table "merge_requests", force: :cascade do |t|
t.string "target_branch", null: false
t.string "source_branch", null: false
@@ -1334,38 +1234,36 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "rebase_commit_sha"
t.boolean "squash", default: false, null: false
t.boolean "allow_maintainer_to_push"
+ t.index ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree
+ t.index ["author_id"], name: "index_merge_requests_on_author_id", using: :btree
+ t.index ["created_at"], name: "index_merge_requests_on_created_at", using: :btree
+ t.index ["description"], name: "index_merge_requests_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
+ t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id", using: :btree
+ t.index ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))", using: :btree
+ t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id", using: :btree
+ t.index ["merge_user_id"], name: "index_merge_requests_on_merge_user_id", where: "(merge_user_id IS NOT NULL)", using: :btree
+ t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id", using: :btree
+ t.index ["source_branch"], name: "index_merge_requests_on_source_branch", using: :btree
+ t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)", using: :btree
+ t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree
+ t.index ["target_branch"], name: "index_merge_requests_on_target_branch", using: :btree
+ t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true, using: :btree
+ t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)", using: :btree
+ t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", using: :btree
+ t.index ["title"], name: "index_merge_requests_on_title", using: :btree
+ t.index ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
+ t.index ["updated_by_id"], name: "index_merge_requests_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
end
- add_index "merge_requests", ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree
- add_index "merge_requests", ["author_id"], name: "index_merge_requests_on_author_id", using: :btree
- add_index "merge_requests", ["created_at"], name: "index_merge_requests_on_created_at", using: :btree
- add_index "merge_requests", ["description"], name: "index_merge_requests_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
- add_index "merge_requests", ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id", using: :btree
- add_index "merge_requests", ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))", using: :btree
- add_index "merge_requests", ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id", using: :btree
- add_index "merge_requests", ["merge_user_id"], name: "index_merge_requests_on_merge_user_id", where: "(merge_user_id IS NOT NULL)", using: :btree
- add_index "merge_requests", ["milestone_id"], name: "index_merge_requests_on_milestone_id", using: :btree
- add_index "merge_requests", ["source_branch"], name: "index_merge_requests_on_source_branch", using: :btree
- add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)", using: :btree
- add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree
- add_index "merge_requests", ["target_branch"], name: "index_merge_requests_on_target_branch", using: :btree
- add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true, using: :btree
- add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)", using: :btree
- add_index "merge_requests", ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", using: :btree
- add_index "merge_requests", ["title"], name: "index_merge_requests_on_title", using: :btree
- add_index "merge_requests", ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
- add_index "merge_requests", ["updated_by_id"], name: "index_merge_requests_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
-
create_table "merge_requests_closing_issues", force: :cascade do |t|
t.integer "merge_request_id", null: false
t.integer "issue_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["issue_id"], name: "index_merge_requests_closing_issues_on_issue_id", using: :btree
+ t.index ["merge_request_id"], name: "index_merge_requests_closing_issues_on_merge_request_id", using: :btree
end
- add_index "merge_requests_closing_issues", ["issue_id"], name: "index_merge_requests_closing_issues_on_issue_id", using: :btree
- add_index "merge_requests_closing_issues", ["merge_request_id"], name: "index_merge_requests_closing_issues_on_merge_request_id", using: :btree
-
create_table "milestones", force: :cascade do |t|
t.string "title", null: false
t.integer "project_id"
@@ -1380,15 +1278,14 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.date "start_date"
t.integer "cached_markdown_version"
t.integer "group_id"
+ t.index ["description"], name: "index_milestones_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
+ t.index ["due_date"], name: "index_milestones_on_due_date", using: :btree
+ t.index ["group_id"], name: "index_milestones_on_group_id", using: :btree
+ t.index ["project_id", "iid"], name: "index_milestones_on_project_id_and_iid", unique: true, using: :btree
+ t.index ["title"], name: "index_milestones_on_title", using: :btree
+ t.index ["title"], name: "index_milestones_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
end
- add_index "milestones", ["description"], name: "index_milestones_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
- add_index "milestones", ["due_date"], name: "index_milestones_on_due_date", using: :btree
- add_index "milestones", ["group_id"], name: "index_milestones_on_group_id", using: :btree
- add_index "milestones", ["project_id", "iid"], name: "index_milestones_on_project_id_and_iid", unique: true, using: :btree
- add_index "milestones", ["title"], name: "index_milestones_on_title", using: :btree
- add_index "milestones", ["title"], name: "index_milestones_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
-
create_table "namespaces", force: :cascade do |t|
t.string "name", null: false
t.string "path", null: false
@@ -1408,19 +1305,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "two_factor_grace_period", default: 48, null: false
t.integer "cached_markdown_version"
t.string "runners_token"
+ t.index ["created_at"], name: "index_namespaces_on_created_at", using: :btree
+ t.index ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree
+ t.index ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
+ t.index ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree
+ t.index ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree
+ t.index ["path"], name: "index_namespaces_on_path", using: :btree
+ t.index ["path"], name: "index_namespaces_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"}
+ t.index ["require_two_factor_authentication"], name: "index_namespaces_on_require_two_factor_authentication", using: :btree
+ t.index ["runners_token"], name: "index_namespaces_on_runners_token", unique: true, using: :btree
+ t.index ["type"], name: "index_namespaces_on_type", using: :btree
end
- add_index "namespaces", ["created_at"], name: "index_namespaces_on_created_at", using: :btree
- add_index "namespaces", ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree
- add_index "namespaces", ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
- add_index "namespaces", ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree
- add_index "namespaces", ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree
- add_index "namespaces", ["path"], name: "index_namespaces_on_path", using: :btree
- add_index "namespaces", ["path"], name: "index_namespaces_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"}
- add_index "namespaces", ["require_two_factor_authentication"], name: "index_namespaces_on_require_two_factor_authentication", using: :btree
- add_index "namespaces", ["runners_token"], name: "index_namespaces_on_runners_token", unique: true, using: :btree
- add_index "namespaces", ["type"], name: "index_namespaces_on_type", using: :btree
-
create_table "note_diff_files", force: :cascade do |t|
t.integer "diff_note_id", null: false
t.text "diff", null: false
@@ -1431,10 +1327,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "b_mode", null: false
t.text "new_path", null: false
t.text "old_path", null: false
+ t.index ["diff_note_id"], name: "index_note_diff_files_on_diff_note_id", unique: true, using: :btree
end
- add_index "note_diff_files", ["diff_note_id"], name: "index_note_diff_files_on_diff_note_id", unique: true, using: :btree
-
create_table "notes", force: :cascade do |t|
t.text "note"
t.string "noteable_type"
@@ -1459,19 +1354,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "cached_markdown_version"
t.text "change_position"
t.boolean "resolved_by_push"
+ t.index ["author_id"], name: "index_notes_on_author_id", using: :btree
+ t.index ["commit_id"], name: "index_notes_on_commit_id", using: :btree
+ t.index ["created_at"], name: "index_notes_on_created_at", using: :btree
+ t.index ["discussion_id"], name: "index_notes_on_discussion_id", using: :btree
+ t.index ["line_code"], name: "index_notes_on_line_code", using: :btree
+ t.index ["note"], name: "index_notes_on_note_trigram", using: :gin, opclasses: {"note"=>"gin_trgm_ops"}
+ t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type", using: :btree
+ t.index ["noteable_type"], name: "index_notes_on_noteable_type", using: :btree
+ t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type", using: :btree
+ t.index ["updated_at"], name: "index_notes_on_updated_at", using: :btree
end
- add_index "notes", ["author_id"], name: "index_notes_on_author_id", using: :btree
- add_index "notes", ["commit_id"], name: "index_notes_on_commit_id", using: :btree
- add_index "notes", ["created_at"], name: "index_notes_on_created_at", using: :btree
- add_index "notes", ["discussion_id"], name: "index_notes_on_discussion_id", using: :btree
- add_index "notes", ["line_code"], name: "index_notes_on_line_code", using: :btree
- add_index "notes", ["note"], name: "index_notes_on_note_trigram", using: :gin, opclasses: {"note"=>"gin_trgm_ops"}
- add_index "notes", ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type", using: :btree
- add_index "notes", ["noteable_type"], name: "index_notes_on_noteable_type", using: :btree
- add_index "notes", ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type", using: :btree
- add_index "notes", ["updated_at"], name: "index_notes_on_updated_at", using: :btree
-
create_table "notification_settings", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "source_id"
@@ -1493,12 +1387,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "success_pipeline"
t.boolean "push_to_merge_request"
t.boolean "issue_due"
+ t.index ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type", using: :btree
+ t.index ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true, using: :btree
+ t.index ["user_id"], name: "index_notification_settings_on_user_id", using: :btree
end
- add_index "notification_settings", ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type", using: :btree
- add_index "notification_settings", ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true, using: :btree
- add_index "notification_settings", ["user_id"], name: "index_notification_settings_on_user_id", using: :btree
-
create_table "oauth_access_grants", force: :cascade do |t|
t.integer "resource_owner_id", null: false
t.integer "application_id", null: false
@@ -1508,10 +1401,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "created_at", null: false
t.datetime "revoked_at"
t.string "scopes"
+ t.index ["token"], name: "index_oauth_access_grants_on_token", unique: true, using: :btree
end
- add_index "oauth_access_grants", ["token"], name: "index_oauth_access_grants_on_token", unique: true, using: :btree
-
create_table "oauth_access_tokens", force: :cascade do |t|
t.integer "resource_owner_id"
t.integer "application_id"
@@ -1521,12 +1413,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "revoked_at"
t.datetime "created_at", null: false
t.string "scopes"
+ t.index ["refresh_token"], name: "index_oauth_access_tokens_on_refresh_token", unique: true, using: :btree
+ t.index ["resource_owner_id"], name: "index_oauth_access_tokens_on_resource_owner_id", using: :btree
+ t.index ["token"], name: "index_oauth_access_tokens_on_token", unique: true, using: :btree
end
- add_index "oauth_access_tokens", ["refresh_token"], name: "index_oauth_access_tokens_on_refresh_token", unique: true, using: :btree
- add_index "oauth_access_tokens", ["resource_owner_id"], name: "index_oauth_access_tokens_on_resource_owner_id", using: :btree
- add_index "oauth_access_tokens", ["token"], name: "index_oauth_access_tokens_on_token", unique: true, using: :btree
-
create_table "oauth_applications", force: :cascade do |t|
t.string "name", null: false
t.string "uid", null: false
@@ -1538,11 +1429,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "owner_id"
t.string "owner_type"
t.boolean "trusted", default: false, null: false
+ t.index ["owner_id", "owner_type"], name: "index_oauth_applications_on_owner_id_and_owner_type", using: :btree
+ t.index ["uid"], name: "index_oauth_applications_on_uid", unique: true, using: :btree
end
- add_index "oauth_applications", ["owner_id", "owner_type"], name: "index_oauth_applications_on_owner_id_and_owner_type", using: :btree
- add_index "oauth_applications", ["uid"], name: "index_oauth_applications_on_uid", unique: true, using: :btree
-
create_table "oauth_openid_requests", force: :cascade do |t|
t.integer "access_grant_id", null: false
t.string "nonce", null: false
@@ -1558,14 +1448,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime_with_timezone "verified_at"
t.string "verification_code", null: false
t.datetime_with_timezone "enabled_until"
+ t.index ["domain"], name: "index_pages_domains_on_domain", unique: true, using: :btree
+ t.index ["project_id", "enabled_until"], name: "index_pages_domains_on_project_id_and_enabled_until", using: :btree
+ t.index ["project_id"], name: "index_pages_domains_on_project_id", using: :btree
+ t.index ["verified_at", "enabled_until"], name: "index_pages_domains_on_verified_at_and_enabled_until", using: :btree
+ t.index ["verified_at"], name: "index_pages_domains_on_verified_at", using: :btree
end
- add_index "pages_domains", ["domain"], name: "index_pages_domains_on_domain", unique: true, using: :btree
- add_index "pages_domains", ["project_id", "enabled_until"], name: "index_pages_domains_on_project_id_and_enabled_until", using: :btree
- add_index "pages_domains", ["project_id"], name: "index_pages_domains_on_project_id", using: :btree
- add_index "pages_domains", ["verified_at", "enabled_until"], name: "index_pages_domains_on_verified_at_and_enabled_until", using: :btree
- add_index "pages_domains", ["verified_at"], name: "index_pages_domains_on_verified_at", using: :btree
-
create_table "personal_access_tokens", force: :cascade do |t|
t.integer "user_id", null: false
t.string "token"
@@ -1577,29 +1466,26 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "scopes", default: "--- []\n", null: false
t.boolean "impersonation", default: false, null: false
t.string "token_digest"
+ t.index ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree
+ t.index ["token_digest"], name: "index_personal_access_tokens_on_token_digest", unique: true, using: :btree
+ t.index ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree
end
- add_index "personal_access_tokens", ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree
- add_index "personal_access_tokens", ["token_digest"], name: "index_personal_access_tokens_on_token_digest", unique: true, using: :btree
- add_index "personal_access_tokens", ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree
-
create_table "programming_languages", force: :cascade do |t|
t.string "name", null: false
t.string "color", null: false
t.datetime_with_timezone "created_at", null: false
+ t.index ["name"], name: "index_programming_languages_on_name", unique: true, using: :btree
end
- add_index "programming_languages", ["name"], name: "index_programming_languages_on_name", unique: true, using: :btree
-
create_table "project_authorizations", id: false, force: :cascade do |t|
t.integer "user_id", null: false
t.integer "project_id", null: false
t.integer "access_level", null: false
+ t.index ["project_id"], name: "index_project_authorizations_on_project_id", using: :btree
+ t.index ["user_id", "project_id", "access_level"], name: "index_project_authorizations_on_user_id_project_id_access_level", unique: true, using: :btree
end
- add_index "project_authorizations", ["project_id"], name: "index_project_authorizations_on_project_id", using: :btree
- add_index "project_authorizations", ["user_id", "project_id", "access_level"], name: "index_project_authorizations_on_user_id_project_id_access_level", unique: true, using: :btree
-
create_table "project_auto_devops", force: :cascade do |t|
t.integer "project_id", null: false
t.datetime_with_timezone "created_at", null: false
@@ -1607,37 +1493,33 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "enabled"
t.string "domain"
t.integer "deploy_strategy", default: 0, null: false
+ t.index ["project_id"], name: "index_project_auto_devops_on_project_id", unique: true, using: :btree
end
- add_index "project_auto_devops", ["project_id"], name: "index_project_auto_devops_on_project_id", unique: true, using: :btree
-
create_table "project_ci_cd_settings", force: :cascade do |t|
t.integer "project_id", null: false
t.boolean "group_runners_enabled", default: true, null: false
+ t.index ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true, using: :btree
end
- add_index "project_ci_cd_settings", ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true, using: :btree
-
create_table "project_custom_attributes", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "project_id", null: false
t.string "key", null: false
t.string "value", null: false
+ t.index ["key", "value"], name: "index_project_custom_attributes_on_key_and_value", using: :btree
+ t.index ["project_id", "key"], name: "index_project_custom_attributes_on_project_id_and_key", unique: true, using: :btree
end
- add_index "project_custom_attributes", ["key", "value"], name: "index_project_custom_attributes_on_key_and_value", using: :btree
- add_index "project_custom_attributes", ["project_id", "key"], name: "index_project_custom_attributes_on_project_id_and_key", unique: true, using: :btree
-
create_table "project_deploy_tokens", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "deploy_token_id", null: false
t.datetime_with_timezone "created_at", null: false
+ t.index ["deploy_token_id"], name: "index_project_deploy_tokens_on_deploy_token_id", using: :btree
+ t.index ["project_id", "deploy_token_id"], name: "index_project_deploy_tokens_on_project_id_and_deploy_token_id", unique: true, using: :btree
end
- add_index "project_deploy_tokens", ["deploy_token_id"], name: "index_project_deploy_tokens_on_deploy_token_id", using: :btree
- add_index "project_deploy_tokens", ["project_id", "deploy_token_id"], name: "index_project_deploy_tokens_on_project_id_and_deploy_token_id", unique: true, using: :btree
-
create_table "project_features", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "merge_requests_access_level"
@@ -1649,10 +1531,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "updated_at"
t.integer "repository_access_level", default: 20, null: false
t.integer "pages_access_level", default: 20, null: false
+ t.index ["project_id"], name: "index_project_features_on_project_id", unique: true, using: :btree
end
- add_index "project_features", ["project_id"], name: "index_project_features_on_project_id", unique: true, using: :btree
-
create_table "project_group_links", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "group_id", null: false
@@ -1660,45 +1541,41 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "updated_at"
t.integer "group_access", default: 30, null: false
t.date "expires_at"
+ t.index ["group_id"], name: "index_project_group_links_on_group_id", using: :btree
+ t.index ["project_id"], name: "index_project_group_links_on_project_id", using: :btree
end
- add_index "project_group_links", ["group_id"], name: "index_project_group_links_on_group_id", using: :btree
- add_index "project_group_links", ["project_id"], name: "index_project_group_links_on_project_id", using: :btree
-
create_table "project_import_data", force: :cascade do |t|
t.integer "project_id"
t.text "data"
t.text "encrypted_credentials"
t.string "encrypted_credentials_iv"
t.string "encrypted_credentials_salt"
+ t.index ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
end
- add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
-
create_table "project_mirror_data", force: :cascade do |t|
t.integer "project_id", null: false
t.string "status"
t.string "jid"
t.text "last_error"
+ t.index ["jid"], name: "index_project_mirror_data_on_jid", using: :btree
+ t.index ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree
+ t.index ["status"], name: "index_project_mirror_data_on_status", using: :btree
end
- add_index "project_mirror_data", ["jid"], name: "index_project_mirror_data_on_jid", using: :btree
- add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree
- add_index "project_mirror_data", ["status"], name: "index_project_mirror_data_on_status", using: :btree
-
create_table "project_statistics", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "namespace_id", null: false
- t.integer "commit_count", limit: 8, default: 0, null: false
- t.integer "storage_size", limit: 8, default: 0, null: false
- t.integer "repository_size", limit: 8, default: 0, null: false
- t.integer "lfs_objects_size", limit: 8, default: 0, null: false
- t.integer "build_artifacts_size", limit: 8, default: 0, null: false
+ t.bigint "commit_count", default: 0, null: false
+ t.bigint "storage_size", default: 0, null: false
+ t.bigint "repository_size", default: 0, null: false
+ t.bigint "lfs_objects_size", default: 0, null: false
+ t.bigint "build_artifacts_size", default: 0, null: false
+ t.index ["namespace_id"], name: "index_project_statistics_on_namespace_id", using: :btree
+ t.index ["project_id"], name: "index_project_statistics_on_project_id", unique: true, using: :btree
end
- add_index "project_statistics", ["namespace_id"], name: "index_project_statistics_on_namespace_id", using: :btree
- add_index "project_statistics", ["project_id"], name: "index_project_statistics_on_project_id", unique: true, using: :btree
-
create_table "projects", force: :cascade do |t|
t.string "name"
t.string "path"
@@ -1751,29 +1628,28 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "jobs_cache_index"
t.boolean "pages_https_only", default: true
t.boolean "remote_mirror_available_overridden"
- t.integer "pool_repository_id", limit: 8
- end
-
- add_index "projects", ["ci_id"], name: "index_projects_on_ci_id", using: :btree
- add_index "projects", ["created_at"], name: "index_projects_on_created_at", using: :btree
- add_index "projects", ["creator_id"], name: "index_projects_on_creator_id", using: :btree
- add_index "projects", ["description"], name: "index_projects_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
- add_index "projects", ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))", using: :btree
- add_index "projects", ["last_activity_at"], name: "index_projects_on_last_activity_at", using: :btree
- add_index "projects", ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)", using: :btree
- add_index "projects", ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed", using: :btree
- add_index "projects", ["last_repository_updated_at"], name: "index_projects_on_last_repository_updated_at", using: :btree
- add_index "projects", ["name"], name: "index_projects_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
- add_index "projects", ["namespace_id"], name: "index_projects_on_namespace_id", using: :btree
- add_index "projects", ["path"], name: "index_projects_on_path", using: :btree
- add_index "projects", ["path"], name: "index_projects_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"}
- add_index "projects", ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree
- add_index "projects", ["pool_repository_id"], name: "index_projects_on_pool_repository_id", where: "(pool_repository_id IS NOT NULL)", using: :btree
- add_index "projects", ["repository_storage", "created_at"], name: "idx_project_repository_check_partial", where: "(last_repository_check_at IS NULL)", using: :btree
- add_index "projects", ["repository_storage"], name: "index_projects_on_repository_storage", using: :btree
- add_index "projects", ["runners_token"], name: "index_projects_on_runners_token", using: :btree
- add_index "projects", ["star_count"], name: "index_projects_on_star_count", using: :btree
- add_index "projects", ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree
+ t.bigint "pool_repository_id"
+ t.index ["ci_id"], name: "index_projects_on_ci_id", using: :btree
+ t.index ["created_at"], name: "index_projects_on_created_at", using: :btree
+ t.index ["creator_id"], name: "index_projects_on_creator_id", using: :btree
+ t.index ["description"], name: "index_projects_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
+ t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))", using: :btree
+ t.index ["last_activity_at"], name: "index_projects_on_last_activity_at", using: :btree
+ t.index ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)", using: :btree
+ t.index ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed", using: :btree
+ t.index ["last_repository_updated_at"], name: "index_projects_on_last_repository_updated_at", using: :btree
+ t.index ["name"], name: "index_projects_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
+ t.index ["namespace_id"], name: "index_projects_on_namespace_id", using: :btree
+ t.index ["path"], name: "index_projects_on_path", using: :btree
+ t.index ["path"], name: "index_projects_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"}
+ t.index ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree
+ t.index ["pool_repository_id"], name: "index_projects_on_pool_repository_id", where: "(pool_repository_id IS NOT NULL)", using: :btree
+ t.index ["repository_storage", "created_at"], name: "idx_project_repository_check_partial", where: "(last_repository_check_at IS NULL)", using: :btree
+ t.index ["repository_storage"], name: "index_projects_on_repository_storage", using: :btree
+ t.index ["runners_token"], name: "index_projects_on_runners_token", using: :btree
+ t.index ["star_count"], name: "index_projects_on_star_count", using: :btree
+ t.index ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree
+ end
create_table "prometheus_metrics", force: :cascade do |t|
t.integer "project_id"
@@ -1787,40 +1663,36 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime_with_timezone "updated_at", null: false
t.boolean "common", default: false, null: false
t.string "identifier"
+ t.index ["common"], name: "index_prometheus_metrics_on_common", using: :btree
+ t.index ["group"], name: "index_prometheus_metrics_on_group", using: :btree
+ t.index ["identifier"], name: "index_prometheus_metrics_on_identifier", unique: true, using: :btree
+ t.index ["project_id"], name: "index_prometheus_metrics_on_project_id", using: :btree
end
- add_index "prometheus_metrics", ["common"], name: "index_prometheus_metrics_on_common", using: :btree
- add_index "prometheus_metrics", ["group"], name: "index_prometheus_metrics_on_group", using: :btree
- add_index "prometheus_metrics", ["identifier"], name: "index_prometheus_metrics_on_identifier", unique: true, using: :btree
- add_index "prometheus_metrics", ["project_id"], name: "index_prometheus_metrics_on_project_id", using: :btree
-
create_table "protected_branch_merge_access_levels", force: :cascade do |t|
t.integer "protected_branch_id", null: false
t.integer "access_level", default: 40, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["protected_branch_id"], name: "index_protected_branch_merge_access", using: :btree
end
- add_index "protected_branch_merge_access_levels", ["protected_branch_id"], name: "index_protected_branch_merge_access", using: :btree
-
create_table "protected_branch_push_access_levels", force: :cascade do |t|
t.integer "protected_branch_id", null: false
t.integer "access_level", default: 40, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["protected_branch_id"], name: "index_protected_branch_push_access", using: :btree
end
- add_index "protected_branch_push_access_levels", ["protected_branch_id"], name: "index_protected_branch_push_access", using: :btree
-
create_table "protected_branches", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["project_id"], name: "index_protected_branches_on_project_id", using: :btree
end
- add_index "protected_branches", ["project_id"], name: "index_protected_branches_on_project_id", using: :btree
-
create_table "protected_tag_create_access_levels", force: :cascade do |t|
t.integer "protected_tag_id", null: false
t.integer "access_level", default: 40
@@ -1828,23 +1700,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "group_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["protected_tag_id"], name: "index_protected_tag_create_access", using: :btree
+ t.index ["user_id"], name: "index_protected_tag_create_access_levels_on_user_id", using: :btree
end
- add_index "protected_tag_create_access_levels", ["protected_tag_id"], name: "index_protected_tag_create_access", using: :btree
- add_index "protected_tag_create_access_levels", ["user_id"], name: "index_protected_tag_create_access_levels_on_user_id", using: :btree
-
create_table "protected_tags", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["project_id", "name"], name: "index_protected_tags_on_project_id_and_name", unique: true, using: :btree
+ t.index ["project_id"], name: "index_protected_tags_on_project_id", using: :btree
end
- add_index "protected_tags", ["project_id", "name"], name: "index_protected_tags_on_project_id_and_name", unique: true, using: :btree
- add_index "protected_tags", ["project_id"], name: "index_protected_tags_on_project_id", using: :btree
-
create_table "push_event_payloads", id: false, force: :cascade do |t|
- t.integer "commit_count", limit: 8, null: false
+ t.bigint "commit_count", null: false
t.integer "event_id", null: false
t.integer "action", limit: 2, null: false
t.integer "ref_type", limit: 2, null: false
@@ -1852,21 +1722,19 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.binary "commit_to"
t.text "ref"
t.string "commit_title", limit: 70
+ t.index ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
end
- add_index "push_event_payloads", ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
-
create_table "redirect_routes", force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
t.string "path", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["path"], name: "index_redirect_routes_on_path", unique: true, using: :btree
+ t.index ["source_type", "source_id"], name: "index_redirect_routes_on_source_type_and_source_id", using: :btree
end
- add_index "redirect_routes", ["path"], name: "index_redirect_routes_on_path", unique: true, using: :btree
- add_index "redirect_routes", ["source_type", "source_id"], name: "index_redirect_routes_on_source_type_and_source_id", using: :btree
-
create_table "releases", force: :cascade do |t|
t.string "tag"
t.text "description"
@@ -1875,11 +1743,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "updated_at"
t.text "description_html"
t.integer "cached_markdown_version"
+ t.index ["project_id", "tag"], name: "index_releases_on_project_id_and_tag", using: :btree
+ t.index ["project_id"], name: "index_releases_on_project_id", using: :btree
end
- add_index "releases", ["project_id", "tag"], name: "index_releases_on_project_id_and_tag", using: :btree
- add_index "releases", ["project_id"], name: "index_releases_on_project_id", using: :btree
-
create_table "remote_mirrors", force: :cascade do |t|
t.integer "project_id"
t.string "url"
@@ -1896,27 +1763,24 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "encrypted_credentials_salt"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["last_successful_update_at"], name: "index_remote_mirrors_on_last_successful_update_at", using: :btree
+ t.index ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree
end
- add_index "remote_mirrors", ["last_successful_update_at"], name: "index_remote_mirrors_on_last_successful_update_at", using: :btree
- add_index "remote_mirrors", ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree
-
create_table "repositories", id: :bigserial, force: :cascade do |t|
t.integer "shard_id", null: false
t.string "disk_path", null: false
+ t.index ["disk_path"], name: "index_repositories_on_disk_path", unique: true, using: :btree
+ t.index ["shard_id"], name: "index_repositories_on_shard_id", using: :btree
end
- add_index "repositories", ["disk_path"], name: "index_repositories_on_disk_path", unique: true, using: :btree
- add_index "repositories", ["shard_id"], name: "index_repositories_on_shard_id", using: :btree
-
create_table "repository_languages", id: false, force: :cascade do |t|
t.integer "project_id", null: false
t.integer "programming_language_id", null: false
t.float "share", null: false
+ t.index ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true, using: :btree
end
- add_index "repository_languages", ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true, using: :btree
-
create_table "resource_label_events", id: :bigserial, force: :cascade do |t|
t.integer "action", null: false
t.integer "issue_id"
@@ -1927,13 +1791,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "cached_markdown_version"
t.text "reference"
t.text "reference_html"
+ t.index ["issue_id"], name: "index_resource_label_events_on_issue_id", using: :btree
+ t.index ["label_id"], name: "index_resource_label_events_on_label_id", using: :btree
+ t.index ["merge_request_id"], name: "index_resource_label_events_on_merge_request_id", using: :btree
+ t.index ["user_id"], name: "index_resource_label_events_on_user_id", using: :btree
end
- add_index "resource_label_events", ["issue_id"], name: "index_resource_label_events_on_issue_id", using: :btree
- add_index "resource_label_events", ["label_id"], name: "index_resource_label_events_on_label_id", using: :btree
- add_index "resource_label_events", ["merge_request_id"], name: "index_resource_label_events_on_merge_request_id", using: :btree
- add_index "resource_label_events", ["user_id"], name: "index_resource_label_events_on_user_id", using: :btree
-
create_table "routes", force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
@@ -1941,12 +1804,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "name"
+ t.index ["path"], name: "index_routes_on_path", unique: true, using: :btree
+ t.index ["path"], name: "index_routes_on_path_text_pattern_ops", using: :btree, opclasses: {"path"=>"varchar_pattern_ops"}
+ t.index ["source_type", "source_id"], name: "index_routes_on_source_type_and_source_id", unique: true, using: :btree
end
- add_index "routes", ["path"], name: "index_routes_on_path", unique: true, using: :btree
- add_index "routes", ["path"], name: "index_routes_on_path_text_pattern_ops", using: :btree, opclasses: {"path"=>"varchar_pattern_ops"}
- add_index "routes", ["source_type", "source_id"], name: "index_routes_on_source_type_and_source_id", unique: true, using: :btree
-
create_table "sent_notifications", force: :cascade do |t|
t.integer "project_id"
t.integer "noteable_id"
@@ -1958,10 +1820,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "note_type"
t.text "position"
t.string "in_reply_to_discussion_id"
+ t.index ["reply_key"], name: "index_sent_notifications_on_reply_key", unique: true, using: :btree
end
- add_index "sent_notifications", ["reply_key"], name: "index_sent_notifications_on_reply_key", unique: true, using: :btree
-
create_table "services", force: :cascade do |t|
t.string "type"
t.string "title"
@@ -1984,17 +1845,15 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "commit_events", default: true, null: false
t.boolean "job_events", default: false, null: false
t.boolean "confidential_note_events", default: true
+ t.index ["project_id"], name: "index_services_on_project_id", using: :btree
+ t.index ["template"], name: "index_services_on_template", using: :btree
end
- add_index "services", ["project_id"], name: "index_services_on_project_id", using: :btree
- add_index "services", ["template"], name: "index_services_on_template", using: :btree
-
create_table "shards", force: :cascade do |t|
t.string "name", null: false
+ t.index ["name"], name: "index_shards_on_name", unique: true, using: :btree
end
- add_index "shards", ["name"], name: "index_shards_on_name", unique: true, using: :btree
-
create_table "site_statistics", force: :cascade do |t|
t.integer "repositories_count", default: 0, null: false
end
@@ -2014,15 +1873,14 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "cached_markdown_version"
t.text "description"
t.text "description_html"
+ t.index ["author_id"], name: "index_snippets_on_author_id", using: :btree
+ t.index ["file_name"], name: "index_snippets_on_file_name_trigram", using: :gin, opclasses: {"file_name"=>"gin_trgm_ops"}
+ t.index ["project_id"], name: "index_snippets_on_project_id", using: :btree
+ t.index ["title"], name: "index_snippets_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
+ t.index ["updated_at"], name: "index_snippets_on_updated_at", using: :btree
+ t.index ["visibility_level"], name: "index_snippets_on_visibility_level", using: :btree
end
- add_index "snippets", ["author_id"], name: "index_snippets_on_author_id", using: :btree
- add_index "snippets", ["file_name"], name: "index_snippets_on_file_name_trigram", using: :gin, opclasses: {"file_name"=>"gin_trgm_ops"}
- add_index "snippets", ["project_id"], name: "index_snippets_on_project_id", using: :btree
- add_index "snippets", ["title"], name: "index_snippets_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
- add_index "snippets", ["updated_at"], name: "index_snippets_on_updated_at", using: :btree
- add_index "snippets", ["visibility_level"], name: "index_snippets_on_visibility_level", using: :btree
-
create_table "spam_logs", force: :cascade do |t|
t.integer "user_id"
t.string "source_ip"
@@ -2045,20 +1903,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "created_at"
t.datetime "updated_at"
t.integer "project_id"
+ t.index ["subscribable_id", "subscribable_type", "user_id", "project_id"], name: "index_subscriptions_on_subscribable_and_user_id_and_project_id", unique: true, using: :btree
end
- add_index "subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], name: "index_subscriptions_on_subscribable_and_user_id_and_project_id", unique: true, using: :btree
-
create_table "system_note_metadata", force: :cascade do |t|
t.integer "note_id", null: false
t.integer "commit_count"
t.string "action"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["note_id"], name: "index_system_note_metadata_on_note_id", unique: true, using: :btree
end
- add_index "system_note_metadata", ["note_id"], name: "index_system_note_metadata_on_note_id", unique: true, using: :btree
-
create_table "taggings", force: :cascade do |t|
t.integer "tag_id"
t.integer "taggable_id"
@@ -2067,32 +1923,29 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "tagger_type"
t.string "context"
t.datetime "created_at"
+ t.index ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree
+ t.index ["tag_id"], name: "index_taggings_on_tag_id", using: :btree
+ t.index ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context", using: :btree
+ t.index ["taggable_id", "taggable_type"], name: "index_taggings_on_taggable_id_and_taggable_type", using: :btree
end
- add_index "taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree
- add_index "taggings", ["tag_id"], name: "index_taggings_on_tag_id", using: :btree
- add_index "taggings", ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context", using: :btree
- add_index "taggings", ["taggable_id", "taggable_type"], name: "index_taggings_on_taggable_id_and_taggable_type", using: :btree
-
create_table "tags", force: :cascade do |t|
t.string "name"
t.integer "taggings_count", default: 0
+ t.index ["name"], name: "index_tags_on_name", unique: true, using: :btree
end
- add_index "tags", ["name"], name: "index_tags_on_name", unique: true, using: :btree
-
create_table "term_agreements", force: :cascade do |t|
t.integer "term_id", null: false
t.integer "user_id", null: false
t.boolean "accepted", default: false, null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.index ["term_id"], name: "index_term_agreements_on_term_id", using: :btree
+ t.index ["user_id", "term_id"], name: "term_agreements_unique_index", unique: true, using: :btree
+ t.index ["user_id"], name: "index_term_agreements_on_user_id", using: :btree
end
- add_index "term_agreements", ["term_id"], name: "index_term_agreements_on_term_id", using: :btree
- add_index "term_agreements", ["user_id", "term_id"], name: "term_agreements_unique_index", unique: true, using: :btree
- add_index "term_agreements", ["user_id"], name: "index_term_agreements_on_user_id", using: :btree
-
create_table "timelogs", force: :cascade do |t|
t.integer "time_spent", null: false
t.integer "user_id"
@@ -2101,12 +1954,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "issue_id"
t.integer "merge_request_id"
t.datetime_with_timezone "spent_at"
+ t.index ["issue_id"], name: "index_timelogs_on_issue_id", using: :btree
+ t.index ["merge_request_id"], name: "index_timelogs_on_merge_request_id", using: :btree
+ t.index ["user_id"], name: "index_timelogs_on_user_id", using: :btree
end
- add_index "timelogs", ["issue_id"], name: "index_timelogs_on_issue_id", using: :btree
- add_index "timelogs", ["merge_request_id"], name: "index_timelogs_on_merge_request_id", using: :btree
- add_index "timelogs", ["user_id"], name: "index_timelogs_on_user_id", using: :btree
-
create_table "todos", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "project_id"
@@ -2120,24 +1972,22 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.integer "note_id"
t.string "commit_id"
t.integer "group_id"
+ t.index ["author_id"], name: "index_todos_on_author_id", using: :btree
+ t.index ["commit_id"], name: "index_todos_on_commit_id", using: :btree
+ t.index ["group_id"], name: "index_todos_on_group_id", using: :btree
+ t.index ["note_id"], name: "index_todos_on_note_id", using: :btree
+ t.index ["project_id"], name: "index_todos_on_project_id", using: :btree
+ t.index ["target_type", "target_id"], name: "index_todos_on_target_type_and_target_id", using: :btree
+ t.index ["user_id", "id"], name: "index_todos_on_user_id_and_id_done", where: "((state)::text = 'done'::text)", using: :btree
+ t.index ["user_id", "id"], name: "index_todos_on_user_id_and_id_pending", where: "((state)::text = 'pending'::text)", using: :btree
+ t.index ["user_id"], name: "index_todos_on_user_id", using: :btree
end
- add_index "todos", ["author_id"], name: "index_todos_on_author_id", using: :btree
- add_index "todos", ["commit_id"], name: "index_todos_on_commit_id", using: :btree
- add_index "todos", ["group_id"], name: "index_todos_on_group_id", using: :btree
- add_index "todos", ["note_id"], name: "index_todos_on_note_id", using: :btree
- add_index "todos", ["project_id"], name: "index_todos_on_project_id", using: :btree
- add_index "todos", ["target_type", "target_id"], name: "index_todos_on_target_type_and_target_id", using: :btree
- add_index "todos", ["user_id", "id"], name: "index_todos_on_user_id_and_id_done", where: "((state)::text = 'done'::text)", using: :btree
- add_index "todos", ["user_id", "id"], name: "index_todos_on_user_id_and_id_pending", where: "((state)::text = 'pending'::text)", using: :btree
- add_index "todos", ["user_id"], name: "index_todos_on_user_id", using: :btree
-
create_table "trending_projects", force: :cascade do |t|
t.integer "project_id", null: false
+ t.index ["project_id"], name: "index_trending_projects_on_project_id", unique: true, using: :btree
end
- add_index "trending_projects", ["project_id"], name: "index_trending_projects_on_project_id", unique: true, using: :btree
-
create_table "u2f_registrations", force: :cascade do |t|
t.text "certificate"
t.string "key_handle"
@@ -2147,13 +1997,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name"
+ t.index ["key_handle"], name: "index_u2f_registrations_on_key_handle", using: :btree
+ t.index ["user_id"], name: "index_u2f_registrations_on_user_id", using: :btree
end
- add_index "u2f_registrations", ["key_handle"], name: "index_u2f_registrations_on_key_handle", using: :btree
- add_index "u2f_registrations", ["user_id"], name: "index_u2f_registrations_on_user_id", using: :btree
-
create_table "uploads", force: :cascade do |t|
- t.integer "size", limit: 8, null: false
+ t.bigint "size", null: false
t.string "path", limit: 511, null: false
t.string "checksum", limit: 64
t.integer "model_id"
@@ -2163,13 +2012,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "mount_point"
t.string "secret"
t.integer "store"
+ t.index ["checksum"], name: "index_uploads_on_checksum", using: :btree
+ t.index ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
+ t.index ["store"], name: "index_uploads_on_store", using: :btree
+ t.index ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
end
- add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
- add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
- add_index "uploads", ["store"], name: "index_uploads_on_store", using: :btree
- add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
-
create_table "user_agent_details", force: :cascade do |t|
t.string "user_agent", null: false
t.string "ip_address", null: false
@@ -2178,47 +2026,42 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "submitted", default: false, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["subject_id", "subject_type"], name: "index_user_agent_details_on_subject_id_and_subject_type", using: :btree
end
- add_index "user_agent_details", ["subject_id", "subject_type"], name: "index_user_agent_details_on_subject_id_and_subject_type", using: :btree
-
create_table "user_callouts", force: :cascade do |t|
t.integer "feature_name", null: false
t.integer "user_id", null: false
+ t.index ["user_id", "feature_name"], name: "index_user_callouts_on_user_id_and_feature_name", unique: true, using: :btree
+ t.index ["user_id"], name: "index_user_callouts_on_user_id", using: :btree
end
- add_index "user_callouts", ["user_id", "feature_name"], name: "index_user_callouts_on_user_id_and_feature_name", unique: true, using: :btree
- add_index "user_callouts", ["user_id"], name: "index_user_callouts_on_user_id", using: :btree
-
create_table "user_custom_attributes", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.integer "user_id", null: false
t.string "key", null: false
t.string "value", null: false
+ t.index ["key", "value"], name: "index_user_custom_attributes_on_key_and_value", using: :btree
+ t.index ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true, using: :btree
end
- add_index "user_custom_attributes", ["key", "value"], name: "index_user_custom_attributes_on_key_and_value", using: :btree
- add_index "user_custom_attributes", ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true, using: :btree
-
create_table "user_interacted_projects", id: false, force: :cascade do |t|
t.integer "user_id", null: false
t.integer "project_id", null: false
+ t.index ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree
+ t.index ["user_id"], name: "index_user_interacted_projects_on_user_id", using: :btree
end
- add_index "user_interacted_projects", ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree
- add_index "user_interacted_projects", ["user_id"], name: "index_user_interacted_projects_on_user_id", using: :btree
-
create_table "user_preferences", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "issue_notes_filter", limit: 2, default: 0, null: false
t.integer "merge_request_notes_filter", limit: 2, default: 0, null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.index ["user_id"], name: "index_user_preferences_on_user_id", unique: true, using: :btree
end
- add_index "user_preferences", ["user_id"], name: "index_user_preferences_on_user_id", unique: true, using: :btree
-
create_table "user_statuses", primary_key: "user_id", force: :cascade do |t|
t.integer "cached_markdown_version"
t.string "emoji", default: "speech_balloon", null: false
@@ -2232,10 +2075,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "location_synced", default: false
t.integer "user_id", null: false
t.string "provider"
+ t.index ["user_id"], name: "index_user_synced_attributes_metadata_on_user_id", unique: true, using: :btree
end
- add_index "user_synced_attributes_metadata", ["user_id"], name: "index_user_synced_attributes_metadata_on_user_id", unique: true, using: :btree
-
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
@@ -2305,33 +2147,31 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.boolean "private_profile"
t.boolean "include_private_contributions"
t.string "commit_email"
+ t.index ["admin"], name: "index_users_on_admin", using: :btree
+ t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true, using: :btree
+ t.index ["created_at"], name: "index_users_on_created_at", using: :btree
+ t.index ["email"], name: "index_users_on_email", unique: true, using: :btree
+ t.index ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"}
+ t.index ["feed_token"], name: "index_users_on_feed_token", using: :btree
+ t.index ["ghost"], name: "index_users_on_ghost", using: :btree
+ t.index ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree
+ t.index ["name"], name: "index_users_on_name", using: :btree
+ t.index ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
+ t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
+ t.index ["state"], name: "index_users_on_state", using: :btree
+ t.index ["username"], name: "index_users_on_username", using: :btree
+ t.index ["username"], name: "index_users_on_username_trigram", using: :gin, opclasses: {"username"=>"gin_trgm_ops"}
end
- add_index "users", ["admin"], name: "index_users_on_admin", using: :btree
- add_index "users", ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true, using: :btree
- add_index "users", ["created_at"], name: "index_users_on_created_at", using: :btree
- add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree
- add_index "users", ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"}
- add_index "users", ["feed_token"], name: "index_users_on_feed_token", using: :btree
- add_index "users", ["ghost"], name: "index_users_on_ghost", using: :btree
- add_index "users", ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree
- add_index "users", ["name"], name: "index_users_on_name", using: :btree
- add_index "users", ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
- add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
- add_index "users", ["state"], name: "index_users_on_state", using: :btree
- add_index "users", ["username"], name: "index_users_on_username", using: :btree
- add_index "users", ["username"], name: "index_users_on_username_trigram", using: :gin, opclasses: {"username"=>"gin_trgm_ops"}
-
create_table "users_star_projects", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "user_id", null: false
t.datetime "created_at"
t.datetime "updated_at"
+ t.index ["project_id"], name: "index_users_star_projects_on_project_id", using: :btree
+ t.index ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true, using: :btree
end
- add_index "users_star_projects", ["project_id"], name: "index_users_star_projects_on_project_id", using: :btree
- add_index "users_star_projects", ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true, using: :btree
-
create_table "web_hook_logs", force: :cascade do |t|
t.integer "web_hook_id", null: false
t.string "trigger"
@@ -2345,11 +2185,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "internal_error_message"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.index ["created_at", "web_hook_id"], name: "index_web_hook_logs_on_created_at_and_web_hook_id", using: :btree
+ t.index ["web_hook_id"], name: "index_web_hook_logs_on_web_hook_id", using: :btree
end
- add_index "web_hook_logs", ["created_at", "web_hook_id"], name: "index_web_hook_logs_on_created_at_and_web_hook_id", using: :btree
- add_index "web_hook_logs", ["web_hook_id"], name: "index_web_hook_logs_on_web_hook_id", using: :btree
-
create_table "web_hooks", force: :cascade do |t|
t.integer "project_id"
t.datetime "created_at"
@@ -2373,11 +2212,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do
t.string "encrypted_token_iv"
t.string "encrypted_url"
t.string "encrypted_url_iv"
+ t.index ["project_id"], name: "index_web_hooks_on_project_id", using: :btree
+ t.index ["type"], name: "index_web_hooks_on_type", using: :btree
end
- add_index "web_hooks", ["project_id"], name: "index_web_hooks_on_project_id", using: :btree
- add_index "web_hooks", ["type"], name: "index_web_hooks_on_type", using: :btree
-
add_foreign_key "application_settings", "users", column: "usage_stats_set_by_user_id", name: "fk_964370041d", on_delete: :nullify
add_foreign_key "badges", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "badges", "projects", on_delete: :cascade
@@ -2450,9 +2288,6 @@ ActiveRecord::Schema.define(version: 20181107054254) do
add_foreign_key "fork_network_members", "projects", on_delete: :cascade
add_foreign_key "fork_networks", "projects", column: "root_project_id", name: "fk_e7b436b2b5", on_delete: :nullify
add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
- add_foreign_key "gcp_clusters", "projects", on_delete: :cascade
- add_foreign_key "gcp_clusters", "services", on_delete: :nullify
- add_foreign_key "gcp_clusters", "users", on_delete: :nullify
add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify
diff --git a/doc/administration/auth/authentiq.md b/doc/administration/auth/authentiq.md
index 252ff1f4b15..772e55cef07 100644
--- a/doc/administration/auth/authentiq.md
+++ b/doc/administration/auth/authentiq.md
@@ -6,7 +6,7 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t
1. Get your Client credentials (Client ID and Client Secret) at [Authentiq](https://www.authentiq.com/developers).
-2. On your GitLab server, open the configuration file:
+1. On your GitLab server, open the configuration file:
For omnibus installation
```sh
@@ -18,11 +18,11 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t
```sh
sudo -u git -H editor /home/git/gitlab/config/gitlab.yml
```
-
-3. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings to enable single sign-on and add Authentiq as an OAuth provider.
-4. Add the provider configuration for Authentiq:
-
+1. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings to enable single sign-on and add Authentiq as an OAuth provider.
+
+1. Add the provider configuration for Authentiq:
+
For Omnibus packages:
```ruby
@@ -31,15 +31,15 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t
"name" => "authentiq",
"app_id" => "YOUR_CLIENT_ID",
"app_secret" => "YOUR_CLIENT_SECRET",
- "args" => {
+ "args" => {
"scope": 'aq:name email~rs address aq:push'
}
}
]
```
-
+
For installations from source:
-
+
```yaml
- { name: 'authentiq',
app_id: 'YOUR_CLIENT_ID',
@@ -49,20 +49,20 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t
}
}
```
-
-
-5. The `scope` is set to request the user's name, email (required and signed), and permission to send push notifications to sign in on subsequent visits.
+
+
+1. The `scope` is set to request the user's name, email (required and signed), and permission to send push notifications to sign in on subsequent visits.
See [OmniAuth Authentiq strategy](https://github.com/AuthentiqID/omniauth-authentiq/wiki/Scopes,-callback-url-configuration-and-responses) for more information on scopes and modifiers.
-6. Change `YOUR_CLIENT_ID` and `YOUR_CLIENT_SECRET` to the Client credentials you received in step 1.
+1. Change `YOUR_CLIENT_ID` and `YOUR_CLIENT_SECRET` to the Client credentials you received in step 1.
-7. Save the configuration file.
+1. Save the configuration file.
-8. [Reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect if you installed GitLab via Omnibus or from source respectively.
+1. [Reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect if you installed GitLab via Omnibus or from source respectively.
-On the sign in page there should now be an Authentiq icon below the regular sign in form. Click the icon to begin the authentication process.
+On the sign in page there should now be an Authentiq icon below the regular sign in form. Click the icon to begin the authentication process.
-- If the user has the Authentiq ID app installed in their iOS or Android device, they can scan the QR code, decide what personal details to share and sign in to your GitLab installation.
-- If not they will be prompted to download the app and then follow the procedure above.
+- If the user has the Authentiq ID app installed in their iOS or Android device, they can scan the QR code, decide what personal details to share and sign in to your GitLab installation.
+- If not they will be prompted to download the app and then follow the procedure above.
-If everything goes right, the user will be returned to GitLab and will be signed in. \ No newline at end of file
+If everything goes right, the user will be returned to GitLab and will be signed in.
diff --git a/doc/administration/high_availability/nfs.md b/doc/administration/high_availability/nfs.md
index 481eb692674..74b0e2c8184 100644
--- a/doc/administration/high_availability/nfs.md
+++ b/doc/administration/high_availability/nfs.md
@@ -59,7 +59,7 @@ on an Linux NFS server, do the following:
sysctl -w fs.leases-enable=0
```
-2. Restart the NFS server process. For example, on CentOS run `service nfs restart`.
+1. Restart the NFS server process. For example, on CentOS run `service nfs restart`.
## Avoid using AWS's Elastic File System (EFS)
@@ -87,12 +87,12 @@ this configuration.
Additionally, this configuration is specifically warned against in the
[Postgres Documentation](https://www.postgresql.org/docs/current/static/creating-cluster.html#CREATING-CLUSTER-NFS):
->PostgreSQL does nothing special for NFS file systems, meaning it assumes NFS behaves exactly like
->locally-connected drives. If the client or server NFS implementation does not provide standard file
->system semantics, this can cause reliability problems. Specifically, delayed (asynchronous) writes
+>PostgreSQL does nothing special for NFS file systems, meaning it assumes NFS behaves exactly like
+>locally-connected drives. If the client or server NFS implementation does not provide standard file
+>system semantics, this can cause reliability problems. Specifically, delayed (asynchronous) writes
>to the NFS server can cause data corruption problems.
-For supported database architecture, please see our documentation on
+For supported database architecture, please see our documentation on
[Configuring a Database for GitLab HA](https://docs.gitlab.com/ee/administration/high_availability/database.html).
## NFS Client mount options
diff --git a/doc/administration/high_availability/redis.md b/doc/administration/high_availability/redis.md
index dcee57def74..7c1ef43499d 100644
--- a/doc/administration/high_availability/redis.md
+++ b/doc/administration/high_availability/redis.md
@@ -665,7 +665,7 @@ cache, queues, and shared_state. To make this work with Sentinel:
**Note**: Redis URLs should be in the format: `redis://:PASSWORD@SENTINEL_MASTER_NAME`
1. PASSWORD is the plaintext password for the Redis instance
- 2. SENTINEL_MASTER_NAME is the Sentinel master name (e.g. `gitlab-redis-cache`)
+ 1. SENTINEL_MASTER_NAME is the Sentinel master name (e.g. `gitlab-redis-cache`)
1. Include an array of hashes with host/port combinations, such as the following:
```ruby
diff --git a/doc/administration/logs.md b/doc/administration/logs.md
index 038e043281c..7e5a3eb9ccd 100644
--- a/doc/administration/logs.md
+++ b/doc/administration/logs.md
@@ -29,9 +29,9 @@ Each line contains a JSON line that can be ingested by Elasticsearch, Splunk, et
In this example, you can see this was a GET request for a specific issue. Notice each line also contains performance data:
1. `duration`: the total time taken to retrieve the request
-2. `view`: total time taken inside the Rails views
-3. `db`: total time to retrieve data from the database
-4. `gitaly_calls`: total number of calls made to Gitaly
+1. `view`: total time taken inside the Rails views
+1. `db`: total time to retrieve data from the database
+1. `gitaly_calls`: total number of calls made to Gitaly
User clone/fetch activity using http transport appears in this log as `action: git_upload_pack`.
@@ -119,7 +119,7 @@ This file lives in `/var/log/gitlab/gitlab-rails/integrations_json.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/integrations_json.log` for
installations from source.
-It contains information about [integrations](../user/project/integrations/project_services.md) activities such as JIRA, Asana and Irker services. It uses JSON format like the example below:
+It contains information about [integrations](../user/project/integrations/project_services.md) activities such as JIRA, Asana and Irker services. It uses JSON format like the example below:
``` json
{"severity":"ERROR","time":"2018-09-06T14:56:20.439Z","service_class":"JiraService","project_id":8,"project_path":"h5bp/html5-boilerplate","message":"Error sending message","client_url":"http://jira.gitlap.com:8080","error":"execution expired"}
@@ -257,8 +257,8 @@ importer. Future importers may use this file.
## Reconfigure Logs
-Reconfigure log files live in `/var/log/gitlab/reconfigure` for Omnibus GitLab
-packages. Installations from source don't have reconfigure logs. A reconfigure log
+Reconfigure log files live in `/var/log/gitlab/reconfigure` for Omnibus GitLab
+packages. Installations from source don't have reconfigure logs. A reconfigure log
is populated whenever `gitlab-ctl reconfigure` is run manually or as part of an upgrade.
Reconfigure logs files are named according to the UNIX timestamp of when the reconfigure
diff --git a/doc/administration/monitoring/performance/influxdb_configuration.md b/doc/administration/monitoring/performance/influxdb_configuration.md
index c30cd2950d8..fa281f47ed8 100644
--- a/doc/administration/monitoring/performance/influxdb_configuration.md
+++ b/doc/administration/monitoring/performance/influxdb_configuration.md
@@ -95,10 +95,10 @@ UDP can be done using the following settings:
This does the following:
1. Enable UDP and bind it to port 8089 for all addresses.
-2. Store any data received in the "gitlab" database.
-3. Define a batch of points to be 1000 points in size and allow a maximum of
+1. Store any data received in the "gitlab" database.
+1. Define a batch of points to be 1000 points in size and allow a maximum of
5 batches _or_ flush them automatically after 1 second.
-4. Define a UDP read buffer size of 200 MB.
+1. Define a UDP read buffer size of 200 MB.
One of the most important settings here is the UDP read buffer size as if this
value is set too low, packets will be dropped. You must also make sure the OS
diff --git a/doc/administration/monitoring/prometheus/index.md b/doc/administration/monitoring/prometheus/index.md
index b1b670c3b42..33611c5efc3 100644
--- a/doc/administration/monitoring/prometheus/index.md
+++ b/doc/administration/monitoring/prometheus/index.md
@@ -1,4 +1,4 @@
-# GitLab Prometheus
+# Monitoring GitLab with Prometheus
> **Notes:**
> - Prometheus and the various exporters listed in this page are bundled in the
@@ -24,7 +24,7 @@ dashboard tool like [Grafana].
## Configuring Prometheus
->**Note:**
+NOTE: **Note:**
For installations from source you'll have to install and configure it yourself.
Prometheus and it's exporters are on by default, starting with GitLab 9.0.
@@ -43,17 +43,17 @@ To disable Prometheus and all of its exporters, as well as any added in the futu
```
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
- take effect
+ take effect.
-## Changing the port and address Prometheus listens on
+### Changing the port and address Prometheus listens on
->**Note:**
+NOTE: **Note:**
The following change was added in [GitLab Omnibus 8.17][1261]. Although possible,
it's not recommended to change the port Prometheus listens
on as this might affect or conflict with other services running on the GitLab
server. Proceed at your own risk.
-In order to access Prometheus from outside the GitLab server you will need to
+In order to access Prometheus from outside the GitLab server you will need to
set a FQDN or IP in `prometheus['listen_address']`.
To change the address/port that Prometheus listens on:
@@ -77,6 +77,60 @@ To change the address/port that Prometheus listens on:
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect
+### Using an external Prometheus server
+
+NOTE: **Note:**
+Prometheus and most exporters do not support authentication. We do not recommend exposing them outside the local network.
+
+A few configuration changes are required to allow GitLab to be monitored by an external Prometheus server. External servers are recommended for highly available deployments of GitLab with multiple nodes.
+
+To use an external Prometheus server:
+
+1. Edit `/etc/gitlab/gitlab.rb`.
+1. Disable the bundled Prometheus:
+
+ ```ruby
+ prometheus['enable'] = false
+ ```
+
+1. Set each bundled service's [exporter](#bundled-software-metrics) to listen on a network address, for example:
+
+ ```ruby
+ gitlab_monitor['listen_address'] = '0.0.0.0'
+ gitlab_monitor['listen_port'] = '9168'
+ gitaly['prometheus_listen_addr'] = "0.0.0.0:9236"
+ node_exporter['listen_address'] = '0.0.0.0:9100'
+ redis_exporter['listen_address'] = '0.0.0.0:9121'
+ postgres_exporter['listen_address'] = '0.0.0.0:9187'
+ ```
+
+1. Install and set up a dedicated Prometheus instance, if necessary, using the [official installation instructions](https://prometheus.io/docs/prometheus/latest/installation/).
+1. Add the Prometheus server IP address to the [monitoring IP whitelist](../ip_whitelist.html). For example:
+
+ ```ruby
+ gitlab_rails['monitoring_whitelist'] = ['127.0.0.0/8', '192.168.0.1']
+ ```
+
+1. [Reconfigure GitLab][reconfigure] to apply the changes
+1. Edit the Prometheus server's configuration file.
+1. Add each node's exporters to the Prometheus server's
+ [scrape target configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#%3Cscrape_config%3E).
+ For example, a sample snippet using `static_configs`:
+
+ ```yaml
+ scrape_configs:
+ - job_name: 'gitlab_exporters'
+ static_configs:
+ - targets: ['1.1.1.1:9168', '1.1.1.1:9236', '1.1.1.1:9236', '1.1.1.1:9100', '1.1.1.1:9121', '1.1.1.1:9187']
+
+ - job_name: 'gitlab_metrics'
+ metrics_path: /-/metrics
+ static_configs:
+ - targets: ['1.1.1.1:443']
+ ```
+
+1. Restart the Prometheus server.
+
## Viewing performance metrics
You can visit `http://localhost:9090` for the dashboard that Prometheus offers by default.
@@ -86,7 +140,7 @@ If SSL has been enabled on your GitLab instance, you may not be able to access
Prometheus on the same browser as GitLab if using the same FQDN due to [HSTS][hsts]. We plan to
[provide access via GitLab][multi-user-prometheus], but in the interim there are
some workarounds: using a separate FQDN, using server IP, using a separate browser for Prometheus, resetting HSTS, or
-having [Nginx proxy it][nginx-custom-config].
+having [NGINX proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the
Prometheus console or through a compatible dashboard tool.
@@ -102,26 +156,7 @@ Sample Prometheus queries:
- **Data transmitted:** `rate(node_network_transmit_bytes_total{device!="lo"}[5m])`
- **Data received:** `rate(node_network_receive_bytes_total{device!="lo"}[5m])`
-## Configuring Prometheus to monitor Kubernetes
-
-> Introduced in GitLab 9.0.
-> Pod monitoring introduced in GitLab 9.4.
-
-If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][] to monitor them.
-
-To disable the monitoring of Kubernetes:
-
-1. Edit `/etc/gitlab/gitlab.rb`
-1. Add or find and uncomment the following line and set it to `false`:
-
- ```ruby
- prometheus['monitor_kubernetes'] = false
- ```
-
-1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
- take effect
-
-## GitLab Prometheus metrics
+## GitLab metrics
> Introduced in GitLab 9.3.
@@ -129,17 +164,10 @@ GitLab monitors its own internal service metrics, and makes them available at th
[āž” Read more about the GitLab Metrics.](gitlab_metrics.md)
-## Prometheus exporters
-
-There are a number of libraries and servers which help in exporting existing
-metrics from third-party systems as Prometheus metrics. This is useful for cases
-where it is not feasible to instrument a given system with Prometheus metrics
-directly (for example, HAProxy or Linux system stats). You can read more in the
-[Prometheus exporters and integrations upstream documentation][prom-exporters].
+## Bundled software metrics
-While you can use any exporter you like with your GitLab installation, the
-following ones documented here are bundled in the Omnibus GitLab packages
-making it easy to configure and use.
+Many of the GitLab dependencies bundled in Omnibus GitLab are preconfigured to
+export Prometheus metrics.
### Node exporter
@@ -166,6 +194,25 @@ The GitLab monitor exporter allows you to measure various GitLab metrics, pulled
[āž” Read more about the GitLab monitor exporter.](gitlab_monitor_exporter.md)
+## Configuring Prometheus to monitor Kubernetes
+
+> Introduced in GitLab 9.0.
+> Pod monitoring introduced in GitLab 9.4.
+
+If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them.
+
+To disable the monitoring of Kubernetes:
+
+1. Edit `/etc/gitlab/gitlab.rb`.
+1. Add or find and uncomment the following line and set it to `false`:
+
+ ```ruby
+ prometheus['monitor_kubernetes'] = false
+ ```
+
+1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
+ take effect.
+
[grafana]: https://grafana.net
[hsts]: https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security
[multi-user-prometheus]: https://gitlab.com/gitlab-org/multi-user-prometheus
diff --git a/doc/administration/operations/fast_ssh_key_lookup.md b/doc/administration/operations/fast_ssh_key_lookup.md
index eada7b19dcd..c293df3fc57 100644
--- a/doc/administration/operations/fast_ssh_key_lookup.md
+++ b/doc/administration/operations/fast_ssh_key_lookup.md
@@ -5,7 +5,7 @@ NOTE: **Note:** This document describes a drop-in replacement for the
using [ssh certificates](ssh_certificates.md), they are even faster,
but are not a drop-in replacement.
-> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/1631) in
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/1631) in
> [GitLab Starter](https://about.gitlab.com/gitlab-ee) 9.3.
>
> [Available in](https://gitlab.com/gitlab-org/gitlab-ee/issues/3953) GitLab
@@ -109,7 +109,7 @@ the database. The following instructions can be used to build OpenSSH 7.5:
yum install rpm-build gcc make wget openssl-devel krb5-devel pam-devel libX11-devel xmkmf libXt-devel
```
-3. Prepare the build by copying files to the right place:
+1. Prepare the build by copying files to the right place:
```
mkdir -p /root/rpmbuild/{SOURCES,SPECS}
@@ -118,7 +118,7 @@ the database. The following instructions can be used to build OpenSSH 7.5:
cd /root/rpmbuild/SPECS
```
-3. Next, set the spec settings properly:
+1. Next, set the spec settings properly:
```
sed -i -e "s/%define no_gnome_askpass 0/%define no_gnome_askpass 1/g" openssh.spec
@@ -126,13 +126,13 @@ the database. The following instructions can be used to build OpenSSH 7.5:
sed -i -e "s/BuildPreReq/BuildRequires/g" openssh.spec
```
-3. Build the RPMs:
+1. Build the RPMs:
```
rpmbuild -bb openssh.spec
```
-4. Ensure the RPMs were built:
+1. Ensure the RPMs were built:
```
ls -al /root/rpmbuild/RPMS/x86_64/
@@ -150,7 +150,7 @@ the database. The following instructions can be used to build OpenSSH 7.5:
-rw-r--r--. 1 root root 367516 Jun 20 19:37 openssh-server-7.5p1-1.x86_64.rpm
```
-5. Install the packages. OpenSSH packages will replace `/etc/pam.d/sshd`
+1. Install the packages. OpenSSH packages will replace `/etc/pam.d/sshd`
with its own version, which may prevent users from logging in, so be sure
that the file is backed up and restored after installation:
@@ -161,7 +161,7 @@ the database. The following instructions can be used to build OpenSSH 7.5:
yes | cp pam-ssh-conf-$timestamp /etc/pam.d/sshd
```
-6. Verify the installed version. In another window, attempt to login to the server:
+1. Verify the installed version. In another window, attempt to login to the server:
```
ssh -v <your-centos-machine>
@@ -171,7 +171,7 @@ the database. The following instructions can be used to build OpenSSH 7.5:
If not, you may need to restart sshd (e.g. `systemctl restart sshd.service`).
-7. *IMPORTANT!* Open a new SSH session to your server before exiting to make
+1. *IMPORTANT!* Open a new SSH session to your server before exiting to make
sure everything is working! If you need to downgrade, simple install the
older package:
diff --git a/doc/administration/reply_by_email_postfix_setup.md b/doc/administration/reply_by_email_postfix_setup.md
index d1a03219542..4c42cb7756a 100644
--- a/doc/administration/reply_by_email_postfix_setup.md
+++ b/doc/administration/reply_by_email_postfix_setup.md
@@ -8,7 +8,7 @@ The instructions make the assumption that you will be using the email address `i
## Configure your server firewall
1. Open up port 25 on your server so that people can send email into the server over SMTP.
-2. If the mail server is different from the server running GitLab, open up port 143 on your server so that GitLab can read email from the server over IMAP.
+1. If the mail server is different from the server running GitLab, open up port 143 on your server so that GitLab can read email from the server over IMAP.
## Install packages
diff --git a/doc/administration/troubleshooting/debug.md b/doc/administration/troubleshooting/debug.md
index 2902af8c782..643c5b9fe80 100644
--- a/doc/administration/troubleshooting/debug.md
+++ b/doc/administration/troubleshooting/debug.md
@@ -20,7 +20,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
bundle exec rails console production
```
-2. Look at the ActionMailer `delivery_method` to make sure it matches what you
+1. Look at the ActionMailer `delivery_method` to make sure it matches what you
intended. If you configured SMTP, it should say `:smtp`. If you're using
Sendmail, it should say `:sendmail`:
@@ -29,7 +29,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
=> :smtp
```
-3. If you're using SMTP, check the mail settings:
+1. If you're using SMTP, check the mail settings:
```ruby
irb(main):002:0> ActionMailer::Base.smtp_settings
@@ -39,7 +39,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se
In the example above, the SMTP server is configured for the local machine. If this is intended, you may need to check your local mail
logs (e.g. `/var/log/mail.log`) for more details.
-4. Send a test message via the console.
+1. Send a test message via the console.
```ruby
irb(main):003:0> Notify.test_email('youremail@email.com', 'Hello World', 'This is a test message').deliver_now
diff --git a/doc/api/events.md b/doc/api/events.md
index ccac5b8bb60..e1c6b801a77 100644
--- a/doc/api/events.md
+++ b/doc/api/events.md
@@ -71,7 +71,7 @@ Parameters:
Example request:
```
-curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/events&target_type=issue&action=created&after=2017-01-31&before=2017-03-01
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/events?target_type=issue&action=created&after=2017-01-31&before=2017-03-01
```
Example response:
@@ -276,7 +276,7 @@ Parameters:
Example request:
```
-curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/projects/:project_id/events&target_type=issue&action=created&after=2017-01-31&before=2017-03-01
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/projects/:project_id/events?target_type=issue&action=created&after=2017-01-31&before=2017-03-01
```
Example response:
diff --git a/doc/ci/autodeploy/quick_start_guide.md b/doc/ci/autodeploy/quick_start_guide.md
index 3836216e951..1473703542d 100644
--- a/doc/ci/autodeploy/quick_start_guide.md
+++ b/doc/ci/autodeploy/quick_start_guide.md
@@ -23,9 +23,9 @@ You need to have the Google Cloud SDK installed. e.g.
On OSX, install [homebrew](https://brew.sh):
1. Install Brew Caskroom: `brew install caskroom/cask/brew-cask`
-2. Install Google Cloud SDK: `brew cask install google-cloud-sdk`
-3. Add `kubectl`: `gcloud components install kubectl`
-4. Log in: `gcloud auth login`
+1. Install Google Cloud SDK: `brew cask install google-cloud-sdk`
+1. Add `kubectl`: `gcloud components install kubectl`
+1. Log in: `gcloud auth login`
Now go back to the Google interface, find your cluster, and follow the instructions under `Connect to the cluster` and open the Kubernetes Dashboard. It will look something like `gcloud container clusters get-credentials ruby-autodeploy \ --zone europe-west2-c --project api-project-XXXXXXX` and then `kubectl proxy`.
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index 3b41036cd14..fef367051bf 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -46,18 +46,18 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user.
--description "My Runner"
```
-2. Install Docker Engine on server.
+1. Install Docker Engine on server.
For more information how to install Docker Engine on different systems
checkout the [Supported installations](https://docs.docker.com/engine/installation/).
-3. Add `gitlab-runner` user to `docker` group:
+1. Add `gitlab-runner` user to `docker` group:
```bash
sudo usermod -aG docker gitlab-runner
```
-4. Verify that `gitlab-runner` has access to Docker:
+1. Verify that `gitlab-runner` has access to Docker:
```bash
sudo -u gitlab-runner -H docker info
@@ -75,7 +75,7 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user.
- docker run my-docker-image /script/to/run/tests
```
-5. You can now use `docker` command and install `docker-compose` if needed.
+1. You can now use `docker` command and install `docker-compose` if needed.
NOTE: **Note:**
By adding `gitlab-runner` to the `docker` group you are effectively granting `gitlab-runner` full root permissions.
diff --git a/doc/ci/examples/browser_performance.md b/doc/ci/examples/browser_performance.md
index d36e97ebfd3..7c3b3a65675 100644
--- a/doc/ci/examples/browser_performance.md
+++ b/doc/ci/examples/browser_performance.md
@@ -1,14 +1,20 @@
# Browser Performance Testing with the Sitespeed.io container
+CAUTION: **Caution:**
+The job definition shown below is supported on GitLab 11.5 and later versions.
+It also requires the GitLab Runner 11.5 or later.
+For earlier versions, use the [previous job definitions](#previous-job-definitions).
+
This example shows how to run the
[Sitespeed.io container](https://hub.docker.com/r/sitespeedio/sitespeed.io/) on
your code by using GitLab CI/CD and [Sitespeed.io](https://www.sitespeed.io)
using Docker-in-Docker.
-First, you need a GitLab Runner with the
+First, you need GitLab Runner with
[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor).
-Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called
-`performance`:
+
+Once you set up the Runner, add a new job to `.gitlab-ci.yml` that
+generates the expected report:
```yaml
performance:
@@ -26,19 +32,22 @@ performance:
- mv sitespeed-results/data/performance.json performance.json
artifacts:
paths:
- - performance.json
- - sitespeed-results/
+ - sitespeed-results/
+ reports:
+ performance: performance.json
```
-The above example will:
+The above example will create a `performance` job in your CI/CD pipeline and will run
+Sitespeed.io against the webpage you defined in `URL` to gather key metrics.
+The [GitLab plugin](https://gitlab.com/gitlab-org/gl-performance) for
+Sitespeed.io is downloaded in order to save the report as a
+[Performance report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportsperformance)
+that you can later download and analyze.
+Due to implementation limitations we always take the latest Performance artifact available.
-1. Create a `performance` job in your CI/CD pipeline and will run
- Sitespeed.io against the webpage you defined in `URL`.
-1. The [GitLab plugin](https://gitlab.com/gitlab-org/gl-performance) for
- Sitespeed.io is downloaded in order to export key metrics to JSON. The full
- HTML Sitespeed.io report will also be saved as an artifact, and if you have
- [GitLab Pages](../../user/project/pages/index.md) enabled, it can be viewed
- directly in your browser.
+The full HTML Sitespeed.io report will also be saved as an artifact, and if you have
+[GitLab Pages](../../user/project/pages/index.md) enabled, it can be viewed
+directly in your browser.
For further customization options of Sitespeed.io, including the ability to
provide a list of URLs to test, please consult
@@ -46,8 +55,8 @@ provide a list of URLs to test, please consult
TIP: **Tip:**
For [GitLab Premium](https://about.gitlab.com/pricing/) users, key metrics are automatically
-extracted and shown right in the merge request widget. Learn more about
-[Browser Performance Testing](https://docs.gitlab.com/ee/user/project/merge_requests/browser_performance_testing.html).
+extracted and shown right in the merge request widget.
+[Learn more on Browser Performance Testing in merge requests](https://docs.gitlab.com/ee//user/project/merge_requests/browser_performance_testing.html).
## Performance testing on Review Apps
@@ -106,8 +115,40 @@ performance:
- mv sitespeed-results/data/performance.json performance.json
artifacts:
paths:
- - performance.json
- sitespeed-results/
+ reports:
+ performance: performance.json
```
A complete example can be found in our [Auto DevOps CI YML](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml).
+
+## Previous job definitions
+
+CAUTION: **Caution:**
+Before GitLab 11.5, Performance job and artifact had to be named specifically
+to automatically extract report data and show it in the merge request widget.
+While these old job definitions are still maintained they have been deprecated
+and may be removed in next major release, GitLab 12.0.
+You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change.
+
+For GitLab 11.4 and earlier, the job should look like:
+
+```yaml
+performance:
+ stage: performance
+ image: docker:git
+ variables:
+ URL: https://example.com
+ services:
+ - docker:stable-dind
+ script:
+ - mkdir gitlab-exporter
+ - wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js
+ - mkdir sitespeed-results
+ - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL
+ - mv sitespeed-results/data/performance.json performance.json
+ artifacts:
+ paths:
+ - performance.json
+ - sitespeed-results/
+``` \ No newline at end of file
diff --git a/doc/ci/examples/code_quality.md b/doc/ci/examples/code_quality.md
index 2a7040ecdeb..ae000b9d30d 100644
--- a/doc/ci/examples/code_quality.md
+++ b/doc/ci/examples/code_quality.md
@@ -1,11 +1,18 @@
# Analyze your project's Code Quality
+CAUTION: **Caution:**
+The job definition shown below is supported on GitLab 11.5 and later versions.
+It also requires the GitLab Runner 11.5 or later.
+For earlier versions, use the [previous job definitions](#previous-job-definitions).
+
This example shows how to run Code Quality on your code by using GitLab CI/CD
and Docker.
-First, you need GitLab Runner with [docker-in-docker executor][dind].
+First, you need GitLab Runner with
+[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor).
-Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called `code_quality`:
+Once you set up the Runner, add a new job to `.gitlab-ci.yml` that
+generates the expected report:
```yaml
code_quality:
@@ -23,27 +30,72 @@ code_quality:
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
- paths: [gl-code-quality-report.json]
+ reports:
+ codequality: gl-code-quality-report.json
```
The above example will create a `code_quality` job in your CI/CD pipeline which
-will scan your source code for code quality issues. The report will be saved
-as an artifact that you can later download and analyze.
+will scan your source code for code quality issues. The report will be saved as a
+[Code Quality report artifact](../../ci/yaml/README.md#artifactsreportscodequality)
+that you can later download and analyze.
+Due to implementation limitations we always take the latest Code Quality artifact available.
TIP: **Tip:**
-Starting with [GitLab Starter][ee] 9.3, this information will
-be automatically extracted and shown right in the merge request widget. To do
-so, the CI/CD job must be named `code_quality` and the artifact path must be
-`gl-code-quality-report.json`.
+For [GitLab Starter][ee] users, this information will be automatically
+extracted and shown right in the merge request widget.
[Learn more on Code Quality in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html).
+## Previous job definitions
+
CAUTION: **Caution:**
-Code Quality was previously using `codeclimate` and `codequality` for job name and
-`codeclimate.json` for the artifact name. While these old names
-are still maintained they have been deprecated with GitLab 11.0 and may be removed
-in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml`
-configuration to reflect that change.
+Before GitLab 11.5, Code Quality job and artifact had to be named specifically
+to automatically extract report data and show it in the merge request widget.
+While these old job definitions are still maintained they have been deprecated
+and may be removed in next major release, GitLab 12.0.
+You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change.
+
+For GitLab 11.4 and earlier, the job should look like:
+
+```yaml
+code_quality:
+ image: docker:stable
+ variables:
+ DOCKER_DRIVER: overlay2
+ allow_failure: true
+ services:
+ - docker:stable-dind
+ script:
+ - export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
+ - docker run
+ --env SOURCE_CODE="$PWD"
+ --volume "$PWD":/code
+ --volume /var/run/docker.sock:/var/run/docker.sock
+ "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
+ artifacts:
+ paths: [gl-code-quality-report.json]
+```
+
+Alternatively the job name could be `codeclimate` or `codequality`
+and the artifact name could be `codeclimate.json`.
+These names have been deprecated with GitLab 11.0
+and may be removed in next major release, GitLab 12.0.
+
+For GitLab 10.3 and earlier, the job should look like:
+
+```yaml
+codequality:
+ image: docker:latest
+ variables:
+ DOCKER_DRIVER: overlay
+ services:
+ - docker:dind
+ script:
+ - docker pull codeclimate/codeclimate:0.69.0
+ - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 init
+ - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > codeclimate.json || true
+ artifacts:
+ paths: [codeclimate.json]
+```
[cli]: https://github.com/codeclimate/codeclimate
-[dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor
[ee]: https://about.gitlab.com/pricing/
diff --git a/doc/ci/examples/container_scanning.md b/doc/ci/examples/container_scanning.md
index bc948dc6ea9..68330261910 100644
--- a/doc/ci/examples/container_scanning.md
+++ b/doc/ci/examples/container_scanning.md
@@ -1,13 +1,20 @@
# Container Scanning with GitLab CI/CD
+CAUTION: **Caution:**
+The job definition shown below is supported on GitLab 11.5 and later versions.
+It also requires the GitLab Runner 11.5 or later.
+For earlier versions, use the [previous job definitions](#previous-job-definitions).
+
You can check your Docker images (or more precisely the containers) for known
vulnerabilities by using [Clair](https://github.com/coreos/clair) and
[clair-scanner](https://github.com/arminc/clair-scanner), two open source tools
for Vulnerability Static Analysis for containers.
-All you need is a GitLab Runner with the Docker executor (the shared Runners on
-GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`,
-called `container_scanning`:
+First, you need GitLab Runner with
+[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor).
+
+Once you set up the Runner, add a new job to `.gitlab-ci.yml` that
+generates the expected report:
```yaml
container_scanning:
@@ -36,32 +43,26 @@ container_scanning:
- while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done
- ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true
artifacts:
- paths: [gl-container-scanning-report.json]
+ reports:
+ container_scanning: gl-container-scanning-report.json
```
The above example will create a `container_scanning` job in your CI/CD pipeline, pull
the image from the [Container Registry](../../user/project/container_registry.md)
(whose name is defined from the two `CI_APPLICATION_` variables) and scan it
-for possible vulnerabilities. The report will be saved as an artifact that you
-can later download and analyze.
+for possible vulnerabilities. The report will be saved as a
+[Container Scanning report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportscontainer_scanning)
+that you can later download and analyze.
+Due to implementation limitations we always take the latest Container Scanning artifact available.
If you want to whitelist some specific vulnerabilities, you can do so by defining
them in a [YAML file](https://github.com/arminc/clair-scanner/blob/master/README.md#example-whitelist-yaml-file),
in our case its named `clair-whitelist.yml`.
TIP: **Tip:**
-Starting with [GitLab Ultimate][ee] 10.4, this information will
-be automatically extracted and shown right in the merge request widget. To do
-so, the CI/CD job must be named `container_scanning` and the artifact path must be
-`gl-container-scanning-report.json`.
-[Learn more on container scanning results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html).
-
-CAUTION: **Caution:**
-Before GitLab 11.0, Container Scanning was previously using `sast:container` for job name and
-`gl-sast-container-report.json` for the artifact name. While these old names
-are still maintained, they have been deprecated with GitLab 11.0 and may be removed
-in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml`
-configuration to reflect that change.
+For [GitLab Ultimate][ee] users, this information will
+be automatically extracted and shown right in the merge request widget.
+[Learn more on Container Scanning in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html).
CAUTION: **Caution:**
Starting with GitLab 11.5, Container Scanning feature is licensed under the name `container_scanning`.
@@ -69,4 +70,50 @@ While the old name `sast_container` is still maintained, it has been deprecated
may be removed in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml`
configuration to reflect that change if you are using the `$GITLAB_FEATURES` environment variable.
+## Previous job definitions
+
+CAUTION: **Caution:**
+Before GitLab 11.5, Container Scanning job and artifact had to be named specifically
+to automatically extract report data and show it in the merge request widget.
+While these old job definitions are still maintained they have been deprecated
+and may be removed in next major release, GitLab 12.0.
+You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change.
+
+For GitLab 11.4 and earlier, the job should look like:
+
+```yaml
+container_scanning:
+ image: docker:stable
+ variables:
+ DOCKER_DRIVER: overlay2
+ ## Define two new variables based on GitLab's CI/CD predefined variables
+ ## https://docs.gitlab.com/ee/ci/variables/#predefined-variables-environment-variables
+ CI_APPLICATION_REPOSITORY: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG
+ CI_APPLICATION_TAG: $CI_COMMIT_SHA
+ allow_failure: true
+ services:
+ - docker:stable-dind
+ script:
+ - docker run -d --name db arminc/clair-db:latest
+ - docker run -p 6060:6060 --link db:postgres -d --name clair --restart on-failure arminc/clair-local-scan:v2.0.1
+ - apk add -U wget ca-certificates
+ - docker pull ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG}
+ - wget https://github.com/arminc/clair-scanner/releases/download/v8/clair-scanner_linux_amd64
+ - mv clair-scanner_linux_amd64 clair-scanner
+ - chmod +x clair-scanner
+ - touch clair-whitelist.yml
+ - while( ! wget -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; done
+ - retries=0
+ - echo "Waiting for clair daemon to start"
+ - while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done
+ - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true
+ artifacts:
+ paths: [gl-container-scanning-report.json]
+```
+
+Alternatively the job name could be `sast:container`
+and the artifact name could be `gl-sast-container-report.json`.
+These names have been deprecated with GitLab 11.0
+and may be removed in next major release, GitLab 12.0.
+
[ee]: https://about.gitlab.com/pricing/
diff --git a/doc/ci/examples/dast.md b/doc/ci/examples/dast.md
index ff20f0b3b5e..0ca89eb6700 100644
--- a/doc/ci/examples/dast.md
+++ b/doc/ci/examples/dast.md
@@ -1,16 +1,26 @@
# Dynamic Application Security Testing with GitLab CI/CD
+CAUTION: **Caution:**
+The job definition shown below is supported on GitLab 11.5 and later versions.
+It also requires the GitLab Runner 11.5 or later.
+For earlier versions, use the [previous job definitions](#previous-job-definitions).
+
[Dynamic Application Security Testing (DAST)](https://en.wikipedia.org/wiki/Dynamic_program_analysis)
is using the popular open source tool [OWASP ZAProxy](https://github.com/zaproxy/zaproxy)
to perform an analysis on your running web application.
+Since it is based on [ZAP Baseline](https://github.com/zaproxy/zaproxy/wiki/ZAP-Baseline-Scan)
+DAST will perform passive scanning only;
+it will not actively attack your application.
It can be very useful combined with [Review Apps](../review_apps/index.md).
## Example
-All you need is a GitLab Runner with the Docker executor (the shared Runners on
-GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`,
-called `dast`:
+First, you need GitLab Runner with
+[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor).
+
+Once you set up the Runner, add a new job to `.gitlab-ci.yml` that
+generates the expected report:
```yaml
dast:
@@ -23,13 +33,16 @@ dast:
- /zap/zap-baseline.py -J gl-dast-report.json -t $website || true
- cp /zap/wrk/gl-dast-report.json .
artifacts:
- paths: [gl-dast-report.json]
+ reports:
+ dast: gl-dast-report.json
```
The above example will create a `dast` job in your CI/CD pipeline which will run
the tests on the URL defined in the `website` variable (change it to use your
-own) and finally write the results in the `gl-dast-report.json` file. You can
-then download and analyze the report artifact in JSON format.
+own) and scan it for possible vulnerabilities. The report will be saved as a
+[DAST report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportsdast)
+that you can later download and analyze.
+Due to implementation limitations we always take the latest DAST artifact available.
It's also possible to authenticate the user before performing DAST checks:
@@ -39,25 +52,51 @@ dast:
variables:
website: "https://example.com"
login_url: "https://example.com/sign-in"
+ username: "john.doe@example.com"
+ password: "john-doe-password"
allow_failure: true
script:
- mkdir /zap/wrk/
- /zap/zap-baseline.py -J gl-dast-report.json -t $website
--auth-url $login_url
- --auth-username "john.doe@example.com"
- --auth-password "john-doe-password" || true
+ --auth-username $username
+ --auth-password $password || true
- cp /zap/wrk/gl-dast-report.json .
artifacts:
- paths: [gl-dast-report.json]
+ reports:
+ dast: gl-dast-report.json
```
See [zaproxy documentation](https://gitlab.com/gitlab-org/security-products/zaproxy)
to learn more about authentication settings.
TIP: **Tip:**
-Starting with [GitLab Ultimate][ee] 10.4, this information will
-be automatically extracted and shown right in the merge request widget. To do
-so, the CI job must be named `dast` and the artifact path must be
-`gl-dast-report.json`.
-[Learn more about DAST results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html).
+For [GitLab Ultimate][ee] users, this information will
+be automatically extracted and shown right in the merge request widget.
+[Learn more on DAST in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html).
+
+## Previous job definitions
+
+CAUTION: **Caution:**
+Before GitLab 11.5, DAST job and artifact had to be named specifically
+to automatically extract report data and show it in the merge request widget.
+While these old job definitions are still maintained they have been deprecated
+and may be removed in next major release, GitLab 12.0.
+You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change.
+
+For GitLab 11.4 and earlier, the job should look like:
+
+```yaml
+dast:
+ image: registry.gitlab.com/gitlab-org/security-products/zaproxy
+ variables:
+ website: "https://example.com"
+ allow_failure: true
+ script:
+ - mkdir /zap/wrk/
+ - /zap/zap-baseline.py -J gl-dast-report.json -t $website || true
+ - cp /zap/wrk/gl-dast-report.json .
+ artifacts:
+ paths: [gl-dast-report.json]
+```
[ee]: https://about.gitlab.com/pricing/
diff --git a/doc/ci/examples/deployment/composer-npm-deploy.md b/doc/ci/examples/deployment/composer-npm-deploy.md
index 55ff131efaa..36358515b84 100644
--- a/doc/ci/examples/deployment/composer-npm-deploy.md
+++ b/doc/ci/examples/deployment/composer-npm-deploy.md
@@ -33,9 +33,9 @@ before_script:
In this particular case, the `npm deploy` script is a Gulp script that does the following:
1. Compile CSS & JS
-2. Create sprites
-3. Copy various assets (images, fonts) around
-4. Replace some strings
+1. Create sprites
+1. Copy various assets (images, fonts) around
+1. Replace some strings
All these operations will put all files into a `build` folder, which is ready to be deployed to a live server.
@@ -62,10 +62,10 @@ before_script:
In order, this means that:
-1. We check if the `ssh-agent` is available and we install it if it's not;
-2. We create the `~/.ssh` folder;
-3. We make sure we're running bash;
-4. We disable host checking (we don't ask for user accept when we first connect to a server; and since every job will equal a first connect, we kind of need this)
+1. We check if the `ssh-agent` is available and we install it if it's not.
+1. We create the `~/.ssh` folder.
+1. We make sure we're running bash.
+1. We disable host checking (we don't ask for user accept when we first connect to a server and since every job will equal a first connect, we kind of need this).
And this is basically all you need in the `before_script` section.
@@ -91,11 +91,11 @@ stage_deploy:
Here's the breakdown:
1. `only:dev` means that this build will run only when something is pushed to the `dev` branch. You can remove this block completely and have everything be ran on every push (but probably this is something you don't want)
-2. `ssh-add ...` we will add that private key you added on the web UI to the docker container
-3. We will connect via `ssh` and create a new `_tmp` folder
-4. We will connect via `scp` and upload the `build` folder (which was generated by a `npm` script) to our previously created `_tmp` folder
-5. We will connect again to `ssh` and move the `live` folder to an `_old` folder, then move `_tmp` to `live`.
-6. We connect to ssh and remove the `_old` folder
+1. `ssh-add ...` we will add that private key you added on the web UI to the docker container
+1. We will connect via `ssh` and create a new `_tmp` folder
+1. We will connect via `scp` and upload the `build` folder (which was generated by a `npm` script) to our previously created `_tmp` folder
+1. We will connect again to `ssh` and move the `live` folder to an `_old` folder, then move `_tmp` to `live`.
+1. We connect to ssh and remove the `_old` folder
What's the deal with the artifacts? We just tell GitLab CI to keep the `build` directory (later on, you can download that as needed).
diff --git a/doc/ci/examples/test-and-deploy-python-application-to-heroku.md b/doc/ci/examples/test-and-deploy-python-application-to-heroku.md
index 087b317ab73..ec0b5aaed09 100644
--- a/doc/ci/examples/test-and-deploy-python-application-to-heroku.md
+++ b/doc/ci/examples/test-and-deploy-python-application-to-heroku.md
@@ -40,15 +40,17 @@ production:
```
This project has three jobs:
-1. `test` - used to test Django application,
-2. `staging` - used to automatically deploy staging environment every push to `master` branch
-3. `production` - used to automatically deploy production environment for every created tag
+
+- `test` - used to test Django application,
+- `staging` - used to automatically deploy staging environment every push to `master` branch
+- `production` - used to automatically deploy production environment for every created tag
## Store API keys
You'll need to create two variables in `Settings > CI/CD > Variables` on your GitLab project settings:
-1. `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app,
-2. `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app.
+
+- `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app.
+- `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app.
Find your Heroku API key in [Manage Account](https://dashboard.heroku.com/account).
diff --git a/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md b/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md
index 7f9ab1f3a5e..33a353f17f5 100644
--- a/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md
+++ b/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md
@@ -36,16 +36,17 @@ production:
```
This project has three jobs:
-1. `test` - used to test Rails application,
-2. `staging` - used to automatically deploy staging environment every push to `master` branch
-3. `production` - used to automatically deploy production environment for every created tag
+
+- `test` - used to test Rails application.
+- `staging` - used to automatically deploy staging environment every push to `master` branch.
+- `production` - used to automatically deploy production environment for every created tag.
## Store API keys
You'll need to create two variables in your project's **Settings > CI/CD > Variables**:
-1. `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app,
-2. `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app.
+- `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app.
+- `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app.
Find your Heroku API key in [Manage Account](https://dashboard.heroku.com/account).
diff --git a/doc/ci/quick_start/README.md b/doc/ci/quick_start/README.md
index 636117536a2..bdc593493ea 100644
--- a/doc/ci/quick_start/README.md
+++ b/doc/ci/quick_start/README.md
@@ -168,7 +168,7 @@ can be found at <https://docs.gitlab.com/runner/>.
In order to have a functional Runner you need to follow two steps:
1. [Install it][runner-install]
-2. [Configure it](../runners/README.md#registering-a-specific-runner)
+1. [Configure it](../runners/README.md#registering-a-specific-runner)
Follow the links above to set up your own Runner or use a Shared Runner as
described in the next section.
diff --git a/doc/ci/runners/README.md b/doc/ci/runners/README.md
index 2a179bfbbf0..9c9ea651678 100644
--- a/doc/ci/runners/README.md
+++ b/doc/ci/runners/README.md
@@ -138,9 +138,9 @@ project without requiring your authorization, so use it with caution.
An admin can enable/disable a specific Runner for projects:
1. Navigate to **Admin > Runners**
-2. Find the Runner you wish to enable/disable
-3. Click edit on the Runner
-4. Click **Enable** or **Disable** on the project
+1. Find the Runner you wish to enable/disable
+1. Click edit on the Runner
+1. Click **Enable** or **Disable** on the project
## Protected Runners
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 5deeb2b0133..aab5f268ef9 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -103,7 +103,7 @@ rspec:
- $RSPEC
```
-In the example above, the `rspec` job inherits from the `.tests` template job.
+In the example above, the `rspec` job inherits from the `.tests` template job.
GitLab will perform a reverse deep merge based on the keys. GitLab will:
- Merge the `rspec` contents into `.tests` recursively.
@@ -1337,6 +1337,81 @@ concatenated into a single file. Use a filename pattern (`junit: rspec-*.xml`),
an array of filenames (`junit: [rspec-1.xml, rspec-2.xml, rspec-3.xml]`), or a
combination thereof (`junit: [rspec.xml, test-results/TEST-*.xml]`).
+#### `artifacts:reports:codequality` **[STARTER]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `codequality` report collects [CodeQuality issues](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html)
+as artifacts.
+
+The collected Code Quality report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests.
+
+#### `artifacts:reports:sast` **[ULTIMATE]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `sast` report collects [SAST vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/sast.html)
+as artifacts.
+
+The collected SAST report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests, pipeline view and provide data for security
+dashboards.
+
+#### `artifacts:reports:dependency_scanning` **[ULTIMATE]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `dependency_scanning` report collects [Dependency Scanning vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/dependency_scanning.html)
+as artifacts.
+
+The collected Dependency Scanning report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests, pipeline view and provide data for security
+dashboards.
+
+#### `artifacts:reports:container_scanning` **[ULTIMATE]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `container_scanning` report collects [Container Scanning vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html)
+as artifacts.
+
+The collected Container Scanning report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests, pipeline view and provide data for security
+dashboards.
+
+#### `artifacts:reports:dast` **[ULTIMATE]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `dast` report collects [DAST vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html)
+as artifacts.
+
+The collected DAST report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests, pipeline view and provide data for security
+dashboards.
+
+#### `artifacts:reports:license_management` **[ULTIMATE]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `license_management` report collects [Licenses](https://docs.gitlab.com/ee/user/project/merge_requests/license_management.html)
+as artifacts.
+
+The collected License Management report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests, pipeline view and provide data for security
+dashboards.
+
+#### `artifacts:reports:performance` **[PREMIUM]**
+
+> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
+
+The `performance` report collects [Performance metrics](https://docs.gitlab.com/ee//user/project/merge_requests/browser_performance_testing.html)
+as artifacts.
+
+The collected Performance report will be uploaded to GitLab as an artifact and will
+be automatically shown in merge requests.
+
## `dependencies`
> Introduced in GitLab 8.6 and GitLab Runner v1.1.1.
diff --git a/doc/development/README.md b/doc/development/README.md
index 14dfe8eb1f3..0080c34c056 100644
--- a/doc/development/README.md
+++ b/doc/development/README.md
@@ -51,6 +51,7 @@ description: 'Learn how to contribute to GitLab.'
- [Prometheus metrics](prometheus_metrics.md)
- [Guidelines for reusing abstractions](reusing_abstractions.md)
- [DeclarativePolicy framework](policies.md)
+- [Switching to Rails 5](switching_to_rails5.md)
## Performance guides
diff --git a/doc/development/adding_database_indexes.md b/doc/development/adding_database_indexes.md
index ea6f14da3b9..d1d2b8c4907 100644
--- a/doc/development/adding_database_indexes.md
+++ b/doc/development/adding_database_indexes.md
@@ -28,9 +28,9 @@ to filter data by. Instead one should ask themselves the following questions:
1. Can I write my query in such a way that it re-uses as many existing indexes
as possible?
-2. Is the data going to be large enough that using an index will actually be
+1. Is the data going to be large enough that using an index will actually be
faster than just iterating over the rows in the table?
-3. Is the overhead of maintaining the index worth the reduction in query
+1. Is the overhead of maintaining the index worth the reduction in query
timings?
We'll explore every question in detail below.
@@ -62,7 +62,7 @@ In short:
1. Try to write your query in such a way that it re-uses as many existing
indexes as possible.
-2. Run the query using `EXPLAIN ANALYZE` and study the output to find the most
+1. Run the query using `EXPLAIN ANALYZE` and study the output to find the most
ideal query.
## Data Size
diff --git a/doc/development/build_test_package.md b/doc/development/build_test_package.md
index 439d228baef..c5f6adfeaeb 100644
--- a/doc/development/build_test_package.md
+++ b/doc/development/build_test_package.md
@@ -4,12 +4,13 @@ While developing a new feature or modifying an existing one, it is helpful if an
installable package (or a docker image) containing those changes is available
for testing. For this very purpose, a manual job is provided in the GitLab CI/CD
pipeline that can be used to trigger a pipeline in the omnibus-gitlab repository
-that will create
-1. A deb package for Ubuntu 16.04, available as a build artifact, and
-2. A docker image, which is pushed to [Omnibus GitLab's container
-registry](https://gitlab.com/gitlab-org/omnibus-gitlab/container_registry)
-(images titled `gitlab-ce` and `gitlab-ee` respectively and image tag is the
-commit which triggered the pipeline).
+that will create:
+
+- A deb package for Ubuntu 16.04, available as a build artifact, and
+- A docker image, which is pushed to [Omnibus GitLab's container
+ registry](https://gitlab.com/gitlab-org/omnibus-gitlab/container_registry)
+ (images titled `gitlab-ce` and `gitlab-ee` respectively and image tag is the
+ commit which triggered the pipeline).
When you push a commit to either the gitlab-ce or gitlab-ee project, the
pipeline for that commit will have a `build-package` manual action you can
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index 96f3861f8d7..52710e54e86 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -9,11 +9,11 @@ code is effective, understandable, and maintainable.
## Getting your merge request reviewed, approved, and merged
-You are strongly encouraged to get your code **reviewed** by a
+You are strongly encouraged to get your code **reviewed** by a
[reviewer](https://about.gitlab.com/handbook/engineering/#reviewer) as soon as
there is any code to review, to get a second opinion on the chosen solution and
implementation, and an extra pair of eyes looking for bugs, logic problems, or
-uncovered edge cases. The reviewer can be from a different team, but it is
+uncovered edge cases. The reviewer can be from a different team, but it is
recommended to pick someone who knows the domain well. You can read more about the
importance of involving reviewer(s) in the section on the responsibility of the author below.
@@ -23,7 +23,7 @@ one of the [Merge request coaches][team].
Depending on the areas your merge request touches, it must be **approved** by one
or more [maintainers](https://about.gitlab.com/handbook/engineering/#maintainer):
-For approvals, we use the approval functionality found in the merge request
+For approvals, we use the approval functionality found in the merge request
widget. Reviewers can add their approval by [approving additionally](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html#adding-or-removing-an-approval).
1. If your merge request includes backend changes [^1], it must be
@@ -42,43 +42,43 @@ widget. Reviewers can add their approval by [approving additionally](https://doc
Getting your merge request **merged** also requires a maintainer. If it requires
more than one approval, the last maintainer to review and approve it will also merge it.
-As described in the section on the responsibility of the maintainer below, you
-are recommended to get your merge request approved and merged by maintainer(s)
+As described in the section on the responsibility of the maintainer below, you
+are recommended to get your merge request approved and merged by maintainer(s)
from other teams than your own.
### The responsibility of the merge request author
The responsibility to find the best solution and implement it lies with the
-merge request author.
+merge request author.
-Before assigning a merge request to a maintainer for approval and merge, they
-should be confident that it actually solves the problem it was meant to solve,
-that it does so in the most appropriate way, that it satisfies all requirements,
-and that there are no remaining bugs, logical problems, or uncovered edge cases.
-The merge request should also have a completed task list in its description and
+Before assigning a merge request to a maintainer for approval and merge, they
+should be confident that it actually solves the problem it was meant to solve,
+that it does so in the most appropriate way, that it satisfies all requirements,
+and that there are no remaining bugs, logical problems, or uncovered edge cases.
+The merge request should also have a completed task list in its description and
a passing CI pipeline to avoid unnecessary back and forth.
To reach the required level of confidence in their solution, an author is expected
-to involve other people in the investigation and implementation processes as
+to involve other people in the investigation and implementation processes as
appropriate.
-They are encouraged to reach out to domain experts to discuss different solutions
-or get an implementation reviewed, to product managers and UX designers to clear
-up confusion or verify that the end result matches what they had in mind, to
+They are encouraged to reach out to domain experts to discuss different solutions
+or get an implementation reviewed, to product managers and UX designers to clear
+up confusion or verify that the end result matches what they had in mind, to
database specialists to get input on the data model or specific queries, or to
any other developer to get an in-depth review of the solution.
If an author is unsure if a merge request needs a domain expert's opinion, that's
-usually a pretty good sign that it does, since without it the required level of
+usually a pretty good sign that it does, since without it the required level of
confidence in their solution will not have been reached.
### The responsibility of the maintainer
Maintainers are responsible for the overall health, quality, and consistency of
-the GitLab codebase, across domains and product areas.
+the GitLab codebase, across domains and product areas.
-Consequently, their reviews will focus primarily on things like overall
-architecture, code organization, separation of concerns, tests, DRYness,
+Consequently, their reviews will focus primarily on things like overall
+architecture, code organization, separation of concerns, tests, DRYness,
consistency, and readability.
Since a maintainer's job only depends on their knowledge of the overall GitLab
@@ -87,12 +87,12 @@ merge requests from any team and in any product area.
In fact, authors are recommended to get their merge requests merged by maintainers
from other teams than their own, to ensure that all code across GitLab is consistent
-and can be easily understood by all contributors, from both inside and outside the
+and can be easily understood by all contributors, from both inside and outside the
company, without requiring team-specific expertise.
Maintainers will do their best to also review the specifics of the chosen solution
before merging, but as they are not necessarily domain experts, they may be poorly
-placed to do so without an unreasonable investment of time. In those cases, they
+placed to do so without an unreasonable investment of time. In those cases, they
will defer to the judgment of the author and earlier reviewers and involved domain
experts, in favor of focusing on their primary responsibilities.
@@ -100,7 +100,7 @@ If a developer who happens to also be a maintainer was involved in a merge reque
as a domain expert and/or reviewer, it is recommended that they are not also picked
as the maintainer to ultimately approve and merge it.
-Maintainers should check before merging if the merge request is approved by the
+Maintainers should check before merging if the merge request is approved by the
required approvers.
## Best practices
@@ -230,41 +230,41 @@ Enterprise Edition instance. This has some implications:
1. **Query changes** should be tested to ensure that they don't result in worse
performance at the scale of GitLab.com:
1. Generating large quantities of data locally can help.
- 2. Asking for query plans from GitLab.com is the most reliable way to validate
+ 1. Asking for query plans from GitLab.com is the most reliable way to validate
these.
-2. **Database migrations** must be:
+1. **Database migrations** must be:
1. Reversible.
- 2. Performant at the scale of GitLab.com - ask a maintainer to test the
+ 1. Performant at the scale of GitLab.com - ask a maintainer to test the
migration on the staging environment if you aren't sure.
- 3. Categorised correctly:
+ 1. Categorised correctly:
- Regular migrations run before the new code is running on the instance.
- [Post-deployment migrations](post_deployment_migrations.md) run _after_
the new code is deployed, when the instance is configured to do that.
- [Background migrations](background_migrations.md) run in Sidekiq, and
should only be done for migrations that would take an extreme amount of
time at GitLab.com scale.
-3. **Sidekiq workers**
+1. **Sidekiq workers**
[cannot change in a backwards-incompatible way](sidekiq_style_guide.md#removing-or-renaming-queues):
1. Sidekiq queues are not drained before a deploy happens, so there will be
workers in the queue from the previous version of GitLab.
- 2. If you need to change a method signature, try to do so across two releases,
+ 1. If you need to change a method signature, try to do so across two releases,
and accept both the old and new arguments in the first of those.
- 3. Similarly, if you need to remove a worker, stop it from being scheduled in
+ 1. Similarly, if you need to remove a worker, stop it from being scheduled in
one release, then remove it in the next. This will allow existing jobs to
execute.
- 4. Don't forget, not every instance will upgrade to every intermediate version
+ 1. Don't forget, not every instance will upgrade to every intermediate version
(some people may go from X.1.0 to X.10.0, or even try bigger upgrades!), so
try to be liberal in accepting the old format if it is cheap to do so.
-4. **Cached values** may persist across releases. If you are changing the type a
+1. **Cached values** may persist across releases. If you are changing the type a
cached value returns (say, from a string or nil to an array), change the
cache key at the same time.
-5. **Settings** should be added as a
+1. **Settings** should be added as a
[last resort](https://about.gitlab.com/handbook/product/#convention-over-configuration).
If you're adding a new setting in `gitlab.yml`:
1. Try to avoid that, and add to `ApplicationSetting` instead.
- 2. Ensure that it is also
+ 1. Ensure that it is also
[added to Omnibus](https://docs.gitlab.com/omnibus/settings/gitlab.yml.html#adding-a-new-setting-to-gitlab-yml).
-6. **Filesystem access** can be slow, so try to avoid
+1. **Filesystem access** can be slow, so try to avoid
[shared files](shared_files.md) when an alternative solution is available.
### Credits
diff --git a/doc/development/diffs.md b/doc/development/diffs.md
index 4adae5dabe2..43fc125c21d 100644
--- a/doc/development/diffs.md
+++ b/doc/development/diffs.md
@@ -17,20 +17,20 @@ The diffs fetching process _limits_ single file diff sizes and the overall size
then persisted on `merge_request_diff_files` table.
Even though diffs larger than 10% of the value of `ApplicationSettings#diff_max_patch_bytes` are collapsed,
-we still keep them on Postgres. However, diff files larger than defined _safety limits_
+we still keep them on Postgres. However, diff files larger than defined _safety limits_
(see the [Diff limits section](#diff-limits)) are _not_ persisted in the database.
In order to present diffs information on the Merge Request diffs page, we:
1. Fetch all diff files from database `merge_request_diff_files`
-2. Fetch the _old_ and _new_ file blobs in batch to:
- 1. Highlight old and new file content
- 2. Know which viewer it should use for each file (text, image, deleted, etc)
- 3. Know if the file content changed
- 4. Know if it was stored externally
- 5. Know if it had storage errors
-3. If the diff file is cacheable (text-based), it's cached on Redis
-using `Gitlab::Diff::FileCollection::MergeRequestDiff`
+1. Fetch the _old_ and _new_ file blobs in batch to:
+ - Highlight old and new file content
+ - Know which viewer it should use for each file (text, image, deleted, etc)
+ - Know if the file content changed
+ - Know if it was stored externally
+ - Know if it had storage errors
+1. If the diff file is cacheable (text-based), it's cached on Redis
+ using `Gitlab::Diff::FileCollection::MergeRequestDiff`
### Note diffs
@@ -39,9 +39,9 @@ on `NoteDiffFile` (which is associated with the actual `DiffNote`). So instead
of hitting the repository every time we need the diff of the file, we:
1. Check whether we have the `NoteDiffFile#diff` persisted and use it
-2. Otherwise, if it's a current MR revision, use the persisted
-`MergeRequestDiffFile#diff`
-3. In the last scenario, go the the repository and fetch the diff
+1. Otherwise, if it's a current MR revision, use the persisted
+ `MergeRequestDiffFile#diff`
+1. In the last scenario, go the repository and fetch the diff
## Diff limits
diff --git a/doc/development/ee_features.md b/doc/development/ee_features.md
index b6f053ff0e9..9aea03139ee 100644
--- a/doc/development/ee_features.md
+++ b/doc/development/ee_features.md
@@ -119,10 +119,20 @@ This also applies to views.
### EE features based on CE features
-For features that build on existing CE features, write a module in the
-`EE` namespace and `prepend` it in the CE class. This makes conflicts
-less likely to happen during CE to EE merges because only one line is
-added to the CE class - the `prepend` line.
+For features that build on existing CE features, write a module in the `EE`
+namespace and `prepend` it in the CE class, on the last line of the file that
+the class resides in. This makes conflicts less likely to happen during CE to EE
+merges because only one line is added to the CE class - the `prepend` line. For
+example, to prepend a module into the `User` class you would use the following
+approach:
+
+```ruby
+class User < ActiveRecord::Base
+ # ... lots of code here ...
+end
+
+User.prepend(EE::User)
+```
Since the module would require an `EE` namespace, the file should also be
put in an `ee/` sub-directory. For example, we want to extend the user model
@@ -231,7 +241,6 @@ the existing file:
```ruby
class ApplicationController < ActionController::Base
- prepend EE::ApplicationController
# ...
def after_sign_out_path_for(resource)
@@ -240,6 +249,8 @@ class ApplicationController < ActionController::Base
# ...
end
+
+ApplicationController.prepend(EE::ApplicationController)
```
And create a new file in the `ee/` sub-directory with the altered
@@ -533,8 +544,6 @@ module API
end
end
- prepend EE::API::MergeRequests
-
params :optional_params do
# CE specific params go here...
@@ -542,6 +551,8 @@ module API
end
end
end
+
+API::MergeRequests.prepend(EE::API::MergeRequests)
```
And then we could override it in EE module:
@@ -582,10 +593,10 @@ module API
authorize_read_builds!
end
end
-
- prepend EE::API::JobArtifacts
end
end
+
+API::JobArtifacts.prepend(EE::API::JobArtifacts)
```
And then we can follow regular object-oriented practices to override it:
@@ -626,8 +637,6 @@ module API
end
end
- prepend EE::API::MergeRequests
-
put ':id/merge_requests/:merge_request_iid/merge' do
merge_request = find_project_merge_request(params[:merge_request_iid])
@@ -639,6 +648,8 @@ module API
end
end
end
+
+API::MergeRequests.prepend(EE::API::MergeRequests)
```
Note that `update_merge_request_ee` doesn't do anything in CE, but
@@ -676,27 +687,37 @@ or not we really need to extend it from EE. For now we're not using it much.
Sometimes we need to use different arguments for a particular API route, and we
can't easily extend it with an EE module because Grape has different context in
-different blocks. In order to overcome this, we could use class methods from the
-API class.
+different blocks. In order to overcome this, we need to move the data to a class
+method that resides in a separate module or class. This allows us to extend that
+module or class before its data is used, without having to place a `prepend` in
+the middle of CE code.
For example, in one place we need to pass an extra argument to
`at_least_one_of` so that the API could consider an EE-only argument as the
-least argument. This is not quite beautiful but it's working:
+least argument. We would approach this as follows:
```ruby
+# api/merge_requests/parameters.rb
module API
class MergeRequests < Grape::API
- def self.update_params_at_least_one_of
- %i[
- assignee_id
- description
- ]
+ module Parameters
+ def self.update_params_at_least_one_of
+ %i[
+ assignee_id
+ description
+ ]
+ end
end
+ end
+end
- prepend EE::API::MergeRequests
+API::MergeRequests::Parameters.prepend(EE::API::MergeRequests::Parameters)
+# api/merge_requests.rb
+module API
+ class MergeRequests < Grape::API
params do
- at_least_one_of(*::API::MergeRequests.update_params_at_least_one_of)
+ at_least_one_of(*Parameters.update_params_at_least_one_of)
end
end
end
@@ -708,16 +729,18 @@ And then we could easily extend that argument in the EE class method:
module EE
module API
module MergeRequests
- extend ActiveSupport::Concern
+ module Parameters
+ extend ActiveSupport::Concern
- class_methods do
- extend ::Gitlab::Utils::Override
+ class_methods do
+ extend ::Gitlab::Utils::Override
- override :update_params_at_least_one_of
- def update_params_at_least_one_of
- super.push(*%i[
- squash
- ])
+ override :update_params_at_least_one_of
+ def update_params_at_least_one_of
+ super.push(*%i[
+ squash
+ ])
+ end
end
end
end
@@ -728,6 +751,78 @@ end
It could be annoying if we need this for a lot of routes, but it might be the
simplest solution right now.
+This approach can also be used when models define validations that depend on
+class methods. For example:
+
+```ruby
+# app/models/identity.rb
+class Identity < ActiveRecord::Base
+ def self.uniqueness_scope
+ [:provider]
+ end
+
+ prepend EE::Identity
+
+ validates :extern_uid,
+ allow_blank: true,
+ uniqueness: { scope: uniqueness_scope, case_sensitive: false }
+end
+
+# ee/app/models/ee/identity.rb
+module EE
+ module Identity
+ extend ActiveSupport::Concern
+
+ class_methods do
+ extend ::Gitlab::Utils::Override
+
+ def uniqueness_scope
+ [*super, :saml_provider_id]
+ end
+ end
+ end
+end
+```
+
+Instead of taking this approach, we would refactor our code into the following:
+
+```ruby
+# ee/app/models/ee/identity/uniqueness_scopes.rb
+module EE
+ module Identity
+ module UniquenessScopes
+ extend ActiveSupport::Concern
+
+ class_methods do
+ extend ::Gitlab::Utils::Override
+
+ def uniqueness_scope
+ [*super, :saml_provider_id]
+ end
+ end
+ end
+ end
+end
+
+# app/models/identity/uniqueness_scopes.rb
+class Identity < ActiveRecord::Base
+ module UniquenessScopes
+ def self.uniqueness_scope
+ [:provider]
+ end
+ end
+end
+
+Identity::UniquenessScopes.prepend(EE::Identity::UniquenessScopes)
+
+# app/models/identity.rb
+class Identity < ActiveRecord::Base
+ validates :extern_uid,
+ allow_blank: true,
+ uniqueness: { scope: Identity::UniquenessScopes.scopes, case_sensitive: false }
+end
+```
+
### Code in `spec/`
When you're testing EE-only features, avoid adding examples to the
diff --git a/doc/development/fe_guide/style_guide_scss.md b/doc/development/fe_guide/style_guide_scss.md
index 48eb6d0a7d6..b09243598d5 100644
--- a/doc/development/fe_guide/style_guide_scss.md
+++ b/doc/development/fe_guide/style_guide_scss.md
@@ -183,9 +183,11 @@ Don't use ID selectors in CSS.
```
### Variables
+
Before adding a new variable for a color or a size, guarantee:
-1. There isn't already one
-2. There isn't a similar one we can use instead.
+
+- There isn't already one
+- There isn't a similar one we can use instead.
## Linting
diff --git a/doc/development/fe_guide/vue.md b/doc/development/fe_guide/vue.md
index f6cbd11042c..ccfd465531a 100644
--- a/doc/development/fe_guide/vue.md
+++ b/doc/development/fe_guide/vue.md
@@ -221,6 +221,14 @@ const vm = mountComponent(Component, data);
The main return value of a Vue component is the rendered output. In order to test the component we
need to test the rendered output. [Vue][vue-test] guide's to unit test show us exactly that:
+## Vue.js Expert Role
+One should apply to be a Vue.js expert by opening an MR when the Merge Request's they create and review show:
+- Deep understanding of Vue and Vuex reactivy
+- Vue and Vuex code are structured according to both official and our guidelines
+- Full understanding of testing a Vue and Vuex application
+- Vuex code follows the [documented pattern](./vuex.md#actions-pattern-request-and-receive-namespaces)
+- Knowledge about the existing Vue and Vuex applications and existing reusable components
+
[vue-docs]: http://vuejs.org/guide/index.html
[issue-boards]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/app/assets/javascripts/boards
diff --git a/doc/development/fe_guide/vuex.md b/doc/development/fe_guide/vuex.md
index f582f5da323..0f57835fb87 100644
--- a/doc/development/fe_guide/vuex.md
+++ b/doc/development/fe_guide/vuex.md
@@ -114,19 +114,21 @@ When a request is made we often want to show a loading state to the user.
Instead of creating an action to toggle the loading state and dispatch it in the component,
create:
+
1. An action `requestSomething`, to toggle the loading state
1. An action `receiveSomethingSuccess`, to handle the success callback
1. An action `receiveSomethingError`, to handle the error callback
1. An action `fetchSomething` to make the request.
1. In case your application does more than a `GET` request you can use these as examples:
- 1. `PUT`: `createSomething`
- 2. `POST`: `updateSomething`
- 3. `DELETE`: `deleteSomething`
+ - `PUT`: `createSomething`
+ - `POST`: `updateSomething`
+ - `DELETE`: `deleteSomething`
The component MUST only dispatch the `fetchNamespace` action. Actions namespaced with `request` or `receive` should not be called from the component
The `fetch` action will be responsible to dispatch `requestNamespace`, `receiveNamespaceSuccess` and `receiveNamespaceError`
By following this pattern we guarantee:
+
1. All applications follow the same pattern, making it easier for anyone to maintain the code
1. All data in the application follows the same lifecycle pattern
1. Actions are contained and human friendly
@@ -297,12 +299,12 @@ export default {
```javascript
// component.vue
-
+
// bad
created() {
this.$store.commit('mutation');
}
-
+
// good
created() {
this.$store.dispatch('action');
diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md
index 0d558583bb8..e860bde48dc 100644
--- a/doc/development/github_importer.md
+++ b/doc/development/github_importer.md
@@ -99,8 +99,8 @@ This worker will wrap up the import process by performing some housekeeping
Advancing stages is done in one of two ways:
-1. Scheduling the worker for the next stage directly.
-2. Scheduling a job for `Gitlab::GithubImport::AdvanceStageWorker` which will
+- Scheduling the worker for the next stage directly.
+- Scheduling a job for `Gitlab::GithubImport::AdvanceStageWorker` which will
advance the stage when all work of the current stage has been completed.
The first approach should only be used by workers that perform all their work in
@@ -147,7 +147,7 @@ We handle this by doing the following:
1. Once we hit the rate limit all jobs will automatically reschedule themselves
in such a way that they are not executed until the rate limit has been reset.
-2. We cache the mapping of GitHub users to GitLab users in Redis.
+1. We cache the mapping of GitHub users to GitLab users in Redis.
More information on user caching can be found below.
@@ -157,21 +157,21 @@ When mapping GitHub users to GitLab users we need to (in the worst case)
perform:
1. One API call to get the user's Email address.
-2. Two database queries to see if a corresponding GitLab user exists. One query
+1. Two database queries to see if a corresponding GitLab user exists. One query
will try to find the user based on the GitHub user ID, while the second query
is used to find the user using their GitHub Email address.
Because this process is quite expensive we cache the result of these lookups in
Redis. For every user looked up we store three keys:
-1. A Redis key mapping GitHub usernames to their Email addresses.
-2. A Redis key mapping a GitHub Email addresses to a GitLab user ID.
-3. A Redis key mapping a GitHub user ID to GitLab user ID.
+- A Redis key mapping GitHub usernames to their Email addresses.
+- A Redis key mapping a GitHub Email addresses to a GitLab user ID.
+- A Redis key mapping a GitHub user ID to GitLab user ID.
There are two types of lookups we cache:
-1. A positive lookup, meaning we found a GitLab user ID.
-2. A negative lookup, meaning we didn't find a GitLab user ID. Caching this
+- A positive lookup, meaning we found a GitLab user ID.
+- A negative lookup, meaning we didn't find a GitLab user ID. Caching this
prevents us from performing the same work for users that we know don't exist
in our GitLab database.
diff --git a/doc/development/instrumentation.md b/doc/development/instrumentation.md
index 7761f65d78a..bef166f2aec 100644
--- a/doc/development/instrumentation.md
+++ b/doc/development/instrumentation.md
@@ -117,11 +117,11 @@ The block is executed and the execution time is stored as a set of fields in the
currently running transaction. If no transaction is present the block is yielded
without measuring anything.
-3 values are measured for a block:
+Three values are measured for a block:
-1. The real time elapsed, stored in NAME_real_time.
-2. The CPU time elapsed, stored in NAME_cpu_time.
-3. The call count, stored in NAME_call_count.
+- The real time elapsed, stored in NAME_real_time.
+- The CPU time elapsed, stored in NAME_cpu_time.
+- The call count, stored in NAME_call_count.
Both the real and CPU timings are measured in milliseconds.
diff --git a/doc/development/new_fe_guide/development/performance.md b/doc/development/new_fe_guide/development/performance.md
index 244dfb3756f..5ccd5357314 100644
--- a/doc/development/new_fe_guide/development/performance.md
+++ b/doc/development/new_fe_guide/development/performance.md
@@ -2,7 +2,7 @@
## Monitoring
-We have a performance dashboard available in one of our [grafana instances](https://performance.gprd.gitlab.com/dashboard/db/sitespeed-page-summary?orgId=1). This dashboard automatically aggregates metric data from [sitespeed.io](https://sitespeed.io) every 6 hours. These changes are displayed after a set number of pages are aggregated.
+We have a performance dashboard available in one of our [grafana instances](https://dashboards.gitlab.net/d/1EBTz3Dmz/sitespeed-page-summary?orgId=1). This dashboard automatically aggregates metric data from [sitespeed.io](https://sitespeed.io) every 6 hours. These changes are displayed after a set number of pages are aggregated.
These pages can be found inside a text file in the gitlab-build-images [repository](https://gitlab.com/gitlab-org/gitlab-build-images) called [gitlab.txt](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/scripts/gitlab.txt)
Any frontend engineer can contribute to this dashboard. They can contribute by adding or removing urls of pages from this text file. Please have a [frontend monitoring expert](https://about.gitlab.com/team) review your changes before assigning to a maintainer of the `gitlab-build-images` project. The changes will go live on the next scheduled run after the changes are merged into `master`.
diff --git a/doc/development/new_fe_guide/development/testing.md b/doc/development/new_fe_guide/development/testing.md
index a9223ac6b0f..082acbedcd2 100644
--- a/doc/development/new_fe_guide/development/testing.md
+++ b/doc/development/new_fe_guide/development/testing.md
@@ -1,30 +1,247 @@
# Overview of Frontend Testing
-## Types of tests in our codebase
+Tests relevant for frontend development can be found at two places:
+
+- `spec/javascripts/` which are run by Karma and contain
+ - [frontend unit tests](#frontend-unit-tests)
+ - [frontend component tests](#frontend-component-tests)
+ - [frontend integration tests](#frontend-integration-tests)
+- `spec/features/` which are run by RSpec and contain
+ - [feature tests](#feature-tests)
+
+In addition there were feature tests in `features/` run by Spinach in the past.
+These have been removed from our codebase in May 2018 ([#23036](https://gitlab.com/gitlab-org/gitlab-ce/issues/23036)).
+
+See also:
-* **RSpec**
- * **[Ruby unit tests](#ruby-unit-tests-spec-rb)** for models, controllers, helpers, etc. (`/spec/**/*.rb`)
- * **[Full feature tests](#full-feature-tests-spec-features-rb)** (`/spec/features/**/*.rb`)
-* **[Karma](#karma-tests-spec-javascripts-js)** (`/spec/javascripts/**/*.js`)
-* <s>Spinach</s> ā€” These have been removed from our codebase in May 2018. (`/features/`)
+- [old testing guide](../../testing_guide/frontend_testing.html)
+- [notes on testing Vue components](../../fe_guide/vue.html#testing-vue-components)
-## RSpec: Ruby unit tests `/spec/**/*.rb`
+## Frontend unit tests
-These tests are meant to unit test the ruby models, controllers and helpers.
+Unit tests are on the lowest abstraction level and typically test functionality that is not directly perceivable by a user.
-### When do we write/update these tests?
+### When to use unit tests
-Whenever we create or modify any Ruby models, controllers or helpers we add/update corresponding tests.
+<details>
+ <summary>exported functions and classes</summary>
+ Anything that is exported can be reused at various places in a way you have no control over.
+ Therefore it is necessary to document the expected behavior of the public interface with tests.
+</details>
----
+<details>
+ <summary>Vuex actions</summary>
+ Any Vuex action needs to work in a consistent way independent of the component it is triggered from.
+</details>
-## RSpec: Full feature tests `/spec/features/**/*.rb`
+<details>
+ <summary>Vuex mutations</summary>
+ For complex Vuex mutations it helps to identify the source of a problem by separating the tests from other parts of the Vuex store.
+</details>
-Full feature tests will load a full app environment and allow us to test things like rendering DOM, interacting with links and buttons, testing the outcome of those interactions through multiple pages if necessary. These are also called end-to-end tests but should not be confused with QA end-to-end tests (`package-and-qa` manual pipeline job).
+### When *not* to use unit tests
-### When do we write/update these tests?
+<details>
+ <summary>non-exported functions or classes</summary>
+ Anything that is not exported from a module can be considered private or an implementation detail and doesn't need to be tested.
+</details>
-When we add a new feature, we write at least two tests covering the success and the failure scenarios.
+<details>
+ <summary>constants</summary>
+ Testing the value of a constant would mean to copy it.
+ This results in extra effort without additional confidence that the value is correct.
+</details>
+
+<details>
+ <summary>Vue components</summary>
+ Computed properties, methods, and lifecycle hooks can be considered an implementation detail of components and don't need to be tested.
+ They are implicitly covered by component tests.
+ The <a href="https://vue-test-utils.vuejs.org/guides/#getting-started">official Vue guidelines</a> suggest the same.
+</details>
+
+### What to mock in unit tests
+
+<details>
+ <summary>state of the class under test</summary>
+ Modifying the state of the class under test directly rather than using methods of the class avoids side-effects in test setup.
+</details>
+
+<details>
+ <summary>other exported classes</summary>
+ Every class needs to be tested in isolation to prevent test scenarios from growing exponentially.
+</details>
+
+<details>
+ <summary>single DOM elements if passed as parameters</summary>
+ For tests that only operate on single DOM elements rather than a whole page, creating these elements is cheaper than loading a whole HTML fixture.
+</details>
+
+<details>
+ <summary>all server requests</summary>
+ When running frontend unit tests, the backend may not be reachable.
+ Therefore all outgoing requests need to be mocked.
+</details>
+
+<details>
+ <summary>asynchronous background operations</summary>
+ Background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects.
+</details>
+
+### What *not* to mock in unit tests
+
+<details>
+ <summary>non-exported functions or classes</summary>
+ Everything that is not exported can be considered private to the module and will be implicitly tested via the exported classes / functions.
+</details>
+
+<details>
+ <summary>methods of the class under test</summary>
+ By mocking methods of the class under test, the mocks will be tested and not the real methods.
+</details>
+
+<details>
+ <summary>utility functions (pure functions, or those that only modify parameters)</summary>
+ If a function has no side effects because it has no state, it is safe to not mock it in tests.
+</details>
+
+<details>
+ <summary>full HTML pages</summary>
+ Loading the HTML of a full page slows down tests, so it should be avoided in unit tests.
+</details>
+
+## Frontend component tests
+
+Component tests cover the state of a single component that is perceivable by a user depending on external signals such as user input, events fired from other components, or application state.
+
+### When to use component tests
+
+- Vue components
+
+### When *not* to use component tests
+
+<details>
+ <summary>Vue applications</summary>
+ Vue applications may contain many components.
+ Testing them on a component level requires too much effort.
+ Therefore they are tested on frontend integration level.
+</details>
+
+<details>
+ <summary>HAML templates</summary>
+ HAML templates contain only Markup and no frontend-side logic.
+ Therefore they are not complete components.
+</details>
+
+### What to mock in component tests
+
+<details>
+ <summary>DOM</summary>
+ Operating on the real DOM is significantly slower than on the virtual DOM.
+</details>
+
+<details>
+ <summary>properties and state of the component under test</summary>
+ Similarly to testing classes, modifying the properties directly (rather than relying on methods of the component) avoids side-effects.
+</details>
+
+<details>
+ <summary>Vuex store</summary>
+ To avoid side effects and keep component tests simple, Vuex stores are replaced with mocks.
+</details>
+
+<details>
+ <summary>all server requests</summary>
+ Similar to unit tests, when running component tests, the backend may not be reachable.
+ Therefore all outgoing requests need to be mocked.
+</details>
+
+<details>
+ <summary>asynchronous background operations</summary>
+ Similar to unit tests, background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects.
+</details>
+
+<details>
+ <summary>child components</summary>
+ Every component is tested individually, so child components are mocked.
+ See also <a href="https://vue-test-utils.vuejs.org/api/#shallowmount">shallowMount()</a>
+</details>
+
+### What *not* to mock in component tests
+
+<details>
+ <summary>methods or computed properties of the component under test</summary>
+ By mocking part of the component under test, the mocks will be tested and not the real component.
+</details>
+
+<details>
+ <summary>functions and classes independent from Vue</summary>
+ All plain JavaScript code is already covered by unit tests and needs not to be mocked in component tests.
+</details>
+
+## Frontend integration tests
+
+Integration tests cover the interaction between all components on a single page.
+Their abstraction level is comparable to how a user would interact with the UI.
+
+### When to use integration tests
+
+<details>
+ <summary>page bundles (<code>index.js</code> files in <code>app/assets/javascripts/pages/</code>)</summary>
+ Testing the page bundles ensures the corresponding frontend components integrate well.
+</details>
+
+<details>
+ <summary>Vue applications outside of page bundles</summary>
+ Testing Vue applications as a whole ensures the corresponding frontend components integrate well.
+</details>
+
+### What to mock in integration tests
+
+<details>
+ <summary>HAML views (use fixtures instead)</summary>
+ Rendering HAML views requires a Rails environment including a running database which we cannot rely on in frontend tests.
+</details>
+
+<details>
+ <summary>all server requests</summary>
+ Similar to unit and component tests, when running component tests, the backend may not be reachable.
+ Therefore all outgoing requests need to be mocked.
+</details>
+
+<details>
+ <summary>asynchronous background operations that are not perceivable on the page</summary>
+ Background operations that affect the page need to be tested on this level.
+ All other background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects.
+</details>
+
+### What *not* to mock in integration tests
+
+<details>
+ <summary>DOM</summary>
+ Testing on the real DOM ensures our components work in the environment they are meant for.
+ Part of this will be delegated to <a href="https://gitlab.com/gitlab-org/quality/team-tasks/issues/45">cross-browser testing</a>.
+</details>
+
+<details>
+ <summary>properties or state of components</summary>
+ On this level, all tests can only perform actions a user would do.
+ For example to change the state of a component, a click event would be fired.
+</details>
+
+<details>
+ <summary>Vuex stores</summary>
+ When testing the frontend code of a page as a whole, the interaction between Vue components and Vuex stores is covered as well.
+</details>
+
+## Feature tests
+
+In contrast to [frontend integration tests](#frontend-integration-tests), feature tests make requests against the real backend instead of using fixtures.
+This also implies that database queries are executed which makes this category significantly slower.
+
+### When to use feature tests
+
+- use cases that require a backend and cannot be tested using fixtures
+- behavior that is not part of a page bundle but defined globally
### Relevant notes
@@ -48,33 +265,9 @@ wait_for_requests
expect(page).not_to have_selector('.card')
```
----
-
-## Karma tests `/spec/javascripts/**/*.js`
-
-These are the more frontend-focused, at the moment. They're **faster** than `rspec` and make for very quick testing of frontend components.
-
-### When do we write/update these tests?
-
-When we add/update a method/action/mutation to Vue or Vuex, we write karma tests to ensure the logic we wrote doesn't break. We should, however, refrain from writing tests that double-test Vue's internal features.
-
-### Relevant notes
-
-Karma tests are run against a virtual DOM.
+## Test helpers
-To populate the DOM, we can use fixtures to fake the generation of HTML instead of having Rails do that.
-
-Be sure to check the [best practices for karma tests](../../testing_guide/frontend_testing.html#best-practices).
-
-### Vue and Vuex
-
-Test as much as possible without double-testing Vue's internal features, as mentioned above.
-
-Make sure to test computedProperties, mutations, actions. Run the action and test that the proper mutations are committed.
-
-Also check these [notes on testing Vue components](../../fe_guide/vue.html#testing-vue-components).
-
-#### Vuex Helper: `testAction`
+### Vuex Helper: `testAction`
We have a helper available to make testing actions easier, as per [official documentation](https://vuex.vuejs.org/en/testing.html):
@@ -97,7 +290,7 @@ testAction(
Check an example in [spec/javascripts/ide/stores/actions_spec.jsspec/javascripts/ide/stores/actions_spec.js](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/spec/javascripts/ide/stores/actions_spec.js).
-#### Vue Helper: `mountComponent`
+### Vue Helper: `mountComponent`
To make mounting a Vue component easier and more readable, we have a few helpers available in `spec/helpers/vue_mount_component_helper`.
@@ -133,6 +326,7 @@ afterEach(() => {
vm.$destroy();
});
```
+
## Testing with older browsers
Some regressions only affect a specific browser version. We can install and test in particular browsers with either Firefox or Browserstack using the following steps:
@@ -140,20 +334,21 @@ Some regressions only affect a specific browser version. We can install and test
### Browserstack
-[Browserstack](https://www.browserstack.com/) allows you to test more than 1200 mobile devices and browsers.
+[Browserstack](https://www.browserstack.com/) allows you to test more than 1200 mobile devices and browsers.
You can use it directly through the [live app](https://www.browserstack.com/live) or you can install the [chrome extension](https://chrome.google.com/webstore/detail/browserstack/nkihdmlheodkdfojglpcjjmioefjahjb) for easy access.
You can find the credentials on 1Password, under `frontendteam@gitlab.com`.
### Firefox
#### macOS
+
You can download any older version of Firefox from the releases FTP server, https://ftp.mozilla.org/pub/firefox/releases/
1. From the website, select a version, in this case `50.0.1`.
-2. Go to the mac folder.
-3. Select your preferred language, you will find the dmg package inside, download it.
-4. Drag and drop the application to any other folder but the `Applications` folder.
-5. Rename the application to something like `Firefox_Old`.
-6. Move the application to the `Applications` folder.
-7. Open up a terminal and run `/Applications/Firefox_Old.app/Contents/MacOS/firefox-bin -profilemanager` to create a new profile specific to that Firefox version.
-8. Once the profile has been created, quit the app, and run it again like normal. You now have a working older Firefox version.
+1. Go to the mac folder.
+1. Select your preferred language, you will find the dmg package inside, download it.
+1. Drag and drop the application to any other folder but the `Applications` folder.
+1. Rename the application to something like `Firefox_Old`.
+1. Move the application to the `Applications` folder.
+1. Open up a terminal and run `/Applications/Firefox_Old.app/Contents/MacOS/firefox-bin -profilemanager` to create a new profile specific to that Firefox version.
+1. Once the profile has been created, quit the app, and run it again like normal. You now have a working older Firefox version.
diff --git a/doc/development/performance.md b/doc/development/performance.md
index e738f2b4b66..4cc2fdc9a58 100644
--- a/doc/development/performance.md
+++ b/doc/development/performance.md
@@ -9,17 +9,17 @@ The process of solving performance problems is roughly as follows:
1. Make sure there's an issue open somewhere (e.g., on the GitLab CE issue
tracker), create one if there isn't. See [#15607][#15607] for an example.
-2. Measure the performance of the code in a production environment such as
+1. Measure the performance of the code in a production environment such as
GitLab.com (see the [Tooling](#tooling) section below). Performance should be
measured over a period of _at least_ 24 hours.
-3. Add your findings based on the measurement period (screenshots of graphs,
+1. Add your findings based on the measurement period (screenshots of graphs,
timings, etc) to the issue mentioned in step 1.
-4. Solve the problem.
-5. Create a merge request, assign the "Performance" label and assign it to
+1. Solve the problem.
+1. Create a merge request, assign the "Performance" label and assign it to
[@yorickpeterse][yorickpeterse] for reviewing.
-6. Once a change has been deployed make sure to _again_ measure for at least 24
+1. Once a change has been deployed make sure to _again_ measure for at least 24
hours to see if your changes have any impact on the production environment.
-7. Repeat until you're done.
+1. Repeat until you're done.
When providing timings make sure to provide:
@@ -94,14 +94,14 @@ result of this should be used instead of the `Benchmark` module.
In short:
-1. Don't trust benchmarks you find on the internet.
-2. Never make claims based on just benchmarks, always measure in production to
+- Don't trust benchmarks you find on the internet.
+- Never make claims based on just benchmarks, always measure in production to
confirm your findings.
-3. X being N times faster than Y is meaningless if you don't know what impact it
+- X being N times faster than Y is meaningless if you don't know what impact it
will actually have on your production environment.
-4. A production environment is the _only_ benchmark that always tells the truth
+- A production environment is the _only_ benchmark that always tells the truth
(unless your performance monitoring systems are not set up correctly).
-5. If you must write a benchmark use the benchmark-ips Gem instead of Ruby's
+- If you must write a benchmark use the benchmark-ips Gem instead of Ruby's
`Benchmark` module.
## Profiling
diff --git a/doc/development/post_deployment_migrations.md b/doc/development/post_deployment_migrations.md
index cfc91539bee..5986efa9974 100644
--- a/doc/development/post_deployment_migrations.md
+++ b/doc/development/post_deployment_migrations.md
@@ -57,13 +57,13 @@ depends on this column being present while it's running. Normally you'd follow
these steps in such a case:
1. Stop the GitLab instance
-2. Run the migration removing the column
-3. Start the GitLab instance again
+1. Run the migration removing the column
+1. Start the GitLab instance again
Using post deployment migrations we can instead follow these steps:
1. Deploy a new version of GitLab while ignoring post deployment migrations
-2. Re-run `rake db:migrate` but without the environment variable set
+1. Re-run `rake db:migrate` but without the environment variable set
Here we don't need any downtime as the migration takes place _after_ a new
version (which doesn't depend on the column anymore) has been deployed.
diff --git a/doc/development/query_count_limits.md b/doc/development/query_count_limits.md
index 310e3faf61b..b3ecaf30d8a 100644
--- a/doc/development/query_count_limits.md
+++ b/doc/development/query_count_limits.md
@@ -8,8 +8,8 @@ in test environments we'll raise an error when this threshold is exceeded.
When a test fails because it executes more than 100 SQL queries there are two
solutions to this problem:
-1. Reduce the number of SQL queries that are executed.
-2. Whitelist the controller or API endpoint.
+- Reduce the number of SQL queries that are executed.
+- Whitelist the controller or API endpoint.
You should only resort to whitelisting when an existing controller or endpoint
is to blame as in this case reducing the number of SQL queries can take a lot of
diff --git a/doc/development/swapping_tables.md b/doc/development/swapping_tables.md
index 6b990ece72c..29cd6a43aff 100644
--- a/doc/development/swapping_tables.md
+++ b/doc/development/swapping_tables.md
@@ -8,8 +8,8 @@ Let's say you want to swap the table "events" with "events_for_migration". In
this case you need to follow 3 steps:
1. Rename "events" to "events_temporary"
-2. Rename "events_for_migration" to "events"
-3. Rename "events_temporary" to "events_for_migration"
+1. Rename "events_for_migration" to "events"
+1. Rename "events_temporary" to "events_for_migration"
Rails allows you to do this using the `rename_table` method:
diff --git a/doc/development/switching_to_rails5.md b/doc/development/switching_to_rails5.md
new file mode 100644
index 00000000000..c9a4ce1a1d1
--- /dev/null
+++ b/doc/development/switching_to_rails5.md
@@ -0,0 +1,27 @@
+# Switching to Rails 5
+
+GitLab switched recently to Rails 5. This is a big change (especially for backend development) and it introduces couple of temporary inconveniences.
+
+## After the switch, I found a broken feature. What do I do?
+
+Many fixes and tweaks were done to make our codebase compatible with Rails 5, but it's possible that not all issues were found. If you find an bug, please create an issue and assign it the ~rails5 label.
+
+## It takes much longer to run CI pipelines that build GitLab. Why?
+
+We are temporarily running CI pipelines with Rails 4 and 5 so that we ensure we remain compatible with Rails 4 in case we must revert back to Rails 4 from Rails 5 (this can double the duration of CI pipelines).
+
+We might revert back to Rails 4 if we found a major issue we were unable to quickly fix.
+
+Once we are sure we can stay with Rails 5, we will stop running CI pipelines with Rails 4.
+
+## Can I skip running Rails 4 tests?
+
+If you are sure that your merge request doesn't introduce any incompatibility, you can just include `norails4` anywhere in your branch name and Rails 4 tests will be skipped.
+
+## CI is failing on my test with Rails 4. How can I debug it?
+
+You can run specs locally with Rails 4 using the following command:
+
+```sh
+BUNDLE_GEMFILE=Gemfile.rails4 RAILS5=0 bundle exec rspec ...
+```
diff --git a/doc/development/ui_guide.md b/doc/development/ui_guide.md
index df6ac452300..dd206bb2ae9 100644
--- a/doc/development/ui_guide.md
+++ b/doc/development/ui_guide.md
@@ -54,12 +54,12 @@ information from database or file system
When exporting SVGs, be sure to follow the following guidelines:
-1. Convert all strokes to outlines.
-2. Use pathfinder tools to combine overlapping paths and create compound paths.
-3. SVGs that are limited to one color should be exported without a fill color so the color can be set using CSS.
-4. Ensure that exported SVGs have been run through an [SVG cleaner](https://github.com/RazrFalcon/SVGCleaner) to remove unused elements and attributes.
+- Convert all strokes to outlines.
+- Use pathfinder tools to combine overlapping paths and create compound paths.
+- SVGs that are limited to one color should be exported without a fill color so the color can be set using CSS.
+- Ensure that exported SVGs have been run through an [SVG cleaner](https://github.com/RazrFalcon/SVGCleaner) to remove unused elements and attributes.
-You can open your svg in a text editor to ensure that it is clean.
+You can open your svg in a text editor to ensure that it is clean.
Incorrect files will look like this:
```xml
diff --git a/doc/development/ux_guide/users.md b/doc/development/ux_guide/users.md
index f9c395b2dff..30386e728c4 100644
--- a/doc/development/ux_guide/users.md
+++ b/doc/development/ux_guide/users.md
@@ -101,7 +101,7 @@ GitLab's interface initially attracted Nazim when he was comparing version contr
### Demographics
**Age**
-
+
42 years old
**Location**
@@ -148,11 +148,11 @@ Matthieu describes GitLab as:
>"the only tool that offers the real feeling of having everything you need in one place."
-He credits himself as being entirely responsible for moving his company to GitLab.
+He credits himself as being entirely responsible for moving his company to GitLab.
### Frustrations
#### Updating to the latest release
-Matthieu introduced his company to GitLab. He is responsible for maintaining and managing the company's installation in addition to his day job. He feels updates are too frequent and he doesn't always have sufficient time to update GitLab. As a result, he's not up to date with releases.
+Matthieu introduced his company to GitLab. He is responsible for maintaining and managing the company's installation in addition to his day job. He feels updates are too frequent and he doesn't always have sufficient time to update GitLab. As a result, he's not up to date with releases.
Matthieu tried to set up automatic updates, however, as he isn't a Systems Administrator, he wasn't confident in his setup. He feels he should be able to "upgrade without users even noticing" but hasn't figured out how to do this yet. Matthieu would like the "update process to be triggered from the Admin Panel, perhaps accompanied with a changelog and the option to skip updates."
@@ -173,11 +173,11 @@ It's Matthieu's responsibility to get teams across his organization up and runni
He states that there has been: "a sluggishness of others to adapt" and it's "a low-effort adaptation at that."
### Goals
-* To save time. One of the reasons Matthieu moved his company to GitLab was to reduce the effort it took him to manage and configure multiple tools, thus saving him time. He has to balance his day job in addition to managing the company's GitLab installation and onboarding new teams to GitLab.
-* To use a platform which is easy to manage. Matthieu isn't a Systems Administrator, and when updating GitLab, creating backups, etc. He would prefer to work within GitLab's UI. Explanations / guided instructions when configuring settings in GitLab's interface would really help Matthieu. He needs reassurance that what he is about to change is
+* To save time. One of the reasons Matthieu moved his company to GitLab was to reduce the effort it took him to manage and configure multiple tools, thus saving him time. He has to balance his day job in addition to managing the company's GitLab installation and onboarding new teams to GitLab.
+* To use a platform which is easy to manage. Matthieu isn't a Systems Administrator, and when updating GitLab, creating backups, etc. He would prefer to work within GitLab's UI. Explanations / guided instructions when configuring settings in GitLab's interface would really help Matthieu. He needs reassurance that what he is about to change is
-1. the right setting
-2. will provide him with the desired result he wants.
+- The right setting.
+- Will provide him with the desired result he wants.
* Matthieu needs to educate his colleagues about GitLab. Matthieu's colleagues won't adopt GitLab as they're unaware of its capabilities and the positive impact it could have on their work. Matthieu needs support in getting this message across to them.
@@ -307,4 +307,4 @@ Karolina has an interest in UX and therefore has strong opinions about how GitLa
### Goals
* To develop her programming experience and to learn from other developers.
* To contribute to both her own and other open source projects.
-* To use a fast and intuitive version control platform. \ No newline at end of file
+* To use a fast and intuitive version control platform.
diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md
index b668c9de6a0..3630a28fae9 100644
--- a/doc/development/what_requires_downtime.md
+++ b/doc/development/what_requires_downtime.md
@@ -300,7 +300,7 @@ The same applies to `rename_column_using_background_migration`:
1. Create a migration using the helper, which will schedule background
migrations to spread the writes over a longer period of time.
-2. In the next monthly release, create a clean-up migration to steal from the
+1. In the next monthly release, create a clean-up migration to steal from the
Sidekiq queues, migrate any missing rows, and cleanup the rename. This
migration should skip the steps after stealing from the Sidekiq queues if the
column has already been renamed.
diff --git a/doc/gitlab-basics/create-your-ssh-keys.md b/doc/gitlab-basics/create-your-ssh-keys.md
index b6ebe374de3..881629c3bfd 100644
--- a/doc/gitlab-basics/create-your-ssh-keys.md
+++ b/doc/gitlab-basics/create-your-ssh-keys.md
@@ -12,7 +12,7 @@
![SSH Keys](img/profile_settings_ssh_keys.png)
-3. Paste your **public** key that you generated in the first step in the 'Key'
+1. Paste your **public** key that you generated in the first step in the 'Key'
box.
![Paste SSH public key](img/profile_settings_ssh_keys_paste_pub.png)
diff --git a/doc/install/installation.md b/doc/install/installation.md
index 316411d1047..cac97b63d92 100644
--- a/doc/install/installation.md
+++ b/doc/install/installation.md
@@ -132,9 +132,9 @@ Remove the old Ruby 1.8 if present:
Download Ruby and compile it:
mkdir /tmp/ruby && cd /tmp/ruby
- curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.4/ruby-2.4.5.tar.gz
- echo '4d650f302f1ec00256450b112bb023644b6ab6dd ruby-2.4.5.tar.gz' | shasum -c - && tar xzf ruby-2.4.5.tar.gz
- cd ruby-2.4.5
+ curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.5/ruby-2.5.3.tar.gz
+ echo 'f919a9fbcdb7abecd887157b49833663c5c15fda ruby-2.5.3.tar.gz' | shasum -c - && tar xzf ruby-2.5.3.tar.gz
+ cd ruby-2.5.3
./configure --disable-install-rdoc
make
diff --git a/doc/integration/akismet.md b/doc/integration/akismet.md
index a6436b5f926..200fe6f5206 100644
--- a/doc/integration/akismet.md
+++ b/doc/integration/akismet.md
@@ -20,17 +20,17 @@ To use Akismet:
1. Go to the URL: https://akismet.com/account/
-2. Sign-in or create a new account.
+1. Sign-in or create a new account.
-3. Click on **Show** to reveal the API key.
+1. Click on **Show** to reveal the API key.
-4. Go to Applications Settings on Admin Area (`admin/application_settings`)
+1. Go to Applications Settings on Admin Area (`admin/application_settings`)
-5. Check the **Enable Akismet** checkbox
+1. Check the **Enable Akismet** checkbox
-6. Fill in the API key from step 3.
+1. Fill in the API key from step 3.
-7. Save the configuration.
+1. Save the configuration.
![Screenshot of Akismet settings](img/akismet_settings.png)
@@ -42,9 +42,9 @@ To use Akismet:
As a way to better recognize between spam and ham, you can train the Akismet
filter whenever there is a false positive or false negative.
-When an entry is recognized as spam, it is rejected and added to the Spam Logs.
+When an entry is recognized as spam, it is rejected and added to the Spam Logs.
From here you can review if they are really spam. If one of them is not really
-spam, you can use the **Submit as ham** button to tell Akismet that it falsely
+spam, you can use the **Submit as ham** button to tell Akismet that it falsely
recognized an entry as spam.
![Screenshot of Spam Logs](img/spam_log.png)
diff --git a/doc/integration/google.md b/doc/integration/google.md
index 73e2f5826ff..b91d40d4bd4 100644
--- a/doc/integration/google.md
+++ b/doc/integration/google.md
@@ -35,7 +35,7 @@ In Google's side:
1. You should now be able to see a Client ID and Client secret. Note them down
or keep this page open as you will need them later.
-1. From the **Dashboard** select **ENABLE APIS AND SERVICES > Compute > Google+ API > Enable**
+1. From the **Dashboard** select **ENABLE APIS AND SERVICES > Social > Google+ API > Enable**
1. To enable projects to access [Google Kubernetes Engine](../user/project/clusters/index.md), you must also
enable these APIs:
- Google Kubernetes Engine API
diff --git a/doc/integration/recaptcha.md b/doc/integration/recaptcha.md
index 932cd479d56..8fdadb008ec 100644
--- a/doc/integration/recaptcha.md
+++ b/doc/integration/recaptcha.md
@@ -8,19 +8,13 @@ to confirm that a real user, not a bot, is attempting to create an account.
To use reCAPTCHA, first you must create a site and private key.
-1. Go to the URL: https://www.google.com/recaptcha/admin
-
-2. Fill out the form necessary to obtain reCAPTCHA keys.
-
-3. Login to your GitLab server, with administrator credentials.
-
-4. Go to Applications Settings on Admin Area (`admin/application_settings`)
-
-5. Fill all recaptcha fields with keys from previous steps
-
-6. Check the `Enable reCAPTCHA` checkbox
-
-7. Save the configuration.
+1. Go to the URL: <https://www.google.com/recaptcha/admin>.
+1. Fill out the form necessary to obtain reCAPTCHA keys.
+1. Login to your GitLab server, with administrator credentials.
+1. Go to Applications Settings on Admin Area (`admin/application_settings`).
+1. Fill all recaptcha fields with keys from previous steps.
+1. Check the `Enable reCAPTCHA` checkbox.
+1. Save the configuration.
## Enabling reCAPTCHA for user logins via passwords
diff --git a/doc/security/rack_attack.md b/doc/security/rack_attack.md
index 3efb19c1526..07e7b3da13b 100644
--- a/doc/security/rack_attack.md
+++ b/doc/security/rack_attack.md
@@ -10,7 +10,7 @@ Rack Attack offers IP whitelisting, blacklisting, Fail2ban style filtering and
tracking.
**Note:** Starting with 11.2, Rack Attack is disabled by default. To continue
-using this feature, please enable it in your `gitlab.rb` by setting
+using this feature, please enable it in your `gitlab.rb` by setting
`gitlab_rails['rack_attack_git_basic_auth'] = true`.
By default, user sign-in, user sign-up (if enabled), and user password reset is
@@ -41,7 +41,7 @@ For more information on how to use these options check out
}
```
-3. Reconfigure GitLab:
+1. Reconfigure GitLab:
```
sudo gitlab-ctl reconfigure
@@ -98,26 +98,26 @@ In case you want to remove a blocked IP, follow these steps:
grep "Rack_Attack" /var/log/gitlab/gitlab-rails/production.log
```
-2. Since the blacklist is stored in Redis, you need to open up `redis-cli`:
+1. Since the blacklist is stored in Redis, you need to open up `redis-cli`:
```sh
/opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.socket
```
-3. You can remove the block using the following syntax, replacing `<ip>` with
+1. You can remove the block using the following syntax, replacing `<ip>` with
the actual IP that is blacklisted:
```
del cache:gitlab:rack::attack:allow2ban:ban:<ip>
```
-4. Confirm that the key with the IP no longer shows up:
+1. Confirm that the key with the IP no longer shows up:
```
keys *rack::attack*
```
-5. Optionally, add the IP to the whitelist to prevent it from being blacklisted
+1. Optionally, add the IP to the whitelist to prevent it from being blacklisted
again (see [settings](#settings)).
## Troubleshooting
@@ -129,11 +129,11 @@ the load balancer. In that case, you will need to:
1. [Configure `nginx[real_ip_trusted_addresses]`](https://docs.gitlab.com/omnibus/settings/nginx.html#configuring-gitlab-trusted_proxies-and-the-nginx-real_ip-module).
This will keep users' IPs from being listed as the load balancer IPs.
-2. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings).
-3. Reconfigure GitLab:
+1. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings).
+1. Reconfigure GitLab:
```
sudo gitlab-ctl reconfigure
```
-4. [Remove the block via Redis.](#remove-blocked-ips-from-rack-attack-via-redis)
+1. [Remove the block via Redis.](#remove-blocked-ips-from-rack-attack-via-redis)
diff --git a/doc/security/two_factor_authentication.md b/doc/security/two_factor_authentication.md
index cd290a80314..b770f2544d2 100644
--- a/doc/security/two_factor_authentication.md
+++ b/doc/security/two_factor_authentication.md
@@ -13,8 +13,8 @@ You can read more about it here:
Users on GitLab, can enable it without any admin's intervention. If you want to
enforce everyone to set up 2FA, you can choose from two different ways:
- 1. Enforce on next login
- 2. Suggest on next login, but allow a grace period before enforcing.
+- Enforce on next login.
+- Suggest on next login, but allow a grace period before enforcing.
In the Admin area under **Settings** (`/admin/application_settings`), look for
the "Sign-in Restrictions" area, where you can configure both.
diff --git a/doc/topics/git/troubleshooting_git.md b/doc/topics/git/troubleshooting_git.md
index 8555c5e91ea..d1729d70158 100644
--- a/doc/topics/git/troubleshooting_git.md
+++ b/doc/topics/git/troubleshooting_git.md
@@ -78,5 +78,20 @@ git push
In case you're running an older version of Git (< 2.9), consider upgrading
to >= 2.9 (see [Broken pipe when pushing to Git repository][Broken-Pipe]).
+## Timeout during git push/pull
+
+If pulling/pushing from/to your repository ends up taking more than 50 seconds,
+a timeout will be issued with a log of the number of operations performed
+and their respective timings, like the example below:
+
+```
+remote: Running checks for branch: master
+remote: Scanning for LFS objects... (153ms)
+remote: Calculating new repository size... (cancelled after 729ms)
+```
+
+This could be used to further investigate what operation is performing poorly
+and provide GitLab with more information on how to improve the service.
+
[SSH troubleshooting]: ../../ssh/README.md#troubleshooting "SSH Troubleshooting"
[Broken-Pipe]: https://stackoverflow.com/questions/19120120/broken-pipe-when-pushing-to-git-repository/36971469#36971469 "StackOverflow: 'Broken pipe when pushing to Git repository'"
diff --git a/doc/university/training/end-user/README.md b/doc/university/training/end-user/README.md
index e5eb5d97e3b..701533358c8 100644
--- a/doc/university/training/end-user/README.md
+++ b/doc/university/training/end-user/README.md
@@ -78,7 +78,7 @@ Workshop Time!
```bash
git config --global user.name "Your Name"
git config --global user.email you@example.com
-```
+```
- If you don't use the global flag you can set up a different author for
each project
@@ -107,14 +107,14 @@ cd ~/development
-or-
mkdir ~/workspace
-cd ~/workspace
+cd ~/workspace
```
---
## Git Basics
----
+---
### Git Workflow
@@ -136,7 +136,7 @@ cd ~/workspace
issue tracking, Merge Requests, and other features.
- The hosted version of GitLab is gitlab.com
----
+---
### New Project
@@ -150,12 +150,12 @@ cd ~/workspace
### Git and GitLab basics
1. Edit `edit_this_file.rb` in `training-examples`
-2. See it listed as a changed file (working area)
-3. View the differences
-4. Stage the file
-5. Commit
-6. Push the commit to the remote
-7. View the git log
+1. See it listed as a changed file (working area)
+1. View the differences
+1. Stage the file
+1. Commit
+1. Push the commit to the remote
+1. View the git log
---
@@ -169,14 +169,14 @@ git push origin master
git log
```
----
+---
### Feature Branching
1. Create a new feature branch called `squash_some_bugs`
-2. Edit `bugs.rb` and remove all the bugs.
-3. Commit
-4. Push
+1. Edit `bugs.rb` and remove all the bugs.
+1. Commit
+1. Push
---
@@ -250,16 +250,17 @@ git push origin squash_some_bugs
---
### Example Plan
+
1. Checkout a new branch and edit conflicts.rb. Add 'Line4' and 'Line5'.
-2. Commit and push
-3. Checkout master and edit conflicts.rb. Add 'Line6' and 'Line7' below 'Line3'.
-4. Commit and push to master
-5. Create a merge request and watch it fail
-6. Rebase our new branch with master
-7. Fix conflicts on the conflicts.rb file.
-8. Stage the file and continue rebasing
-9. Force push the changes
-10. Finally continue with the Merge Request
+1. Commit and push
+1. Checkout master and edit conflicts.rb. Add 'Line6' and 'Line7' below 'Line3'.
+1. Commit and push to master
+1. Create a merge request and watch it fail
+1. Rebase our new branch with master
+1. Fix conflicts on the conflicts.rb file.
+1. Stage the file and continue rebasing
+1. Force push the changes
+1. Finally continue with the Merge Request
---
@@ -362,15 +363,15 @@ Don't reset after pushing
### Reset Workflow
1. Edit file again 'edit_this_file.rb'
-2. Check status
-3. Add and commit with wrong message
-4. Check log
-5. Amend commit
-6. Check log
-7. Soft reset
-8. Check log
-9. Pull for updates
-10. Push changes
+1. Check status
+1. Add and commit with wrong message
+1. Check log
+1. Amend commit
+1. Check log
+1. Soft reset
+1. Check log
+1. Pull for updates
+1. Push changes
----
@@ -389,9 +390,9 @@ Don't reset after pushing
### Note
-git revert vs git reset
-Reset removes the commit while revert removes the changes but leaves the commit
-Revert is safer considering we can revert a revert
+git revert vs git reset
+Reset removes the commit while revert removes the changes but leaves the commit
+Revert is safer considering we can revert a revert
# Changed file
diff --git a/doc/university/training/topics/bisect.md b/doc/university/training/topics/bisect.md
index 01e93e4dcb0..4848d0412c1 100644
--- a/doc/university/training/topics/bisect.md
+++ b/doc/university/training/topics/bisect.md
@@ -2,7 +2,7 @@
comments: false
---
-# Bisect
+# Bisect
----------
@@ -17,11 +17,11 @@ comments: false
## Bisect
1. Start the bisect process
-2. Enter the bad revision (usually latest commit)
-3. Enter a known good revision (commit/branch)
-4. Run code to see if bug still exists
-5. Tell bisect the result
-6. Repeat the previous 2 items until you find the offending commit
+1. Enter the bad revision (usually latest commit)
+1. Enter a known good revision (commit/branch)
+1. Run code to see if bug still exists
+1. Tell bisect the result
+1. Repeat the previous 2 items until you find the offending commit
----------
diff --git a/doc/university/training/topics/getting_started.md b/doc/university/training/topics/getting_started.md
index 1441e4b89b2..66cb08feacb 100644
--- a/doc/university/training/topics/getting_started.md
+++ b/doc/university/training/topics/getting_started.md
@@ -35,11 +35,10 @@ comments: false
## Instantiate workflow with clone
-1. Create a project in your user namespace
- - Choose to import from 'Any Repo by URL' and use
- https://gitlab.com/gitlab-org/training-examples.git
-2. Create a '`Workspace`' directory in your home directory.
-3. Clone the '`training-examples`' project
+1. Create a project in your user namespace.
+ - Choose to import from 'Any Repo by URL' and use <https://gitlab.com/gitlab-org/training-examples.git>.
+1. Create a '`Workspace`' directory in your home directory.
+1. Clone the '`training-examples`' project.
----------
diff --git a/doc/university/training/topics/git_log.md b/doc/university/training/topics/git_log.md
index 3e39ea5cc9a..6ba6f9eb69d 100644
--- a/doc/university/training/topics/git_log.md
+++ b/doc/university/training/topics/git_log.md
@@ -46,11 +46,11 @@ Git log lists commit history. It allows searching and filtering.
## Git Log Workflow
1. Change to workspace directory
-2. Clone the multi runner projects
-3. Change to project dir
-4. Search by author
-5. Search by date
-6. Combine
+1. Clone the multi runner projects
+1. Change to project dir
+1. Search by author
+1. Search by date
+1. Combine
----------
diff --git a/doc/university/training/topics/merge_conflicts.md b/doc/university/training/topics/merge_conflicts.md
index 9a1ce550868..071baddf508 100644
--- a/doc/university/training/topics/merge_conflicts.md
+++ b/doc/university/training/topics/merge_conflicts.md
@@ -16,15 +16,15 @@ comments: false
## Merge conflicts
1. Checkout a new branch and edit `conflicts.rb`. Add 'Line4' and 'Line5'.
-2. Commit and push
-3. Checkout master and edit `conflicts.rb`. Add 'Line6' and 'Line7' below 'Line3'.
-4. Commit and push to master
-5. Create a merge request and watch it fail
-6. Rebase our new branch with master
-7. Fix conflicts on the `conflicts.rb` file.
-8. Stage the file and continue rebasing
-9. Force push the changes
-10. Finally continue with the Merge Request
+1. Commit and push.
+1. Checkout master and edit `conflicts.rb`. Add 'Line6' and 'Line7' below 'Line3'.
+1. Commit and push to master.
+1. Create a merge request and watch it fail.
+1. Rebase our new branch with master.
+1. Fix conflicts on the `conflicts.rb` file.
+1. Stage the file and continue rebasing.
+1. Force push the changes.
+1. Finally continue with the Merge Request.
----------
diff --git a/doc/university/training/topics/rollback_commits.md b/doc/university/training/topics/rollback_commits.md
index 11cb557651f..44304634f36 100644
--- a/doc/university/training/topics/rollback_commits.md
+++ b/doc/university/training/topics/rollback_commits.md
@@ -41,15 +41,15 @@ comments: false
## Reset Workflow
1. Edit file again 'edit_this_file.rb'
-2. Check status
-3. Add and commit with wrong message
-4. Check log
-5. Amend commit
-6. Check log
-7. Soft reset
-8. Check log
-9. Pull for updates
-10. Push changes
+1. Check status
+1. Add and commit with wrong message
+1. Check log
+1. Amend commit
+1. Check log
+1. Soft reset
+1. Check log
+1. Pull for updates
+1. Push changes
----------
diff --git a/doc/university/training/topics/stash.md b/doc/university/training/topics/stash.md
index 315ced1a196..42eedea14e5 100644
--- a/doc/university/training/topics/stash.md
+++ b/doc/university/training/topics/stash.md
@@ -66,12 +66,12 @@ stashes.
## Git Stash
1. Modify a file
-2. Stage file
-3. Stash it
-4. View our stash list
-5. Confirm no pending changes through status
-5. Apply with pop
-6. View list to confirm changes
+1. Stage file
+1. Stash it
+1. View our stash list
+1. Confirm no pending changes through status
+1. Apply with pop
+1. View list to confirm changes
----------
diff --git a/doc/update/README.md b/doc/update/README.md
index 7d3c4c310a4..d4fc0cc91bf 100644
--- a/doc/update/README.md
+++ b/doc/update/README.md
@@ -38,12 +38,12 @@ Starting with GitLab 9.1.0 it's possible to upgrade to a newer major, minor, or
patch version of GitLab without having to take your GitLab instance offline.
However, for this to work there are the following requirements:
-1. You can only upgrade 1 minor release at a time. So from 9.1 to 9.2, not to
+- You can only upgrade 1 minor release at a time. So from 9.1 to 9.2, not to
9.3.
-2. You have to use [post-deployment
+- You have to use [post-deployment
migrations](../development/post_deployment_migrations.md) (included in
- zero downtime update steps below)
-3. You are using PostgreSQL. If you are using MySQL please look at the release
+ zero downtime update steps below).
+- You are using PostgreSQL. If you are using MySQL please look at the release
post to see if downtime is required.
Most of the time you can safely upgrade from a patch release to the next minor
diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md
index 6025a5bbcda..d4853a5842e 100644
--- a/doc/user/admin_area/settings/continuous_integration.md
+++ b/doc/user/admin_area/settings/continuous_integration.md
@@ -49,3 +49,19 @@ and the default value is `30 days`. On GitLab.com they
This setting is set per job and can be overridden in
[`.gitlab-ci.yml`](../../../ci/yaml/README.md#artifacts-expire_in).
To disable the expiration, set it to `0`. The default unit is in seconds.
+
+## Archive jobs **[CORE ONLY]**
+
+Archiving jobs is useful for reducing the CI/CD footprint on the system by
+removing some of the capabilities of the jobs (metadata needed to run the job),
+but persisting the traces and artifacts for auditing purposes.
+
+To set the duration for which the jobs will be considered as old and expired:
+
+1. Go to **Admin area > Settings > CI/CD > Continuous Integration and Deployment**.
+1. Change the value of "Archive jobs".
+1. Hit **Save changes** for the changes to take effect.
+
+Once that time passes, the jobs will be archived and no longer able to be
+retried. Make it empty to never expire jobs. It has to be no less than 1 day,
+for example: <code>15 days</code>, <code>1 month</code>, <code>2 years</code>.
diff --git a/doc/user/profile/account/two_factor_authentication.md b/doc/user/profile/account/two_factor_authentication.md
index 64219737d61..76f7e869ff7 100644
--- a/doc/user/profile/account/two_factor_authentication.md
+++ b/doc/user/profile/account/two_factor_authentication.md
@@ -158,7 +158,7 @@ authentication. If an SSH key is added to your GitLab account, you can generate
a new set of recovery codes with SSH.
1. Run `ssh git@gitlab.example.com 2fa_recovery_codes`.
-2. You are prompted to confirm that you want to generate new codes. Continuing this process invalidates previously saved codes.
+1. You are prompted to confirm that you want to generate new codes. Continuing this process invalidates previously saved codes.
```
bash
$ ssh git@gitlab.example.com 2fa_recovery_codes
@@ -185,7 +185,7 @@ a new set of recovery codes with SSH.
so you do not lose access to your account again.
```
-3. Go to the GitLab sign-in page and enter your username/email and password.
+1. Go to the GitLab sign-in page and enter your username/email and password.
When prompted for a two-factor code, enter one of the recovery codes obtained
from the command-line output.
diff --git a/doc/user/profile/index.md b/doc/user/profile/index.md
index da7c30b6b39..2f989a26725 100644
--- a/doc/user/profile/index.md
+++ b/doc/user/profile/index.md
@@ -97,13 +97,13 @@ You and GitLab admins can see your the abovementioned information on your profil
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/14078) in GitLab 11.3.
-Enabling private contributions will include contributions to private projects, in the user contribution calendar graph and user recent activity.
+Enabling private contributions will include contributions to private projects, in the user contribution calendar graph and user recent activity.
To enable private contributions:
1. Navigate to your personal [profile settings](#profile-settings).
-2. Check the "Private contributions" option.
-3. Hit **Update profile settings**.
+1. Check the "Private contributions" option.
+1. Hit **Update profile settings**.
## Current status
diff --git a/doc/user/project/clusters/eks_and_gitlab/img/rbac.png b/doc/user/project/clusters/eks_and_gitlab/img/rbac.png
new file mode 100644
index 00000000000..c8adaad13c2
--- /dev/null
+++ b/doc/user/project/clusters/eks_and_gitlab/img/rbac.png
Binary files differ
diff --git a/doc/user/project/clusters/eks_and_gitlab/index.md b/doc/user/project/clusters/eks_and_gitlab/index.md
index ef19b05fb9e..45d77e075f1 100644
--- a/doc/user/project/clusters/eks_and_gitlab/index.md
+++ b/doc/user/project/clusters/eks_and_gitlab/index.md
@@ -1,16 +1,8 @@
----
-author: Joshua Lambert
-author_gitlab: joshlambert
-level: intermediate
-article_type: tutorial
-date: 2018-06-05
----
-
# Connecting and deploying to an Amazon EKS cluster
## Introduction
-In this tutorial, we will show how easy it is to integrate an [Amazon EKS](https://aws.amazon.com/eks/) cluster with GitLab, and begin deploying applications.
+In this tutorial, we will show how to integrate an [Amazon EKS](https://aws.amazon.com/eks/) cluster with GitLab, and begin deploying applications.
For an end-to-end walkthrough we will:
@@ -21,7 +13,7 @@ For an end-to-end walkthrough we will:
You will need:
1. An account on GitLab, like [GitLab.com](https://gitlab.com)
-1. An Amazon EKS cluster
+1. An Amazon EKS cluster (with worker nodes properly configured)
1. `kubectl` [installed and configured for access to the EKS cluster](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html#get-started-kubectl)
If you don't have an Amazon EKS cluster, one can be created by following [the EKS getting started guide](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html).
@@ -38,26 +30,103 @@ Give the project a name, and then select `Create project`.
![Create Project](img/create_project.png)
-## Connecting the EKS cluster
+## Configuring and connecting the EKS cluster
From the left side bar, hover over `Operations` and select `Kubernetes`, then click on `Add Kubernetes cluster`, and finally `Add an existing Kubernetes cluster`.
A few details from the EKS cluster will be required to connect it to GitLab.
-1. A valid Kubernetes certificate and token are needed to authenticate to the EKS cluster. A pair is created by default, which can be used. Open a shell and use `kubectl` to retrieve them:
- * List the secrets with `kubectl get secrets`, and one should named similar to `default-token-xxxxx`. Copy that token name for use below.
- * Get the certificate with `kubectl get secret <secret name> -o jsonpath="{['data']['ca\.crt']}" | base64 -D`
- * Retrieve the token with `kubectl get secret <secret name> -o jsonpath="{['data']['token']}" | base64 -D`.
+1. **Retrieve the certificate**: A valid Kubernetes certificate is needed to authenticate to the EKS cluster. We will use the certificate created by default. Open a shell and use `kubectl` to retrieve it:
+ - List the secrets with `kubectl get secrets`, and one should named similar to `default-token-xxxxx`. Copy that token name for use below.
+ - Get the certificate with `kubectl get secret <secret name> -o jsonpath="{['data']['ca\.crt']}" | base64 -D`
+
+1. **Create admin token**: A `cluster-admin` token is required to install and manage Helm Tiller. GitLab establishes mutual SSL auth with Helm Tiller and creates limited service accounts for each application. To create the token we will create an admin service account as follows:
+
+ 1. Create a file called `eks-admin-service-account.yaml` with the text below:
+
+ ```yaml
+ apiVersion: v1
+ kind: ServiceAccount
+ metadata:
+ name: eks-admin
+ namespace: kube-system
+ ```
+
+ 2. Apply the service account to your cluster:
+
+ ```bash
+ kubectl apply -f eks-admin-service-account.yaml
+ ```
+
+ Output:
+
+ ```bash
+ serviceaccount "eks-admin" created
+ ```
+
+ 3. Create a file called `eks-admin-cluster-role-binding.yaml` with the text below:
+
+ ```yaml
+ apiVersion: rbac.authorization.k8s.io/v1beta1
+ kind: ClusterRoleBinding
+ metadata:
+ name: eks-admin
+ roleRef:
+ apiGroup: rbac.authorization.k8s.io
+ kind: ClusterRole
+ name: cluster-admin
+ subjects:
+ - kind: ServiceAccount
+ name: eks-admin
+ namespace: kube-system
+ ```
+
+ 4. Apply the cluster role binding to your cluster:
+
+ ```bash
+ kubectl apply -f eks-admin-cluster-role-binding.yaml
+ ```
+
+ Output:
+
+ ```bash
+ clusterrolebinding "eks-admin" created
+ ```
+
+ 5. Retrieve the token for the `eks-admin` service account. Copy the `<authentication_token>` value from the output.
+
+ ```bash
+ kubectl -n kube-system describe secret $(kubectl -n kube-system get secret | grep eks-admin | awk '{print $1}')
+ ```
+
+ Output:
+
+ ```yaml
+ Name: eks-admin-token-b5zv4
+ Namespace: kube-system
+ Labels: <none>
+ Annotations: kubernetes.io/service-account.name=eks-admin
+ kubernetes.io/service-account.uid=bcfe66ac-39be-11e8-97e8-026dce96b6e8
+
+ Type: kubernetes.io/service-account-token
+
+ Data
+ ====
+ ca.crt: 1025 bytes
+ namespace: 11 bytes
+ token: <authentication_token>
+ ```
+
1. The API server endpoint is also required, so GitLab can connect to the cluster. This is displayed on the AWS EKS console, when viewing the EKS cluster details.
You now have all the information needed to connect the EKS cluster:
-* Kubernetes cluster name: Provide a name for the cluster to identify it within GitLab.
-* Environment scope: Leave this as `*` for now, since we are only connecting a single cluster.
-* API URL: Paste in the API server endpoint retrieved above.
-* CA Certificate: Paste the certificate data from the earlier step, as-is.
-* Paste the token value.
-* Project namespace: This can be left blank to accept the default namespace, based on the project name.
+- Kubernetes cluster name: Provide a name for the cluster to identify it within GitLab.
+- Environment scope: Leave this as `*` for now, since we are only connecting a single cluster.
+- API URL: Paste in the API server endpoint retrieved above.
+- CA Certificate: Paste the certificate data from the earlier step, as-is.
+- Paste the admin token value.
+- Project namespace: This can be left blank to accept the default namespace, based on the project name.
![Add Cluster](img/add_cluster.png)
@@ -65,9 +134,11 @@ Click on `Add Kubernetes cluster`, the cluster is now connected to GitLab. At th
If you would like to utilize your own CI/CD scripts to deploy to the cluster, you can stop here.
-## Disable Role-Based Access Control (RBAC)
+## Disable Role-Based Access Control (RBAC) - Optional
+
+When connecting a cluster via GitLab integration, you may specify whether the cluster is RBAC-enabled or not. This will affect how GitLab interacts with the cluster for certain operations. If you **did not** check the "RBAC-enabled cluster" checkbox at creation time, GitLab will assume RBAC is disabled for your cluster when interacting with it. If so, you must disable RBAC on your cluster for the integration to work properly.
-Presently, Auto DevOps and one-click app installs do not support [Kubernetes role-based access control](https://kubernetes.io/docs/reference/access-authn-authz/rbac/). Support is [being worked on](https://gitlab.com/groups/gitlab-org/-/epics/136), but in the interim RBAC must be disabled to utilize for these features.
+![rbac](img/rbac.png)
> **Note**: Disabling RBAC means that any application running in the cluster, or user who can authenticate to the cluster, has full API access. This is a [security concern](https://docs.gitlab.com/ee/user/project/clusters/#security-implications), and may not be desirable.
diff --git a/doc/user/project/container_registry.md b/doc/user/project/container_registry.md
index 1b1827a2658..cac64fc0cb6 100644
--- a/doc/user/project/container_registry.md
+++ b/doc/user/project/container_registry.md
@@ -139,12 +139,12 @@ docker login registry.example.com -u <username> -p <token>
1. Check to make sure that the system clock on your Docker client and GitLab server have
been synchronized (e.g. via NTP).
-2. If you are using an S3-backed Registry, double check that the IAM
+1. If you are using an S3-backed Registry, double check that the IAM
permissions and the S3 credentials (including region) are correct. See [the
sample IAM policy](https://docs.docker.com/registry/storage-drivers/s3/)
for more details.
-3. Check the Registry logs (e.g. `/var/log/gitlab/registry/current`) and the GitLab production logs
+1. Check the Registry logs (e.g. `/var/log/gitlab/registry/current`) and the GitLab production logs
for errors (e.g. `/var/log/gitlab/gitlab-rails/production.log`). You may be able to find clues
there.
diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md
index dc73194309c..7688508c6ac 100644
--- a/doc/user/project/deploy_tokens/index.md
+++ b/doc/user/project/deploy_tokens/index.md
@@ -13,8 +13,8 @@ You can create as many deploy tokens as you like from the settings of your proje
1. Log in to your GitLab account.
1. Go to the project you want to create Deploy Tokens for.
-1. Go to **Settings** > **Repository**
-1. Click on "Expand" on **Deploy Tokens** section
+1. Go to **Settings** > **Repository**.
+1. Click on "Expand" on **Deploy Tokens** section.
1. Choose a name and optionally an expiry date for the token.
1. Choose the [desired scopes](#limiting-scopes-of-a-deploy-token).
1. Click on **Create deploy token**.
@@ -46,8 +46,8 @@ the following table.
To download a repository using a Deploy Token, you just need to:
1. Create a Deploy Token with `read_repository` as a scope.
-2. Take note of your `username` and `token`
-3. `git clone` the project using the Deploy Token:
+1. Take note of your `username` and `token`.
+1. `git clone` the project using the Deploy Token:
```sh
git clone http://<username>:<deploy_token>@gitlab.example.com/tanuki/awesome_project.git
@@ -60,8 +60,8 @@ Replace `<username>` and `<deploy_token>` with the proper values.
To read the container registry images, you'll need to:
1. Create a Deploy Token with `read_registry` as a scope.
-2. Take note of your `username` and `token`
-3. Log in to GitLabā€™s Container Registry using the deploy token:
+1. Take note of your `username` and `token`.
+1. Log in to GitLabā€™s Container Registry using the deploy token:
```sh
docker login registry.example.com -u <username> -p <deploy_token>
diff --git a/doc/user/project/import/github.md b/doc/user/project/import/github.md
index fcd6192e82f..3e4be043199 100644
--- a/doc/user/project/import/github.md
+++ b/doc/user/project/import/github.md
@@ -65,9 +65,9 @@ developer documentation.
Before you begin, ensure that any GitHub users who you want to map to GitLab users have either:
-1. A GitLab account that has logged in using the GitHub icon
+- A GitLab account that has logged in using the GitHub icon
\- or -
-2. A GitLab account with an email address that matches the [public email address](https://help.github.com/articles/setting-your-commit-email-address-on-github/) of the GitHub user
+- A GitLab account with an email address that matches the [public email address](https://help.github.com/articles/setting-your-commit-email-address-on-github/) of the GitHub user
User-matching attempts occur in that order, and if a user is not identified either way, the activity is associated with
the user account that is performing the import.
@@ -77,10 +77,10 @@ If you are using a self-hosted GitLab instance, this process requires that you h
[GitHub integration][gh-import].
1. From the top navigation bar, click **+** and select **New project**.
-2. Select the **Import project** tab and then select **GitHub**.
-3. Select the first button to **List your GitHub repositories**. You are redirected to a page on github.com to authorize the GitLab application.
-4. Click **Authorize gitlabhq**. You are redirected back to GitLab's Import page and all of your GitHub repositories are listed.
-5. Continue on to [selecting which repositories to import](#selecting-which-repositories-to-import).
+1. Select the **Import project** tab and then select **GitHub**.
+1. Select the first button to **List your GitHub repositories**. You are redirected to a page on github.com to authorize the GitLab application.
+1. Click **Authorize gitlabhq**. You are redirected back to GitLab's Import page and all of your GitHub repositories are listed.
+1. Continue on to [selecting which repositories to import](#selecting-which-repositories-to-import).
### Using a GitHub token
@@ -92,12 +92,12 @@ integration enabled, that should be the preferred method to import your reposito
If you are not using the GitHub integration, you can still perform an authorization with GitHub to grant GitLab access your repositories:
1. Go to https://github.com/settings/tokens/new
-2. Enter a token description.
-3. Select the repo scope.
-4. Click **Generate token**.
-5. Copy the token hash.
-6. Go back to GitLab and provide the token to the GitHub importer.
-7. Hit the **List Your GitHub Repositories** button and wait while GitLab reads your repositories' information.
+1. Enter a token description.
+1. Select the repo scope.
+1. Click **Generate token**.
+1. Copy the token hash.
+1. Go back to GitLab and provide the token to the GitHub importer.
+1. Hit the **List Your GitHub Repositories** button and wait while GitLab reads your repositories' information.
Once done, you'll be taken to the importer page to select the repositories to import.
### Selecting which repositories to import
@@ -107,10 +107,10 @@ your GitHub repositories are listed.
1. By default, the proposed repository namespaces match the names as they exist in GitHub, but based on your permissions,
you can choose to edit these names before you proceed to import any of them.
-2. Select the **Import** button next to any number of repositories, or select **Import all repositories**.
-3. The **Status** column shows the import status of each repository. You can choose to leave the page open and it will
+1. Select the **Import** button next to any number of repositories, or select **Import all repositories**.
+1. The **Status** column shows the import status of each repository. You can choose to leave the page open and it will
update in realtime or you can return to it later.
-4. Once a repository has been imported, click its GitLab path to open its GitLab URL.
+1. Once a repository has been imported, click its GitLab path to open its GitLab URL.
## Mirroring and pipeline status sharing
diff --git a/doc/user/project/integrations/bugzilla.md b/doc/user/project/integrations/bugzilla.md
index 671804035cc..040e80d529d 100644
--- a/doc/user/project/integrations/bugzilla.md
+++ b/doc/user/project/integrations/bugzilla.md
@@ -16,8 +16,9 @@ Once you have configured and enabled Bugzilla you'll see the Bugzilla link on th
## Referencing issues in Bugzilla
Issues in Bugzilla can be referenced in two alternative ways:
-1. `#<ID>` where `<ID>` is a number (example `#143`).
-2. `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is
+
+- `#<ID>` where `<ID>` is a number (example `#143`).
+- `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is
then followed by capital letters, numbers or underscores, and `<ID>` is
a number (example `API_32-143`).
diff --git a/doc/user/project/integrations/jira_cloud_configuration.md b/doc/user/project/integrations/jira_cloud_configuration.md
index 2e6e8278e64..cae66526175 100644
--- a/doc/user/project/integrations/jira_cloud_configuration.md
+++ b/doc/user/project/integrations/jira_cloud_configuration.md
@@ -4,16 +4,15 @@ An API token is needed when integrating with JIRA Cloud, follow the steps
below to create one:
1. Log in to https://id.atlassian.com with your email.
-2. **Click API tokens**, then **Create API token**.
+1. **Click API tokens**, then **Create API token**.
![JIRA API token](img/jira_api_token_menu.png)
![JIRA API token](img/jira_api_token.png)
-3. Make sure to write down your new API token as you will need it in the next [steps](jira.md#configuring-gitlab).
+1. Make sure to write down your new API token as you will need it in the next [steps](jira.md#configuring-gitlab).
NOTE: **Note**
It is important that the user associated with this email has 'write' access to projects in JIRA.
The JIRA configuration is complete. You are going to need this new created token and the email you used to log in when [configuring GitLab in the next section](jira.md#configuring-gitlab).
-
diff --git a/doc/user/project/integrations/jira_server_configuration.md b/doc/user/project/integrations/jira_server_configuration.md
index 7d84ad0b07c..20036183187 100644
--- a/doc/user/project/integrations/jira_server_configuration.md
+++ b/doc/user/project/integrations/jira_server_configuration.md
@@ -17,17 +17,17 @@ We have split this stage in steps so it is easier to follow.
![Jira user management link](img/jira_user_management_link.png)
-2. The next step is to create a new user (e.g., `gitlab`) who has write access
+1. The next step is to create a new user (e.g., `gitlab`) who has write access
to projects in Jira. Enter the user's name and a _valid_ e-mail address
since Jira sends a verification e-mail to set up the password.
_**Note:** Jira creates the username automatically by using the e-mail
- prefix. You can change it later, if needed. Our integration does not support SSO (such as SAML). You will need to create
- an HTTP basic authentication password. You can do this by visiting the user
+ prefix. You can change it later, if needed. Our integration does not support SSO (such as SAML). You will need to create
+ an HTTP basic authentication password. You can do this by visiting the user
profile, looking up the username, and setting a password._
![Jira create new user](img/jira_create_new_user.png)
-3. Create a `gitlab-developers` group which will have write access
+1. Create a `gitlab-developers` group which will have write access
to projects in Jira. Go to the **Groups** tab and select **Create group**.
![Jira create new user](img/jira_create_new_group.png)
@@ -36,13 +36,13 @@ We have split this stage in steps so it is easier to follow.
![Jira create new group](img/jira_create_new_group_name.png)
-4. To give the newly-created group 'write' access, go to
+1. To give the newly-created group 'write' access, go to
**Application access > View configuration** and add the `gitlab-developers`
group to Jira Core.
![Jira group access](img/jira_group_access.png)
-5. Add the `gitlab` user to the `gitlab-developers` group by going to
+1. Add the `gitlab` user to the `gitlab-developers` group by going to
**Users > GitLab user > Add group** and selecting the `gitlab-developers`
group from the dropdown menu. Notice that the group says _Access_, which is
intended as part of this process.
diff --git a/doc/user/project/integrations/redmine.md b/doc/user/project/integrations/redmine.md
index de2cf6d4647..76a2617125e 100644
--- a/doc/user/project/integrations/redmine.md
+++ b/doc/user/project/integrations/redmine.md
@@ -18,15 +18,16 @@ in the table below.
![Redmine configuration](img/redmine_configuration.png)
-2. To disable the internal issue tracking system in a project, navigate to the General page, expand [Permissions](../settings/index.md#sharing-and-permissions), and slide the Issues switch invalid.
+1. To disable the internal issue tracking system in a project, navigate to the General page, expand [Permissions](../settings/index.md#sharing-and-permissions), and slide the Issues switch invalid.
![Issue configuration](img/issue_configuration.png)
## Referencing issues in Redmine
Issues in Redmine can be referenced in two alternative ways:
-1. `#<ID>` where `<ID>` is a number (example `#143`)
-2. `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is
+
+- `#<ID>` where `<ID>` is a number (example `#143`).
+- `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is
then followed by capital letters, numbers or underscores, and `<ID>` is
a number (example `API_32-143`).
diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md
index 7c63967c829..4d1d95da6f0 100644
--- a/doc/user/project/integrations/webhooks.md
+++ b/doc/user/project/integrations/webhooks.md
@@ -338,10 +338,10 @@ payload will also include information about the target of the comment. For examp
a comment on an issue will include the specific issue information under the `issue` key.
Valid target types:
-1. `commit`
-2. `merge_request`
-3. `issue`
-4. `snippet`
+- `commit`
+- `merge_request`
+- `issue`
+- `snippet`
#### Comment on commit
diff --git a/doc/user/project/issues/automatic_issue_closing.md b/doc/user/project/issues/automatic_issue_closing.md
index b6570c777ae..afb7d9ada5f 100644
--- a/doc/user/project/issues/automatic_issue_closing.md
+++ b/doc/user/project/issues/automatic_issue_closing.md
@@ -27,10 +27,11 @@ used:
Note that `%{issue_ref}` is a complex regular expression defined inside GitLab's
source code that can match references to:
-1. a local issue (`#123`),
-2. a cross-project issue (`group/project#123`)
-3. a link to an issue
-(`https://gitlab.example.com/group/project/issues/123`).
+
+- A local issue (`#123`).
+- A cross-project issue (`group/project#123`).
+- A link to an issue
+ (`https://gitlab.example.com/group/project/issues/123`).
---
diff --git a/doc/user/project/new_ci_build_permissions_model.md b/doc/user/project/new_ci_build_permissions_model.md
index 23d5b34504c..9a53036b4d1 100644
--- a/doc/user/project/new_ci_build_permissions_model.md
+++ b/doc/user/project/new_ci_build_permissions_model.md
@@ -60,7 +60,7 @@ Let's consider the following scenario:
hosted in private repositories and you have multiple CI jobs that make use
of these repositories.
-2. You invite a new [external user][ext]. CI jobs created by that user do not
+1. You invite a new [external user][ext]. CI jobs created by that user do not
have access to internal repositories, because the user also doesn't have the
access from within GitLab. You as an employee have to grant explicit access
for this user. This allows us to prevent from accidental data leakage.
diff --git a/doc/user/project/repository/index.md b/doc/user/project/repository/index.md
index 6d822d3f7f2..1710bba2fd0 100644
--- a/doc/user/project/repository/index.md
+++ b/doc/user/project/repository/index.md
@@ -53,17 +53,35 @@ To get started with the command line, please read through the
Use GitLab's [file finder](../../../workflow/file_finder.md) to search for files in a repository.
+### Supported markup languages and extensions
+
+GitLab supports a number of markup languages (sometimes called [lightweight
+markup languages](https://en.wikipedia.org/wiki/Lightweight_markup_language))
+that you can use for the content of your files in a repository. They are mostly
+used for documentation purposes.
+
+Just pick the right extension for your files and GitLab will render them
+according to the markup language.
+
+| Markup language | Extensions |
+| --------------- | ---------- |
+| Plain text | `txt` |
+| [Markdown](../../markdown.md) | `mdown`, `mkd`, `mkdn`, `md`, `markdown` |
+| [reStructuredText](http://docutils.sourceforge.net/rst.html) | `rst` |
+| [Asciidoc](https://asciidoctor.org/docs/what-is-asciidoc/) | `adoc`, `ad`, `asciidoc` |
+| [Textile](https://txstyle.org/) | `textile` |
+| [rdoc](http://rdoc.sourceforge.net/doc/index.html) | `rdoc` |
+| [Orgmode](https://orgmode.org/) | `org` |
+| [creole](http://www.wikicreole.org/) | `creole` |
+| [Mediawiki](https://www.mediawiki.org/wiki/MediaWiki) | `wiki`, `mediawiki` |
+
### Repository README and index files
When a `README` or `index` file is present in a repository, its contents will be
automatically pre-rendered by GitLab without opening it.
-They can either be plain text or have an extension of a supported markup language:
-
-- Asciidoc: `README.adoc` or `index.adoc`
-- Markdown: `README.md` or `index.md`
-- reStructuredText: `README.rst` or `index.rst`
-- Text: `README.txt` or `index.txt`
+They can either be plain text or have an extension of a
+[supported markup language](#supported-markup-languages-and-extensions):
Some things to note about precedence:
@@ -75,10 +93,6 @@ Some things to note about precedence:
precedence over `README.md`, and `README.rst` will take precedence over
`README`.
-NOTE: **Note:**
-`index` files without an extension will not automatically pre-render. You'll
-have to explicitly open them to see their contents.
-
### Jupyter Notebook files
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/2508) in GitLab 9.1
diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
index ec949c290bd..4b726bd90d3 100644
--- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
@@ -27,7 +27,7 @@
# Continuous deployment to production is enabled by default.
# If you want to deploy to staging first, set STAGING_ENABLED environment variable.
# If you want to enable incremental rollout, either manual or time based,
-# set INCREMENTAL_ROLLOUT_TYPE environment variable to "manual" or "timed".
+# set INCREMENTAL_ROLLOUT_MODE environment variable to "manual" or "timed".
# If you want to use canary deployments, set CANARY_ENABLED environment variable.
#
# If Auto DevOps fails to detect the proper buildpack, or if you want to
diff --git a/lib/gitlab/kubernetes/helm/api.rb b/lib/gitlab/kubernetes/helm/api.rb
index e21bc531444..06841ec7b76 100644
--- a/lib/gitlab/kubernetes/helm/api.rb
+++ b/lib/gitlab/kubernetes/helm/api.rb
@@ -89,22 +89,14 @@ module Gitlab
end
def service_account_exists?(resource)
- resource_exists? do
- kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace)
- end
+ kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace)
+ rescue ::Kubeclient::ResourceNotFoundError
+ false
end
def cluster_role_binding_exists?(resource)
- resource_exists? do
- kubeclient.get_cluster_role_binding(resource.metadata.name)
- end
- end
-
- def resource_exists?
- yield
- rescue ::Kubeclient::HttpError => e
- raise e unless e.error_code == 404
-
+ kubeclient.get_cluster_role_binding(resource.metadata.name)
+ rescue ::Kubeclient::ResourceNotFoundError
false
end
end
diff --git a/lib/gitlab/kubernetes/namespace.rb b/lib/gitlab/kubernetes/namespace.rb
index e6ff6160ab9..783c8a24741 100644
--- a/lib/gitlab/kubernetes/namespace.rb
+++ b/lib/gitlab/kubernetes/namespace.rb
@@ -10,9 +10,7 @@ module Gitlab
def exists?
@client.get_namespace(name)
- rescue ::Kubeclient::HttpError => ke
- raise ke unless ke.error_code == 404
-
+ rescue ::Kubeclient::ResourceNotFoundError
false
end
diff --git a/lib/gitlab/sentry.rb b/lib/gitlab/sentry.rb
index 24e3866128b..8079c5882c4 100644
--- a/lib/gitlab/sentry.rb
+++ b/lib/gitlab/sentry.rb
@@ -7,7 +7,7 @@ module Gitlab
end
def self.context(current_user = nil)
- return unless self.enabled?
+ return unless enabled?
Raven.tags_context(locale: I18n.locale)
@@ -29,14 +29,22 @@ module Gitlab
#
# Provide an issue URL for follow up.
def self.track_exception(exception, issue_url: nil, extra: {})
+ track_acceptable_exception(exception, issue_url: issue_url, extra: extra)
+
+ raise exception if should_raise?
+ end
+
+ # This should be used when you do not want to raise an exception in
+ # development and test. If you need development and test to behave
+ # just the same as production you can use this instead of
+ # track_exception.
+ def self.track_acceptable_exception(exception, issue_url: nil, extra: {})
if enabled?
extra[:issue_url] = issue_url if issue_url
context # Make sure we've set everything we know in the context
Raven.capture_exception(exception, extra: extra)
end
-
- raise exception if should_raise?
end
def self.program_context
diff --git a/qa/qa/page/merge_request/show.rb b/qa/qa/page/merge_request/show.rb
index 2e69a89e386..2fd30e15ffb 100644
--- a/qa/qa/page/merge_request/show.rb
+++ b/qa/qa/page/merge_request/show.rb
@@ -55,6 +55,10 @@ module QA
element :labels_block
end
+ view 'app/views/projects/merge_requests/_mr_title.html.haml' do
+ element :edit_button
+ end
+
def fast_forward_possible?
!has_text?('Fast-forward merge is not possible')
end
@@ -163,6 +167,10 @@ module QA
all_elements(:discussion_reply).last.click
fill_element :reply_input, reply_text
end
+
+ def edit!
+ click_element :edit_button
+ end
end
end
end
diff --git a/qa/qa/resource/merge_request.rb b/qa/qa/resource/merge_request.rb
index 466a7942dc6..77afb3cfcba 100644
--- a/qa/qa/resource/merge_request.rb
+++ b/qa/qa/resource/merge_request.rb
@@ -11,7 +11,9 @@ module QA
:target_branch,
:assignee,
:milestone,
- :labels
+ :labels,
+ :file_name,
+ :file_content
attribute :project do
Project.fabricate! do |resource|
@@ -35,8 +37,8 @@ module QA
resource.branch_name = target_branch
resource.remote_branch = source_branch
resource.new_branch = false
- resource.file_name = "added_file.txt"
- resource.file_content = "File Added"
+ resource.file_name = file_name
+ resource.file_content = file_content
end
end
@@ -48,6 +50,8 @@ module QA
@assignee = nil
@milestone = nil
@labels = []
+ @file_name = "added_file.txt"
+ @file_content = "File Added"
end
def fabricate!
diff --git a/scripts/rails4-gemfile-lock-check b/scripts/rails4-gemfile-lock-check
new file mode 100755
index 00000000000..a74a49874e1
--- /dev/null
+++ b/scripts/rails4-gemfile-lock-check
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+echo -e "=> Checking if Gemfile.rails4.lock is up-to-date...\\n"
+
+cp Gemfile.rails4.lock Gemfile.rails4.lock.orig
+BUNDLE_GEMFILE=Gemfile.rails4 bundle install "$BUNDLE_INSTALL_FLAGS"
+diff -u Gemfile.rails4.lock.orig Gemfile.rails4.lock >/dev/null 2>&1
+
+if [ $? == 1 ]
+then
+ diff -u Gemfile.rails4.lock.orig Gemfile.rails4.lock
+
+ echo -e "\\nāœ– ERROR: Gemfile.rails4.lock is not up-to-date!
+ Please run 'BUNDLE_GEMFILE=Gemfile.rails4 bundle install'\\n" >&2
+ exit 1
+fi
+
+echo "āœ” Gemfile.rails4.lock is up-to-date"
+exit 0
diff --git a/scripts/rails5-gemfile-lock-check b/scripts/rails5-gemfile-lock-check
deleted file mode 100755
index da6f1b7145e..00000000000
--- a/scripts/rails5-gemfile-lock-check
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-echo -e "=> Checking if Gemfile.rails5.lock is up-to-date...\\n"
-
-cp Gemfile.rails5.lock Gemfile.rails5.lock.orig
-BUNDLE_GEMFILE=Gemfile.rails5 bundle install "$BUNDLE_INSTALL_FLAGS"
-diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock >/dev/null 2>&1
-
-if [ $? == 1 ]
-then
- diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock
-
- echo -e "\\nāœ– ERROR: Gemfile.rails5.lock is not up-to-date!
- Please run 'BUNDLE_GEMFILE=Gemfile.rails5 bundle install'\\n" >&2
- exit 1
-fi
-
-echo "āœ” Gemfile.rails5.lock is up-to-date"
-exit 0
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index b180860899a..00e23f12bc0 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -179,21 +179,35 @@ function delete() {
track="${1-stable}"
name="$CI_ENVIRONMENT_SLUG"
+ if [ -z "$CI_ENVIRONMENT_SLUG" ]; then
+ echo "No release given, aborting the delete!"
+ return
+ fi
+
if [[ "$track" != "stable" ]]; then
name="$name-$track"
fi
+ if ! deployExists "${KUBE_NAMESPACE}" "${name}"; then
+ echo "The release $name doesn't exist, aborting the cleanup!"
+ return
+ fi
+
echo "Deleting release '$name'..."
helm delete --purge "$name" || true
}
function cleanup() {
- echo "Cleaning up $CI_ENVIRONMENT_SLUG..."
- kubectl -n "$KUBE_NAMESPACE" get ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa 2>&1 \
- | grep "$CI_ENVIRONMENT_SLUG" \
- | awk '{print $1}' \
- | xargs kubectl -n "$KUBE_NAMESPACE" delete \
- || true
+ if [ -z "$CI_ENVIRONMENT_SLUG" ]; then
+ echo "No release given, aborting the delete!"
+ return
+ fi
+
+ echo "Cleaning up '$CI_ENVIRONMENT_SLUG'..."
+ kubectl -n "$KUBE_NAMESPACE" delete \
+ ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa \
+ -l release="$CI_ENVIRONMENT_SLUG" \
+ || true
}
function install_external_dns() {
diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
index 0c1d5f5b0b4..4f561df7943 100644
--- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
@@ -754,6 +754,50 @@ describe('Filtered Search Visual Tokens', () => {
expect(updateLabelTokenColorSpy.calls.count()).toBe(0);
expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0);
});
+
+ it('does not update user token appearance for `none` filter', () => {
+ const { tokenNameElement } = findElements(authorToken);
+
+ const tokenName = tokenNameElement.innerText;
+ const tokenValue = 'none';
+
+ subject.renderVisualTokenValue(authorToken, tokenName, tokenValue);
+
+ expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0);
+ });
+
+ it('does not update user token appearance for `any` filter', () => {
+ const { tokenNameElement } = findElements(authorToken);
+
+ const tokenName = tokenNameElement.innerText;
+ const tokenValue = 'any';
+
+ subject.renderVisualTokenValue(authorToken, tokenName, tokenValue);
+
+ expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0);
+ });
+
+ it('does not update label token color for `none` filter', () => {
+ const { tokenNameElement } = findElements(bugLabelToken);
+
+ const tokenName = tokenNameElement.innerText;
+ const tokenValue = 'none';
+
+ subject.renderVisualTokenValue(bugLabelToken, tokenName, tokenValue);
+
+ expect(updateLabelTokenColorSpy.calls.count()).toBe(0);
+ });
+
+ it('does not update label token color for `any` filter', () => {
+ const { tokenNameElement } = findElements(bugLabelToken);
+
+ const tokenName = tokenNameElement.innerText;
+ const tokenValue = 'any';
+
+ subject.renderVisualTokenValue(bugLabelToken, tokenName, tokenValue);
+
+ expect(updateLabelTokenColorSpy.calls.count()).toBe(0);
+ });
});
describe('updateUserTokenAppearance', () => {
@@ -763,19 +807,6 @@ describe('Filtered Search Visual Tokens', () => {
spyOn(UsersCache, 'retrieve').and.callFake(username => usersCacheSpy(username));
});
- it('ignores special value "none"', done => {
- usersCacheSpy = username => {
- expect(username).toBe('none');
- done.fail('Should not resolve "none"!');
- };
- const { tokenValueContainer, tokenValueElement } = findElements(authorToken);
-
- subject
- .updateUserTokenAppearance(tokenValueContainer, tokenValueElement, 'none')
- .then(done)
- .catch(done.fail);
- });
-
it('ignores error if UsersCache throws', done => {
spyOn(window, 'Flash');
const dummyError = new Error('Earth rotated backwards');
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
index 2d3e178d249..7f2e246d656 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js
@@ -97,6 +97,26 @@ describe('ImageDiffViewer', () => {
});
});
+ it('renders image diff for renamed', done => {
+ vm = new Vue({
+ components: {
+ imageDiffViewer,
+ },
+ template: `
+ <image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path="">
+ <span slot="image-overlay" class="overlay">test</span>
+ </image-diff-viewer>
+ `,
+ }).$mount();
+
+ setTimeout(() => {
+ expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
+ expect(vm.$el.querySelector('.overlay')).not.toBe(null);
+
+ done();
+ });
+ });
+
describe('swipeMode', () => {
beforeEach(done => {
createComponent({
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 4e83b27e4a5..23f27939dd2 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1338,7 +1338,12 @@ describe Gitlab::Database::MigrationHelpers do
end
describe '#index_exists_by_name?' do
- it 'returns true if an index exists' do
+ # TODO: remove rails5-only after removing rails4 tests
+ # rails 4 can not handle multiple indexes on the same column set if
+ # index was added by 't.index' - t.index is used by default in schema.rb in
+ # rails 5. Let's run this test only in rails 5 env:
+ # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_113602758
+ it 'returns true if an index exists', :rails5 do
expect(model.index_exists_by_name?(:projects, 'index_projects_on_path'))
.to be_truthy
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 25684ea9e2c..efca8564894 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -240,7 +240,12 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
.and_return(user.id)
end
- it 'returns the existing merge request' do
+ # TODO: remove rails5-only after removing rails4 tests
+ # rails 4 can not handle multiple indexes on the same column set if
+ # index was added by 't.index' - t.index is used by default in schema.rb in
+ # rails 5. Let's run this test only in rails 5 env:
+ # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_113602758
+ it 'returns the existing merge request', :rails5 do
mr1, exists1 = importer.create_merge_request
mr2, exists2 = importer.create_merge_request
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index 9200724ed23..a8124ced28c 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -88,8 +88,8 @@ describe Gitlab::Kubernetes::Helm::Api do
context 'service account and cluster role binding does not exist' do
before do
- expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
- expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil))
+ expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil))
end
it 'creates a service account, followed the cluster role binding on kubeclient' do
@@ -103,7 +103,7 @@ describe Gitlab::Kubernetes::Helm::Api do
context 'service account already exists' do
before do
expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_return(service_account_resource)
- expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil))
end
it 'updates the service account, followed by creating the cluster role binding' do
diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb
index e098612f6fb..e1c35c355f4 100644
--- a/spec/lib/gitlab/kubernetes/namespace_spec.rb
+++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb
@@ -9,7 +9,7 @@ describe Gitlab::Kubernetes::Namespace do
describe '#exists?' do
context 'when namespace do not exits' do
- let(:exception) { ::Kubeclient::HttpError.new(404, "namespace #{name} not found", nil) }
+ let(:exception) { ::Kubeclient::ResourceNotFoundError.new(404, "namespace #{name} not found", nil) }
it 'returns false' do
expect(client).to receive(:get_namespace).with(name).once.and_raise(exception)
diff --git a/spec/lib/gitlab/sentry_spec.rb b/spec/lib/gitlab/sentry_spec.rb
index 499757da061..d3b41b27b80 100644
--- a/spec/lib/gitlab/sentry_spec.rb
+++ b/spec/lib/gitlab/sentry_spec.rb
@@ -52,4 +52,28 @@ describe Gitlab::Sentry do
end
end
end
+
+ context '.track_acceptable_exception' do
+ let(:exception) { RuntimeError.new('boom') }
+
+ before do
+ allow(described_class).to receive(:enabled?).and_return(true)
+ end
+
+ it 'calls Raven.capture_exception' do
+ expected_extras = {
+ some_other_info: 'info',
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1'
+ }
+
+ expect(Raven).to receive(:capture_exception)
+ .with(exception, extra: a_hash_including(expected_extras))
+
+ described_class.track_acceptable_exception(
+ exception,
+ issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1',
+ extra: { some_other_info: 'info' }
+ )
+ end
+ end
end
diff --git a/spec/serializers/environment_status_entity_spec.rb b/spec/serializers/environment_status_entity_spec.rb
index 58e2e627410..8a6a38fe5f8 100644
--- a/spec/serializers/environment_status_entity_spec.rb
+++ b/spec/serializers/environment_status_entity_spec.rb
@@ -40,4 +40,40 @@ describe EnvironmentStatusEntity do
it { is_expected.to include(:stop_url) }
end
+
+ context 'when deployment has metrics' do
+ let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
+
+ let(:simple_metrics) do
+ {
+ success: true,
+ metrics: {},
+ last_update: 42
+ }
+ end
+
+ before do
+ project.add_maintainer(user)
+ allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
+ allow(entity).to receive(:deployment).and_return(deployment)
+ end
+
+ context 'when deployment succeeded' do
+ let(:deployment) { create(:deployment, :succeed, :review_app) }
+
+ it 'returns metrics url' do
+ expect(subject[:metrics_url])
+ .to eq("/#{project.namespace.name}/#{project.name}/environments/#{environment.id}/deployments/#{deployment.iid}/metrics")
+ end
+ end
+
+ context 'when deployment is running' do
+ let(:deployment) { create(:deployment, :running, :review_app) }
+
+ it 'does not return metrics url' do
+ expect(subject[:metrics_url]).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/services/clusters/applications/check_installation_progress_service_spec.rb b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
index 1a565bb734d..ea17f2bb423 100644
--- a/spec/services/clusters/applications/check_installation_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
@@ -19,6 +19,10 @@ describe Clusters::Applications::CheckInstallationProgressService do
shared_examples 'a not yet terminated installation' do |a_phase|
let(:phase) { a_phase }
+ before do
+ expect(service).to receive(:installation_phase).once.and_return(phase)
+ end
+
context "when phase is #{a_phase}" do
context 'when not timeouted' do
it 'reschedule a new check' do
@@ -50,8 +54,6 @@ describe Clusters::Applications::CheckInstallationProgressService do
end
before do
- expect(service).to receive(:installation_phase).once.and_return(phase)
-
allow(service).to receive(:installation_errors).and_return(errors)
allow(service).to receive(:remove_installation_pod).and_return(nil)
end
@@ -60,6 +62,10 @@ describe Clusters::Applications::CheckInstallationProgressService do
context 'when installation POD succeeded' do
let(:phase) { Gitlab::Kubernetes::Pod::SUCCEEDED }
+ before do
+ expect(service).to receive(:installation_phase).once.and_return(phase)
+ end
+
it_behaves_like 'a terminated installation'
it 'make the application installed' do
@@ -76,6 +82,10 @@ describe Clusters::Applications::CheckInstallationProgressService do
let(:phase) { Gitlab::Kubernetes::Pod::FAILED }
let(:errors) { 'test installation failed' }
+ before do
+ expect(service).to receive(:installation_phase).once.and_return(phase)
+ end
+
it_behaves_like 'a terminated installation'
it 'make the application errored' do
@@ -87,5 +97,22 @@ describe Clusters::Applications::CheckInstallationProgressService do
end
RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated installation', phase }
+
+ context 'when installation raises a Kubeclient::HttpError' do
+ let(:cluster) { create(:cluster, :provided_by_user, :project) }
+
+ before do
+ application.update!(cluster: cluster)
+
+ expect(service).to receive(:installation_phase).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'shows the response code from the error' do
+ service.execute
+
+ expect(application).to be_errored
+ expect(application.status_reason).to eq('Kubernetes error: 401')
+ end
+ end
end
end
diff --git a/spec/services/clusters/applications/install_service_spec.rb b/spec/services/clusters/applications/install_service_spec.rb
index 4bd19f5bd79..2f801d019fe 100644
--- a/spec/services/clusters/applications/install_service_spec.rb
+++ b/spec/services/clusters/applications/install_service_spec.rb
@@ -42,7 +42,7 @@ describe Clusters::Applications::InstallService do
service.execute
expect(application).to be_errored
- expect(application.status_reason).to match('Kubernetes error.')
+ expect(application.status_reason).to match('Kubernetes error: 500')
end
end
diff --git a/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb b/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb
index 4d1a6bb7b3a..a5806559b14 100644
--- a/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb
+++ b/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb
@@ -19,13 +19,16 @@ describe Clusters::Gcp::Kubernetes::FetchKubernetesTokenService do
subject { described_class.new(kubeclient, service_account_token_name, namespace).execute }
+ before do
+ stub_kubeclient_discover(api_url)
+ end
+
context 'when params correct' do
let(:decoded_token) { 'xxx.token.xxx' }
let(:token) { Base64.encode64(decoded_token) }
context 'when gitlab-token exists' do
before do
- stub_kubeclient_discover(api_url)
stub_kubeclient_get_secret(
api_url,
{
@@ -39,9 +42,17 @@ describe Clusters::Gcp::Kubernetes::FetchKubernetesTokenService do
it { is_expected.to eq(decoded_token) }
end
+ context 'when there is a 500 error' do
+ before do
+ stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 500)
+ end
+
+ it { expect { subject }.to raise_error(Kubeclient::HttpError) }
+ end
+
context 'when gitlab-token does not exist' do
before do
- allow(kubeclient).to receive(:get_secret).and_raise(Kubeclient::HttpError.new(404, 'Not found', nil))
+ stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 404)
end
it { is_expected.to be_nil }
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index 051e8c87f39..17bc880dec5 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -159,9 +159,9 @@ describe Users::BuildService do
true | true | 'fl@example.com' | '' | true
true | false | 'fl@example.com' | '' | true
- true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index cd69160be10..3fedb9ed48c 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -235,6 +235,10 @@ RSpec.configure do |config|
example.run if Gitlab::Database.mysql?
end
+ config.around(:each, :rails5) do |example|
+ example.run if Gitlab.rails5?
+ end
+
# This makes sure the `ApplicationController#can?` method is stubbed with the
# original implementation for all view specs.
config.before(:each, type: :view) do
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index a03d9c4045f..35ae04b16c6 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -41,9 +41,9 @@ module KubernetesHelpers
.to_return(kube_response(kube_v1_secret_body(options)))
end
- def stub_kubeclient_get_secret_error(api_url, name, namespace: 'default')
+ def stub_kubeclient_get_secret_error(api_url, name, namespace: 'default', status: 404)
WebMock.stub_request(:get, api_url + "/api/v1/namespaces/#{namespace}/secrets/#{name}")
- .to_return(status: [404, "Internal Server Error"])
+ .to_return(status: [status, "Internal Server Error"])
end
def stub_kubeclient_create_service_account(api_url, namespace: 'default')