diff options
524 files changed, 7271 insertions, 3797 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 407cd8696a2..b7f18673a58 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.5-golang-1.9-git-2.18-chrome-69.0-node-8.x-yarn-1.2-postgresql-9.6-graphicsmagick-1.3.29" +image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.5.3-golang-1.9-git-2.18-chrome-69.0-node-10.x-yarn-1.12-postgresql-9.6-graphicsmagick-1.3.29" .dedicated-runner: &dedicated-runner retry: 1 @@ -6,7 +6,7 @@ image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.5-golang-1.9-git - gitlab-org .default-cache: &default-cache - key: "ruby-2.4.5-debian-stretch-with-yarn" + key: "debian-stretch-ruby-2.5.3-node-10.x" paths: - vendor/ruby - .yarn-cache/ @@ -32,6 +32,7 @@ variables: GET_SOURCES_ATTEMPTS: "3" KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/report-suite.json + BUILD_ASSETS_IMAGE: "false" before_script: - bundle --version @@ -75,15 +76,17 @@ stages: - mysql:5.7 - redis:alpine -.rails5: &rails5 - allow_failure: true - only: +.rails4: &rails4 + allow_failure: false + except: variables: - - $CI_COMMIT_REF_NAME =~ /rails5/ - - $RAILS5_ENABLED + - $CI_COMMIT_REF_NAME =~ /(^docs[\/-].*|.*-docs$)/ + - $CI_COMMIT_REF_NAME =~ /(^qa[\/-].*|.*-qa$)/ + - $CI_COMMIT_REF_NAME =~ /norails4/ + - $RAILS5_DISABLED variables: - BUNDLE_GEMFILE: "Gemfile.rails5" - RAILS5: "true" + BUNDLE_GEMFILE: "Gemfile.rails4" + RAILS5: "false" # Skip all jobs except the ones that begin with 'docs/'. # Used for commits including ONLY documentation changes. @@ -121,7 +124,7 @@ stages: <<: *except-docs-and-qa .single-script-job: &single-script-job - image: ruby:2.4-alpine + image: ruby:2.5-alpine stage: test cache: {} dependencies: [] @@ -148,13 +151,12 @@ stages: stage: test script: - JOB_NAME=( $CI_JOB_NAME ) - - export CI_NODE_INDEX=${JOB_NAME[-2]} - - export CI_NODE_TOTAL=${JOB_NAME[-1]} - - export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json + - TEST_TOOL=${JOB_NAME[0]} + - export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${TEST_TOOL}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - export KNAPSACK_GENERATE_REPORT=true - export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH} - - export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - - export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json + - export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${TEST_TOOL}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json + - export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${TEST_TOOL}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json - export FLAKY_RSPEC_GENERATE_REPORT=true - export CACHE_CLASSES=true - cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH} @@ -177,17 +179,17 @@ stages: <<: *rspec-metadata <<: *use-pg -.rspec-metadata-pg-rails5: &rspec-metadata-pg-rails5 +.rspec-metadata-pg-rails4: &rspec-metadata-pg-rails4 <<: *rspec-metadata-pg - <<: *rails5 + <<: *rails4 .rspec-metadata-mysql: &rspec-metadata-mysql <<: *rspec-metadata <<: *use-mysql -.rspec-metadata-mysql-rails5: &rspec-metadata-mysql-rails5 +.rspec-metadata-mysql-rails4: &rspec-metadata-mysql-rails4 <<: *rspec-metadata-mysql - <<: *rails5 + <<: *rails4 .only-canonical-masters: &only-canonical-masters only: @@ -227,6 +229,8 @@ stages: script: - git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v9.3.0 - git checkout -f FETCH_HEAD + - sed -i "s/gem 'oj', '~> 2.17.4'//" Gemfile + - bundle update google-protobuf grpc - bundle install $BUNDLE_INSTALL_FLAGS - date - cp config/gitlab.yml.example config/gitlab.yml @@ -316,9 +320,10 @@ review-docs-cleanup: # Trigger a docker image build in CNG (Cloud Native GitLab) repository # cloud-native-image: - image: ruby:2.4-alpine + image: ruby:2.5-alpine before_script: [] - stage: test + dependencies: [] + stage: post-test allow_failure: true variables: GIT_DEPTH: "1" @@ -368,7 +373,7 @@ update-tests-metadata: flaky-examples-check: <<: *dedicated-runner - image: ruby:2.4-alpine + image: ruby:2.5-alpine services: [] before_script: [] variables: @@ -426,7 +431,7 @@ setup-test-env: script: - bundle exec ruby -Ispec -e 'require "spec_helper" ; TestEnv.init' - scripts/gitaly-test-build # Do not use 'bundle exec' here - - BUNDLE_GEMFILE=Gemfile.rails5 bundle install $BUNDLE_INSTALL_FLAGS + - BUNDLE_GEMFILE=Gemfile.rails4 bundle install $BUNDLE_INSTALL_FLAGS artifacts: expire_in: 7d paths: @@ -456,129 +461,21 @@ danger-review: - yarn install --frozen-lockfile --cache-folder .yarn-cache - danger --fail-on-errors=true -rspec-pg 0 30: *rspec-metadata-pg -rspec-pg 1 30: *rspec-metadata-pg -rspec-pg 2 30: *rspec-metadata-pg -rspec-pg 3 30: *rspec-metadata-pg -rspec-pg 4 30: *rspec-metadata-pg -rspec-pg 5 30: *rspec-metadata-pg -rspec-pg 6 30: *rspec-metadata-pg -rspec-pg 7 30: *rspec-metadata-pg -rspec-pg 8 30: *rspec-metadata-pg -rspec-pg 9 30: *rspec-metadata-pg -rspec-pg 10 30: *rspec-metadata-pg -rspec-pg 11 30: *rspec-metadata-pg -rspec-pg 12 30: *rspec-metadata-pg -rspec-pg 13 30: *rspec-metadata-pg -rspec-pg 14 30: *rspec-metadata-pg -rspec-pg 15 30: *rspec-metadata-pg -rspec-pg 16 30: *rspec-metadata-pg -rspec-pg 17 30: *rspec-metadata-pg -rspec-pg 18 30: *rspec-metadata-pg -rspec-pg 19 30: *rspec-metadata-pg -rspec-pg 20 30: *rspec-metadata-pg -rspec-pg 21 30: *rspec-metadata-pg -rspec-pg 22 30: *rspec-metadata-pg -rspec-pg 23 30: *rspec-metadata-pg -rspec-pg 24 30: *rspec-metadata-pg -rspec-pg 25 30: *rspec-metadata-pg -rspec-pg 26 30: *rspec-metadata-pg -rspec-pg 27 30: *rspec-metadata-pg -rspec-pg 28 30: *rspec-metadata-pg -rspec-pg 29 30: *rspec-metadata-pg - -rspec-mysql 0 30: *rspec-metadata-mysql -rspec-mysql 1 30: *rspec-metadata-mysql -rspec-mysql 2 30: *rspec-metadata-mysql -rspec-mysql 3 30: *rspec-metadata-mysql -rspec-mysql 4 30: *rspec-metadata-mysql -rspec-mysql 5 30: *rspec-metadata-mysql -rspec-mysql 6 30: *rspec-metadata-mysql -rspec-mysql 7 30: *rspec-metadata-mysql -rspec-mysql 8 30: *rspec-metadata-mysql -rspec-mysql 9 30: *rspec-metadata-mysql -rspec-mysql 10 30: *rspec-metadata-mysql -rspec-mysql 11 30: *rspec-metadata-mysql -rspec-mysql 12 30: *rspec-metadata-mysql -rspec-mysql 13 30: *rspec-metadata-mysql -rspec-mysql 14 30: *rspec-metadata-mysql -rspec-mysql 15 30: *rspec-metadata-mysql -rspec-mysql 16 30: *rspec-metadata-mysql -rspec-mysql 17 30: *rspec-metadata-mysql -rspec-mysql 18 30: *rspec-metadata-mysql -rspec-mysql 19 30: *rspec-metadata-mysql -rspec-mysql 20 30: *rspec-metadata-mysql -rspec-mysql 21 30: *rspec-metadata-mysql -rspec-mysql 22 30: *rspec-metadata-mysql -rspec-mysql 23 30: *rspec-metadata-mysql -rspec-mysql 24 30: *rspec-metadata-mysql -rspec-mysql 25 30: *rspec-metadata-mysql -rspec-mysql 26 30: *rspec-metadata-mysql -rspec-mysql 27 30: *rspec-metadata-mysql -rspec-mysql 28 30: *rspec-metadata-mysql -rspec-mysql 29 30: *rspec-metadata-mysql - -rspec-pg-rails5 0 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 1 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 2 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 3 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 4 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 5 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 6 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 7 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 8 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 9 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 10 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 11 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 12 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 13 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 14 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 15 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 16 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 17 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 18 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 19 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 20 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 21 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 22 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 23 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 24 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 25 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 26 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 27 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 28 30: *rspec-metadata-pg-rails5 -rspec-pg-rails5 29 30: *rspec-metadata-pg-rails5 - -rspec-mysql-rails5 0 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 1 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 2 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 3 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 4 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 5 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 6 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 7 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 8 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 9 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 10 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 11 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 12 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 13 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 14 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 15 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 16 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 17 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 18 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 19 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 20 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 21 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 22 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 23 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 24 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 25 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 26 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 27 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 28 30: *rspec-metadata-mysql-rails5 -rspec-mysql-rails5 29 30: *rspec-metadata-mysql-rails5 +rspec-pg: + <<: *rspec-metadata-pg + parallel: 30 + +rspec-mysql: + <<: *rspec-metadata-mysql + parallel: 30 + +rspec-pg-rails4: + <<: *rspec-metadata-pg-rails4 + parallel: 30 + +rspec-mysql-rails4: + <<: *rspec-metadata-mysql-rails4 + parallel: 30 static-analysis: <<: *dedicated-no-docs-no-db-pull-cache-job @@ -588,7 +485,7 @@ static-analysis: script: - scripts/static-analysis cache: - key: "ruby-2.4.5-debian-stretch-with-yarn-and-rubocop" + key: "debian-stretch-ruby-2.5.3-node-10.x-and-rubocop" paths: - vendor/ruby - .yarn-cache/ @@ -624,14 +521,15 @@ downtime_check: - /(^docs[\/-].*|.*-docs$)/ - /(^qa[\/-].*|.*-qa$)/ -rails5_gemfile_lock_check: +rails4_gemfile_lock_check: <<: *dedicated-no-docs-no-db-pull-cache-job <<: *except-docs-and-qa script: - - scripts/rails5-gemfile-lock-check + - scripts/rails4-gemfile-lock-check ee_compat_check: <<: *rake-exec + dependencies: [] except: - master - tags @@ -694,7 +592,7 @@ gitlab:setup-mysql: # Frontend-related jobs gitlab:assets:compile: <<: *dedicated-no-docs-and-no-qa-pull-cache-job - image: dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.4-git-2.18-chrome-69.0-node-8.x-yarn-1.2-graphicsmagick-1.3.29-docker-18.06.1 + image: dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.5.3-git-2.18-chrome-69.0-node-8.x-yarn-1.2-graphicsmagick-1.3.29-docker-18.06.1 dependencies: [] services: - docker:stable-dind @@ -774,7 +672,8 @@ code_quality: --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code artifacts: - paths: [gl-code-quality-report.json] + reports: + codequality: gl-code-quality-report.json expire_in: 1 week sast: @@ -798,7 +697,8 @@ sast: --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/sast:$SP_VERSION" /app/bin/run /code artifacts: - paths: [gl-sast-report.json] + reports: + sast: gl-sast-report.json dependency_scanning: <<: *dedicated-no-docs-no-db-pull-cache-job @@ -820,7 +720,8 @@ dependency_scanning: --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/dependency-scanning:$SP_VERSION" /code artifacts: - paths: [gl-dependency-scanning-report.json] + reports: + dependency_scanning: gl-dependency-scanning-report.json qa:internal: <<: *dedicated-no-docs-no-db-pull-cache-job @@ -860,9 +761,7 @@ coverage: lint:javascript:report: <<: *dedicated-no-docs-and-no-qa-pull-cache-job stage: post-test - dependencies: - - compile-assets - - setup-test-env + dependencies: [] before_script: [] script: - date @@ -916,6 +815,7 @@ gitlab_git_test: variables: SETUP_DB: "false" before_script: [] + dependencies: [] cache: {} script: - spec/support/prepare-gitlab-git-test-for-commit --check-for-changes @@ -926,6 +826,7 @@ no_ee_check: variables: SETUP_DB: "false" before_script: [] + dependencies: [] cache: {} script: - scripts/no-ee-check diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS.disabled index a4b773b15a9..a4b773b15a9 100644 --- a/.gitlab/CODEOWNERS +++ b/.gitlab/CODEOWNERS.disabled @@ -1 +1 @@ -8.11.3 +10.13.0 diff --git a/.ruby-version b/.ruby-version index 59aa62c1fa4..aedc15bb0c6 100644 --- a/.ruby-version +++ b/.ruby-version @@ -1 +1 @@ -2.4.5 +2.5.3 diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index 4db8830b115..e491d6243c6 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -0.129.0 +0.132.0 diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION index a3fcc7121bb..21c8c7b46b8 100644 --- a/GITLAB_WORKHORSE_VERSION +++ b/GITLAB_WORKHORSE_VERSION @@ -1 +1 @@ -7.1.0 +7.1.1 @@ -1,6 +1,6 @@ # --- Special code for migrating to Rails 5.0 --- def rails5? - %w[1 true].include?(ENV["RAILS5"]) + !%w[0 false].include?(ENV["RAILS5"]) end gem_versions = {} @@ -315,7 +315,7 @@ group :development do # Better errors handler gem 'better_errors', '~> 2.1.0' - gem 'binding_of_caller', '~> 0.7.2' + gem 'binding_of_caller', '~> 0.8.0' # thin instead webrick gem 'thin', '~> 1.7.0' @@ -342,7 +342,7 @@ group :development, :test do gem 'minitest', '~> 5.7.0' # Generate Fake data - gem 'ffaker', '~> 2.4' + gem 'ffaker', '~> 2.10' gem 'capybara', '~> 2.15' gem 'capybara-screenshot', '~> 1.0.0' @@ -357,14 +357,14 @@ group :development, :test do gem 'rubocop-rspec', '~> 1.22.1' gem 'scss_lint', '~> 0.56.0', require: false - gem 'haml_lint', '~> 0.26.0', require: false + gem 'haml_lint', '~> 0.28.0', require: false gem 'simplecov', '~> 0.14.0', require: false gem 'bundler-audit', '~> 0.5.0', require: false gem 'benchmark-ips', '~> 2.3.0', require: false gem 'license_finder', '~> 5.4', require: false - gem 'knapsack', '~> 1.16' + gem 'knapsack', '~> 1.17' gem 'activerecord_sane_schema_dumper', gem_versions['activerecord_sane_schema_dumper'] diff --git a/Gemfile.lock b/Gemfile.lock index 8570fabb72f..e21a1b85457 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -4,41 +4,44 @@ GEM RedCloth (4.3.2) abstract_type (0.0.7) ace-rails-ap (4.1.2) - actionmailer (4.2.10) - actionpack (= 4.2.10) - actionview (= 4.2.10) - activejob (= 4.2.10) + actioncable (5.0.7) + actionpack (= 5.0.7) + nio4r (>= 1.2, < 3.0) + websocket-driver (~> 0.6.1) + actionmailer (5.0.7) + actionpack (= 5.0.7) + actionview (= 5.0.7) + activejob (= 5.0.7) mail (~> 2.5, >= 2.5.4) - rails-dom-testing (~> 1.0, >= 1.0.5) - actionpack (4.2.10) - actionview (= 4.2.10) - activesupport (= 4.2.10) - rack (~> 1.6) - rack-test (~> 0.6.2) - rails-dom-testing (~> 1.0, >= 1.0.5) + rails-dom-testing (~> 2.0) + actionpack (5.0.7) + actionview (= 5.0.7) + activesupport (= 5.0.7) + rack (~> 2.0) + rack-test (~> 0.6.3) + rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.0, >= 1.0.2) - actionview (4.2.10) - activesupport (= 4.2.10) + actionview (5.0.7) + activesupport (= 5.0.7) builder (~> 3.1) erubis (~> 2.7.0) - rails-dom-testing (~> 1.0, >= 1.0.5) + rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.0, >= 1.0.3) - activejob (4.2.10) - activesupport (= 4.2.10) - globalid (>= 0.3.0) - activemodel (4.2.10) - activesupport (= 4.2.10) - builder (~> 3.1) - activerecord (4.2.10) - activemodel (= 4.2.10) - activesupport (= 4.2.10) - arel (~> 6.0) - activerecord_sane_schema_dumper (0.2) - rails (>= 4, < 5) - activesupport (4.2.10) - i18n (~> 0.7) + activejob (5.0.7) + activesupport (= 5.0.7) + globalid (>= 0.3.6) + activemodel (5.0.7) + activesupport (= 5.0.7) + activerecord (5.0.7) + activemodel (= 5.0.7) + activesupport (= 5.0.7) + arel (~> 7.0) + activerecord_sane_schema_dumper (1.0) + rails (>= 5, < 6) + activesupport (5.0.7) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 0.7, < 2) minitest (~> 5.1) - thread_safe (~> 0.3, >= 0.3.4) tzinfo (~> 1.1) acts-as-taggable-on (5.0.0) activerecord (>= 4.2.8) @@ -49,7 +52,7 @@ GEM public_suffix (>= 2.0.2, < 4.0) aes_key_wrap (1.0.1) akismet (2.0.0) - arel (6.0.4) + arel (7.1.4) asana (0.6.0) faraday (~> 0.9) faraday_middleware (~> 0.9) @@ -79,7 +82,7 @@ GEM erubis (>= 2.6.6) rack (>= 0.9.0) bindata (2.4.3) - binding_of_caller (0.7.2) + binding_of_caller (0.8.0) debug_inspector (>= 0.0.1) bootsnap (1.3.2) msgpack (~> 1.0) @@ -137,14 +140,14 @@ GEM addressable daemons (1.2.6) database_cleaner (1.5.3) - debug_inspector (0.0.2) + debug_inspector (0.0.3) debugger-ruby_core_source (1.3.8) deckar01-task_list (2.0.0) html-pipeline declarative (0.0.10) declarative-option (0.1.0) - default_value_for (3.0.2) - activerecord (>= 3.2.0, < 5.1) + default_value_for (3.0.5) + activerecord (>= 3.2.0, < 5.2) descendants_tracker (0.0.4) thread_safe (~> 0.3, >= 0.3.1) device_detector (1.0.0) @@ -202,7 +205,7 @@ GEM multi_json fast_blank (1.0.0) fast_gettext (1.6.0) - ffaker (2.4.0) + ffaker (2.10.0) ffi (1.9.25) flipper (0.13.0) flipper-active_record (0.13.0) @@ -337,11 +340,11 @@ GEM haml (5.0.4) temple (>= 0.8.0) tilt - haml_lint (0.26.0) + haml_lint (0.28.0) haml (>= 4.0, < 5.1) rainbow rake (>= 10, < 13) - rubocop (>= 0.49.0) + rubocop (>= 0.50.0) sysexits (~> 1.1) hamlit (2.8.8) temple (>= 0.8.0) @@ -376,7 +379,7 @@ GEM json (~> 1.8) multi_xml (>= 0.5.2) httpclient (2.8.3) - i18n (0.9.5) + i18n (1.1.0) concurrent-ruby (~> 1.0) icalendar (2.4.1) ice_nine (0.11.2) @@ -412,7 +415,7 @@ GEM kaminari-core (= 1.0.1) kaminari-core (1.0.1) kgio (2.10.0) - knapsack (1.16.0) + knapsack (1.17.0) rake kubeclient (3.1.0) http (~> 2.2.2) @@ -451,9 +454,9 @@ GEM memoizable (0.4.2) thread_safe (~> 0.3, >= 0.3.1) method_source (0.9.0) - mime-types (3.1) + mime-types (3.2.2) mime-types-data (~> 3.2015) - mime-types-data (3.2016.0521) + mime-types-data (3.2018.0812) mimemagic (0.3.0) mini_magick (4.8.0) mini_mime (1.0.1) @@ -470,6 +473,7 @@ GEM net-ldap (0.16.0) net-ssh (5.0.1) netrc (0.11.0) + nio4r (2.3.1) nokogiri (1.8.4) mini_portile2 (~> 2.3.0) nokogumbo (1.5.0) @@ -598,7 +602,7 @@ GEM get_process_mem (~> 0.2) puma (>= 2.7, < 4) pyu-ruby-sasl (0.0.3.3) - rack (1.6.11) + rack (2.0.5) rack-accept (0.4.5) rack (>= 0.4) rack-attack (4.4.1) @@ -616,31 +620,36 @@ GEM rack rack-test (0.6.3) rack (>= 1.0) - rails (4.2.10) - actionmailer (= 4.2.10) - actionpack (= 4.2.10) - actionview (= 4.2.10) - activejob (= 4.2.10) - activemodel (= 4.2.10) - activerecord (= 4.2.10) - activesupport (= 4.2.10) - bundler (>= 1.3.0, < 2.0) - railties (= 4.2.10) - sprockets-rails + rails (5.0.7) + actioncable (= 5.0.7) + actionmailer (= 5.0.7) + actionpack (= 5.0.7) + actionview (= 5.0.7) + activejob (= 5.0.7) + activemodel (= 5.0.7) + activerecord (= 5.0.7) + activesupport (= 5.0.7) + bundler (>= 1.3.0) + railties (= 5.0.7) + sprockets-rails (>= 2.0.0) + rails-controller-testing (1.0.2) + actionpack (~> 5.x, >= 5.0.1) + actionview (~> 5.x, >= 5.0.1) + activesupport (~> 5.x) rails-deprecated_sanitizer (1.0.3) activesupport (>= 4.2.0.alpha) - rails-dom-testing (1.0.9) - activesupport (>= 4.2.0, < 5.0) - nokogiri (~> 1.6) - rails-deprecated_sanitizer (>= 1.0.1) + rails-dom-testing (2.0.3) + activesupport (>= 4.2.0) + nokogiri (>= 1.6) rails-html-sanitizer (1.0.4) loofah (~> 2.2, >= 2.2.2) - rails-i18n (4.0.9) - i18n (~> 0.7) - railties (~> 4.0) - railties (4.2.10) - actionpack (= 4.2.10) - activesupport (= 4.2.10) + rails-i18n (5.1.1) + i18n (>= 0.7, < 2) + railties (>= 5.0, < 6) + railties (5.0.7) + actionpack (= 5.0.7) + activesupport (= 5.0.7) + method_source rake (>= 0.8.7) thor (>= 0.18.1, < 2.0) rainbow (3.0.0) @@ -678,7 +687,7 @@ GEM redis-actionpack (>= 5.0, < 6) redis-activesupport (>= 5.0, < 6) redis-store (>= 1.2, < 2) - redis-store (1.4.1) + redis-store (1.6.0) redis (>= 2.2, < 5) regexp_parser (0.5.0) representable (3.0.4) @@ -731,8 +740,7 @@ GEM rspec-core rspec-set (0.1.3) rspec-support (3.7.1) - rspec_junit_formatter (0.2.3) - builder (< 4) + rspec_junit_formatter (0.4.1) rspec-core (>= 2, < 4, != 2.12.0) rspec_profiling (0.0.5) activerecord @@ -849,8 +857,6 @@ GEM sysexits (1.2.0) temple (0.8.0) test-prof (0.2.5) - test_after_commit (1.1.0) - activerecord (>= 3.2) text (1.3.1) thin (1.7.2) daemons (~> 1.0, >= 1.0.9) @@ -913,6 +919,9 @@ GEM hashdiff webpack-rails (0.9.11) railties (>= 3.2.0) + websocket-driver (0.6.5) + websocket-extensions (>= 0.1.0) + websocket-extensions (0.1.3) wikicloth (0.8.1) builder expression_parser @@ -928,7 +937,7 @@ PLATFORMS DEPENDENCIES RedCloth (~> 4.3.2) ace-rails-ap (~> 4.1.0) - activerecord_sane_schema_dumper (= 0.2) + activerecord_sane_schema_dumper (= 1.0) acts-as-taggable-on (~> 5.0) addressable (~> 2.5.2) akismet (~> 2.0) @@ -943,7 +952,7 @@ DEPENDENCIES bcrypt_pbkdf (~> 1.0) benchmark-ips (~> 2.3.0) better_errors (~> 2.1.0) - binding_of_caller (~> 0.7.2) + binding_of_caller (~> 0.8.0) bootsnap (~> 1.3) bootstrap_form (~> 2.7.0) brakeman (~> 4.2) @@ -962,7 +971,7 @@ DEPENDENCIES creole (~> 0.5.0) database_cleaner (~> 1.5.0) deckar01-task_list (= 2.0.0) - default_value_for (~> 3.0.0) + default_value_for (~> 3.0.5) device_detector devise (~> 4.4) devise-two-factor (~> 3.0.0) @@ -977,7 +986,7 @@ DEPENDENCIES factory_bot_rails (~> 4.8.2) faraday (~> 0.12) fast_blank - ffaker (~> 2.4) + ffaker (~> 2.10) flipper (~> 0.13.0) flipper-active_record (~> 0.13.0) flipper-active_support_cache_store (~> 0.13.0) @@ -1013,7 +1022,7 @@ DEPENDENCIES graphiql-rails (~> 1.4.10) graphql (~> 1.8.0) grpc (~> 1.15.0) - haml_lint (~> 0.26.0) + haml_lint (~> 0.28.0) hamlit (~> 2.8.8) hangouts-chat (~> 0.0.5) hashie-forbidden_attributes @@ -1030,7 +1039,7 @@ DEPENDENCIES json-schema (~> 2.8.0) jwt (~> 1.5.6) kaminari (~> 1.0) - knapsack (~> 1.16) + knapsack (~> 1.17) kubeclient (~> 3.1.0) letter_opener_web (~> 1.3.0) license_finder (~> 5.4) @@ -1080,9 +1089,10 @@ DEPENDENCIES rack-cors (~> 1.0.0) rack-oauth2 (~> 1.2.1) rack-proxy (~> 0.6.0) - rails (= 4.2.10) + rails (= 5.0.7) + rails-controller-testing rails-deprecated_sanitizer (~> 1.0.3) - rails-i18n (~> 4.0.9) + rails-i18n (~> 5.1) rainbow (~> 3.0) raindrops (~> 0.18) rblineprof (~> 0.3.6) @@ -1135,7 +1145,6 @@ DEPENDENCIES state_machines-activerecord (~> 0.5.1) sys-filesystem (~> 1.1.6) test-prof (~> 0.2.5) - test_after_commit (~> 1.1) thin (~> 1.7.0) timecop (~> 0.8.0) toml-rb (~> 1.0.0) diff --git a/Gemfile.rails5 b/Gemfile.rails4 index 2b526b19ba0..0ec00e702aa 100644 --- a/Gemfile.rails5 +++ b/Gemfile.rails4 @@ -1,6 +1,6 @@ -# BUNDLE_GEMFILE=Gemfile.rails5 bundle install +# BUNDLE_GEMFILE=Gemfile.rails4 bundle install -ENV["RAILS5"] = "true" +ENV["RAILS5"] = "false" gemfile = File.expand_path("../Gemfile", __FILE__) diff --git a/Gemfile.rails5.lock b/Gemfile.rails4.lock index 31f06121884..fea3102b8d6 100644 --- a/Gemfile.rails5.lock +++ b/Gemfile.rails4.lock @@ -4,44 +4,41 @@ GEM RedCloth (4.3.2) abstract_type (0.0.7) ace-rails-ap (4.1.2) - actioncable (5.0.7) - actionpack (= 5.0.7) - nio4r (>= 1.2, < 3.0) - websocket-driver (~> 0.6.1) - actionmailer (5.0.7) - actionpack (= 5.0.7) - actionview (= 5.0.7) - activejob (= 5.0.7) + actionmailer (4.2.10) + actionpack (= 4.2.10) + actionview (= 4.2.10) + activejob (= 4.2.10) mail (~> 2.5, >= 2.5.4) - rails-dom-testing (~> 2.0) - actionpack (5.0.7) - actionview (= 5.0.7) - activesupport (= 5.0.7) - rack (~> 2.0) - rack-test (~> 0.6.3) - rails-dom-testing (~> 2.0) + rails-dom-testing (~> 1.0, >= 1.0.5) + actionpack (4.2.10) + actionview (= 4.2.10) + activesupport (= 4.2.10) + rack (~> 1.6) + rack-test (~> 0.6.2) + rails-dom-testing (~> 1.0, >= 1.0.5) rails-html-sanitizer (~> 1.0, >= 1.0.2) - actionview (5.0.7) - activesupport (= 5.0.7) + actionview (4.2.10) + activesupport (= 4.2.10) builder (~> 3.1) erubis (~> 2.7.0) - rails-dom-testing (~> 2.0) + rails-dom-testing (~> 1.0, >= 1.0.5) rails-html-sanitizer (~> 1.0, >= 1.0.3) - activejob (5.0.7) - activesupport (= 5.0.7) - globalid (>= 0.3.6) - activemodel (5.0.7) - activesupport (= 5.0.7) - activerecord (5.0.7) - activemodel (= 5.0.7) - activesupport (= 5.0.7) - arel (~> 7.0) - activerecord_sane_schema_dumper (1.0) - rails (>= 5, < 6) - activesupport (5.0.7) - concurrent-ruby (~> 1.0, >= 1.0.2) - i18n (>= 0.7, < 2) + activejob (4.2.10) + activesupport (= 4.2.10) + globalid (>= 0.3.0) + activemodel (4.2.10) + activesupport (= 4.2.10) + builder (~> 3.1) + activerecord (4.2.10) + activemodel (= 4.2.10) + activesupport (= 4.2.10) + arel (~> 6.0) + activerecord_sane_schema_dumper (0.2) + rails (>= 4, < 5) + activesupport (4.2.10) + i18n (~> 0.7) minitest (~> 5.1) + thread_safe (~> 0.3, >= 0.3.4) tzinfo (~> 1.1) acts-as-taggable-on (5.0.0) activerecord (>= 4.2.8) @@ -52,7 +49,7 @@ GEM public_suffix (>= 2.0.2, < 4.0) aes_key_wrap (1.0.1) akismet (2.0.0) - arel (7.1.4) + arel (6.0.4) asana (0.6.0) faraday (~> 0.9) faraday_middleware (~> 0.9) @@ -82,7 +79,7 @@ GEM erubis (>= 2.6.6) rack (>= 0.9.0) bindata (2.4.3) - binding_of_caller (0.7.2) + binding_of_caller (0.8.0) debug_inspector (>= 0.0.1) bootsnap (1.3.2) msgpack (~> 1.0) @@ -140,14 +137,14 @@ GEM addressable daemons (1.2.6) database_cleaner (1.5.3) - debug_inspector (0.0.2) + debug_inspector (0.0.3) debugger-ruby_core_source (1.3.8) deckar01-task_list (2.0.0) html-pipeline declarative (0.0.10) declarative-option (0.1.0) - default_value_for (3.0.5) - activerecord (>= 3.2.0, < 5.2) + default_value_for (3.0.2) + activerecord (>= 3.2.0, < 5.1) descendants_tracker (0.0.4) thread_safe (~> 0.3, >= 0.3.1) device_detector (1.0.0) @@ -205,7 +202,7 @@ GEM multi_json fast_blank (1.0.0) fast_gettext (1.6.0) - ffaker (2.4.0) + ffaker (2.10.0) ffi (1.9.25) flipper (0.13.0) flipper-active_record (0.13.0) @@ -340,11 +337,11 @@ GEM haml (5.0.4) temple (>= 0.8.0) tilt - haml_lint (0.26.0) + haml_lint (0.28.0) haml (>= 4.0, < 5.1) rainbow rake (>= 10, < 13) - rubocop (>= 0.49.0) + rubocop (>= 0.50.0) sysexits (~> 1.1) hamlit (2.8.8) temple (>= 0.8.0) @@ -379,7 +376,7 @@ GEM json (~> 1.8) multi_xml (>= 0.5.2) httpclient (2.8.3) - i18n (1.1.0) + i18n (0.9.5) concurrent-ruby (~> 1.0) icalendar (2.4.1) ice_nine (0.11.2) @@ -415,7 +412,7 @@ GEM kaminari-core (= 1.0.1) kaminari-core (1.0.1) kgio (2.10.0) - knapsack (1.16.0) + knapsack (1.17.0) rake kubeclient (3.1.0) http (~> 2.2.2) @@ -454,9 +451,9 @@ GEM memoizable (0.4.2) thread_safe (~> 0.3, >= 0.3.1) method_source (0.9.0) - mime-types (3.1) + mime-types (3.2.2) mime-types-data (~> 3.2015) - mime-types-data (3.2016.0521) + mime-types-data (3.2018.0812) mimemagic (0.3.0) mini_magick (4.8.0) mini_mime (1.0.1) @@ -473,7 +470,6 @@ GEM net-ldap (0.16.0) net-ssh (5.0.1) netrc (0.11.0) - nio4r (2.3.1) nokogiri (1.8.4) mini_portile2 (~> 2.3.0) nokogumbo (1.5.0) @@ -546,7 +542,7 @@ GEM orm_adapter (0.5.0) os (1.0.0) parallel (1.12.1) - parser (2.5.1.2) + parser (2.5.3.0) ast (~> 2.4.0) parslet (1.8.2) peek (1.0.1) @@ -602,7 +598,7 @@ GEM get_process_mem (~> 0.2) puma (>= 2.7, < 4) pyu-ruby-sasl (0.0.3.3) - rack (2.0.5) + rack (1.6.11) rack-accept (0.4.5) rack (>= 0.4) rack-attack (4.4.1) @@ -620,36 +616,31 @@ GEM rack rack-test (0.6.3) rack (>= 1.0) - rails (5.0.7) - actioncable (= 5.0.7) - actionmailer (= 5.0.7) - actionpack (= 5.0.7) - actionview (= 5.0.7) - activejob (= 5.0.7) - activemodel (= 5.0.7) - activerecord (= 5.0.7) - activesupport (= 5.0.7) - bundler (>= 1.3.0) - railties (= 5.0.7) - sprockets-rails (>= 2.0.0) - rails-controller-testing (1.0.2) - actionpack (~> 5.x, >= 5.0.1) - actionview (~> 5.x, >= 5.0.1) - activesupport (~> 5.x) + rails (4.2.10) + actionmailer (= 4.2.10) + actionpack (= 4.2.10) + actionview (= 4.2.10) + activejob (= 4.2.10) + activemodel (= 4.2.10) + activerecord (= 4.2.10) + activesupport (= 4.2.10) + bundler (>= 1.3.0, < 2.0) + railties (= 4.2.10) + sprockets-rails rails-deprecated_sanitizer (1.0.3) activesupport (>= 4.2.0.alpha) - rails-dom-testing (2.0.3) - activesupport (>= 4.2.0) - nokogiri (>= 1.6) + rails-dom-testing (1.0.9) + activesupport (>= 4.2.0, < 5.0) + nokogiri (~> 1.6) + rails-deprecated_sanitizer (>= 1.0.1) rails-html-sanitizer (1.0.4) loofah (~> 2.2, >= 2.2.2) - rails-i18n (5.1.1) - i18n (>= 0.7, < 2) - railties (>= 5.0, < 6) - railties (5.0.7) - actionpack (= 5.0.7) - activesupport (= 5.0.7) - method_source + rails-i18n (4.0.9) + i18n (~> 0.7) + railties (~> 4.0) + railties (4.2.10) + actionpack (= 4.2.10) + activesupport (= 4.2.10) rake (>= 0.8.7) thor (>= 0.18.1, < 2.0) rainbow (3.0.0) @@ -687,7 +678,7 @@ GEM redis-actionpack (>= 5.0, < 6) redis-activesupport (>= 5.0, < 6) redis-store (>= 1.2, < 2) - redis-store (1.4.1) + redis-store (1.6.0) redis (>= 2.2, < 5) regexp_parser (0.5.0) representable (3.0.4) @@ -740,7 +731,8 @@ GEM rspec-core rspec-set (0.1.3) rspec-support (3.7.1) - rspec_junit_formatter (0.4.1) + rspec_junit_formatter (0.2.3) + builder (< 4) rspec-core (>= 2, < 4, != 2.12.0) rspec_profiling (0.0.5) activerecord @@ -857,6 +849,8 @@ GEM sysexits (1.2.0) temple (0.8.0) test-prof (0.2.5) + test_after_commit (1.1.0) + activerecord (>= 3.2) text (1.3.1) thin (1.7.2) daemons (~> 1.0, >= 1.0.9) @@ -919,9 +913,6 @@ GEM hashdiff webpack-rails (0.9.11) railties (>= 3.2.0) - websocket-driver (0.6.5) - websocket-extensions (>= 0.1.0) - websocket-extensions (0.1.3) wikicloth (0.8.1) builder expression_parser @@ -937,7 +928,7 @@ PLATFORMS DEPENDENCIES RedCloth (~> 4.3.2) ace-rails-ap (~> 4.1.0) - activerecord_sane_schema_dumper (= 1.0) + activerecord_sane_schema_dumper (= 0.2) acts-as-taggable-on (~> 5.0) addressable (~> 2.5.2) akismet (~> 2.0) @@ -952,7 +943,7 @@ DEPENDENCIES bcrypt_pbkdf (~> 1.0) benchmark-ips (~> 2.3.0) better_errors (~> 2.1.0) - binding_of_caller (~> 0.7.2) + binding_of_caller (~> 0.8.0) bootsnap (~> 1.3) bootstrap_form (~> 2.7.0) brakeman (~> 4.2) @@ -971,7 +962,7 @@ DEPENDENCIES creole (~> 0.5.0) database_cleaner (~> 1.5.0) deckar01-task_list (= 2.0.0) - default_value_for (~> 3.0.5) + default_value_for (~> 3.0.0) device_detector devise (~> 4.4) devise-two-factor (~> 3.0.0) @@ -986,7 +977,7 @@ DEPENDENCIES factory_bot_rails (~> 4.8.2) faraday (~> 0.12) fast_blank - ffaker (~> 2.4) + ffaker (~> 2.10) flipper (~> 0.13.0) flipper-active_record (~> 0.13.0) flipper-active_support_cache_store (~> 0.13.0) @@ -1022,7 +1013,7 @@ DEPENDENCIES graphiql-rails (~> 1.4.10) graphql (~> 1.8.0) grpc (~> 1.15.0) - haml_lint (~> 0.26.0) + haml_lint (~> 0.28.0) hamlit (~> 2.8.8) hangouts-chat (~> 0.0.5) hashie-forbidden_attributes @@ -1039,7 +1030,7 @@ DEPENDENCIES json-schema (~> 2.8.0) jwt (~> 1.5.6) kaminari (~> 1.0) - knapsack (~> 1.16) + knapsack (~> 1.17) kubeclient (~> 3.1.0) letter_opener_web (~> 1.3.0) license_finder (~> 5.4) @@ -1089,10 +1080,9 @@ DEPENDENCIES rack-cors (~> 1.0.0) rack-oauth2 (~> 1.2.1) rack-proxy (~> 0.6.0) - rails (= 5.0.7) - rails-controller-testing + rails (= 4.2.10) rails-deprecated_sanitizer (~> 1.0.3) - rails-i18n (~> 5.1) + rails-i18n (~> 4.0.9) rainbow (~> 3.0) raindrops (~> 0.18) rblineprof (~> 0.3.6) @@ -1145,6 +1135,7 @@ DEPENDENCIES state_machines-activerecord (~> 0.5.1) sys-filesystem (~> 1.1.6) test-prof (~> 0.2.5) + test_after_commit (~> 1.1) thin (~> 1.7.0) timecop (~> 0.8.0) toml-rb (~> 1.0.0) diff --git a/app/assets/javascripts/diffs/components/app.vue b/app/assets/javascripts/diffs/components/app.vue index 7c60fb3da42..b885fa49365 100644 --- a/app/assets/javascripts/diffs/components/app.vue +++ b/app/assets/javascripts/diffs/components/app.vue @@ -94,7 +94,7 @@ export default { return __('Show latest version'); }, canCurrentUserFork() { - return this.currentUser.canFork === true && this.currentUser.canCreateMergeRequest; + return this.currentUser.can_fork === true && this.currentUser.can_create_merge_request; }, showCompareVersions() { return this.mergeRequestDiffs && this.mergeRequestDiff; @@ -128,6 +128,7 @@ export default { eventHub.$once('fetchedNotesData', this.setDiscussions); }, methods: { + ...mapActions(['startTaskList']), ...mapActions('diffs', [ 'setBaseConfig', 'fetchDiffFiles', @@ -157,7 +158,13 @@ export default { if (this.isNotesFetched && !this.assignedDiscussions && !this.isLoading) { this.assignedDiscussions = true; - requestIdleCallback(() => this.assignDiscussionsToDiff(), { timeout: 1000 }); + requestIdleCallback( + () => + this.assignDiscussionsToDiff() + .then(this.$nextTick) + .then(this.startTaskList), + { timeout: 1000 }, + ); } }, adjustView() { diff --git a/app/assets/javascripts/diffs/components/commit_item.vue b/app/assets/javascripts/diffs/components/commit_item.vue index 23d0bad2ecb..aa72aca1478 100644 --- a/app/assets/javascripts/diffs/components/commit_item.vue +++ b/app/assets/javascripts/diffs/components/commit_item.vue @@ -40,15 +40,17 @@ export default { }, computed: { authorName() { - return (this.commit.author && this.commit.author.name) || this.commit.authorName; + return (this.commit.author && this.commit.author.name) || this.commit.author_name; }, authorUrl() { return ( - (this.commit.author && this.commit.author.webUrl) || `mailto:${this.commit.authorEmail}` + (this.commit.author && this.commit.author.web_url) || `mailto:${this.commit.author_email}` ); }, authorAvatar() { - return (this.commit.author && this.commit.author.avatarUrl) || this.commit.authorGravatarUrl; + return ( + (this.commit.author && this.commit.author.avatar_url) || this.commit.author_gravatar_url + ); }, }, }; @@ -66,18 +68,18 @@ export default { <div class="commit-detail flex-list"> <div class="commit-content qa-commit-content"> <a - :href="commit.commitUrl" + :href="commit.commit_url" class="commit-row-message item-title" - v-html="commit.titleHtml" + v-html="commit.title_html" ></a> <span class="commit-row-message d-block d-sm-none"> · - {{ commit.shortId }} + {{ commit.short_id }} </span> <button - v-if="commit.descriptionHtml" + v-if="commit.description_html" class="text-expander js-toggle-button" type="button" :aria-label="__('Toggle commit description')" @@ -95,29 +97,29 @@ export default { ></a> {{ s__('CommitWidget|authored') }} <time-ago-tooltip - :time="commit.authoredDate" + :time="commit.authored_date" /> </div> <pre - v-if="commit.descriptionHtml" + v-if="commit.description_html" class="commit-row-description js-toggle-content append-bottom-8" - v-html="commit.descriptionHtml" + v-html="commit.description_html" ></pre> </div> <div class="commit-actions flex-row d-none d-sm-flex"> <div - v-if="commit.signatureHtml" - v-html="commit.signatureHtml" + v-if="commit.signature_html" + v-html="commit.signature_html" ></div> <commit-pipeline-status - v-if="commit.pipelineStatusPath" - :endpoint="commit.pipelineStatusPath" + v-if="commit.pipeline_status_path" + :endpoint="commit.pipeline_status_path" /> <div class="commit-sha-group"> <div class="label label-monospace" - v-text="commit.shortId" + v-text="commit.short_id" ></div> <clipboard-button :text="commit.id" diff --git a/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue b/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue index f4b333f3700..112206e4ad6 100644 --- a/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue +++ b/app/assets/javascripts/diffs/components/compare_versions_dropdown.vue @@ -56,16 +56,16 @@ export default { methods: { commitsText(version) { return n__( - `${version.commitsCount} commit,`, - `${version.commitsCount} commits,`, - version.commitsCount, + `${version.commits_count} commit,`, + `${version.commits_count} commits,`, + version.commits_count, ); }, href(version) { if (this.showCommitCount) { - return version.versionPath; + return version.version_path; } - return version.comparePath; + return version.compare_path; }, versionName(version) { if (this.isLatest(version)) { @@ -74,7 +74,7 @@ export default { if (this.targetBranch && (this.isBase(version) || !version)) { return this.targetBranch.branchName; } - return `version ${version.versionIndex}`; + return `version ${version.version_index}`; }, isActive(version) { if (!version) { @@ -84,11 +84,11 @@ export default { if (this.targetBranch) { return ( (this.isBase(version) && !this.startVersion) || - (this.startVersion && this.startVersion.versionIndex === version.versionIndex) + (this.startVersion && this.startVersion.version_index === version.version_index) ); } - return version.versionIndex === this.mergeRequestVersion.versionIndex; + return version.version_index === this.mergeRequestVersion.version_index; }, isBase(version) { if (!version || !this.targetBranch) { @@ -98,7 +98,7 @@ export default { }, isLatest(version) { return ( - this.mergeRequestVersion && version.versionIndex === this.targetVersions[0].versionIndex + this.mergeRequestVersion && version.version_index === this.targetVersions[0].version_index ); }, }, @@ -142,7 +142,7 @@ export default { </div> <div> <small class="commit-sha"> - {{ version.truncatedCommitSha }} + {{ version.truncated_commit_sha }} </small> </div> <div> @@ -151,8 +151,8 @@ export default { {{ commitsText(version) }} </template> <time-ago - v-if="version.createdAt" - :time="version.createdAt" + v-if="version.created_at" + :time="version.created_at" class="js-timeago js-timeago-render" /> </small> diff --git a/app/assets/javascripts/diffs/components/diff_content.vue b/app/assets/javascripts/diffs/components/diff_content.vue index 547742a5ff4..5e5fda5fba6 100644 --- a/app/assets/javascripts/diffs/components/diff_content.vue +++ b/app/assets/javascripts/diffs/components/diff_content.vue @@ -39,7 +39,7 @@ export default { return this.diffFile.viewer.name === 'text'; }, diffFileCommentForm() { - return this.getCommentFormForDiffFile(this.diffFile.fileHash); + return this.getCommentFormForDiffFile(this.diffFile.file_hash); }, showNotesContainer() { return this.diffFile.discussions.length || this.diffFileCommentForm; @@ -73,28 +73,28 @@ export default { <inline-diff-view v-if="isInlineView" :diff-file="diffFile" - :diff-lines="diffFile.highlightedDiffLines || []" + :diff-lines="diffFile.highlighted_diff_lines || []" /> <parallel-diff-view v-if="isParallelView" :diff-file="diffFile" - :diff-lines="diffFile.parallelDiffLines || []" + :diff-lines="diffFile.parallel_diff_lines || []" /> </template> <diff-viewer v-else :diff-mode="diffMode" - :new-path="diffFile.newPath" - :new-sha="diffFile.diffRefs.headSha" - :old-path="diffFile.oldPath" - :old-sha="diffFile.diffRefs.baseSha" - :file-hash="diffFile.fileHash" + :new-path="diffFile.new_path" + :new-sha="diffFile.diff_refs.head_sha" + :old-path="diffFile.old_path" + :old-sha="diffFile.diff_refs.base_sha" + :file-hash="diffFile.file_hash" :project-path="projectPath" > <image-diff-overlay slot="image-overlay" :discussions="diffFile.discussions" - :file-hash="diffFile.fileHash" + :file-hash="diffFile.file_hash" :can-comment="getNoteableData.current_user.can_create_note" /> <div @@ -115,7 +115,7 @@ export default { :save-button-title="__('Comment')" class="diff-comment-form new-note discussion-form discussion-form-container" @handleFormUpdate="handleSaveNote" - @cancelForm="closeDiffFileCommentForm(diffFile.fileHash)" + @cancelForm="closeDiffFileCommentForm(diffFile.file_hash)" /> </div> </diff-viewer> diff --git a/app/assets/javascripts/diffs/components/diff_file.vue b/app/assets/javascripts/diffs/components/diff_file.vue index e76c7afd863..872131a5900 100644 --- a/app/assets/javascripts/diffs/components/diff_file.vue +++ b/app/assets/javascripts/diffs/components/diff_file.vue @@ -32,6 +32,7 @@ export default { computed: { ...mapState('diffs', ['currentDiffFileId']), ...mapGetters(['isNotesFetched']), + ...mapGetters('diffs', ['getDiffFileDiscussions']), isCollapsed() { return this.file.collapsed || false; }, @@ -39,7 +40,7 @@ export default { return sprintf( __('You can %{linkStart}view the blob%{linkEnd} instead.'), { - linkStart: `<a href="${_.escape(this.file.viewPath)}">`, + linkStart: `<a href="${_.escape(this.file.view_path)}">`, linkEnd: '</a>', }, false, @@ -48,21 +49,34 @@ export default { showExpandMessage() { return ( this.isCollapsed || - (!this.file.highlightedDiffLines && + (!this.file.highlighted_diff_lines && !this.isLoadingCollapsedDiff && - !this.file.tooLarge && + !this.file.too_large && this.file.text) ); }, showLoadingIcon() { return this.isLoadingCollapsedDiff || (!this.file.renderIt && !this.isCollapsed); }, + hasDiffLines() { + return ( + this.file.highlighted_diff_lines && + this.file.parallel_diff_lines && + this.file.parallel_diff_lines.length > 0 + ); + }, + }, + watch: { + 'file.collapsed': function fileCollapsedWatch(newVal, oldVal) { + if (!newVal && oldVal && !this.hasDiffLines) { + this.handleLoadCollapsedDiff(); + } + }, }, methods: { ...mapActions('diffs', ['loadCollapsedDiff', 'assignDiscussionsToDiff']), handleToggle() { - const { highlightedDiffLines, parallelDiffLines } = this.file; - if (!highlightedDiffLines && parallelDiffLines !== undefined && !parallelDiffLines.length) { + if (!this.hasDiffLines) { this.handleLoadCollapsedDiff(); } else { this.file.collapsed = !this.file.collapsed; @@ -81,7 +95,7 @@ export default { .then(() => { requestIdleCallback( () => { - this.assignDiscussionsToDiff(); + this.assignDiscussionsToDiff(this.getDiffFileDiscussions(this.file)); }, { timeout: 1000 }, ); @@ -103,9 +117,9 @@ export default { <template> <div - :id="file.fileHash" + :id="file.file_hash" :class="{ - 'is-active': currentDiffFileId === file.fileHash + 'is-active': currentDiffFileId === file.file_hash }" class="diff-file file-holder" > @@ -129,7 +143,7 @@ export default { make your changes there, and submit a merge request. </span> <a - :href="file.forkPath" + :href="file.fork_path" class="js-fork-suggestion-button btn btn-grouped btn-inverted btn-success" > Fork @@ -145,7 +159,7 @@ export default { <diff-content v-if="!isCollapsed && file.renderIt" - :class="{ hidden: isCollapsed || file.tooLarge }" + :class="{ hidden: isCollapsed || file.too_large }" :diff-file="file" /> <gl-loading-icon @@ -166,7 +180,7 @@ export default { </a> </div> <div - v-if="file.tooLarge" + v-if="file.too_large" class="nothing-here-block diff-collapsed js-too-large-diff" > {{ __('This source diff could not be displayed because it is too large.') }} diff --git a/app/assets/javascripts/diffs/components/diff_file_header.vue b/app/assets/javascripts/diffs/components/diff_file_header.vue index dcf1057eb84..af03cec6582 100644 --- a/app/assets/javascripts/diffs/components/diff_file_header.vue +++ b/app/assets/javascripts/diffs/components/diff_file_header.vue @@ -68,32 +68,32 @@ export default { }, titleLink() { if (this.diffFile.submodule) { - return this.diffFile.submoduleTreeUrl || this.diffFile.submoduleLink; + return this.diffFile.submodule_tree_url || this.diffFile.submodule_link; } return this.discussionPath; }, filePath() { if (this.diffFile.submodule) { - return `${this.diffFile.filePath} @ ${truncateSha(this.diffFile.blob.id)}`; + return `${this.diffFile.file_path} @ ${truncateSha(this.diffFile.blob.id)}`; } - if (this.diffFile.deletedFile) { - return sprintf(__('%{filePath} deleted'), { filePath: this.diffFile.filePath }, false); + if (this.diffFile.deleted_file) { + return sprintf(__('%{filePath} deleted'), { filePath: this.diffFile.file_path }, false); } - return this.diffFile.filePath; + return this.diffFile.file_path; }, titleTag() { - return this.diffFile.fileHash ? 'a' : 'span'; + return this.diffFile.file_hash ? 'a' : 'span'; }, isUsingLfs() { - return this.diffFile.storedExternally && this.diffFile.externalStorage === 'lfs'; + return this.diffFile.stored_externally && this.diffFile.external_storage === 'lfs'; }, collapseIcon() { return this.expanded ? 'chevron-down' : 'chevron-right'; }, viewFileButtonText() { - const truncatedContentSha = _.escape(truncateSha(this.diffFile.contentSha)); + const truncatedContentSha = _.escape(truncateSha(this.diffFile.content_sha)); return sprintf( s__('MergeRequests|View file @ %{commitId}'), { @@ -103,7 +103,7 @@ export default { ); }, viewReplacedFileButtonText() { - const truncatedBaseSha = _.escape(truncateSha(this.diffFile.diffRefs.baseSha)); + const truncatedBaseSha = _.escape(truncateSha(this.diffFile.diff_refs.base_sha)); return sprintf( s__('MergeRequests|View replaced file @ %{commitId}'), { @@ -113,7 +113,7 @@ export default { ); }, gfmCopyText() { - return `\`${this.diffFile.filePath}\``; + return `\`${this.diffFile.file_path}\``; }, }, methods: { @@ -164,21 +164,21 @@ export default { aria-hidden="true" css-classes="js-file-icon append-right-5" /> - <span v-if="diffFile.renamedFile"> + <span v-if="diffFile.renamed_file"> <strong v-tooltip - :title="diffFile.oldPath" + :title="diffFile.old_path" class="file-title-name" data-container="body" - v-html="diffFile.oldPathHtml" + v-html="diffFile.old_path_html" ></strong> → <strong v-tooltip - :title="diffFile.newPath" + :title="diffFile.new_path" class="file-title-name" data-container="body" - v-html="diffFile.newPathHtml" + v-html="diffFile.new_path_html" ></strong> </span> @@ -195,16 +195,16 @@ export default { <clipboard-button :title="__('Copy file path to clipboard')" - :text="diffFile.filePath" + :text="diffFile.file_path" :gfm="gfmCopyText" css-class="btn-default btn-transparent btn-clipboard" /> <small - v-if="diffFile.modeChanged" + v-if="diffFile.mode_changed" ref="fileMode" > - {{ diffFile.aMode }} → {{ diffFile.bMode }} + {{ diffFile.a_mode }} → {{ diffFile.b_mode }} </small> <span @@ -220,7 +220,7 @@ export default { class="file-actions d-none d-sm-block" > <template - v-if="diffFile.blob && diffFile.blob.readableText" + v-if="diffFile.blob && diffFile.blob.readable_text" > <button :disabled="!diffHasDiscussions(diffFile)" @@ -234,33 +234,33 @@ export default { </button> <edit-button - v-if="!diffFile.deletedFile" + v-if="!diffFile.deleted_file" :can-current-user-fork="canCurrentUserFork" - :edit-path="diffFile.editPath" - :can-modify-blob="diffFile.canModifyBlob" + :edit-path="diffFile.edit_path" + :can-modify-blob="diffFile.can_modify_blob" @showForkMessage="showForkMessage" /> </template> <a - v-if="diffFile.replacedViewPath" - :href="diffFile.replacedViewPath" + v-if="diffFile.replaced_view_path" + :href="diffFile.replaced_view_path" class="btn view-file js-view-file" v-html="viewReplacedFileButtonText" > </a> <a - :href="diffFile.viewPath" + :href="diffFile.view_path" class="btn view-file js-view-file" v-html="viewFileButtonText" > </a> <a - v-if="diffFile.externalUrl" + v-if="diffFile.external_url" v-tooltip - :href="diffFile.externalUrl" - :title="`View on ${diffFile.formattedExternalUrl}`" + :href="diffFile.external_url" + :title="`View on ${diffFile.formatted_external_url}`" target="_blank" rel="noopener noreferrer" class="btn btn-file-option" diff --git a/app/assets/javascripts/diffs/components/diff_line_gutter_content.vue b/app/assets/javascripts/diffs/components/diff_line_gutter_content.vue index f4a9be19496..8f037eeefc4 100644 --- a/app/assets/javascripts/diffs/components/diff_line_gutter_content.vue +++ b/app/assets/javascripts/diffs/components/diff_line_gutter_content.vue @@ -73,7 +73,7 @@ export default { }), ...mapGetters(['isLoggedIn']), lineHref() { - return `#${this.line.lineCode || ''}`; + return `#${this.line.line_code || ''}`; }, shouldShowCommentButton() { return ( @@ -99,7 +99,7 @@ export default { methods: { ...mapActions('diffs', ['loadMoreLines', 'showCommentForm']), handleCommentButton() { - this.showCommentForm({ lineCode: this.line.lineCode }); + this.showCommentForm({ lineCode: this.line.line_code }); }, handleLoadMoreLines() { if (this.isRequesting) { @@ -108,8 +108,8 @@ export default { this.isRequesting = true; const endpoint = this.contextLinesPath; - const oldLineNumber = this.line.metaData.oldPos || 0; - const newLineNumber = this.line.metaData.newPos || 0; + const oldLineNumber = this.line.meta_data.old_pos || 0; + const newLineNumber = this.line.meta_data.new_pos || 0; const offset = newLineNumber - oldLineNumber; const bottom = this.isBottom; const { fileHash } = this; @@ -125,12 +125,12 @@ export default { to = lineNumber + UNFOLD_COUNT; } else { const diffFile = utils.findDiffFile(this.diffFiles, this.fileHash); - const indexForInline = utils.findIndexInInlineLines(diffFile.highlightedDiffLines, { + const indexForInline = utils.findIndexInInlineLines(diffFile.highlighted_diff_lines, { oldLineNumber, newLineNumber, }); - const prevLine = diffFile.highlightedDiffLines[indexForInline - 2]; - const prevLineNumber = (prevLine && prevLine.newLine) || 0; + const prevLine = diffFile.highlighted_diff_lines[indexForInline - 2]; + const prevLineNumber = (prevLine && prevLine.new_line) || 0; if (since <= prevLineNumber + 1) { since = prevLineNumber + 1; diff --git a/app/assets/javascripts/diffs/components/diff_line_note_form.vue b/app/assets/javascripts/diffs/components/diff_line_note_form.vue index bb9bb821de3..07f38172575 100644 --- a/app/assets/javascripts/diffs/components/diff_line_note_form.vue +++ b/app/assets/javascripts/diffs/components/diff_line_note_form.vue @@ -53,7 +53,7 @@ export default { this.noteableData.diff_head_sha, DIFF_NOTE_TYPE, this.noteableData.source_project_id, - this.line.lineCode, + this.line.line_code, ]; this.initAutoSave(this.noteableData, keys); @@ -72,7 +72,7 @@ export default { } this.cancelCommentForm({ - lineCode: this.line.lineCode, + lineCode: this.line.line_code, }); this.$nextTick(() => { this.resetAutoSave(); @@ -94,7 +94,7 @@ export default { <note-form ref="noteForm" :is-editing="true" - :line-code="line.lineCode" + :line-code="line.line_code" save-button-title="Comment" class="diff-comment-form" @cancelForm="handleCancelCommentForm" diff --git a/app/assets/javascripts/diffs/components/diff_table_cell.vue b/app/assets/javascripts/diffs/components/diff_table_cell.vue index 5d9a0b123fe..0a893a57f07 100644 --- a/app/assets/javascripts/diffs/components/diff_table_cell.vue +++ b/app/assets/javascripts/diffs/components/diff_table_cell.vue @@ -96,9 +96,7 @@ export default { }; }, lineNumber() { - const { lineType } = this; - - return lineType === OLD_LINE_TYPE ? this.line.oldLine : this.line.newLine; + return this.lineType === OLD_LINE_TYPE ? this.line.old_line : this.line.new_line; }, }, }; diff --git a/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue b/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue index 46a51859da5..b9e14c53d2c 100644 --- a/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue +++ b/app/assets/javascripts/diffs/components/inline_diff_comment_row.vue @@ -48,7 +48,7 @@ export default { :discussions="line.discussions" /> <diff-line-note-form - v-if="diffLineCommentForms[line.lineCode]" + v-if="diffLineCommentForms[line.line_code]" :diff-file-hash="diffFileHash" :line="line" :note-target-line="line" diff --git a/app/assets/javascripts/diffs/components/inline_diff_table_row.vue b/app/assets/javascripts/diffs/components/inline_diff_table_row.vue index 542acd3d930..1f4088066d1 100644 --- a/app/assets/javascripts/diffs/components/inline_diff_table_row.vue +++ b/app/assets/javascripts/diffs/components/inline_diff_table_row.vue @@ -52,9 +52,7 @@ export default { }; }, inlineRowId() { - const { lineCode, oldLine, newLine } = this.line; - - return lineCode || `${this.fileHash}_${oldLine}_${newLine}`; + return this.line.line_code || `${this.fileHash}_${this.line.old_line}_${this.line.new_line}`; }, }, created() { @@ -107,7 +105,7 @@ export default { <td :class="line.type" class="line_content" - v-html="line.richText" + v-html="line.rich_text" > </td> </tr> diff --git a/app/assets/javascripts/diffs/components/inline_diff_view.vue b/app/assets/javascripts/diffs/components/inline_diff_view.vue index fbf9e77ac07..79efac89e98 100644 --- a/app/assets/javascripts/diffs/components/inline_diff_view.vue +++ b/app/assets/javascripts/diffs/components/inline_diff_view.vue @@ -43,16 +43,16 @@ export default { v-for="(line, index) in diffLines" > <inline-diff-table-row - :key="line.lineCode" - :file-hash="diffFile.fileHash" - :context-lines-path="diffFile.contextLinesPath" + :key="line.line_code" + :file-hash="diffFile.file_hash" + :context-lines-path="diffFile.context_lines_path" :line="line" :is-bottom="index + 1 === diffLinesLength" /> <inline-diff-comment-row v-if="shouldRenderInlineCommentRow(line)" :key="index" - :diff-file-hash="diffFile.fileHash" + :diff-file-hash="diffFile.file_hash" :line="line" :line-index="index" /> diff --git a/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue b/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue index 3b71c0a1fd4..00c2df4dac1 100644 --- a/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue +++ b/app/assets/javascripts/diffs/components/parallel_diff_comment_row.vue @@ -27,10 +27,10 @@ export default { diffLineCommentForms: state => state.diffs.diffLineCommentForms, }), leftLineCode() { - return this.line.left && this.line.left.lineCode; + return this.line.left && this.line.left.line_code; }, rightLineCode() { - return this.line.right && this.line.right.lineCode; + return this.line.right && this.line.right.line_code; }, hasExpandedDiscussionOnLeft() { return this.line.left && this.line.left.discussions diff --git a/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue b/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue index fcc3b3e9117..2d87db12fd6 100644 --- a/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue +++ b/app/assets/javascripts/diffs/components/parallel_diff_table_row.vue @@ -120,11 +120,11 @@ export default { class="diff-line-num old_line" /> <td - :id="line.left.lineCode" + :id="line.left.line_code" :class="parallelViewLeftLineType" class="line_content parallel left-side" @mousedown.native="handleParallelLineMouseDown" - v-html="line.left.richText" + v-html="line.left.rich_text" > </td> </template> @@ -146,11 +146,11 @@ export default { class="diff-line-num new_line" /> <td - :id="line.right.lineCode" + :id="line.right.line_code" :class="line.right.type" class="line_content parallel right-side" @mousedown.native="handleParallelLineMouseDown" - v-html="line.right.richText" + v-html="line.right.rich_text" > </td> </template> diff --git a/app/assets/javascripts/diffs/components/parallel_diff_view.vue b/app/assets/javascripts/diffs/components/parallel_diff_view.vue index 3452f0d2b00..6942f9b53e0 100644 --- a/app/assets/javascripts/diffs/components/parallel_diff_view.vue +++ b/app/assets/javascripts/diffs/components/parallel_diff_view.vue @@ -46,8 +46,8 @@ export default { > <parallel-diff-table-row :key="index" - :file-hash="diffFile.fileHash" - :context-lines-path="diffFile.contextLinesPath" + :file-hash="diffFile.file_hash" + :context-lines-path="diffFile.context_lines_path" :line="line" :is-bottom="index + 1 === diffLinesLength" /> @@ -55,7 +55,7 @@ export default { v-if="shouldRenderParallelCommentRow(line)" :key="`dcr-${index}`" :line="line" - :diff-file-hash="diffFile.fileHash" + :diff-file-hash="diffFile.file_hash" :line-index="index" /> </template> diff --git a/app/assets/javascripts/diffs/components/tree_list.vue b/app/assets/javascripts/diffs/components/tree_list.vue index 91052b303a6..ff1eb23cea3 100644 --- a/app/assets/javascripts/diffs/components/tree_list.vue +++ b/app/assets/javascripts/diffs/components/tree_list.vue @@ -35,7 +35,7 @@ export default { if (search === '') return this.renderTreeList ? this.tree : this.allBlobs; - return this.allBlobs.filter(f => f.name.toLowerCase().indexOf(search) >= 0); + return this.allBlobs.filter(f => f.path.toLowerCase().indexOf(search) >= 0); }, rowDisplayTextKey() { if (this.renderTreeList && this.search.trim() === '') { diff --git a/app/assets/javascripts/diffs/index.js b/app/assets/javascripts/diffs/index.js index aae89109c27..06ef4207d85 100644 --- a/app/assets/javascripts/diffs/index.js +++ b/app/assets/javascripts/diffs/index.js @@ -1,6 +1,5 @@ import Vue from 'vue'; import { mapState } from 'vuex'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import diffsApp from './components/app.vue'; export default function initDiffsApp(store) { @@ -17,9 +16,7 @@ export default function initDiffsApp(store) { return { endpoint: dataset.endpoint, projectPath: dataset.projectPath, - currentUser: convertObjectPropsToCamelCase(JSON.parse(dataset.currentUserData), { - deep: true, - }), + currentUser: JSON.parse(dataset.currentUserData) || {}, }; }, computed: { diff --git a/app/assets/javascripts/diffs/store/actions.js b/app/assets/javascripts/diffs/store/actions.js index d3e9c7c88f0..fb648527450 100644 --- a/app/assets/javascripts/diffs/store/actions.js +++ b/app/assets/javascripts/diffs/store/actions.js @@ -50,8 +50,8 @@ export const assignDiscussionsToDiff = ( }; export const removeDiscussionsFromDiff = ({ commit }, removeDiscussion) => { - const { fileHash, line_code, id } = removeDiscussion; - commit(types.REMOVE_LINE_DISCUSSIONS_FOR_FILE, { fileHash, lineCode: line_code, id }); + const { file_hash, line_code, id } = removeDiscussion; + commit(types.REMOVE_LINE_DISCUSSIONS_FOR_FILE, { fileHash: file_hash, lineCode: line_code, id }); }; export const startRenderDiffsQueue = ({ state, commit }) => { @@ -189,7 +189,8 @@ export const saveDiffDiscussion = ({ dispatch }, { note, formData }) => { return dispatch('saveNote', postData, { root: true }) .then(result => dispatch('updateDiscussion', result.discussion, { root: true })) .then(discussion => dispatch('assignDiscussionsToDiff', [discussion])) - .then(() => dispatch('closeDiffFileCommentForm', formData.diffFile.fileHash)) + .then(() => dispatch('closeDiffFileCommentForm', formData.diffFile.file_hash)) + .then(() => dispatch('startTaskList', null, { root: true })) .catch(() => createFlash(s__('MergeRequests|Saving the comment failed'))); }; diff --git a/app/assets/javascripts/diffs/store/getters.js b/app/assets/javascripts/diffs/store/getters.js index bf490f9d78a..7f02c67a64e 100644 --- a/app/assets/javascripts/diffs/store/getters.js +++ b/app/assets/javascripts/diffs/store/getters.js @@ -1,4 +1,3 @@ -import _ from 'underscore'; import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '../constants'; export const isParallelView = state => state.diffViewType === PARALLEL_DIFF_VIEW_TYPE; @@ -68,8 +67,7 @@ export const diffHasDiscussions = (state, getters) => diff => */ export const getDiffFileDiscussions = (state, getters, rootState, rootGetters) => diff => rootGetters.discussions.filter( - discussion => - discussion.diff_discussion && _.isEqual(discussion.diff_file.file_hash, diff.fileHash), + discussion => discussion.diff_discussion && discussion.diff_file.file_hash === diff.file_hash, ) || []; export const shouldRenderParallelCommentRow = state => line => { @@ -90,14 +88,14 @@ export const shouldRenderParallelCommentRow = state => line => { return true; } - const hasCommentFormOnLeft = line.left && state.diffLineCommentForms[line.left.lineCode]; - const hasCommentFormOnRight = line.right && state.diffLineCommentForms[line.right.lineCode]; + const hasCommentFormOnLeft = line.left && state.diffLineCommentForms[line.left.line_code]; + const hasCommentFormOnRight = line.right && state.diffLineCommentForms[line.right.line_code]; return hasCommentFormOnLeft || hasCommentFormOnRight; }; export const shouldRenderInlineCommentRow = state => line => { - if (state.diffLineCommentForms[line.lineCode]) return true; + if (state.diffLineCommentForms[line.line_code]) return true; if (!line.discussions || line.discussions.length === 0) { return false; @@ -108,7 +106,7 @@ export const shouldRenderInlineCommentRow = state => line => { // prevent babel-plugin-rewire from generating an invalid default during karma∂ tests export const getDiffFileByHash = state => fileHash => - state.diffFiles.find(file => file.fileHash === fileHash); + state.diffFiles.find(file => file.file_hash === fileHash); export const allBlobs = state => Object.values(state.treeEntries).filter(f => f.type === 'blob'); diff --git a/app/assets/javascripts/diffs/store/mutations.js b/app/assets/javascripts/diffs/store/mutations.js index a7eea2c1449..2133cfe4825 100644 --- a/app/assets/javascripts/diffs/store/mutations.js +++ b/app/assets/javascripts/diffs/store/mutations.js @@ -23,12 +23,11 @@ export default { }, [types.SET_DIFF_DATA](state, data) { - const diffData = convertObjectPropsToCamelCase(data, { deep: true }); - prepareDiffData(diffData); - const { tree, treeEntries } = generateTreeList(diffData.diffFiles); + prepareDiffData(data); + const { tree, treeEntries } = generateTreeList(data.diff_files); Object.assign(state, { - ...diffData, + ...convertObjectPropsToCamelCase(data), tree: sortTree(tree), treeEntries, }); @@ -42,7 +41,7 @@ export default { [types.SET_MERGE_REQUEST_DIFFS](state, mergeRequestDiffs) { Object.assign(state, { - mergeRequestDiffs: convertObjectPropsToCamelCase(mergeRequestDiffs, { deep: true }), + mergeRequestDiffs, }); }, @@ -62,13 +61,18 @@ export default { const { lineNumbers, contextLines, fileHash } = options; const { bottom } = options.params; const diffFile = findDiffFile(state.diffFiles, fileHash); - const { highlightedDiffLines, parallelDiffLines } = diffFile; removeMatchLine(diffFile, lineNumbers, bottom); - const lines = addLineReferences(contextLines, lineNumbers, bottom); + + const lines = addLineReferences(contextLines, lineNumbers, bottom).map(line => ({ + ...line, + line_code: line.line_code || `${fileHash}_${line.old_line}_${line.new_line}`, + discussions: line.discussions || [], + })); + addContextLines({ - inlineLines: highlightedDiffLines, - parallelLines: parallelDiffLines, + inlineLines: diffFile.highlighted_diff_lines, + parallelLines: diffFile.parallel_diff_lines, contextLines: lines, bottom, lineNumbers, @@ -76,10 +80,9 @@ export default { }, [types.ADD_COLLAPSED_DIFFS](state, { file, data }) { - const normalizedData = convertObjectPropsToCamelCase(data, { deep: true }); - prepareDiffData(normalizedData); - const [newFileData] = normalizedData.diffFiles.filter(f => f.fileHash === file.fileHash); - const selectedFile = state.diffFiles.find(f => f.fileHash === file.fileHash); + prepareDiffData(data); + const [newFileData] = data.diff_files.filter(f => f.file_hash === file.file_hash); + const selectedFile = state.diffFiles.find(f => f.file_hash === file.file_hash); Object.assign(selectedFile, { ...newFileData }); }, @@ -95,20 +98,20 @@ export default { const discussionLineCode = discussion.line_code; const fileHash = discussion.diff_file.file_hash; - const lineCheck = ({ lineCode }) => - lineCode === discussionLineCode && + const lineCheck = line => + line.line_code === discussionLineCode && isDiscussionApplicableToLine({ discussion, - diffPosition: diffPositionByLineCode[lineCode], + diffPosition: diffPositionByLineCode[line.line_code], latestDiff, }); state.diffFiles = state.diffFiles.map(diffFile => { - if (diffFile.fileHash === fileHash) { + if (diffFile.file_hash === fileHash) { const file = { ...diffFile }; - if (file.highlightedDiffLines) { - file.highlightedDiffLines = file.highlightedDiffLines.map(line => { + if (file.highlighted_diff_lines) { + file.highlighted_diff_lines = file.highlighted_diff_lines.map(line => { if (lineCheck(line)) { return { ...line, @@ -120,8 +123,8 @@ export default { }); } - if (file.parallelDiffLines) { - file.parallelDiffLines = file.parallelDiffLines.map(line => { + if (file.parallel_diff_lines) { + file.parallel_diff_lines = file.parallel_diff_lines.map(line => { const left = line.left && lineCheck(line.left); const right = line.right && lineCheck(line.right); @@ -142,7 +145,7 @@ export default { }); } - if (!file.parallelDiffLines || !file.highlightedDiffLines) { + if (!file.parallel_diff_lines || !file.highlighted_diff_lines) { file.discussions = file.discussions.concat(discussion); } @@ -154,16 +157,16 @@ export default { }, [types.REMOVE_LINE_DISCUSSIONS_FOR_FILE](state, { fileHash, lineCode, id }) { - const selectedFile = state.diffFiles.find(f => f.fileHash === fileHash); + const selectedFile = state.diffFiles.find(f => f.file_hash === fileHash); if (selectedFile) { - if (selectedFile.parallelDiffLines) { - const targetLine = selectedFile.parallelDiffLines.find( + if (selectedFile.parallel_diff_lines) { + const targetLine = selectedFile.parallel_diff_lines.find( line => - (line.left && line.left.lineCode === lineCode) || - (line.right && line.right.lineCode === lineCode), + (line.left && line.left.line_code === lineCode) || + (line.right && line.right.line_code === lineCode), ); if (targetLine) { - const side = targetLine.left && targetLine.left.lineCode === lineCode ? 'left' : 'right'; + const side = targetLine.left && targetLine.left.line_code === lineCode ? 'left' : 'right'; Object.assign(targetLine[side], { discussions: [], @@ -171,9 +174,9 @@ export default { } } - if (selectedFile.highlightedDiffLines) { - const targetInlineLine = selectedFile.highlightedDiffLines.find( - line => line.lineCode === lineCode, + if (selectedFile.highlighted_diff_lines) { + const targetInlineLine = selectedFile.highlighted_diff_lines.find( + line => line.line_code === lineCode, ); if (targetInlineLine) { diff --git a/app/assets/javascripts/diffs/store/utils.js b/app/assets/javascripts/diffs/store/utils.js index a935b9b1ffa..d9d3c0f2ca2 100644 --- a/app/assets/javascripts/diffs/store/utils.js +++ b/app/assets/javascripts/diffs/store/utils.js @@ -1,5 +1,4 @@ import _ from 'underscore'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { diffModes } from '~/ide/constants'; import { LINE_POSITION_LEFT, @@ -15,7 +14,7 @@ import { } from '../constants'; export function findDiffFile(files, hash) { - return files.filter(file => file.fileHash === hash)[0]; + return files.filter(file => file.file_hash === hash)[0]; } export const getReversePosition = linePosition => { @@ -39,14 +38,14 @@ export function getFormData(params) { } = params; const position = JSON.stringify({ - base_sha: diffFile.diffRefs.baseSha, - start_sha: diffFile.diffRefs.startSha, - head_sha: diffFile.diffRefs.headSha, - old_path: diffFile.oldPath, - new_path: diffFile.newPath, + base_sha: diffFile.diff_refs.base_sha, + start_sha: diffFile.diff_refs.start_sha, + head_sha: diffFile.diff_refs.head_sha, + old_path: diffFile.old_path, + new_path: diffFile.new_path, position_type: positionType || TEXT_DIFF_POSITION_TYPE, - old_line: noteTargetLine ? noteTargetLine.oldLine : null, - new_line: noteTargetLine ? noteTargetLine.newLine : null, + old_line: noteTargetLine ? noteTargetLine.old_line : null, + new_line: noteTargetLine ? noteTargetLine.new_line : null, x: params.x, y: params.y, width: params.width, @@ -56,7 +55,7 @@ export function getFormData(params) { const postData = { view: diffViewType, line_type: linePosition === LINE_POSITION_RIGHT ? NEW_LINE_TYPE : OLD_LINE_TYPE, - merge_request_diff_head_sha: diffFile.diffRefs.headSha, + merge_request_diff_head_sha: diffFile.diff_refs.head_sha, in_reply_to_discussion_id: '', note_project_id: '', target_type: noteableData.targetType, @@ -69,10 +68,10 @@ export function getFormData(params) { noteable_id: noteableData.id, commit_id: '', type: - diffFile.diffRefs.startSha && diffFile.diffRefs.headSha + diffFile.diff_refs.start_sha && diffFile.diff_refs.head_sha ? DIFF_NOTE_TYPE : LEGACY_DIFF_NOTE_TYPE, - line_code: noteTargetLine ? noteTargetLine.lineCode : null, + line_code: noteTargetLine ? noteTargetLine.line_code : null, }, }; @@ -93,7 +92,7 @@ export const findIndexInInlineLines = (lines, lineNumbers) => { return _.findIndex( lines, - line => line.oldLine === oldLineNumber && line.newLine === newLineNumber, + line => line.old_line === oldLineNumber && line.new_line === newLineNumber, ); }; @@ -105,18 +104,18 @@ export const findIndexInParallelLines = (lines, lineNumbers) => { line => line.left && line.right && - line.left.oldLine === oldLineNumber && - line.right.newLine === newLineNumber, + line.left.old_line === oldLineNumber && + line.right.new_line === newLineNumber, ); }; export function removeMatchLine(diffFile, lineNumbers, bottom) { - const indexForInline = findIndexInInlineLines(diffFile.highlightedDiffLines, lineNumbers); - const indexForParallel = findIndexInParallelLines(diffFile.parallelDiffLines, lineNumbers); + const indexForInline = findIndexInInlineLines(diffFile.highlighted_diff_lines, lineNumbers); + const indexForParallel = findIndexInParallelLines(diffFile.parallel_diff_lines, lineNumbers); const factor = bottom ? 1 : -1; - diffFile.highlightedDiffLines.splice(indexForInline + factor, 1); - diffFile.parallelDiffLines.splice(indexForParallel + factor, 1); + diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1); + diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1); } export function addLineReferences(lines, lineNumbers, bottom) { @@ -125,18 +124,16 @@ export function addLineReferences(lines, lineNumbers, bottom) { let matchLineIndex = -1; const linesWithNumbers = lines.map((l, index) => { - const line = convertObjectPropsToCamelCase(l); - - if (line.type === MATCH_LINE_TYPE) { + if (l.type === MATCH_LINE_TYPE) { matchLineIndex = index; } else { - Object.assign(line, { - oldLine: bottom ? oldLineNumber + index + 1 : oldLineNumber + index - lineCount, - newLine: bottom ? newLineNumber + index + 1 : newLineNumber + index - lineCount, + Object.assign(l, { + old_line: bottom ? oldLineNumber + index + 1 : oldLineNumber + index - lineCount, + new_line: bottom ? newLineNumber + index + 1 : newLineNumber + index - lineCount, }); } - return line; + return l; }); if (matchLineIndex > -1) { @@ -146,9 +143,9 @@ export function addLineReferences(lines, lineNumbers, bottom) { : linesWithNumbers[matchLineIndex + 1]; Object.assign(line, { - metaData: { - oldPos: targetLine.oldLine, - newPos: targetLine.newLine, + meta_data: { + old_pos: targetLine.old_line, + new_pos: targetLine.new_line, }, }); } @@ -187,11 +184,11 @@ export function trimFirstCharOfLineContent(line = {}) { const parsedLine = Object.assign({}, line); - if (line.richText) { - const firstChar = parsedLine.richText.charAt(0); + if (line.rich_text) { + const firstChar = parsedLine.rich_text.charAt(0); if (firstChar === ' ' || firstChar === '+' || firstChar === '-') { - parsedLine.richText = line.richText.substring(1); + parsedLine.rich_text = line.rich_text.substring(1); } } @@ -201,15 +198,15 @@ export function trimFirstCharOfLineContent(line = {}) { // This prepares and optimizes the incoming diff data from the server // by setting up incremental rendering and removing unneeded data export function prepareDiffData(diffData) { - const filesLength = diffData.diffFiles.length; + const filesLength = diffData.diff_files.length; let showingLines = 0; for (let i = 0; i < filesLength; i += 1) { - const file = diffData.diffFiles[i]; + const file = diffData.diff_files[i]; - if (file.parallelDiffLines) { - const linesLength = file.parallelDiffLines.length; + if (file.parallel_diff_lines) { + const linesLength = file.parallel_diff_lines.length; for (let u = 0; u < linesLength; u += 1) { - const line = file.parallelDiffLines[u]; + const line = file.parallel_diff_lines[u]; if (line.left) { line.left = trimFirstCharOfLineContent(line.left); } @@ -219,13 +216,13 @@ export function prepareDiffData(diffData) { } } - if (file.highlightedDiffLines) { - const linesLength = file.highlightedDiffLines.length; + if (file.highlighted_diff_lines) { + const linesLength = file.highlighted_diff_lines.length; for (let u = 0; u < linesLength; u += 1) { - const line = file.highlightedDiffLines[u]; + const line = file.highlighted_diff_lines[u]; Object.assign(line, { ...trimFirstCharOfLineContent(line) }); } - showingLines += file.parallelDiffLines.length; + showingLines += file.parallel_diff_lines.length; } Object.assign(file, { @@ -238,26 +235,21 @@ export function prepareDiffData(diffData) { export function getDiffPositionByLineCode(diffFiles) { return diffFiles.reduce((acc, diffFile) => { - const { baseSha, headSha, startSha } = diffFile.diffRefs; - const { newPath, oldPath } = diffFile; - // We can only use highlightedDiffLines to create the map of diff lines because // highlightedDiffLines will also include every parallel diff line in it. - if (diffFile.highlightedDiffLines) { - diffFile.highlightedDiffLines.forEach(line => { - const { lineCode, oldLine, newLine } = line; - - if (lineCode) { - acc[lineCode] = { - baseSha, - headSha, - startSha, - newPath, - oldPath, - oldLine, - newLine, - lineCode, - positionType: 'text', + if (diffFile.highlighted_diff_lines) { + diffFile.highlighted_diff_lines.forEach(line => { + if (line.line_code) { + acc[line.line_code] = { + base_sha: diffFile.diff_refs.base_sha, + head_sha: diffFile.diff_refs.head_sha, + start_sha: diffFile.diff_refs.start_sha, + new_path: diffFile.new_path, + old_path: diffFile.old_path, + old_line: line.old_line, + new_line: line.new_line, + line_code: line.line_code, + position_type: 'text', }; } }); @@ -270,30 +262,30 @@ export function getDiffPositionByLineCode(diffFiles) { // This method will check whether the discussion is still applicable // to the diff line in question regarding different versions of the MR export function isDiscussionApplicableToLine({ discussion, diffPosition, latestDiff }) { - const { lineCode, ...diffPositionCopy } = diffPosition; + const { line_code, ...diffPositionCopy } = diffPosition; if (discussion.original_position && discussion.position) { - const originalRefs = convertObjectPropsToCamelCase(discussion.original_position); - const refs = convertObjectPropsToCamelCase(discussion.position); + const originalRefs = discussion.original_position; + const refs = discussion.position; return _.isEqual(refs, diffPositionCopy) || _.isEqual(originalRefs, diffPositionCopy); } - return latestDiff && discussion.active && lineCode === discussion.line_code; + // eslint-disable-next-line + return latestDiff && discussion.active && line_code === discussion.line_code; } export const generateTreeList = files => files.reduce( (acc, file) => { - const { fileHash, addedLines, removedLines, newFile, deletedFile, newPath } = file; - const split = newPath.split('/'); + const split = file.new_path.split('/'); split.forEach((name, i) => { const parent = acc.treeEntries[split.slice(0, i).join('/')]; const path = `${parent ? `${parent.path}/` : ''}${name}`; if (!acc.treeEntries[path]) { - const type = path === newPath ? 'blob' : 'tree'; + const type = path === file.new_path ? 'blob' : 'tree'; acc.treeEntries[path] = { key: path, path, @@ -307,11 +299,11 @@ export const generateTreeList = files => if (type === 'blob') { Object.assign(entry, { changed: true, - tempFile: newFile, - deleted: deletedFile, - fileHash, - addedLines, - removedLines, + tempFile: file.new_file, + deleted: file.deleted_file, + fileHash: file.file_hash, + addedLines: file.added_lines, + removedLines: file.removed_lines, }); } else { Object.assign(entry, { @@ -329,6 +321,6 @@ export const generateTreeList = files => ); export const getDiffMode = diffFile => { - const diffModeKey = Object.keys(diffModes).find(key => diffFile[`${key}File`]); + const diffModeKey = Object.keys(diffModes).find(key => diffFile[`${key}_file`]); return diffModes[diffModeKey] || diffModes.replaced; }; diff --git a/app/assets/javascripts/files_comment_button.js b/app/assets/javascripts/files_comment_button.js index 3233f5c4f71..aad5647c045 100644 --- a/app/assets/javascripts/files_comment_button.js +++ b/app/assets/javascripts/files_comment_button.js @@ -1,5 +1,5 @@ /* Developer beware! Do not add logic to showButton or hideButton - * that will force a reflow. Doing so will create a signficant performance + * that will force a reflow. Doing so will create a significant performance * bottleneck for pages with large diffs. For a comprehensive list of what * causes reflows, visit https://gist.github.com/paulirish/5d52fb081b3570c81e3a */ diff --git a/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js b/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js index c23d4c484a5..89dcff74d0e 100644 --- a/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js +++ b/app/assets/javascripts/filtered_search/filtered_search_visual_tokens.js @@ -135,10 +135,6 @@ export default class FilteredSearchVisualTokens { } static updateUserTokenAppearance(tokenValueContainer, tokenValueElement, tokenValue) { - if (tokenValue === 'none') { - return Promise.resolve(); - } - const username = tokenValue.replace(/^@/, ''); return ( UsersCache.retrieve(username) @@ -184,7 +180,12 @@ export default class FilteredSearchVisualTokens { const tokenValueElement = tokenValueContainer.querySelector('.value'); tokenValueElement.innerText = tokenValue; + if (tokenValue === 'none' || tokenValue === 'any') { + return; + } + const tokenType = tokenName.toLowerCase(); + if (tokenType === 'label') { FilteredSearchVisualTokens.updateLabelTokenColor(tokenValueContainer, tokenValue); } else if (tokenType === 'author' || tokenType === 'assignee') { diff --git a/app/assets/javascripts/ide/stores/modules/pipelines/actions.js b/app/assets/javascripts/ide/stores/modules/pipelines/actions.js index 8fa86995ef0..ea2525a2f0e 100644 --- a/app/assets/javascripts/ide/stores/modules/pipelines/actions.js +++ b/app/assets/javascripts/ide/stores/modules/pipelines/actions.js @@ -28,7 +28,7 @@ export const receiveLatestPipelineError = ({ commit, dispatch }, err) => { dispatch( 'setErrorMessage', { - text: __('An error occured whilst fetching the latest pipline.'), + text: __('An error occured whilst fetching the latest pipeline.'), action: () => dispatch('forcePipelineRequest').then(() => dispatch('setErrorMessage', null, { root: true }), diff --git a/app/assets/javascripts/jobs/store/actions.js b/app/assets/javascripts/jobs/store/actions.js index 54ed217572a..8045f6dc3ff 100644 --- a/app/assets/javascripts/jobs/store/actions.js +++ b/app/assets/javascripts/jobs/store/actions.js @@ -80,8 +80,8 @@ export const fetchJob = ({ state, dispatch }) => { export const receiveJobSuccess = ({ commit }, data = {}) => { commit(types.RECEIVE_JOB_SUCCESS, data); - if (data.favicon) { - setFaviconOverlay(data.favicon); + if (data.status && data.status.favicon) { + setFaviconOverlay(data.status.favicon); } else { resetFavicon(); } diff --git a/app/assets/javascripts/jobs/store/getters.js b/app/assets/javascripts/jobs/store/getters.js index d440b2c9ef1..35e92b0b5d9 100644 --- a/app/assets/javascripts/jobs/store/getters.js +++ b/app/assets/javascripts/jobs/store/getters.js @@ -42,7 +42,7 @@ export const emptyStateIllustration = state => (state.job && state.job.status && state.job.status.illustration) || {}; export const emptyStateAction = state => - (state.job && state.job.status && state.job.status.action) || {}; + (state.job && state.job.status && state.job.status.action) || null; export const isScrollingDown = state => isScrolledToBottom() && !state.isTraceComplete; diff --git a/app/assets/javascripts/notes/components/comment_form.vue b/app/assets/javascripts/notes/components/comment_form.vue index 754c6e79ee4..10e80883c00 100644 --- a/app/assets/javascripts/notes/components/comment_form.vue +++ b/app/assets/javascripts/notes/components/comment_form.vue @@ -6,7 +6,6 @@ import Autosize from 'autosize'; import { __, sprintf } from '~/locale'; import Flash from '../../flash'; import Autosave from '../../autosave'; -import TaskList from '../../task_list'; import { capitalizeFirstCharacter, convertToCamelCase, @@ -146,7 +145,6 @@ export default { }); this.initAutoSave(); - this.initTaskList(); }, methods: { ...mapActions([ @@ -298,13 +296,6 @@ Please check your network connection and try again.`; ]); } }, - initTaskList() { - return new TaskList({ - dataType: 'note', - fieldName: 'note', - selector: '.notes', - }); - }, resizeTextarea() { this.$nextTick(() => { Autosize.update(this.$refs.textarea); diff --git a/app/assets/javascripts/notes/components/diff_with_note.vue b/app/assets/javascripts/notes/components/diff_with_note.vue index b209f736c3f..080161dfbba 100644 --- a/app/assets/javascripts/notes/components/diff_with_note.vue +++ b/app/assets/javascripts/notes/components/diff_with_note.vue @@ -1,6 +1,5 @@ <script> import { mapState, mapActions } from 'vuex'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import DiffFileHeader from '~/diffs/components/diff_file_header.vue'; import DiffViewer from '~/vue_shared/components/diff_viewer/diff_viewer.vue'; import ImageDiffOverlay from '~/diffs/components/image_diff_overlay.vue'; @@ -34,7 +33,9 @@ export default { return getDiffMode(this.diffFile); }, hasTruncatedDiffLines() { - return this.discussion.truncatedDiffLines && this.discussion.truncatedDiffLines.length !== 0; + return ( + this.discussion.truncated_diff_lines && this.discussion.truncated_diff_lines.length !== 0 + ); }, isDiscussionsExpanded() { return true; // TODO: @fatihacet - Fix this. @@ -50,19 +51,17 @@ export default { return text ? 'text-file' : 'js-image-file'; }, diffFile() { - return convertObjectPropsToCamelCase(this.discussion.diffFile, { deep: true }); + return this.discussion.diff_file; }, imageDiffHtml() { - return this.discussion.imageDiffHtml; + return this.discussion.image_diff_html; }, userColorScheme() { return window.gon.user_color_scheme; }, normalizedDiffLines() { - if (this.discussion.truncatedDiffLines) { - return this.discussion.truncatedDiffLines.map(line => - trimFirstCharOfLineContent(convertObjectPropsToCamelCase(line)), - ); + if (this.discussion.truncated_diff_lines) { + return this.discussion.truncated_diff_lines.map(line => trimFirstCharOfLineContent(line)); } return []; @@ -97,7 +96,7 @@ export default { class="diff-file file-holder" > <diff-file-header - :discussion-path="discussion.discussionPath" + :discussion-path="discussion.discussion_path" :diff-file="diffFile" :can-current-user-fork="false" :discussions-expanded="isDiscussionsExpanded" @@ -111,15 +110,15 @@ export default { <table> <tr v-for="line in normalizedDiffLines" - :key="line.lineCode" + :key="line.line_code" class="line_holder" > - <td class="diff-line-num old_line">{{ line.oldLine }}</td> - <td class="diff-line-num new_line">{{ line.newLine }}</td> + <td class="diff-line-num old_line">{{ line.old_line }}</td> + <td class="diff-line-num new_line">{{ line.new_line }}</td> <td :class="line.type" class="line_content" - v-html="line.richText" + v-html="line.rich_text" > </td> </tr> @@ -165,17 +164,17 @@ export default { > <diff-viewer :diff-mode="diffMode" - :new-path="diffFile.newPath" - :new-sha="diffFile.diffRefs.headSha" - :old-path="diffFile.oldPath" - :old-sha="diffFile.diffRefs.baseSha" - :file-hash="diffFile.fileHash" + :new-path="diffFile.new_path" + :new-sha="diffFile.diff_refs.head_sha" + :old-path="diffFile.old_path" + :old-sha="diffFile.diff_refs.base_sha" + :file-hash="diffFile.file_hash" :project-path="projectPath" > <image-diff-overlay slot="image-overlay" :discussions="discussion" - :file-hash="diffFile.fileHash" + :file-hash="diffFile.file_hash" :show-comment-icon="true" :should-toggle-discussion="false" badge-class="image-comment-badge" diff --git a/app/assets/javascripts/notes/components/note_body.vue b/app/assets/javascripts/notes/components/note_body.vue index cf4c35de42c..9375627359c 100644 --- a/app/assets/javascripts/notes/components/note_body.vue +++ b/app/assets/javascripts/notes/components/note_body.vue @@ -4,7 +4,6 @@ import noteEditedText from './note_edited_text.vue'; import noteAwardsList from './note_awards_list.vue'; import noteAttachment from './note_attachment.vue'; import noteForm from './note_form.vue'; -import TaskList from '../../task_list'; import autosave from '../mixins/autosave'; export default { @@ -37,14 +36,12 @@ export default { }, mounted() { this.renderGFM(); - this.initTaskList(); if (this.isEditing) { this.initAutoSave(this.note); } }, updated() { - this.initTaskList(); this.renderGFM(); if (this.isEditing) { @@ -59,15 +56,6 @@ export default { renderGFM() { $(this.$refs['note-body']).renderGFM(); }, - initTaskList() { - if (this.canEdit) { - this.taskList = new TaskList({ - dataType: 'note', - fieldName: 'note', - selector: '.notes', - }); - } - }, handleFormUpdate(note, parentElement, callback) { this.$emit('handleFormUpdate', note, parentElement, callback); }, diff --git a/app/assets/javascripts/notes/components/noteable_discussion.vue b/app/assets/javascripts/notes/components/noteable_discussion.vue index c1dfa036678..7740967ccd5 100644 --- a/app/assets/javascripts/notes/components/noteable_discussion.vue +++ b/app/assets/javascripts/notes/components/noteable_discussion.vue @@ -1,6 +1,5 @@ <script> import { mapActions, mapGetters } from 'vuex'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { truncateSha } from '~/lib/utils/text_utility'; import { s__ } from '~/locale'; import systemNote from '~/vue_shared/components/notes/system_note.vue'; @@ -88,17 +87,16 @@ export default { transformedDiscussion() { return { ...this.discussion.notes[0], - truncatedDiffLines: this.discussion.truncated_diff_lines || [], - truncatedDiffLinesPath: this.discussion.truncated_diff_lines_path, - diffFile: this.discussion.diff_file, - diffDiscussion: this.discussion.diff_discussion, - imageDiffHtml: this.discussion.image_diff_html, + truncated_diff_lines: this.discussion.truncated_diff_lines || [], + truncated_diff_lines_path: this.discussion.truncated_diff_lines_path, + diff_file: this.discussion.diff_file, + diff_discussion: this.discussion.diff_discussion, active: this.discussion.active, - discussionPath: this.discussion.discussion_path, + discussion_path: this.discussion.discussion_path, resolved: this.discussion.resolved, - resolvedBy: this.discussion.resolved_by, - resolvedByPush: this.discussion.resolved_by_push, - resolvedAt: this.discussion.resolved_at, + resolved_by: this.discussion.resolved_by, + resolved_by_push: this.discussion.resolved_by_push, + resolved_at: this.discussion.resolved_at, }; }, author() { @@ -138,7 +136,7 @@ export default { return null; }, resolvedText() { - return this.transformedDiscussion.resolvedByPush ? 'Automatically resolved' : 'Resolved'; + return this.transformedDiscussion.resolved_by_push ? 'Automatically resolved' : 'Resolved'; }, hasMultipleUnresolvedDiscussions() { return this.unresolvedDiscussions.length > 1; @@ -150,12 +148,14 @@ export default { ); }, shouldRenderDiffs() { - const { diffDiscussion, diffFile } = this.transformedDiscussion; - - return diffDiscussion && diffFile && this.renderDiffFile; + return ( + this.transformedDiscussion.diff_discussion && + this.transformedDiscussion.diff_file && + this.renderDiffFile + ); }, shouldGroupReplies() { - return !this.shouldRenderDiffs && !this.transformedDiscussion.diffDiscussion; + return !this.shouldRenderDiffs && !this.transformedDiscussion.diff_discussion; }, shouldRenderHeader() { return this.shouldRenderDiffs; @@ -165,7 +165,7 @@ export default { }, wrapperComponentProps() { if (this.shouldRenderDiffs) { - return { discussion: convertObjectPropsToCamelCase(this.discussion) }; + return { discussion: this.discussion }; } return {}; @@ -184,8 +184,8 @@ export default { }, shouldShowDiscussions() { const isExpanded = this.discussion.expanded; - const { diffDiscussion, resolved } = this.transformedDiscussion; - const isResolvedNonDiffDiscussion = !diffDiscussion && resolved; + const { resolved } = this.transformedDiscussion; + const isResolvedNonDiffDiscussion = !this.transformedDiscussion.diff_discussion && resolved; return isExpanded || this.alwaysExpanded || isResolvedNonDiffDiscussion; }, @@ -333,9 +333,9 @@ Please check your network connection and try again.`; :expanded="discussion.expanded" @toggleHandler="toggleDiscussionHandler" > - <template v-if="transformedDiscussion.diffDiscussion"> + <template v-if="transformedDiscussion.diff_discussion"> started a discussion on - <a :href="transformedDiscussion.discussionPath"> + <a :href="transformedDiscussion.discussion_path"> <template v-if="transformedDiscussion.active"> the diff </template> @@ -356,8 +356,8 @@ Please check your network connection and try again.`; </note-header> <note-edited-text v-if="transformedDiscussion.resolved" - :edited-at="transformedDiscussion.resolvedAt" - :edited-by="transformedDiscussion.resolvedBy" + :edited-at="transformedDiscussion.resolved_at" + :edited-by="transformedDiscussion.resolved_by" :action-text="resolvedText" class-name="discussion-headline-light js-discussion-headline" /> diff --git a/app/assets/javascripts/notes/components/noteable_note.vue b/app/assets/javascripts/notes/components/noteable_note.vue index e302a89ee95..9ab91e2abe5 100644 --- a/app/assets/javascripts/notes/components/noteable_note.vue +++ b/app/assets/javascripts/notes/components/noteable_note.vue @@ -46,6 +46,7 @@ export default { 'is-requesting being-posted': this.isRequesting, 'disabled-content': this.isDeleting, target: this.isTarget, + 'is-editable': this.note.current_user.can_edit, }; }, canResolve() { diff --git a/app/assets/javascripts/notes/components/notes_app.vue b/app/assets/javascripts/notes/components/notes_app.vue index e555279a6ac..69ddfd751e0 100644 --- a/app/assets/javascripts/notes/components/notes_app.vue +++ b/app/assets/javascripts/notes/components/notes_app.vue @@ -122,6 +122,7 @@ export default { setTargetNoteHash: 'setTargetNoteHash', toggleDiscussion: 'toggleDiscussion', setNotesFetchedState: 'setNotesFetchedState', + startTaskList: 'startTaskList', }), getComponentName(discussion) { if (discussion.isSkeletonNote) { @@ -157,6 +158,7 @@ export default { this.isFetching = false; }) .then(() => this.$nextTick()) + .then(() => this.startTaskList()) .then(() => this.checkLocationHash()) .catch(() => { this.setLoadingState(false); diff --git a/app/assets/javascripts/notes/stores/actions.js b/app/assets/javascripts/notes/stores/actions.js index 88739ffb083..5b2f0540020 100644 --- a/app/assets/javascripts/notes/stores/actions.js +++ b/app/assets/javascripts/notes/stores/actions.js @@ -1,6 +1,8 @@ +import Vue from 'vue'; import $ from 'jquery'; import axios from '~/lib/utils/axios_utils'; import Visibility from 'visibilityjs'; +import TaskList from '../../task_list'; import Flash from '../../flash'; import Poll from '../../lib/utils/poll'; import * as types from './mutation_types'; @@ -58,12 +60,13 @@ export const deleteNote = ({ commit, dispatch }, note) => dispatch('updateMergeRequestWidget'); }); -export const updateNote = ({ commit }, { endpoint, note }) => +export const updateNote = ({ commit, dispatch }, { endpoint, note }) => service .updateNote(endpoint, note) .then(res => res.json()) .then(res => { commit(types.UPDATE_NOTE, res); + dispatch('startTaskList'); }); export const replyToDiscussion = ({ commit }, { endpoint, data }) => @@ -85,6 +88,7 @@ export const createNewNote = ({ commit, dispatch }, { endpoint, data }) => commit(types.ADD_NEW_NOTE, res); dispatch('updateMergeRequestWidget'); + dispatch('startTaskList'); } return res; }); @@ -260,6 +264,8 @@ const pollSuccessCallBack = (resp, commit, state, getters, dispatch) => { commit(types.ADD_NEW_NOTE, note); } }); + + dispatch('startTaskList'); } commit(types.SET_LAST_FETCHED_AT, resp.last_fetched_at); @@ -335,7 +341,7 @@ export const scrollToNoteIfNeeded = (context, el) => { }; export const fetchDiscussionDiffLines = ({ commit }, discussion) => - axios.get(discussion.truncatedDiffLinesPath).then(({ data }) => { + axios.get(discussion.truncated_diff_lines_path).then(({ data }) => { commit(types.SET_DISCUSSION_DIFF_LINES, { discussionId: discussion.id, diffLines: data.truncated_diff_lines, @@ -368,5 +374,16 @@ export const setCommentsDisabled = ({ commit }, data) => { commit(types.DISABLE_COMMENTS, data); }; +export const startTaskList = ({ dispatch }) => + Vue.nextTick( + () => + new TaskList({ + dataType: 'note', + fieldName: 'note', + selector: '.notes .is-editable', + onSuccess: () => dispatch('startTaskList'), + }), + ); + // prevent babel-plugin-rewire from generating an invalid default during karma tests export default () => {}; diff --git a/app/assets/javascripts/notes/stores/mutations.js b/app/assets/javascripts/notes/stores/mutations.js index c8d9e196103..f6054e0be87 100644 --- a/app/assets/javascripts/notes/stores/mutations.js +++ b/app/assets/javascripts/notes/stores/mutations.js @@ -102,7 +102,7 @@ export default { discussionsData.forEach(discussion => { if (discussion.diff_file) { Object.assign(discussion, { - fileHash: discussion.diff_file.file_hash, + file_hash: discussion.diff_file.file_hash, truncated_diff_lines: discussion.truncated_diff_lines || [], }); } @@ -195,7 +195,7 @@ export default { const selectedDiscussion = state.discussions.find(disc => disc.id === note.id); note.expanded = true; // override expand flag to prevent collapse if (note.diff_file) { - Object.assign(note, { fileHash: note.diff_file.file_hash }); + Object.assign(note, { file_hash: note.diff_file.file_hash }); } Object.assign(selectedDiscussion, { ...note }); }, diff --git a/app/assets/javascripts/pipelines/components/graph/action_component.vue b/app/assets/javascripts/pipelines/components/graph/action_component.vue index b82e28a0735..f6a97236ebf 100644 --- a/app/assets/javascripts/pipelines/components/graph/action_component.vue +++ b/app/assets/javascripts/pipelines/components/graph/action_component.vue @@ -1,10 +1,9 @@ <script> -import $ from 'jquery'; +import { GlTooltipDirective, GlButton } from '@gitlab-org/gitlab-ui'; import axios from '~/lib/utils/axios_utils'; import { dasherize } from '~/lib/utils/text_utility'; import { __ } from '~/locale'; import createFlash from '~/flash'; -import tooltip from '~/vue_shared/directives/tooltip'; import Icon from '~/vue_shared/components/icon.vue'; /** @@ -20,23 +19,20 @@ import Icon from '~/vue_shared/components/icon.vue'; export default { components: { Icon, + GlButton, }, - directives: { - tooltip, + GlTooltip: GlTooltipDirective, }, - props: { tooltipText: { type: String, required: true, }, - link: { type: String, required: true, }, - actionIcon: { type: String, required: true, @@ -47,7 +43,6 @@ export default { isDisabled: false, }; }, - computed: { cssClass() { const actionIconDash = dasherize(this.actionIcon); @@ -62,8 +57,7 @@ export default { * */ onClickAction() { - $(this.$el).tooltip('hide'); - + this.$root.$emit('bv::hide::tooltip', `js-ci-action-${this.link}`); this.isDisabled = true; axios @@ -82,18 +76,16 @@ export default { }; </script> <template> - <button - v-tooltip + <gl-button + :id="`js-ci-action-${link}`" + v-gl-tooltip="{ boundary: 'viewport' }" :title="tooltipText" :class="cssClass" :disabled="isDisabled" - type="button" class="js-ci-action btn btn-blank btn-transparent ci-action-icon-container ci-action-icon-wrapper" - data-container="body" - data-boundary="viewport" @click="onClickAction" > <icon :name="actionIcon"/> - </button> + </gl-button> </template> diff --git a/app/assets/javascripts/pipelines/components/graph/job_group_dropdown.vue b/app/assets/javascripts/pipelines/components/graph/job_group_dropdown.vue index 34bada533df..2c3cb1959b5 100644 --- a/app/assets/javascripts/pipelines/components/graph/job_group_dropdown.vue +++ b/app/assets/javascripts/pipelines/components/graph/job_group_dropdown.vue @@ -1,8 +1,8 @@ <script> import $ from 'jquery'; +import { GlTooltipDirective } from '@gitlab-org/gitlab-ui'; import CiIcon from '~/vue_shared/components/ci_icon.vue'; import JobItem from './job_item.vue'; -import tooltip from '../../../vue_shared/directives/tooltip'; /** * Renders the dropdown for the pipeline graph. @@ -12,32 +12,27 @@ import tooltip from '../../../vue_shared/directives/tooltip'; */ export default { directives: { - tooltip, + GlTooltip: GlTooltipDirective, }, - components: { JobItem, CiIcon, }, - props: { group: { type: Object, required: true, }, }, - computed: { tooltipText() { const { name, status } = this.group; return `${name} - ${status.label}`; }, }, - mounted() { this.stopDropdownClickPropagation(); }, - methods: { /** * When the user right clicks or cmd/ctrl + click in the group name or the action icon @@ -65,12 +60,10 @@ export default { <template> <div class="ci-job-dropdown-container dropdown dropright"> <button - v-tooltip + v-gl-tooltip.hover="{ boundary: 'viewport' }" :title="tooltipText" type="button" data-toggle="dropdown" - data-container="body" - data-boundary="viewport" data-display="static" class="dropdown-menu-toggle build-content" > diff --git a/app/assets/javascripts/pipelines/components/graph/job_item.vue b/app/assets/javascripts/pipelines/components/graph/job_item.vue index 7cdde8a53b3..182849c6455 100644 --- a/app/assets/javascripts/pipelines/components/graph/job_item.vue +++ b/app/assets/javascripts/pipelines/components/graph/job_item.vue @@ -1,7 +1,7 @@ <script> import ActionComponent from './action_component.vue'; import JobNameComponent from './job_name_component.vue'; -import tooltip from '../../../vue_shared/directives/tooltip'; +import { GlTooltipDirective, GlLink } from '@gitlab-org/gitlab-ui'; import { sprintf } from '~/locale'; import delayedJobMixin from '~/jobs/mixins/delayed_job_mixin'; @@ -34,9 +34,10 @@ export default { components: { ActionComponent, JobNameComponent, + GlLink, }, directives: { - tooltip, + GlTooltip: GlTooltipDirective, }, mixins: [delayedJobMixin], props: { @@ -55,7 +56,6 @@ export default { default: Infinity, }, }, - computed: { status() { return this.job && this.job.status ? this.job.status : {}; @@ -88,7 +88,6 @@ export default { tooltipBoundary() { return this.dropdownLength < 5 ? 'viewport' : null; }, - /** * Verifies if the provided job has an action path * @@ -98,7 +97,6 @@ export default { return this.job.status && this.job.status.action && this.job.status.action.path; }, }, - methods: { pipelineActionRequestComplete() { this.$emit('pipelineActionRequestComplete'); @@ -108,30 +106,26 @@ export default { </script> <template> <div class="ci-job-component"> - <a + <gl-link v-if="status.has_details" - v-tooltip + v-gl-tooltip="{ boundary: tooltipBoundary }" :href="status.details_path" :title="tooltipText" :class="cssClassJobName" - :data-boundary="tooltipBoundary" - data-container="body" class="js-pipeline-graph-job-link" > - <job-name-component :name="job.name" :status="job.status" /> - </a> + </gl-link> <div v-else - v-tooltip + v-gl-tooltip :title="tooltipText" :class="cssClassJobName" class="js-job-component-tooltip non-details-job-component" - data-container="body" > <job-name-component diff --git a/app/assets/javascripts/pipelines/components/graph/stage_column_component.vue b/app/assets/javascripts/pipelines/components/graph/stage_column_component.vue index efbab51d200..d5f931943d5 100644 --- a/app/assets/javascripts/pipelines/components/graph/stage_column_component.vue +++ b/app/assets/javascripts/pipelines/components/graph/stage_column_component.vue @@ -13,34 +13,28 @@ export default { type: String, required: true, }, - groups: { type: Array, required: true, }, - isFirstColumn: { type: Boolean, required: false, default: false, }, - stageConnectorClass: { type: String, required: false, default: '', }, }, - methods: { groupId(group) { return `ci-badge-${_.escape(group.name)}`; }, - buildConnnectorClass(index) { return index === 0 && !this.isFirstColumn ? 'left-connector' : ''; }, - pipelineActionRequestComplete() { this.$emit('refreshPipelineGraph'); }, @@ -50,7 +44,8 @@ export default { <template> <li :class="stageConnectorClass" - class="stage-column"> + class="stage-column" + > <div class="stage-name"> {{ title }} </div> @@ -78,7 +73,6 @@ export default { :group="group" @pipelineActionRequestComplete="pipelineActionRequestComplete" /> - </li> </ul> </div> diff --git a/app/assets/javascripts/pipelines/components/pipelines_table_row.vue b/app/assets/javascripts/pipelines/components/pipelines_table_row.vue index 026d533d10f..fd674a8d447 100644 --- a/app/assets/javascripts/pipelines/components/pipelines_table_row.vue +++ b/app/assets/javascripts/pipelines/components/pipelines_table_row.vue @@ -308,7 +308,8 @@ export default { <div v-for="(stage, index) in pipeline.details.stages" :key="index" - class="stage-container dropdown js-mini-pipeline-graph"> + class="stage-container dropdown js-mini-pipeline-graph" + > <pipeline-stage :type="$options.pipelinesTable" :stage="stage" diff --git a/app/assets/javascripts/pipelines/components/stage.vue b/app/assets/javascripts/pipelines/components/stage.vue index 3df8f7a6da6..587c4ffa45c 100644 --- a/app/assets/javascripts/pipelines/components/stage.vue +++ b/app/assets/javascripts/pipelines/components/stage.vue @@ -13,14 +13,13 @@ */ import $ from 'jquery'; -import { GlLoadingIcon } from '@gitlab-org/gitlab-ui'; +import { GlLoadingIcon, GlTooltipDirective } from '@gitlab-org/gitlab-ui'; import { __ } from '../../locale'; import Flash from '../../flash'; import axios from '../../lib/utils/axios_utils'; import eventHub from '../event_hub'; import Icon from '../../vue_shared/components/icon.vue'; import JobItem from './graph/job_item.vue'; -import tooltip from '../../vue_shared/directives/tooltip'; import { PIPELINES_TABLE } from '../constants'; export default { @@ -31,7 +30,7 @@ export default { }, directives: { - tooltip, + GlTooltip: GlTooltipDirective, }, props: { @@ -159,11 +158,10 @@ export default { <button id="stageDropdown" ref="dropdown" - v-tooltip + v-gl-tooltip.hover :class="triggerButtonClass" :title="stage.title" class="mini-pipeline-graph-dropdown-toggle js-builds-dropdown-button" - data-placement="top" data-toggle="dropdown" data-display="static" type="button" diff --git a/app/assets/javascripts/vue_merge_request_widget/components/deployment.vue b/app/assets/javascripts/vue_merge_request_widget/components/deployment.vue index 2a8380f5f2b..4ec925aa8a6 100644 --- a/app/assets/javascripts/vue_merge_request_widget/components/deployment.vue +++ b/app/assets/javascripts/vue_merge_request_widget/components/deployment.vue @@ -40,10 +40,8 @@ export default { failed: __('Failed to deploy to'), }, data() { - const features = window.gon.features || {}; return { isStopping: false, - enableCiEnvironmentsStatusChanges: features.ciEnvironmentsStatusChanges, }; }, computed: { @@ -74,10 +72,7 @@ export default { : ''; }, shouldRenderDropdown() { - return ( - this.enableCiEnvironmentsStatusChanges && - (this.deployment.changes && this.deployment.changes.length > 0) - ); + return this.deployment.changes && this.deployment.changes.length > 0; }, }, methods: { diff --git a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue index e68a2aa73fa..d7f24c1afc5 100644 --- a/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue +++ b/app/assets/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue @@ -44,8 +44,11 @@ export default { isNew() { return this.diffMode === diffModes.new; }, + isRenamed() { + return this.diffMode === diffModes.renamed; + }, imagePath() { - return this.isNew ? this.newPath : this.oldPath; + return this.isNew || this.isRenamed ? this.newPath : this.oldPath; }, }, methods: { @@ -114,7 +117,7 @@ export default { }]" > <slot - v-if="isNew" + v-if="isNew || isRenamed" slot="image-overlay" name="image-overlay" > diff --git a/app/assets/javascripts/vue_shared/components/header_ci_component.vue b/app/assets/javascripts/vue_shared/components/header_ci_component.vue index aee88cae48d..88e95c33b9b 100644 --- a/app/assets/javascripts/vue_shared/components/header_ci_component.vue +++ b/app/assets/javascripts/vue_shared/components/header_ci_component.vue @@ -1,8 +1,9 @@ <script> +import { GlTooltipDirective, GlLink, GlButton } from '@gitlab-org/gitlab-ui'; import CiIconBadge from './ci_badge_link.vue'; import TimeagoTooltip from './time_ago_tooltip.vue'; -import tooltip from '../directives/tooltip'; import UserAvatarImage from './user_avatar/user_avatar_image.vue'; +import LoadingButton from '~/vue_shared/components/loading_button.vue'; /** * Renders header component for job and pipeline page based on UI mockups @@ -16,9 +17,12 @@ export default { CiIconBadge, TimeagoTooltip, UserAvatarImage, + GlLink, + GlButton, + LoadingButton, }, directives: { - tooltip, + GlTooltip: GlTooltipDirective, }, props: { status: { @@ -98,8 +102,8 @@ export default { by <template v-if="user"> - <a - v-tooltip + <gl-link + v-gl-tooltip :href="user.path" :title="user.email" class="js-user-link commit-committer-link" @@ -113,7 +117,7 @@ export default { /> {{ user.name }} - </a> + </gl-link> <span v-if="user.status_tooltip_html" v-html="user.status_tooltip_html"></span> @@ -127,16 +131,16 @@ export default { <template v-for="(action, i) in actions" > - <a + <gl-link v-if="action.type === 'link'" :key="i" :href="action.path" :class="action.cssClass" > {{ action.label }} - </a> + </gl-link> - <a + <gl-link v-else-if="action.type === 'ujs-link'" :key="i" :href="action.path" @@ -145,31 +149,24 @@ export default { rel="nofollow" > {{ action.label }} - </a> + </gl-link> - <button + <loading-button v-else-if="action.type === 'button'" :key="i" + :loading="action.isLoading" :disabled="action.isLoading" :class="action.cssClass" - type="button" + container-class="d-inline" + :label="action.label" @click="onClickAction(action)" - > - {{ action.label }} - <i - v-show="action.isLoading" - class="fa fa-spin fa-spinner" - aria-hidden="true" - > - </i> - </button> + /> </template> </section> - <button + <gl-button v-if="hasSidebarButton" id="toggleSidebar" - type="button" - class="btn btn-default d-block d-sm-none + class="d-block d-sm-none sidebar-toggle-btn js-sidebar-build-toggle js-sidebar-build-toggle-header" @click="onClickSidebarButton" > @@ -179,6 +176,6 @@ sidebar-toggle-btn js-sidebar-build-toggle js-sidebar-build-toggle-header" aria-labelledby="toggleSidebar" > </i> - </button> + </gl-button> </header> </template> diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss index 00b06aea898..3aa79bf2466 100644 --- a/app/assets/stylesheets/pages/issuable.scss +++ b/app/assets/stylesheets/pages/issuable.scss @@ -47,12 +47,6 @@ @extend .fixed-width-container; } } - - .diffs { - .mr-version-controls { - @extend .fixed-width-container; - } - } } .issuable-details { diff --git a/app/assets/stylesheets/pages/note_form.scss b/app/assets/stylesheets/pages/note_form.scss index 855d73a9939..97b3f696139 100644 --- a/app/assets/stylesheets/pages/note_form.scss +++ b/app/assets/stylesheets/pages/note_form.scss @@ -176,8 +176,10 @@ background-color: $white-light; } -.discussion-form-container { - padding: $gl-padding-top $gl-padding $gl-padding; +table { + .discussion-form-container { + padding: $gl-padding-top $gl-padding $gl-padding; + } } .discussion-notes .disabled-comment { @@ -239,7 +241,7 @@ .discussion-reply-holder { background-color: $white-light; padding: 10px 16px; - border-radius: 0 0 $border-radius-default $border-radius-default; + border-radius: 0 0 3px 3px; &.is-replying { padding-bottom: $gl-padding; diff --git a/app/assets/stylesheets/pages/notes.scss b/app/assets/stylesheets/pages/notes.scss index c57c1eee350..1f34537d856 100644 --- a/app/assets/stylesheets/pages/notes.scss +++ b/app/assets/stylesheets/pages/notes.scss @@ -1,6 +1,6 @@ $system-note-icon-size: 32px; $system-note-svg-size: 16px; -$note-form-margin-left: 70px; +$note-form-margin-left: 72px; @mixin vertical-line($left) { &::before { @@ -13,12 +13,32 @@ $note-form-margin-left: 70px; } } +@mixin outline-comment() { + margin: $gl-padding; + border: 1px solid $border-color; + border-radius: $border-radius-default; +} + .note-wrapper { padding: $gl-padding; + + &.outlined { + @include outline-comment(); + } +} + +.main-notes-list { + @include vertical-line(36px); } -.issuable-discussion { - .notes.timeline > .timeline-entry { +.notes { + display: block; + list-style: none; + margin: 0; + padding: 0; + position: relative; + + &.timeline > .timeline-entry { border: 1px solid $border-color; border-radius: $border-radius-default; margin: $gl-padding 0; @@ -51,18 +71,6 @@ $note-form-margin-left: 70px; border-top: 1px solid $border-color; } } -} - -.main-notes-list { - @include vertical-line(39px); -} - -.notes { - display: block; - list-style: none; - margin: 0; - padding: 0; - position: relative; > .note-discussion { .card { @@ -71,10 +79,6 @@ $note-form-margin-left: 70px; li.note { border-bottom: 1px solid $border-color; - - &:first-child { - border-radius: $border-radius-default $border-radius-default 0 0; - } } } @@ -268,7 +272,7 @@ $note-form-margin-left: 70px; } .system-note { - padding: 6px $gl-padding-24; + padding: 6px 21px; margin: $gl-padding-24 0; background-color: transparent; @@ -387,6 +391,7 @@ $note-form-margin-left: 70px; line-height: 42px; padding: 0 $gl-padding; border-top: 1px solid $border-color; + border-radius: 0; &:hover { background-color: $gray-light; @@ -402,6 +407,24 @@ $note-form-margin-left: 70px; } } +.tab-pane.notes { + .diff-file .notes .system-note { + margin: 0; + } +} + +.tab-pane.diffs { + .system-note { + padding: 0 $gl-padding; + margin-left: 20px; + } + + .notes > .note-discussion li.note.system-note { + border-bottom: 0; + padding: 0 $gl-padding; + } +} + .diff-file { .is-over { .add-diff-note { @@ -421,7 +444,7 @@ $note-form-margin-left: 70px; } .system-note { - margin: 0; + background-color: $white-light; padding: $gl-padding; } } @@ -479,9 +502,12 @@ $note-form-margin-left: 70px; } .note-wrapper { - margin: $gl-padding; - border: 1px solid $border-color; - border-radius: $border-radius-default; + @include outline-comment(); + + &.system-note { + border: 0; + margin-left: 20px; + } } .discussion-reply-holder { @@ -491,6 +517,20 @@ $note-form-margin-left: 70px; } } +.commit-diff { + .notes { + @include vertical-line(52px); + } + + .notes_content { + background-color: $white-light; + } + + .discussion-reply-holder { + border-top: 1px solid $border-color; + } +} + .discussion-header, .note-header-info { a { @@ -897,3 +937,23 @@ $note-form-margin-left: 70px; } } } + +//This needs to be deleted when Snippet/Commit comments are convered to Vue +// See https://gitlab.com/gitlab-org/gitlab-ce/issues/53918#note_117038785 +.unstyled-comments { + + .discussion-header { + padding: $gl-padding; + border-bottom: 1px solid $border-color; + } + + .note-wrapper.outlined { + margin: 0; + border: 0; + border-radius: 0; + } + + .discussion-form-container { + padding: $gl-padding; + } +} diff --git a/app/controllers/admin/background_jobs_controller.rb b/app/controllers/admin/background_jobs_controller.rb index 7701f2e645b..fc877142418 100644 --- a/app/controllers/admin/background_jobs_controller.rb +++ b/app/controllers/admin/background_jobs_controller.rb @@ -1,9 +1,4 @@ # frozen_string_literal: true class Admin::BackgroundJobsController < Admin::ApplicationController - def show - ps_output, _ = Gitlab::Popen.popen(%W(ps ww -U #{Gitlab.config.gitlab.user} -o pid,pcpu,pmem,stat,start,command)) - @sidekiq_processes = ps_output.split("\n").grep(/sidekiq \d+\.\d+\.\d+/) - @concurrency = Sidekiq.options[:concurrency] - end end diff --git a/app/controllers/admin/impersonation_tokens_controller.rb b/app/controllers/admin/impersonation_tokens_controller.rb index f5825ecb19a..706bcc1e549 100644 --- a/app/controllers/admin/impersonation_tokens_controller.rb +++ b/app/controllers/admin/impersonation_tokens_controller.rb @@ -11,6 +11,7 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController @impersonation_token = finder.build(impersonation_token_params) if @impersonation_token.save + PersonalAccessToken.redis_store!(current_user.id, @impersonation_token.token) redirect_to admin_user_impersonation_tokens_path, notice: "A new impersonation token has been created." else set_index_vars @@ -53,6 +54,8 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController @impersonation_token ||= finder.build @inactive_impersonation_tokens = finder(state: 'inactive').execute @active_impersonation_tokens = finder(state: 'active').execute.order(:expires_at) + + @new_impersonation_token = PersonalAccessToken.redis_getdel(current_user.id) end # rubocop: enable CodeReuse/ActiveRecord end diff --git a/app/controllers/projects/blob_controller.rb b/app/controllers/projects/blob_controller.rb index c02ec407262..2a6fe3b9c97 100644 --- a/app/controllers/projects/blob_controller.rb +++ b/app/controllers/projects/blob_controller.rb @@ -122,7 +122,7 @@ class Projects::BlobController < Projects::ApplicationController @lines.map! do |line| # These are marked as context lines but are loaded from blobs. # We also have context lines loaded from diffs in other places. - diff_line = Gitlab::Diff::Line.new(line, 'context', nil, nil, nil) + diff_line = Gitlab::Diff::Line.new(line, nil, nil, nil, nil) diff_line.rich_text = line diff_line end diff --git a/app/controllers/projects/merge_requests/diffs_controller.rb b/app/controllers/projects/merge_requests/diffs_controller.rb index 5307cd0720a..b3d77335c2a 100644 --- a/app/controllers/projects/merge_requests/diffs_controller.rb +++ b/app/controllers/projects/merge_requests/diffs_controller.rb @@ -22,6 +22,12 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic def render_diffs @environment = @merge_request.environments_for(current_user).last + notes_grouped_by_path = renderable_notes.group_by { |note| note.position.file_path } + + @diffs.diff_files.each do |diff_file| + notes = notes_grouped_by_path.fetch(diff_file.file_path, []) + notes.each { |note| diff_file.unfold_diff_lines(note.position) } + end @diffs.write_cache @@ -108,4 +114,10 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic @grouped_diff_discussions = @merge_request.grouped_diff_discussions(@compare.diff_refs) @notes = prepare_notes_for_rendering(@grouped_diff_discussions.values.flatten.flat_map(&:notes), @merge_request) end + + def renderable_notes + define_diff_comment_vars unless @notes + + @notes + end end diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb index 4bdb857b2d9..23d16fed7b9 100644 --- a/app/controllers/projects/merge_requests_controller.rb +++ b/app/controllers/projects/merge_requests_controller.rb @@ -14,9 +14,6 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo before_action :set_issuables_index, only: [:index] before_action :authenticate_user!, only: [:assign_related_issues] before_action :check_user_can_push_to_source_branch!, only: [:rebase] - before_action do - push_frontend_feature_flag(:ci_environments_status_changes) - end def index @merge_requests = @issuables diff --git a/app/helpers/import_helper.rb b/app/helpers/import_helper.rb index 3d0eb3d0d51..49171df1433 100644 --- a/app/helpers/import_helper.rb +++ b/app/helpers/import_helper.rb @@ -83,7 +83,7 @@ module ImportHelper private def github_project_url(full_path) - URI.join(github_root_url, full_path).to_s + Gitlab::Utils.append_path(github_root_url, full_path) end def github_root_url @@ -95,6 +95,6 @@ module ImportHelper end def gitea_project_url(full_path) - URI.join(@gitea_host_url, full_path).to_s + Gitlab::Utils.append_path(@gitea_host_url, full_path) end end diff --git a/app/helpers/nav_helper.rb b/app/helpers/nav_helper.rb index 761f42f2f0f..a7fe8c3d59c 100644 --- a/app/helpers/nav_helper.rb +++ b/app/helpers/nav_helper.rb @@ -19,10 +19,7 @@ module NavHelper end def page_gutter_class - if current_path?('merge_requests#show') || - current_path?('projects/merge_requests/conflicts#show') || - current_path?('issues#show') || - current_path?('milestones#show') + if page_has_markdown? if cookies[:collapsed_gutter] == 'true' %w[page-gutter right-sidebar-collapsed] @@ -50,6 +47,17 @@ module NavHelper class_names end + def show_separator? + Gitlab::Sherlock.enabled? || can?(current_user, :read_instance_statistics) + end + + def page_has_markdown? + current_path?('merge_requests#show') || + current_path?('projects/merge_requests/conflicts#show') || + current_path?('issues#show') || + current_path?('milestones#show') + end + private def get_header_links diff --git a/app/helpers/profiles_helper.rb b/app/helpers/profiles_helper.rb index 42f9a1213e9..df318de740a 100644 --- a/app/helpers/profiles_helper.rb +++ b/app/helpers/profiles_helper.rb @@ -7,7 +7,7 @@ module ProfilesHelper [ [s_("Profiles|Use a private email - %{email}").html_safe % { email: private_email }, Gitlab::PrivateCommitEmail::TOKEN], - verified_emails + *verified_emails ] end diff --git a/app/models/application_record.rb b/app/models/application_record.rb new file mode 100644 index 00000000000..71fbba5b328 --- /dev/null +++ b/app/models/application_record.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class ApplicationRecord < ActiveRecord::Base + self.abstract_class = true +end diff --git a/app/models/clusters/applications/knative.rb b/app/models/clusters/applications/knative.rb index a79a97576d1..c66d5ce54db 100644 --- a/app/models/clusters/applications/knative.rb +++ b/app/models/clusters/applications/knative.rb @@ -41,6 +41,10 @@ module Clusters ) end + def client + cluster.platform_kubernetes.kubeclient.knative_client + end + private def install_script diff --git a/app/models/clusters/kubernetes_namespace.rb b/app/models/clusters/kubernetes_namespace.rb index ac7f9193b87..cbd52bfb48b 100644 --- a/app/models/clusters/kubernetes_namespace.rb +++ b/app/models/clusters/kubernetes_namespace.rb @@ -22,6 +22,8 @@ module Clusters key: Settings.attr_encrypted_db_key_base_truncated, algorithm: 'aes-256-cbc' + scope :has_service_account_token, -> { where.not(encrypted_service_account_token: nil) } + def token_name "#{namespace}-token" end diff --git a/app/models/clusters/platforms/kubernetes.rb b/app/models/clusters/platforms/kubernetes.rb index ea02ae6c9d8..3c5d7756eec 100644 --- a/app/models/clusters/platforms/kubernetes.rb +++ b/app/models/clusters/platforms/kubernetes.rb @@ -83,7 +83,7 @@ module Clusters .append(key: 'KUBE_CA_PEM_FILE', value: ca_pem, file: true) end - if kubernetes_namespace = cluster.kubernetes_namespaces.find_by(project: project) + if kubernetes_namespace = cluster.kubernetes_namespaces.has_service_account_token.find_by(project: project) variables.concat(kubernetes_namespace.predefined_variables) else # From 11.5, every Clusters::Project should have at least one @@ -173,9 +173,7 @@ module Clusters kubeclient = build_kube_client! kubeclient.get_pods(namespace: actual_namespace).as_json - rescue Kubeclient::HttpError => err - raise err unless err.error_code == 404 - + rescue Kubeclient::ResourceNotFoundError [] end diff --git a/app/models/concerns/deployable.rb b/app/models/concerns/deployable.rb index f4f1989f0a9..bc12b06b5af 100644 --- a/app/models/concerns/deployable.rb +++ b/app/models/concerns/deployable.rb @@ -13,17 +13,18 @@ module Deployable name: expanded_environment_name ) - environment.deployments.create!( + # If we failed to persist envirionment record by validation error, such as name with invalid character, + # the job will fall back to a non-environment job. + return unless environment.persisted? + + create_deployment!( project_id: environment.project_id, environment: environment, ref: ref, tag: tag, sha: sha, user: user, - deployable: self, - on_stop: on_stop).tap do |_| - self.reload # Reload relationships - end + on_stop: on_stop) end end end diff --git a/app/models/concerns/mentionable.rb b/app/models/concerns/mentionable.rb index 298d0d42d90..0d88b34fb48 100644 --- a/app/models/concerns/mentionable.rb +++ b/app/models/concerns/mentionable.rb @@ -97,9 +97,9 @@ module Mentionable # Allows heavy processing to be skipped def matches_cross_reference_regex? reference_pattern = if !project || project.default_issues_tracker? - ReferenceRegexes::DEFAULT_PATTERN + ReferenceRegexes.default_pattern else - ReferenceRegexes::EXTERNAL_PATTERN + ReferenceRegexes.external_pattern end self.class.mentionable_attrs.any? do |attr, _| diff --git a/app/models/concerns/mentionable/reference_regexes.rb b/app/models/concerns/mentionable/reference_regexes.rb index fe8fbb71184..b8fb3f71925 100644 --- a/app/models/concerns/mentionable/reference_regexes.rb +++ b/app/models/concerns/mentionable/reference_regexes.rb @@ -2,6 +2,8 @@ module Mentionable module ReferenceRegexes + extend Gitlab::Utils::StrongMemoize + def self.reference_pattern(link_patterns, issue_pattern) Regexp.union(link_patterns, issue_pattern, @@ -15,16 +17,20 @@ module Mentionable ] end - DEFAULT_PATTERN = begin - issue_pattern = Issue.reference_pattern - link_patterns = Regexp.union([Issue, Commit, MergeRequest, Epic].map(&:link_reference_pattern).compact) - reference_pattern(link_patterns, issue_pattern) + def self.default_pattern + strong_memoize(:default_pattern) do + issue_pattern = Issue.reference_pattern + link_patterns = Regexp.union([Issue, Commit, MergeRequest, Epic].map(&:link_reference_pattern).compact) + reference_pattern(link_patterns, issue_pattern) + end end - EXTERNAL_PATTERN = begin - issue_pattern = IssueTrackerService.reference_pattern - link_patterns = URI.regexp(%w(http https)) - reference_pattern(link_patterns, issue_pattern) + def self.external_pattern + strong_memoize(:external_pattern) do + issue_pattern = IssueTrackerService.reference_pattern + link_patterns = URI.regexp(%w(http https)) + reference_pattern(link_patterns, issue_pattern) + end end end end diff --git a/app/models/deployment.rb b/app/models/deployment.rb index 83434276995..811e623b7f7 100644 --- a/app/models/deployment.rb +++ b/app/models/deployment.rb @@ -160,18 +160,18 @@ class Deployment < ActiveRecord::Base end def has_metrics? - prometheus_adapter&.can_query? + prometheus_adapter&.can_query? && success? end def metrics - return {} unless has_metrics? && success? + return {} unless has_metrics? metrics = prometheus_adapter.query(:deployment, self) metrics&.merge(deployment_time: finished_at.to_i) || {} end def additional_metrics - return {} unless has_metrics? && success? + return {} unless has_metrics? metrics = prometheus_adapter.query(:additional_metrics_deployment, self) metrics&.merge(deployment_time: finished_at.to_i) || {} diff --git a/app/models/diff_note.rb b/app/models/diff_note.rb index 5f59e4832db..c32008aa9c7 100644 --- a/app/models/diff_note.rb +++ b/app/models/diff_note.rb @@ -66,6 +66,10 @@ class DiffNote < Note self.original_position.diff_refs == diff_refs end + def discussion_first_note? + self == discussion.first_note + end + private def enqueue_diff_file_creation_job @@ -78,26 +82,33 @@ class DiffNote < Note end def should_create_diff_file? - on_text? && note_diff_file.nil? && self == discussion.first_note + on_text? && note_diff_file.nil? && discussion_first_note? end def fetch_diff_file - if note_diff_file - diff = Gitlab::Git::Diff.new(note_diff_file.to_hash) - Gitlab::Diff::File.new(diff, - repository: project.repository, - diff_refs: original_position.diff_refs) - elsif created_at_diff?(noteable.diff_refs) - # We're able to use the already persisted diffs (Postgres) if we're - # presenting a "current version" of the MR discussion diff. - # So no need to make an extra Gitaly diff request for it. - # As an extra benefit, the returned `diff_file` already - # has `highlighted_diff_lines` data set from Redis on - # `Diff::FileCollection::MergeRequestDiff`. - noteable.diffs(original_position.diff_options).diff_files.first - else - original_position.diff_file(self.project.repository) - end + file = + if note_diff_file + diff = Gitlab::Git::Diff.new(note_diff_file.to_hash) + Gitlab::Diff::File.new(diff, + repository: project.repository, + diff_refs: original_position.diff_refs) + elsif created_at_diff?(noteable.diff_refs) + # We're able to use the already persisted diffs (Postgres) if we're + # presenting a "current version" of the MR discussion diff. + # So no need to make an extra Gitaly diff request for it. + # As an extra benefit, the returned `diff_file` already + # has `highlighted_diff_lines` data set from Redis on + # `Diff::FileCollection::MergeRequestDiff`. + noteable.diffs(original_position.diff_options).diff_files.first + else + original_position.diff_file(self.project.repository) + end + + # Since persisted diff files already have its content "unfolded" + # there's no need to make it pass through the unfolding process. + file&.unfold_diff_lines(position) unless note_diff_file + + file end def supported? diff --git a/app/models/hooks/service_hook.rb b/app/models/hooks/service_hook.rb index 7d9f6d89d44..8f305dd7c22 100644 --- a/app/models/hooks/service_hook.rb +++ b/app/models/hooks/service_hook.rb @@ -5,8 +5,8 @@ class ServiceHook < WebHook validates :service, presence: true # rubocop: disable CodeReuse/ServiceClass - def execute(data) - WebHookService.new(self, data, 'service_hook').execute + def execute(data, hook_name = 'service_hook') + WebHookService.new(self, data, hook_name).execute end # rubocop: enable CodeReuse/ServiceClass end diff --git a/app/models/identity.rb b/app/models/identity.rb index f5a13dbd6f2..d63dd432426 100644 --- a/app/models/identity.rb +++ b/app/models/identity.rb @@ -1,18 +1,14 @@ # frozen_string_literal: true class Identity < ActiveRecord::Base - def self.uniqueness_scope - :provider - end - include Sortable include CaseSensitivity belongs_to :user validates :provider, presence: true - validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false } - validates :user_id, uniqueness: { scope: uniqueness_scope } + validates :extern_uid, allow_blank: true, uniqueness: { scope: UniquenessScopes.scopes, case_sensitive: false } + validates :user_id, uniqueness: { scope: UniquenessScopes.scopes } before_save :ensure_normalized_extern_uid, if: :extern_uid_changed? after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider? diff --git a/app/models/identity/uniqueness_scopes.rb b/app/models/identity/uniqueness_scopes.rb new file mode 100644 index 00000000000..674b735903f --- /dev/null +++ b/app/models/identity/uniqueness_scopes.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class Identity < ActiveRecord::Base + # This module and method are defined in a separate file to allow EE to + # redefine the `scopes` method before it is used in the `Identity` model. + module UniquenessScopes + def self.scopes + [:provider] + end + end +end diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index df5678ec2f1..92add079a02 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -966,7 +966,6 @@ class MergeRequest < ActiveRecord::Base def mergeable_ci_state? return true unless project.only_allow_merge_if_pipeline_succeeds? - return true unless head_pipeline actual_head_pipeline&.success? || actual_head_pipeline&.skipped? end diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb index 74583af1a29..6f1beede6f9 100644 --- a/app/models/merge_request_diff.rb +++ b/app/models/merge_request_diff.rb @@ -142,7 +142,7 @@ class MergeRequestDiff < ActiveRecord::Base end def commits_by_shas(shas) - return [] unless shas.present? + return MergeRequestDiffCommit.none unless shas.present? merge_request_diff_commits.where(sha: shas) end diff --git a/app/models/project.rb b/app/models/project.rb index d87fc1e4b86..ab19190385e 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -1903,10 +1903,6 @@ class Project < ActiveRecord::Base false end - def issue_board_milestone_available?(user = nil) - feature_available?(:issue_board_milestone, user) - end - def full_path_was File.join(namespace.full_path, previous_changes['path'].first) end diff --git a/app/models/project_services/bamboo_service.rb b/app/models/project_services/bamboo_service.rb index d121d088ff6..a252052200a 100644 --- a/app/models/project_services/bamboo_service.rb +++ b/app/models/project_services/bamboo_service.rb @@ -86,14 +86,16 @@ class BambooService < CiService end def read_build_page(response) - if response.code != 200 || response.dig('results', 'results', 'size') == '0' - # If actual build link can't be determined, send user to build summary page. - URI.join("#{bamboo_url}/", "browse/#{build_key}").to_s - else - # If actual build link is available, go to build result page. - result_key = response.dig('results', 'results', 'result', get_build_result_index, 'planResultKey', 'key') - URI.join("#{bamboo_url}/", "browse/#{result_key}").to_s - end + key = + if response.code != 200 || response.dig('results', 'results', 'size') == '0' + # If actual build link can't be determined, send user to build summary page. + build_key + else + # If actual build link is available, go to build result page. + response.dig('results', 'results', 'result', get_build_result_index, 'planResultKey', 'key') + end + + build_url("browse/#{key}") end def read_commit_status(response) @@ -117,7 +119,7 @@ class BambooService < CiService end def build_url(path) - URI.join("#{bamboo_url}/", path).to_s + Gitlab::Utils.append_path(bamboo_url, path) end def get_path(path, query_params = {}) diff --git a/app/models/project_services/drone_ci_service.rb b/app/models/project_services/drone_ci_service.rb index 158ae0bf255..5ccc2f019cb 100644 --- a/app/models/project_services/drone_ci_service.rb +++ b/app/models/project_services/drone_ci_service.rb @@ -39,11 +39,9 @@ class DroneCiService < CiService end def commit_status_path(sha, ref) - url = [drone_url, - "gitlab/#{project.full_path}/commits/#{sha}", - "?branch=#{URI.encode(ref.to_s)}&access_token=#{token}"] - - URI.join(*url).to_s + Gitlab::Utils.append_path( + drone_url, + "gitlab/#{project.full_path}/commits/#{sha}?branch=#{URI.encode(ref.to_s)}&access_token=#{token}") end def commit_status(sha, ref) @@ -74,11 +72,9 @@ class DroneCiService < CiService end def build_page(sha, ref) - url = [drone_url, - "gitlab/#{project.full_path}/redirect/commits/#{sha}", - "?branch=#{URI.encode(ref.to_s)}"] - - URI.join(*url).to_s + Gitlab::Utils.append_path( + drone_url, + "gitlab/#{project.full_path}/redirect/commits/#{sha}?branch=#{URI.encode(ref.to_s)}") end def title diff --git a/app/models/project_services/issue_tracker_service.rb b/app/models/project_services/issue_tracker_service.rb index a399982e5ec..f54497fc6d8 100644 --- a/app/models/project_services/issue_tracker_service.rb +++ b/app/models/project_services/issue_tracker_service.rb @@ -9,7 +9,7 @@ class IssueTrackerService < Service # Override this method on services that uses different patterns # This pattern does not support cross-project references # The other code assumes that this pattern is a superset of all - # overridden patterns. See ReferenceRegexes::EXTERNAL_PATTERN + # overridden patterns. See ReferenceRegexes.external_pattern def self.reference_pattern(only_long: false) if only_long /(\b[A-Z][A-Z0-9_]*-)(?<issue>\d+)/ diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb index 5a38f48c542..9066a0b7f1d 100644 --- a/app/models/project_services/jira_service.rb +++ b/app/models/project_services/jira_service.rb @@ -54,7 +54,7 @@ class JiraService < IssueTrackerService { username: self.username, password: self.password, - site: URI.join(url, '/').to_s, + site: URI.join(url, '/').to_s, # Intended to find the root context_path: url.path.chomp('/'), auth_type: :basic, read_timeout: 120, diff --git a/app/models/project_services/kubernetes_service.rb b/app/models/project_services/kubernetes_service.rb index 3459ded7ccf..c52a531e5fe 100644 --- a/app/models/project_services/kubernetes_service.rb +++ b/app/models/project_services/kubernetes_service.rb @@ -203,9 +203,7 @@ class KubernetesService < DeploymentService kubeclient = build_kube_client! kubeclient.get_pods(namespace: actual_namespace).as_json - rescue Kubeclient::HttpError => err - raise err unless err.error_code == 404 - + rescue Kubeclient::ResourceNotFoundError [] end diff --git a/app/models/project_services/mock_ci_service.rb b/app/models/project_services/mock_ci_service.rb index 6883976f0c8..d8bba58dcbf 100644 --- a/app/models/project_services/mock_ci_service.rb +++ b/app/models/project_services/mock_ci_service.rb @@ -34,10 +34,9 @@ class MockCiService < CiService # http://jenkins.example.com:8888/job/test1/scm/bySHA1/12d65c # def build_page(sha, ref) - url = [mock_service_url, - "#{project.namespace.path}/#{project.path}/status/#{sha}"] - - URI.join(*url).to_s + Gitlab::Utils.append_path( + mock_service_url, + "#{project.namespace.path}/#{project.path}/status/#{sha}") end # Return string with build status or :error symbol @@ -61,10 +60,9 @@ class MockCiService < CiService end def commit_status_path(sha) - url = [mock_service_url, - "#{project.namespace.path}/#{project.path}/status/#{sha}.json"] - - URI.join(*url).to_s + Gitlab::Utils.append_path( + mock_service_url, + "#{project.namespace.path}/#{project.path}/status/#{sha}.json") end def read_commit_status(response) diff --git a/app/models/project_services/teamcity_service.rb b/app/models/project_services/teamcity_service.rb index eeeff5e802a..b8e17087db5 100644 --- a/app/models/project_services/teamcity_service.rb +++ b/app/models/project_services/teamcity_service.rb @@ -132,7 +132,7 @@ class TeamcityService < CiService end def build_url(path) - URI.join("#{teamcity_url}/", path).to_s + Gitlab::Utils.append_path(teamcity_url, path) end def get_path(path) diff --git a/app/models/shard.rb b/app/models/shard.rb index 2fa22bd040c..2e75bc91df0 100644 --- a/app/models/shard.rb +++ b/app/models/shard.rb @@ -9,13 +9,12 @@ class Shard < ActiveRecord::Base # The GitLab config does not change for the lifecycle of the process in_config = Gitlab.config.repositories.storages.keys.map(&:to_s) + in_db = all.pluck(:name) - transaction do - in_db = all.pluck(:name) - missing = in_config - in_db - - missing.map { |name| by_name(name) } - end + # This may race with other processes creating shards at the same time, but + # `by_name` will handle that correctly + missing = in_config - in_db + missing.map { |name| by_name(name) } end def self.by_name(name) diff --git a/app/serializers/environment_status_entity.rb b/app/serializers/environment_status_entity.rb index f87cc894d2f..f6321b9e520 100644 --- a/app/serializers/environment_status_entity.rb +++ b/app/serializers/environment_status_entity.rb @@ -11,7 +11,7 @@ class EnvironmentStatusEntity < Grape::Entity project_environment_path(es.project, es.environment) end - expose :metrics_url, if: ->(*) { can_read_environment? && environment.has_metrics? } do |es| + expose :metrics_url, if: ->(*) { can_read_environment? && deployment.has_metrics? } do |es| metrics_project_environment_deployment_path(es.project, es.environment, es.deployment) end @@ -37,7 +37,7 @@ class EnvironmentStatusEntity < Grape::Entity es.deployment.try(:formatted_deployment_time) end - expose :changes, if: ->(*) { Feature.enabled?(:ci_environments_status_changes, project) } + expose :changes private @@ -45,6 +45,10 @@ class EnvironmentStatusEntity < Grape::Entity object.environment end + def deployment + object.deployment + end + def project object.environment.project end diff --git a/app/services/clusters/applications/check_installation_progress_service.rb b/app/services/clusters/applications/check_installation_progress_service.rb index 19dc0478591..ca0f7b30053 100644 --- a/app/services/clusters/applications/check_installation_progress_service.rb +++ b/app/services/clusters/applications/check_installation_progress_service.rb @@ -15,8 +15,9 @@ module Clusters check_timeout end rescue Kubeclient::HttpError => e - Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}" - app.make_errored!("Kubernetes error") unless app.errored? + Rails.logger.error("Kubernetes error: #{e.error_code} #{e.message}") + Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id }) + app.make_errored!("Kubernetes error: #{e.error_code}") unless app.errored? end private @@ -53,7 +54,7 @@ module Clusters def remove_installation_pod helm_api.delete_pod!(install_command.pod_name) rescue => e - Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}" + Rails.logger.error("Kubernetes error: #{e.class.name} #{e.message}") # no-op end diff --git a/app/services/clusters/applications/install_service.rb b/app/services/clusters/applications/install_service.rb index 5a24d78e712..f4385748c43 100644 --- a/app/services/clusters/applications/install_service.rb +++ b/app/services/clusters/applications/install_service.rb @@ -13,10 +13,12 @@ module Clusters ClusterWaitForAppInstallationWorker.perform_in( ClusterWaitForAppInstallationWorker::INTERVAL, app.name, app.id) rescue Kubeclient::HttpError => e - Rails.logger.error "Kubernetes error: #{e.class.name} #{e.message}" - app.make_errored!("Kubernetes error.") + Rails.logger.error("Kubernetes error: #{e.error_code} #{e.message}") + Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id }) + app.make_errored!("Kubernetes error: #{e.error_code}") rescue StandardError => e Rails.logger.error "Can't start installation process: #{e.class.name} #{e.message}" + Gitlab::Sentry.track_acceptable_exception(e, extra: { scope: 'kubernetes', app_id: app.id }) app.make_errored!("Can't start installation process.") end end diff --git a/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb b/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb index 277cc4b788d..4ad04ab801e 100644 --- a/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb +++ b/app/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service.rb @@ -21,10 +21,7 @@ module Clusters def get_secret kubeclient.get_secret(service_account_token_name, namespace).as_json - rescue Kubeclient::HttpError => err - raise err unless err.error_code == 404 - - nil + rescue Kubeclient::ResourceNotFoundError end end end diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb new file mode 100644 index 00000000000..0300cc0d8d3 --- /dev/null +++ b/app/services/issuable/clone/attributes_rewriter.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module Issuable + module Clone + class AttributesRewriter < ::Issuable::Clone::BaseService + def initialize(current_user, original_entity, new_entity) + @current_user = current_user + @original_entity = original_entity + @new_entity = new_entity + end + + def execute + new_entity.update(milestone: cloneable_milestone, labels: cloneable_labels) + copy_resource_label_events + end + + private + + def cloneable_milestone + title = original_entity.milestone&.title + return unless title + + params = { title: title, project_ids: new_entity.project&.id, group_ids: group&.id } + + milestones = MilestonesFinder.new(params).execute + milestones.first + end + + def cloneable_labels + params = { + project_id: new_entity.project&.id, + group_id: group&.id, + title: original_entity.labels.select(:title), + include_ancestor_groups: true + } + + params[:only_group_labels] = true if new_parent.is_a?(Group) + + LabelsFinder.new(current_user, params).execute + end + + def copy_resource_label_events + original_entity.resource_label_events.find_in_batches do |batch| + events = batch.map do |event| + entity_key = new_entity.is_a?(Issue) ? 'issue_id' : 'epic_id' + # rubocop: disable CodeReuse/ActiveRecord + event.attributes + .except('id', 'reference', 'reference_html') + .merge(entity_key => new_entity.id, 'action' => ResourceLabelEvent.actions[event.action]) + # rubocop: enable CodeReuse/ActiveRecord + end + + Gitlab::Database.bulk_insert(ResourceLabelEvent.table_name, events) + end + end + + def entity_key + new_entity.class.name.parameterize('_').foreign_key + end + end + end +end diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb new file mode 100644 index 00000000000..42dd9c666f5 --- /dev/null +++ b/app/services/issuable/clone/base_service.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module Issuable + module Clone + class BaseService < IssuableBaseService + attr_reader :original_entity, :new_entity + + alias_method :old_project, :project + + def execute(original_entity, new_project = nil) + @original_entity = original_entity + + # Using transaction because of a high resources footprint + # on rewriting notes (unfolding references) + # + ActiveRecord::Base.transaction do + @new_entity = create_new_entity + + update_new_entity + update_old_entity + create_notes + end + end + + private + + def update_new_entity + rewriters = [ContentRewriter, AttributesRewriter] + + rewriters.each do |rewriter| + rewriter.new(current_user, original_entity, new_entity).execute + end + end + + def update_old_entity + close_issue + end + + def create_notes + add_note_from + add_note_to + end + + def close_issue + close_service = Issues::CloseService.new(old_project, current_user) + close_service.execute(original_entity, notifications: false, system_note: false) + end + + def new_parent + new_entity.project ? new_entity.project : new_entity.group + end + + def group + if new_entity.project&.group && current_user.can?(:read_group, new_entity.project.group) + new_entity.project.group + end + end + end + end +end diff --git a/app/services/issuable/clone/content_rewriter.rb b/app/services/issuable/clone/content_rewriter.rb new file mode 100644 index 00000000000..e1e0b75085d --- /dev/null +++ b/app/services/issuable/clone/content_rewriter.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module Issuable + module Clone + class ContentRewriter < ::Issuable::Clone::BaseService + def initialize(current_user, original_entity, new_entity) + @current_user = current_user + @original_entity = original_entity + @new_entity = new_entity + @project = original_entity.project + end + + def execute + rewrite_description + rewrite_award_emoji(original_entity, new_entity) + rewrite_notes + end + + private + + def rewrite_description + new_entity.update(description: rewrite_content(original_entity.description)) + end + + def rewrite_notes + original_entity.notes_with_associations.find_each do |note| + new_note = note.dup + new_params = { + project: new_entity.project, noteable: new_entity, + note: rewrite_content(new_note.note), + created_at: note.created_at, + updated_at: note.updated_at + } + + if note.system_note_metadata + new_params[:system_note_metadata] = note.system_note_metadata.dup + end + + new_note.update(new_params) + + rewrite_award_emoji(note, new_note) + end + end + + def rewrite_content(content) + return unless content + + rewriters = [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter] + + rewriters.inject(content) do |text, klass| + rewriter = klass.new(text, old_project, current_user) + rewriter.rewrite(new_parent) + end + end + + def rewrite_award_emoji(old_awardable, new_awardable) + old_awardable.award_emoji.each do |award| + new_award = award.dup + new_award.awardable = new_awardable + new_award.save + end + end + end + end +end diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb index d2bdba1e627..41b6a96b005 100644 --- a/app/services/issues/move_service.rb +++ b/app/services/issues/move_service.rb @@ -1,165 +1,66 @@ # frozen_string_literal: true module Issues - class MoveService < Issues::BaseService + class MoveService < Issuable::Clone::BaseService MoveError = Class.new(StandardError) - def execute(issue, new_project) - @old_issue = issue - @old_project = @project - @new_project = new_project + def execute(issue, target_project) + @target_project = target_project - unless issue.can_move?(current_user, new_project) + unless issue.can_move?(current_user, @target_project) raise MoveError, 'Cannot move issue due to insufficient permissions!' end - if @project == new_project + if @project == @target_project raise MoveError, 'Cannot move issue to project it originates from!' end - # Using transaction because of a high resources footprint - # on rewriting notes (unfolding references) - # - ActiveRecord::Base.transaction do - @new_issue = create_new_issue - - update_new_issue - update_old_issue - end + super notify_participants - @new_issue + new_entity end private - def update_new_issue - rewrite_notes - copy_resource_label_events - rewrite_issue_award_emoji - add_note_moved_from - end + def update_old_entity + super - def update_old_issue - add_note_moved_to - close_issue mark_as_moved end - def create_new_issue - new_params = { id: nil, iid: nil, label_ids: cloneable_label_ids, - milestone_id: cloneable_milestone_id, - project: @new_project, author: @old_issue.author, - description: rewrite_content(@old_issue.description), - assignee_ids: @old_issue.assignee_ids } - - new_params = @old_issue.serializable_hash.symbolize_keys.merge(new_params) - CreateService.new(@new_project, @current_user, new_params).execute - end - - # rubocop: disable CodeReuse/ActiveRecord - def cloneable_label_ids - params = { - project_id: @new_project.id, - title: @old_issue.labels.pluck(:title), - include_ancestor_groups: true - } + def create_new_entity + new_params = { + id: nil, + iid: nil, + project: @target_project, + author: original_entity.author, + assignee_ids: original_entity.assignee_ids + } - LabelsFinder.new(current_user, params).execute.pluck(:id) + new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params) + CreateService.new(@target_project, @current_user, new_params).execute end - # rubocop: enable CodeReuse/ActiveRecord - - def cloneable_milestone_id - title = @old_issue.milestone&.title - return unless title - - if @new_project.group && can?(current_user, :read_group, @new_project.group) - group_id = @new_project.group.id - end - - params = - { title: title, project_ids: @new_project.id, group_ids: group_id } - milestones = MilestonesFinder.new(params).execute - milestones.first&.id - end - - def rewrite_notes - @old_issue.notes_with_associations.find_each do |note| - new_note = note.dup - new_params = { project: @new_project, noteable: @new_issue, - note: rewrite_content(new_note.note), - created_at: note.created_at, - updated_at: note.updated_at } - - new_note.update(new_params) - - rewrite_award_emoji(note, new_note) - end - end - - # rubocop: disable CodeReuse/ActiveRecord - def copy_resource_label_events - @old_issue.resource_label_events.find_in_batches do |batch| - events = batch.map do |event| - event.attributes - .except('id', 'reference', 'reference_html') - .merge('issue_id' => @new_issue.id, 'action' => ResourceLabelEvent.actions[event.action]) - end - - Gitlab::Database.bulk_insert(ResourceLabelEvent.table_name, events) - end - end - # rubocop: enable CodeReuse/ActiveRecord - - def rewrite_issue_award_emoji - rewrite_award_emoji(@old_issue, @new_issue) - end - - def rewrite_award_emoji(old_awardable, new_awardable) - old_awardable.award_emoji.each do |award| - new_award = award.dup - new_award.awardable = new_awardable - new_award.save - end - end - - def rewrite_content(content) - return unless content - - rewriters = [Gitlab::Gfm::ReferenceRewriter, - Gitlab::Gfm::UploadsRewriter] - - rewriters.inject(content) do |text, klass| - rewriter = klass.new(text, @old_project, @current_user) - rewriter.rewrite(@new_project) - end + def mark_as_moved + original_entity.update(moved_to: new_entity) end - def close_issue - close_service = CloseService.new(@old_project, @current_user) - close_service.execute(@old_issue, notifications: false, system_note: false) + def notify_participants + notification_service.async.issue_moved(original_entity, new_entity, @current_user) end - def add_note_moved_from - SystemNoteService.noteable_moved(@new_issue, @new_project, - @old_issue, @current_user, + def add_note_from + SystemNoteService.noteable_moved(new_entity, @target_project, + original_entity, current_user, direction: :from) end - def add_note_moved_to - SystemNoteService.noteable_moved(@old_issue, @old_project, - @new_issue, @current_user, + def add_note_to + SystemNoteService.noteable_moved(original_entity, old_project, + new_entity, current_user, direction: :to) end - - def mark_as_moved - @old_issue.update(moved_to: @new_issue) - end - - def notify_participants - notification_service.async.issue_moved(@old_issue, @new_issue, @current_user) - end end end diff --git a/app/services/merge_requests/reload_diffs_service.rb b/app/services/merge_requests/reload_diffs_service.rb index b47d8f3f63a..c64b2e99b52 100644 --- a/app/services/merge_requests/reload_diffs_service.rb +++ b/app/services/merge_requests/reload_diffs_service.rb @@ -29,10 +29,6 @@ module MergeRequests # rubocop: disable CodeReuse/ActiveRecord def clear_cache(new_diff) - # Executing the iteration we cache highlighted diffs for each diff file of - # MergeRequestDiff. - cacheable_collection(new_diff).write_cache - # Remove cache for all diffs on this MR. Do not use the association on the # model, as that will interfere with other actions happening when # reloading the diff. diff --git a/app/services/notes/base_service.rb b/app/services/notes/base_service.rb new file mode 100644 index 00000000000..c1260837c12 --- /dev/null +++ b/app/services/notes/base_service.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module Notes + class BaseService < ::BaseService + def clear_noteable_diffs_cache(note) + if note.is_a?(DiffNote) && + note.discussion_first_note? && + note.position.unfolded_diff?(project.repository) + note.noteable.diffs.clear_cache + end + end + end +end diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb index 049e6c5a871..e03789e3ca9 100644 --- a/app/services/notes/create_service.rb +++ b/app/services/notes/create_service.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module Notes - class CreateService < ::BaseService + class CreateService < ::Notes::BaseService def execute merge_request_diff_head_sha = params.delete(:merge_request_diff_head_sha) @@ -35,6 +35,7 @@ module Notes if !only_commands && note.save todo_service.new_note(note, current_user) + clear_noteable_diffs_cache(note) end if command_params.present? diff --git a/app/services/notes/destroy_service.rb b/app/services/notes/destroy_service.rb index 64e9accd97f..fa0c2c5c86b 100644 --- a/app/services/notes/destroy_service.rb +++ b/app/services/notes/destroy_service.rb @@ -1,11 +1,13 @@ # frozen_string_literal: true module Notes - class DestroyService < BaseService + class DestroyService < ::Notes::BaseService def execute(note) TodoService.new.destroy_target(note) do |note| note.destroy end + + clear_noteable_diffs_cache(note) end end end diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb index de6ff92d1da..24ac20fdd29 100644 --- a/app/services/users/build_service.rb +++ b/app/services/users/build_service.rb @@ -95,10 +95,6 @@ module Users if params[:reset_password] user_params.merge!(force_random_password: true, password_expires_at: nil) end - - if user_default_internal_regex_enabled? && !user_params.key?(:external) - user_params[:external] = user_external? - end else allowed_signup_params = signup_params allowed_signup_params << :skip_confirmation if skip_authorization @@ -109,6 +105,10 @@ module Users end end + if user_default_internal_regex_enabled? && !user_params.key?(:external) + user_params[:external] = user_external? + end + user_params end diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb index ffc1e5f75ca..e90599f2505 100644 --- a/app/uploaders/file_uploader.rb +++ b/app/uploaders/file_uploader.rb @@ -149,9 +149,9 @@ class FileUploader < GitlabUploader # return a new uploader with a file copy on another project def self.copy_to(uploader, to_project) - moved = uploader.dup.tap do |u| - u.model = to_project - end + moved = self.new(to_project) + moved.object_store = uploader.object_store + moved.filename = uploader.filename moved.copy_file(uploader.file) moved diff --git a/app/views/admin/application_settings/_ci_cd.html.haml b/app/views/admin/application_settings/_ci_cd.html.haml index adb496495d1..0d42094fc89 100644 --- a/app/views/admin/application_settings/_ci_cd.html.haml +++ b/app/views/admin/application_settings/_ci_cd.html.haml @@ -42,12 +42,12 @@ <code>4 mins 2 sec</code>, <code>2h42min</code>. = link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'default-artifacts-expiration') .form-group - = f.label :archive_builds_in_human_readable, 'Archive builds in', class: 'label-bold' + = f.label :archive_builds_in_human_readable, 'Archive jobs', class: 'label-bold' = f.text_field :archive_builds_in_human_readable, class: 'form-control', placeholder: 'never' .form-text.text-muted - Set the duration when build gonna be considered old. Archived builds cannot be retried. - Make it empty to never expire builds. It has to be larger than 1 day. - The default unit is in seconds, but you can define an alternative. For example: - <code>4 mins 2 sec</code>, <code>2h42min</code>. + Set the duration for which the jobs will be considered as old and expired. + Once that time passes, the jobs will be archived and no longer able to be + retried. Make it empty to never expire jobs. It has to be no less than 1 day, + for example: <code>15 days</code>, <code>1 month</code>, <code>2 years</code>. = f.submit 'Save changes', class: "btn btn-success" diff --git a/app/views/admin/background_jobs/show.html.haml b/app/views/admin/background_jobs/show.html.haml index faa5854bb40..a0a00ac5d96 100644 --- a/app/views/admin/background_jobs/show.html.haml +++ b/app/views/admin/background_jobs/show.html.haml @@ -6,40 +6,5 @@ %p.light GitLab uses #{link_to "sidekiq", "http://sidekiq.org/"} library for async job processing %hr - - .card - .card-header Sidekiq running processes - .card-body - - if @sidekiq_processes.empty? - %h4.cred - %i.fa.fa-exclamation-triangle - There are no running sidekiq processes. Please restart GitLab - - else - .table-holder - %table.table - %thead - %th USER - %th PID - %th CPU - %th MEM - %th STATE - %th START - %th COMMAND - %tbody - - @sidekiq_processes.each do |process| - %tr - %td= gitlab_config.user - - parse_sidekiq_ps(process).each do |value| - %td= value - .clearfix - %p - %i.fa.fa-exclamation-circle - If '[#{@concurrency} of #{@concurrency} busy]' is shown, restart GitLab with 'sudo service gitlab reload'. - %p - %i.fa.fa-exclamation-circle - If more than one sidekiq process is listed, stop GitLab, kill the remaining sidekiq processes (sudo pkill -u #{gitlab_config.user} -f sidekiq) and restart GitLab. - - - .card %iframe{ src: sidekiq_path, width: '100%', height: 970, style: "border: 0" } diff --git a/app/views/admin/impersonation_tokens/index.html.haml b/app/views/admin/impersonation_tokens/index.html.haml index 9e490713ef3..8e869fb4b71 100644 --- a/app/views/admin/impersonation_tokens/index.html.haml +++ b/app/views/admin/impersonation_tokens/index.html.haml @@ -5,6 +5,11 @@ .row.prepend-top-default .col-lg-12 + - if @new_impersonation_token + = render "shared/personal_access_tokens_created_container", new_token_value: @new_impersonation_token, + container_title: 'Your New Impersonation Token', + clipboard_button_title: 'Copy impersonation token to clipboard' + = render "shared/personal_access_tokens_form", path: admin_user_impersonation_tokens_path, impersonation: true, token: @impersonation_token, scopes: @scopes = render "shared/personal_access_tokens_table", impersonation: true, active_tokens: @active_impersonation_tokens, inactive_tokens: @inactive_impersonation_tokens diff --git a/app/views/discussions/_discussion.html.haml b/app/views/discussions/_discussion.html.haml index 1765251c93d..10187129a33 100644 --- a/app/views/discussions/_discussion.html.haml +++ b/app/views/discussions/_discussion.html.haml @@ -1,12 +1,12 @@ - expanded = discussion.expanded? -%li.note.note-discussion.timeline-entry +%li.note.note-discussion.timeline-entry.unstyled-comments .timeline-entry-inner - .timeline-icon - = link_to user_path(discussion.author) do - = image_tag avatar_icon_for_user(discussion.author), class: "avatar s40" .timeline-content .discussion.js-toggle-container{ data: { discussion_id: discussion.id } } .discussion-header + .timeline-icon + = link_to user_path(discussion.author) do + = image_tag avatar_icon_for_user(discussion.author), class: "avatar s40" .discussion-actions %button.note-action-button.discussion-toggle-button.js-toggle-button{ type: "button", class: ("js-toggle-lazy-diff" unless expanded) } - if expanded diff --git a/app/views/events/event/_push.html.haml b/app/views/events/event/_push.html.haml index 82693ec832e..69914fccc48 100644 --- a/app/views/events/event/_push.html.haml +++ b/app/views/events/event/_push.html.haml @@ -7,10 +7,10 @@ .event-title.d-flex.flex-wrap = inline_event_icon(event) %span.event-type.d-inline-block.append-right-4.pushed #{event.action_name} #{event.ref_type} - %span + %span.append-right-4 - commits_link = project_commits_path(project, event.ref_name) - should_link = event.tag? ? project.repository.tag_exists?(event.ref_name) : project.repository.branch_exists?(event.ref_name) - = link_to_if should_link, event.ref_name, commits_link, class: 'ref-name append-right-4' + = link_to_if should_link, event.ref_name, commits_link, class: 'ref-name' = render "events/event_scope", event: event diff --git a/app/views/layouts/nav/_dashboard.html.haml b/app/views/layouts/nav/_dashboard.html.haml index 8f8b6b454d9..ea5f2b166b4 100644 --- a/app/views/layouts/nav/_dashboard.html.haml +++ b/app/views/layouts/nav/_dashboard.html.haml @@ -64,7 +64,7 @@ = link_to '#', class: 'dashboard-shortcuts-web-ide', title: _('Web IDE') do = _('Web IDE') - - if Gitlab::Sherlock.enabled? || can?(current_user, :read_instance_statistics) + - if show_separator? %li.line-separator.d-none.d-sm-block = render_if_exists 'dashboard/operations/nav_link' - if can?(current_user, :read_instance_statistics) diff --git a/app/views/profiles/personal_access_tokens/index.html.haml b/app/views/profiles/personal_access_tokens/index.html.haml index c10d4ea1a4d..c1e1eaff942 100644 --- a/app/views/profiles/personal_access_tokens/index.html.haml +++ b/app/views/profiles/personal_access_tokens/index.html.haml @@ -14,17 +14,7 @@ .col-lg-8 - if @new_personal_access_token - .created-personal-access-token-container - %h5.prepend-top-0 - Your New Personal Access Token - .form-group - .input-group - = text_field_tag 'created-personal-access-token', @new_personal_access_token, readonly: true, class: "form-control js-select-on-focus", 'aria-describedby' => "created-personal-access-token-help-block" - %span.input-group-append - = clipboard_button(text: @new_personal_access_token, title: "Copy personal access token to clipboard", placement: "left", class: "input-group-text btn-default btn-clipboard") - %span#created-personal-access-token-help-block.form-text.text-muted.text-danger Make sure you save it - you won't be able to access it again. - - %hr + = render "shared/personal_access_tokens_created_container", new_token_value: @new_personal_access_token = render "shared/personal_access_tokens_form", path: profile_personal_access_tokens_path, impersonation: false, token: @personal_access_token, scopes: @scopes diff --git a/app/views/projects/diffs/_text_file.html.haml b/app/views/projects/diffs/_text_file.html.haml index e8a5e63e59e..bc9f6c71fa8 100644 --- a/app/views/projects/diffs/_text_file.html.haml +++ b/app/views/projects/diffs/_text_file.html.haml @@ -3,7 +3,7 @@ .suppressed-container %a.show-suppressed-diff.js-show-suppressed-diff Changes suppressed. Click to show. -%table.text-file.diff-wrap-lines.code.js-syntax-highlight{ data: diff_view_data, class: too_big ? 'hide' : '' } +%table.text-file.diff-wrap-lines.code.js-syntax-highlight.commit-diff{ data: diff_view_data, class: too_big ? 'hide' : '' } = render partial: "projects/diffs/line", collection: diff_file.highlighted_diff_lines, as: :line, diff --git a/app/views/projects/edit.html.haml b/app/views/projects/edit.html.haml index 3aff5538813..de768696fe9 100644 --- a/app/views/projects/edit.html.haml +++ b/app/views/projects/edit.html.haml @@ -165,7 +165,7 @@ .input-group .input-group-prepend .input-group-text - #{URI.join(root_url, @project.namespace.full_path)}/ + #{Gitlab::Utils.append_path(root_url, @project.namespace.full_path)}/ = f.text_field :path, class: 'form-control' %ul %li Be careful. Renaming a project's repository can have unintended side effects. diff --git a/app/views/projects/merge_requests/_mr_title.html.haml b/app/views/projects/merge_requests/_mr_title.html.haml index 1bf42ded97a..3cd83feb842 100644 --- a/app/views/projects/merge_requests/_mr_title.html.haml +++ b/app/views/projects/merge_requests/_mr_title.html.haml @@ -37,6 +37,6 @@ = link_to 'Reopen', merge_request_path(@merge_request, merge_request: { state_event: :reopen }), method: :put, class: 'reopen-mr-link', title: 'Reopen merge request' - if can_update_merge_request - = link_to 'Edit', edit_project_merge_request_path(@project, @merge_request), class: "d-none d-sm-none d-md-block btn btn-grouped js-issuable-edit" + = link_to 'Edit', edit_project_merge_request_path(@project, @merge_request), class: "d-none d-sm-none d-md-block btn btn-grouped js-issuable-edit qa-edit-button" = render 'shared/issuable/close_reopen_button', issuable: @merge_request, can_update: can_update_merge_request, can_reopen: can_update_merge_request diff --git a/app/views/shared/_label.html.haml b/app/views/shared/_label.html.haml index 71f34c0d85b..21ea188d7b3 100644 --- a/app/views/shared/_label.html.haml +++ b/app/views/shared/_label.html.haml @@ -23,28 +23,29 @@ %li.inline = link_to edit_label_path(label), class: 'btn btn-transparent label-action edit has-tooltip', title: _('Edit'), data: { placement: 'bottom' }, aria_label: _('Edit') do = sprite_icon('pencil') - %li.inline - .dropdown - %button{ type: 'button', class: 'btn btn-transparent js-label-options-dropdown label-action', data: { toggle: 'dropdown' }, aria_label: _('Label actions dropdown') } - = sprite_icon('ellipsis_v') - .dropdown-menu.dropdown-open-left - %ul - - if label.is_a?(ProjectLabel) && label.project.group && can?(current_user, :admin_label, label.project.group) - %li - %button.js-promote-project-label-button.btn.btn-transparent.btn-action{ disabled: true, type: 'button', - data: { url: promote_project_label_path(label.project, label), - label_title: label.title, - label_color: label.color, - label_text_color: label.text_color, - group_name: label.project.group.name, - target: '#promote-label-modal', - container: 'body', - toggle: 'modal' } } - = _('Promote to group label') - - if can?(current_user, :admin_label, label) - %li - %span{ data: { toggle: 'modal', target: "#modal-delete-label-#{label.id}" } } - %button.text-danger.remove-row{ type: 'button' }= _('Delete') + - if can?(current_user, :admin_label, label) + %li.inline + .dropdown + %button{ type: 'button', class: 'btn btn-transparent js-label-options-dropdown label-action', data: { toggle: 'dropdown' }, aria_label: _('Label actions dropdown') } + = sprite_icon('ellipsis_v') + .dropdown-menu.dropdown-open-left + %ul + - if label.is_a?(ProjectLabel) && label.project.group && can?(current_user, :admin_label, label.project.group) + %li + %button.js-promote-project-label-button.btn.btn-transparent.btn-action{ disabled: true, type: 'button', + data: { url: promote_project_label_path(label.project, label), + label_title: label.title, + label_color: label.color, + label_text_color: label.text_color, + group_name: label.project.group.name, + target: '#promote-label-modal', + container: 'body', + toggle: 'modal' } } + = _('Promote to group label') + - if can?(current_user, :admin_label, label) + %li + %span{ data: { toggle: 'modal', target: "#modal-delete-label-#{label.id}" } } + %button.text-danger.remove-row{ type: 'button' }= _('Delete') - if current_user %li.inline.label-subscription - if can_subscribe_to_label_in_different_levels?(label) diff --git a/app/views/shared/_personal_access_tokens_created_container.html.haml b/app/views/shared/_personal_access_tokens_created_container.html.haml new file mode 100644 index 00000000000..3150d39b84a --- /dev/null +++ b/app/views/shared/_personal_access_tokens_created_container.html.haml @@ -0,0 +1,14 @@ +- container_title = local_assigns.fetch(:container_title, 'Your New Personal Access Token') +- clipboard_button_title = local_assigns.fetch(:clipboard_button_title, 'Copy personal access token to clipboard') + +.created-personal-access-token-container + %h5.prepend-top-0 + = container_title + .form-group + .input-group + = text_field_tag 'created-personal-access-token', new_token_value, readonly: true, class: "form-control js-select-on-focus", 'aria-describedby' => "created-token-help-block" + %span.input-group-append + = clipboard_button(text: new_token_value, title: clipboard_button_title, placement: "left", class: "input-group-text btn-default btn-clipboard") + %span#created-token-help-block.form-text.text-muted.text-danger Make sure you save it - you won't be able to access it again. + +%hr diff --git a/app/views/shared/_personal_access_tokens_table.html.haml b/app/views/shared/_personal_access_tokens_table.html.haml index cadac1cc99d..2efd03d4867 100644 --- a/app/views/shared/_personal_access_tokens_table.html.haml +++ b/app/views/shared/_personal_access_tokens_table.html.haml @@ -15,8 +15,6 @@ %th Created %th Expires %th Scopes - - if impersonation - %th Token %th %tbody - active_tokens.each do |token| @@ -30,10 +28,6 @@ - else %span.token-never-expires-label Never %td= token.scopes.present? ? token.scopes.join(", ") : "<no scopes selected>" - - if impersonation - %td.token-token-container - = text_field_tag 'impersonation-token-token', token.token, readonly: true, class: "form-control" - = clipboard_button(text: token.token) - path = impersonation ? revoke_admin_user_impersonation_token_path(token.user, token) : revoke_profile_personal_access_token_path(token) %td= link_to "Revoke", path, method: :put, class: "btn btn-danger float-right", data: { confirm: "Are you sure you want to revoke this #{type} Token? This action cannot be undone." } - else diff --git a/app/views/shared/notes/_note.html.haml b/app/views/shared/notes/_note.html.haml index 84adbd444c5..bc918430823 100644 --- a/app/views/shared/notes/_note.html.haml +++ b/app/views/shared/notes/_note.html.haml @@ -5,7 +5,7 @@ - note_editable = can?(current_user, :admin_note, note) - note_counter = local_assigns.fetch(:note_counter, 0) -%li.timeline-entry{ id: dom_id(note), +%li.timeline-entry.note-wrapper.outlined{ id: dom_id(note), class: ["note", "note-row-#{note.id}", ('system-note' if note.system)], data: { author_id: note.author.id, editable: note_editable, diff --git a/app/views/shared/notes/_notes_with_form.html.haml b/app/views/shared/notes/_notes_with_form.html.haml index ec1e10bb0c1..4c4050c6054 100644 --- a/app/views/shared/notes/_notes_with_form.html.haml +++ b/app/views/shared/notes/_notes_with_form.html.haml @@ -8,7 +8,7 @@ - if can_create_note? .notes.notes-form.timeline - .timeline-entry + .timeline-entry.note-form .timeline-entry-inner .flash-container.timeline-content diff --git a/app/workers/build_finished_worker.rb b/app/workers/build_finished_worker.rb index 51cbbe8882e..61d866b1f02 100644 --- a/app/workers/build_finished_worker.rb +++ b/app/workers/build_finished_worker.rb @@ -13,7 +13,7 @@ class BuildFinishedWorker BuildTraceSectionsWorker.new.perform(build.id) BuildCoverageWorker.new.perform(build.id) - # We execute that async as this are two indepentent operations that can be executed after TraceSections and Coverage + # We execute that async as this are two independent operations that can be executed after TraceSections and Coverage BuildHooksWorker.perform_async(build.id) ArchiveTraceWorker.perform_async(build.id) end diff --git a/app/workers/process_commit_worker.rb b/app/workers/process_commit_worker.rb index 7b167c95c29..29a7f8e691a 100644 --- a/app/workers/process_commit_worker.rb +++ b/app/workers/process_commit_worker.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -# Worker for processing individiual commit messages pushed to a repository. +# Worker for processing individual commit messages pushed to a repository. # # Jobs for this worker are scheduled for every commit that is being pushed. As a # result of this the workload of this worker should be kept to a bare minimum. diff --git a/bin/rails b/bin/rails index 228f812ccaf..d21b64b3007 100755 --- a/bin/rails +++ b/bin/rails @@ -1,7 +1,7 @@ #!/usr/bin/env ruby # Remove this block when upgraded to rails 5.0. -unless %w[1 true].include?(ENV["RAILS5"]) +if %w[0 false].include?(ENV["RAILS5"]) begin load File.expand_path('../spring', __FILE__) rescue LoadError => e @@ -1,7 +1,7 @@ #!/usr/bin/env ruby # Remove this block when upgraded to rails 5.0. -unless %w[1 true].include?(ENV["RAILS5"]) +if %w[0 false].include?(ENV["RAILS5"]) begin load File.expand_path('../spring', __FILE__) rescue LoadError => e diff --git a/bin/rspec b/bin/rspec index 26583242051..b0770e30a70 100755 --- a/bin/rspec +++ b/bin/rspec @@ -2,7 +2,7 @@ # Remove these two lines below when upgraded to rails 5.0. # Allow run `rspec` command as `RAILS5=1 rspec ...` instead of `BUNDLE_GEMFILE=Gemfile.rails5 rspec ...` -gemfile = %w[1 true].include?(ENV["RAILS5"]) ? "Gemfile.rails5" : "Gemfile" +gemfile = %w[0 false].include?(ENV["RAILS5"]) ? "Gemfile.rails4" : "Gemfile" ENV['BUNDLE_GEMFILE'] ||= File.expand_path("../#{gemfile}", __dir__) begin diff --git a/bin/setup b/bin/setup index ec1ebe02950..34bb667087a 100755 --- a/bin/setup +++ b/bin/setup @@ -1,7 +1,7 @@ #!/usr/bin/env ruby def rails5? - %w[1 true].include?(ENV["RAILS5"]) + !%w[0 false].include?(ENV["RAILS5"]) end require "pathname" diff --git a/changelogs/unreleased/28682-can-merge-branch-before-build-is-started.yml b/changelogs/unreleased/28682-can-merge-branch-before-build-is-started.yml new file mode 100644 index 00000000000..5ffd93e098f --- /dev/null +++ b/changelogs/unreleased/28682-can-merge-branch-before-build-is-started.yml @@ -0,0 +1,5 @@ +--- +title: Strictly require a pipeline to merge. +merge_request: 22911 +author: +type: changed diff --git a/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml b/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml new file mode 100644 index 00000000000..98e15a5cc0a --- /dev/null +++ b/changelogs/unreleased/52940-fix-internal-email-pattern-not-respected.yml @@ -0,0 +1,5 @@ +--- +title: Fix a bug where internal email pattern wasn't respected +merge_request: 22516 +author: +type: fixed diff --git a/changelogs/unreleased/53289-update-haml_lint-to-0-28-0.yml b/changelogs/unreleased/53289-update-haml_lint-to-0-28-0.yml new file mode 100644 index 00000000000..9a16666c416 --- /dev/null +++ b/changelogs/unreleased/53289-update-haml_lint-to-0-28-0.yml @@ -0,0 +1,5 @@ +--- +title: Update haml_lint to 0.28.0 +merge_request: 22660 +author: Takuya Noguchi +type: other diff --git a/changelogs/unreleased/53291-update-ffaker-to-2-10-0.yml b/changelogs/unreleased/53291-update-ffaker-to-2-10-0.yml new file mode 100644 index 00000000000..a1b95df5e32 --- /dev/null +++ b/changelogs/unreleased/53291-update-ffaker-to-2-10-0.yml @@ -0,0 +1,5 @@ +--- +title: Update ffaker to 2.10.0 +merge_request: 22661 +author: Takuya Noguchi +type: other diff --git a/changelogs/unreleased/53626-update-config-map-on-install-retry.yml b/changelogs/unreleased/53626-update-config-map-on-install-retry.yml new file mode 100644 index 00000000000..38e79c06c89 --- /dev/null +++ b/changelogs/unreleased/53626-update-config-map-on-install-retry.yml @@ -0,0 +1,5 @@ +--- +title: Update config map for gitlab managed application if already present on install +merge_request: 22969 +author: +type: other diff --git a/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml b/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml new file mode 100644 index 00000000000..a59a276a334 --- /dev/null +++ b/changelogs/unreleased/53636-fix-rendering-of-any-user-filter.yml @@ -0,0 +1,5 @@ +--- +title: Fix rendering of filter bar tokens for special values +merge_request: 22865 +author: Heinrich Lee Yu +type: fixed diff --git a/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml b/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml new file mode 100644 index 00000000000..a9ca56303eb --- /dev/null +++ b/changelogs/unreleased/53816-empty-label-menu-if-not-logged-in.yml @@ -0,0 +1,5 @@ +--- +title: Removes promote to group label for anonymous user +merge_request: 23042 +author: Jacopo Beschi @jacopo-beschi +type: fixed diff --git a/changelogs/unreleased/53879-kube-token-nil.yml b/changelogs/unreleased/53879-kube-token-nil.yml new file mode 100644 index 00000000000..61a0db15d84 --- /dev/null +++ b/changelogs/unreleased/53879-kube-token-nil.yml @@ -0,0 +1,5 @@ +--- +title: Fix deployment jobs using nil KUBE_TOKEN due to migration issue +merge_request: 23009 +author: +type: fixed diff --git a/changelogs/unreleased/53888-missing-favicon.yml b/changelogs/unreleased/53888-missing-favicon.yml new file mode 100644 index 00000000000..ba6f26c6b9f --- /dev/null +++ b/changelogs/unreleased/53888-missing-favicon.yml @@ -0,0 +1,5 @@ +--- +title: Adds CI favicon back to jobs page +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/53972-fix-fill-shards.yml b/changelogs/unreleased/53972-fix-fill-shards.yml new file mode 100644 index 00000000000..ca94d6cc589 --- /dev/null +++ b/changelogs/unreleased/53972-fix-fill-shards.yml @@ -0,0 +1,5 @@ +--- +title: Fix a race condition intermittently breaking GitLab startup +merge_request: 23028 +author: +type: fixed diff --git a/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml b/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml new file mode 100644 index 00000000000..9f4f104a12c --- /dev/null +++ b/changelogs/unreleased/54002-activity-feed-missing-padding-in-event-note-when-a-branch-is-deleted.yml @@ -0,0 +1,5 @@ +--- +title: Adds margin after a deleted branch name in the activity feed. +merge_request: 23038 +author: +type: fixed diff --git a/changelogs/unreleased/54021-empty-button.yml b/changelogs/unreleased/54021-empty-button.yml new file mode 100644 index 00000000000..3b03665cf95 --- /dev/null +++ b/changelogs/unreleased/54021-empty-button.yml @@ -0,0 +1,5 @@ +--- +title: Prevent empty button being rendered in empty state +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/added-glob-for-ci-changes-detection.yml b/changelogs/unreleased/added-glob-for-ci-changes-detection.yml new file mode 100644 index 00000000000..887c6ef0346 --- /dev/null +++ b/changelogs/unreleased/added-glob-for-ci-changes-detection.yml @@ -0,0 +1,5 @@ +--- +title: Added glob for CI changes detection +merge_request: 23128 +author: Kirill Zaitsev +type: added diff --git a/changelogs/unreleased/ashmckenzie-hmac-token-decode-and-tests.yml b/changelogs/unreleased/ashmckenzie-hmac-token-decode-and-tests.yml new file mode 100644 index 00000000000..d15c5654d99 --- /dev/null +++ b/changelogs/unreleased/ashmckenzie-hmac-token-decode-and-tests.yml @@ -0,0 +1,5 @@ +--- +title: Relocate JSONWebToken::HMACToken from EE +merge_request: 22906 +author: +type: changed diff --git a/changelogs/unreleased/auto_devops_kubernetes_active.yml b/changelogs/unreleased/auto_devops_kubernetes_active.yml new file mode 100644 index 00000000000..310d37128c9 --- /dev/null +++ b/changelogs/unreleased/auto_devops_kubernetes_active.yml @@ -0,0 +1,5 @@ +--- +title: Switch kubernetes:active with checking in Auto-DevOps.gitlab-ci.yml +merge_request: 22929 +author: +type: fixed diff --git a/changelogs/unreleased/ce-53347_fix_impersonation_tokens.yml b/changelogs/unreleased/ce-53347_fix_impersonation_tokens.yml new file mode 100644 index 00000000000..6cc743d6f3a --- /dev/null +++ b/changelogs/unreleased/ce-53347_fix_impersonation_tokens.yml @@ -0,0 +1,5 @@ +--- +title: Display impersonation token value only after creation +merge_request: 22916 +author: +type: fixed diff --git a/changelogs/unreleased/discussion-perf-improvement.yml b/changelogs/unreleased/discussion-perf-improvement.yml new file mode 100644 index 00000000000..defff8a55f5 --- /dev/null +++ b/changelogs/unreleased/discussion-perf-improvement.yml @@ -0,0 +1,5 @@ +--- +title: Improve initial discussion rendering performance +merge_request: 22607 +author: +type: changed diff --git a/changelogs/unreleased/dm-commit-email-select-options.yml b/changelogs/unreleased/dm-commit-email-select-options.yml new file mode 100644 index 00000000000..90d5c8cf0c6 --- /dev/null +++ b/changelogs/unreleased/dm-commit-email-select-options.yml @@ -0,0 +1,5 @@ +--- +title: Fix bug causing not all emails to show up in commit email selectbox +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/drop-gcp-cluster-table.yml b/changelogs/unreleased/drop-gcp-cluster-table.yml new file mode 100644 index 00000000000..15964ec2eaf --- /dev/null +++ b/changelogs/unreleased/drop-gcp-cluster-table.yml @@ -0,0 +1,5 @@ +--- +title: Drop gcp_clusters table +merge_request: 22713 +author: +type: other diff --git a/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml b/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml new file mode 100644 index 00000000000..5427ead3d1b --- /dev/null +++ b/changelogs/unreleased/fix-deployment-metrics-in-mr-widget.yml @@ -0,0 +1,6 @@ +--- +title: Avoid returning deployment metrics url to MR widget when the deployment is + not successful +merge_request: 23010 +author: +type: fixed diff --git a/changelogs/unreleased/fix-tags-for-envs.yml b/changelogs/unreleased/fix-tags-for-envs.yml new file mode 100644 index 00000000000..633788ff6d8 --- /dev/null +++ b/changelogs/unreleased/fix-tags-for-envs.yml @@ -0,0 +1,5 @@ +--- +title: Do not reload self on hooks when creating deployment +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/frozen-string-lib-gitlab-even-more.yml b/changelogs/unreleased/frozen-string-lib-gitlab-even-more.yml new file mode 100644 index 00000000000..cfbc4ced635 --- /dev/null +++ b/changelogs/unreleased/frozen-string-lib-gitlab-even-more.yml @@ -0,0 +1,5 @@ +--- +title: Enable even more frozen string in lib/gitlab/**/*.rb +merge_request: +author: gfyoung +type: performance diff --git a/changelogs/unreleased/gt-fix-typos-in-lib.yml b/changelogs/unreleased/gt-fix-typos-in-lib.yml new file mode 100644 index 00000000000..32ccd03b063 --- /dev/null +++ b/changelogs/unreleased/gt-fix-typos-in-lib.yml @@ -0,0 +1,5 @@ +--- +title: Fix typos in lib +merge_request: 23106 +author: George Tsiolis +type: other diff --git a/changelogs/unreleased/gt-remove-unused-project-method.yml b/changelogs/unreleased/gt-remove-unused-project-method.yml new file mode 100644 index 00000000000..2d60c2fe423 --- /dev/null +++ b/changelogs/unreleased/gt-remove-unused-project-method.yml @@ -0,0 +1,5 @@ +--- +title: Remove unused project method +merge_request: 54103 +author: George Tsiolis +type: other diff --git a/changelogs/unreleased/ignore-environment-validation-failure.yml b/changelogs/unreleased/ignore-environment-validation-failure.yml new file mode 100644 index 00000000000..1b61cf86dc4 --- /dev/null +++ b/changelogs/unreleased/ignore-environment-validation-failure.yml @@ -0,0 +1,5 @@ +--- +title: Ignore environment validation failure +merge_request: 23100 +author: +type: fixed diff --git a/changelogs/unreleased/kubernetes-http-response-code.yml b/changelogs/unreleased/kubernetes-http-response-code.yml new file mode 100644 index 00000000000..551fe2edc3c --- /dev/null +++ b/changelogs/unreleased/kubernetes-http-response-code.yml @@ -0,0 +1,5 @@ +--- +title: Show HTTP response code for Kubernetes errors +merge_request: 22964 +author: +type: other diff --git a/changelogs/unreleased/mr-tree-filter-path-name.yml b/changelogs/unreleased/mr-tree-filter-path-name.yml new file mode 100644 index 00000000000..152f8a67337 --- /dev/null +++ b/changelogs/unreleased/mr-tree-filter-path-name.yml @@ -0,0 +1,5 @@ +--- +title: Changed merge request filtering to be by path instead of name +merge_request: +author: +type: changed diff --git a/changelogs/unreleased/osw-comment-on-any-line-on-diffs.yml b/changelogs/unreleased/osw-comment-on-any-line-on-diffs.yml new file mode 100644 index 00000000000..e25d64a89d7 --- /dev/null +++ b/changelogs/unreleased/osw-comment-on-any-line-on-diffs.yml @@ -0,0 +1,5 @@ +--- +title: Allow commenting on any diff line in Merge Requests +merge_request: 22914 +author: +type: added diff --git a/changelogs/unreleased/osw-fallback-on-blank-refs.yml b/changelogs/unreleased/osw-fallback-on-blank-refs.yml new file mode 100644 index 00000000000..039179f5829 --- /dev/null +++ b/changelogs/unreleased/osw-fallback-on-blank-refs.yml @@ -0,0 +1,5 @@ +--- +title: Avoid Gitaly RPC errors when fetching diff stats +merge_request: 22995 +author: +type: fixed diff --git a/changelogs/unreleased/sh-53180-append-path.yml b/changelogs/unreleased/sh-53180-append-path.yml new file mode 100644 index 00000000000..64fae5522d8 --- /dev/null +++ b/changelogs/unreleased/sh-53180-append-path.yml @@ -0,0 +1,5 @@ +--- +title: Make sure there's only one slash as path separator +merge_request: 22954 +author: +type: other diff --git a/changelogs/unreleased/sh-bump-ruby-2-5-3.yml b/changelogs/unreleased/sh-bump-ruby-2-5-3.yml new file mode 100644 index 00000000000..13cadc73e9c --- /dev/null +++ b/changelogs/unreleased/sh-bump-ruby-2-5-3.yml @@ -0,0 +1,5 @@ +--- +title: Upgrade to Ruby 2.5.3 +merge_request: 2806 +author: +type: performance diff --git a/changelogs/unreleased/sh-remove-local-sidekiq-admin-check.yml b/changelogs/unreleased/sh-remove-local-sidekiq-admin-check.yml new file mode 100644 index 00000000000..3ec15908fc7 --- /dev/null +++ b/changelogs/unreleased/sh-remove-local-sidekiq-admin-check.yml @@ -0,0 +1,5 @@ +--- +title: Remove display of local Sidekiq process in /admin/sidekiq +merge_request: 23118 +author: +type: fixed diff --git a/changelogs/unreleased/sh-use-nokogiri-xml-backend.yml b/changelogs/unreleased/sh-use-nokogiri-xml-backend.yml new file mode 100644 index 00000000000..6a82e32c416 --- /dev/null +++ b/changelogs/unreleased/sh-use-nokogiri-xml-backend.yml @@ -0,0 +1,5 @@ +--- +title: Use Nokogiri as the ActiveSupport XML backend +merge_request: 23136 +author: +type: performance diff --git a/changelogs/unreleased/switch-rails.yml b/changelogs/unreleased/switch-rails.yml new file mode 100644 index 00000000000..4edf709dbd4 --- /dev/null +++ b/changelogs/unreleased/switch-rails.yml @@ -0,0 +1,5 @@ +--- +title: Switch to Rails 5 +merge_request: 21492 +author: +type: other diff --git a/changelogs/unreleased/triggermesh-phase2-serverless.yml b/changelogs/unreleased/triggermesh-phase2-serverless.yml new file mode 100644 index 00000000000..bee2b5e1e2c --- /dev/null +++ b/changelogs/unreleased/triggermesh-phase2-serverless.yml @@ -0,0 +1,5 @@ +--- +title: Add knative client to kubeclient library +merge_request: 22968 +author: cab105 +type: added diff --git a/changelogs/unreleased/zj-remove-broken-storage.yml b/changelogs/unreleased/zj-remove-broken-storage.yml new file mode 100644 index 00000000000..9df87b40e09 --- /dev/null +++ b/changelogs/unreleased/zj-remove-broken-storage.yml @@ -0,0 +1,5 @@ +--- +title: Remove obsolete gitlab_shell rake tasks +merge_request: 22417 +author: +type: removed diff --git a/config/application.rb b/config/application.rb index 95b0f74a5a3..921baa5d617 100644 --- a/config/application.rb +++ b/config/application.rb @@ -8,7 +8,7 @@ module Gitlab # This method is used for smooth upgrading from the current Rails 4.x to Rails 5.0. # https://gitlab.com/gitlab-org/gitlab-ce/issues/14286 def self.rails5? - ENV["RAILS5"].in?(%w[1 true]) + !%w[0 false].include?(ENV["RAILS5"]) end class Application < Rails::Application @@ -158,6 +158,9 @@ module Gitlab config.action_view.sanitized_allowed_protocols = %w(smb) + # Nokogiri is significantly faster and uses less memory than REXML + ActiveSupport::XmlMini.backend = 'Nokogiri' + # This middleware needs to precede ActiveRecord::QueryCache and other middlewares that # connect to the database. config.middleware.insert_after Rails::Rack::Logger, ::Gitlab::Middleware::BasicHealthCheck diff --git a/config/boot.rb b/config/boot.rb index 655c54ddb84..1aeacdabbad 100644 --- a/config/boot.rb +++ b/config/boot.rb @@ -1,10 +1,10 @@ def rails5? - %w[1 true].include?(ENV["RAILS5"]) + !%w[0 false].include?(ENV["RAILS5"]) end require 'rubygems' unless rails5? -gemfile = rails5? ? "Gemfile.rails5" : "Gemfile" +gemfile = rails5? ? "Gemfile" : "Gemfile.rails4" ENV['BUNDLE_GEMFILE'] ||= File.expand_path("../#{gemfile}", __dir__) # Set up gems listed in the Gemfile. diff --git a/config/environment.rb b/config/environment.rb index 5d35937f7c6..3a52656a2c1 100644 --- a/config/environment.rb +++ b/config/environment.rb @@ -1,10 +1,10 @@ # Load the rails application # Remove this condition when upgraded to rails 5.0. -if %w[1 true].include?(ENV["RAILS5"]) - require_relative 'application' -else +if %w[0 false].include?(ENV["RAILS5"]) require File.expand_path('application', __dir__) +else + require_relative 'application' end # Initialize the rails application diff --git a/config/initializers/mysql_set_length_for_binary_indexes.rb b/config/initializers/mysql_set_length_for_binary_indexes.rb index 81ed2fb83de..0445d8fcae2 100644 --- a/config/initializers/mysql_set_length_for_binary_indexes.rb +++ b/config/initializers/mysql_set_length_for_binary_indexes.rb @@ -24,28 +24,46 @@ if defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter) ActiveRecord::ConnectionAdapters::Mysql2Adapter.send(:prepend, MysqlSetLengthForBinaryIndex) end -if Gitlab.rails5? - module MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema - # This method is used in Rails 5 schema loading as t.index - def index(column_names, options = {}) - options[:length] ||= {} - Array(column_names).each do |column_name| - column = columns.find { |c| c.name == column_name } - - if column&.type == :binary - options[:length][column_name] = 20 - end - end +module MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema + # This method is used in Rails 5 schema loading as t.index + def index(column_names, options = {}) + # Ignore indexes that use opclasses, + # also see config/initializers/mysql_ignore_postgresql_options.rb + if options[:opclasses] + warn "WARNING: index on columns #{column_names} uses unsupported option, skipping." + return + end + + # when running rails 4 with rails 5 schema, rails 4 doesn't support multiple + # indexes on the same set of columns. Mysql doesn't support partial indexes, so if + # an index already exists and we add another index, skip it if it's partial: + # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_102821326 + if !Gitlab.rails5? && indexes[column_names] && options[:where] + warn "WARNING: index on columns #{column_names} already exists and partial index is not supported, skipping." + return + end + + options[:length] ||= {} + Array(column_names).each do |column_name| + column = columns.find { |c| c.name == column_name } - # Ignore indexes that use opclasses, - # also see config/initializers/mysql_ignore_postgresql_options.rb - unless options[:opclasses] - super(column_names, options) + if column&.type == :binary + options[:length][column_name] = 20 end end + + super(column_names, options) end +end +def mysql_adapter? + defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter) && ActiveRecord::Base.connection.is_a?(ActiveRecord::ConnectionAdapters::Mysql2Adapter) +end + +if Gitlab.rails5? if defined?(ActiveRecord::ConnectionAdapters::MySQL::TableDefinition) ActiveRecord::ConnectionAdapters::MySQL::TableDefinition.send(:prepend, MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema) end +elsif mysql_adapter? && defined?(ActiveRecord::ConnectionAdapters::TableDefinition) + ActiveRecord::ConnectionAdapters::TableDefinition.send(:prepend, MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema) end diff --git a/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb b/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb new file mode 100644 index 00000000000..a7106111f46 --- /dev/null +++ b/db/migrate/20181031190558_drop_fk_gcp_clusters_table.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +class DropFkGcpClustersTable < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + def up + remove_foreign_key_if_exists :gcp_clusters, column: :project_id + remove_foreign_key_if_exists :gcp_clusters, column: :user_id + remove_foreign_key_if_exists :gcp_clusters, column: :service_id + end + + def down + add_foreign_key_if_not_exists :gcp_clusters, :projects, column: :project_id, on_delete: :cascade + add_foreign_key_if_not_exists :gcp_clusters, :users, column: :user_id, on_delete: :nullify + add_foreign_key_if_not_exists :gcp_clusters, :services, column: :service_id, on_delete: :nullify + end + + private + + def add_foreign_key_if_not_exists(source, target, column:, on_delete:) + return unless table_exists?(source) + return if foreign_key_exists?(source, target, column: column) + + add_concurrent_foreign_key(source, target, column: column, on_delete: on_delete) + end + + def remove_foreign_key_if_exists(table, column:) + return unless table_exists?(table) + return unless foreign_key_exists?(table, column: column) + + remove_foreign_key(table, column: column) + end +end diff --git a/db/migrate/20181031190559_drop_gcp_clusters_table.rb b/db/migrate/20181031190559_drop_gcp_clusters_table.rb new file mode 100644 index 00000000000..808d474b4fc --- /dev/null +++ b/db/migrate/20181031190559_drop_gcp_clusters_table.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +class DropGcpClustersTable < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + def up + drop_table :gcp_clusters + end + + def down + create_table :gcp_clusters do |t| + # Order columns by best align scheme + t.references :project, null: false, index: { unique: true }, foreign_key: { on_delete: :cascade } + t.references :user, foreign_key: { on_delete: :nullify } + t.references :service, foreign_key: { on_delete: :nullify } + t.integer :status + t.integer :gcp_cluster_size, null: false + + # Timestamps + t.datetime_with_timezone :created_at, null: false + t.datetime_with_timezone :updated_at, null: false + + # Enable/disable + t.boolean :enabled, default: true + + # General + t.text :status_reason + + # k8s integration specific + t.string :project_namespace + + # Cluster details + t.string :endpoint + t.text :ca_cert + t.text :encrypted_kubernetes_token + t.string :encrypted_kubernetes_token_iv + t.string :username + t.text :encrypted_password + t.string :encrypted_password_iv + + # GKE + t.string :gcp_project_id, null: false + t.string :gcp_cluster_zone, null: false + t.string :gcp_cluster_name, null: false + t.string :gcp_machine_type + t.string :gcp_operation_id + t.text :encrypted_gcp_token + t.string :encrypted_gcp_token_iv + end + end +end diff --git a/db/schema.rb b/db/schema.rb index 56137caf1d7..deaa2d30b26 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -1,4 +1,3 @@ -# encoding: UTF-8 # This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. @@ -126,7 +125,7 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "unique_ips_limit_enabled", default: false, null: false t.string "default_artifacts_expire_in", default: "0", null: false t.string "uuid" - t.decimal "polling_interval_multiplier", default: 1.0, null: false + t.decimal "polling_interval_multiplier", default: "1.0", null: false t.integer "cached_markdown_version" t.boolean "clientside_sentry_enabled", default: false, null: false t.string "clientside_sentry_dsn" @@ -177,10 +176,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "details" t.datetime "created_at" t.datetime "updated_at" + t.index ["entity_id", "entity_type"], name: "index_audit_events_on_entity_id_and_entity_type", using: :btree end - add_index "audit_events", ["entity_id", "entity_type"], name: "index_audit_events_on_entity_id_and_entity_type", using: :btree - create_table "award_emoji", force: :cascade do |t| t.string "name" t.integer "user_id" @@ -188,11 +186,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "awardable_type" t.datetime "created_at" t.datetime "updated_at" + t.index ["awardable_type", "awardable_id"], name: "index_award_emoji_on_awardable_type_and_awardable_id", using: :btree + t.index ["user_id", "name"], name: "index_award_emoji_on_user_id_and_name", using: :btree end - add_index "award_emoji", ["awardable_type", "awardable_id"], name: "index_award_emoji_on_awardable_type_and_awardable_id", using: :btree - add_index "award_emoji", ["user_id", "name"], name: "index_award_emoji_on_user_id_and_name", using: :btree - create_table "badges", force: :cascade do |t| t.string "link_url", null: false t.string "image_url", null: false @@ -201,47 +198,43 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "type", null: false t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false + t.index ["group_id"], name: "index_badges_on_group_id", using: :btree + t.index ["project_id"], name: "index_badges_on_project_id", using: :btree end - add_index "badges", ["group_id"], name: "index_badges_on_group_id", using: :btree - add_index "badges", ["project_id"], name: "index_badges_on_project_id", using: :btree - create_table "board_group_recent_visits", id: :bigserial, force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.integer "user_id" t.integer "board_id" t.integer "group_id" + t.index ["board_id"], name: "index_board_group_recent_visits_on_board_id", using: :btree + t.index ["group_id"], name: "index_board_group_recent_visits_on_group_id", using: :btree + t.index ["user_id", "group_id", "board_id"], name: "index_board_group_recent_visits_on_user_group_and_board", unique: true, using: :btree + t.index ["user_id"], name: "index_board_group_recent_visits_on_user_id", using: :btree end - add_index "board_group_recent_visits", ["board_id"], name: "index_board_group_recent_visits_on_board_id", using: :btree - add_index "board_group_recent_visits", ["group_id"], name: "index_board_group_recent_visits_on_group_id", using: :btree - add_index "board_group_recent_visits", ["user_id", "group_id", "board_id"], name: "index_board_group_recent_visits_on_user_group_and_board", unique: true, using: :btree - add_index "board_group_recent_visits", ["user_id"], name: "index_board_group_recent_visits_on_user_id", using: :btree - create_table "board_project_recent_visits", id: :bigserial, force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.integer "user_id" t.integer "project_id" t.integer "board_id" + t.index ["board_id"], name: "index_board_project_recent_visits_on_board_id", using: :btree + t.index ["project_id"], name: "index_board_project_recent_visits_on_project_id", using: :btree + t.index ["user_id", "project_id", "board_id"], name: "index_board_project_recent_visits_on_user_project_and_board", unique: true, using: :btree + t.index ["user_id"], name: "index_board_project_recent_visits_on_user_id", using: :btree end - add_index "board_project_recent_visits", ["board_id"], name: "index_board_project_recent_visits_on_board_id", using: :btree - add_index "board_project_recent_visits", ["project_id"], name: "index_board_project_recent_visits_on_project_id", using: :btree - add_index "board_project_recent_visits", ["user_id", "project_id", "board_id"], name: "index_board_project_recent_visits_on_user_project_and_board", unique: true, using: :btree - add_index "board_project_recent_visits", ["user_id"], name: "index_board_project_recent_visits_on_user_id", using: :btree - create_table "boards", force: :cascade do |t| t.integer "project_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "group_id" + t.index ["group_id"], name: "index_boards_on_group_id", using: :btree + t.index ["project_id"], name: "index_boards_on_project_id", using: :btree end - add_index "boards", ["group_id"], name: "index_boards_on_group_id", using: :btree - add_index "boards", ["project_id"], name: "index_boards_on_project_id", using: :btree - create_table "broadcast_messages", force: :cascade do |t| t.text "message", null: false t.datetime "starts_at", null: false @@ -252,10 +245,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "font" t.text "message_html", null: false t.integer "cached_markdown_version" + t.index ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree end - add_index "broadcast_messages", ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree - create_table "chat_names", force: :cascade do |t| t.integer "user_id", null: false t.integer "service_id", null: false @@ -266,51 +258,46 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "last_used_at" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["service_id", "team_id", "chat_id"], name: "index_chat_names_on_service_id_and_team_id_and_chat_id", unique: true, using: :btree + t.index ["user_id", "service_id"], name: "index_chat_names_on_user_id_and_service_id", unique: true, using: :btree end - add_index "chat_names", ["service_id", "team_id", "chat_id"], name: "index_chat_names_on_service_id_and_team_id_and_chat_id", unique: true, using: :btree - add_index "chat_names", ["user_id", "service_id"], name: "index_chat_names_on_user_id_and_service_id", unique: true, using: :btree - create_table "chat_teams", force: :cascade do |t| t.integer "namespace_id", null: false t.string "team_id" t.string "name" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree end - add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree - create_table "ci_build_trace_chunks", id: :bigserial, force: :cascade do |t| t.integer "build_id", null: false t.integer "chunk_index", null: false t.integer "data_store", null: false t.binary "raw_data" + t.index ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree end - add_index "ci_build_trace_chunks", ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree - create_table "ci_build_trace_section_names", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false + t.index ["project_id", "name"], name: "index_ci_build_trace_section_names_on_project_id_and_name", unique: true, using: :btree end - add_index "ci_build_trace_section_names", ["project_id", "name"], name: "index_ci_build_trace_section_names_on_project_id_and_name", unique: true, using: :btree - create_table "ci_build_trace_sections", force: :cascade do |t| t.integer "project_id", null: false t.datetime_with_timezone "date_start", null: false t.datetime_with_timezone "date_end", null: false - t.integer "byte_start", limit: 8, null: false - t.integer "byte_end", limit: 8, null: false + t.bigint "byte_start", null: false + t.bigint "byte_end", null: false t.integer "build_id", null: false t.integer "section_name_id", null: false + t.index ["build_id", "section_name_id"], name: "index_ci_build_trace_sections_on_build_id_and_section_name_id", unique: true, using: :btree + t.index ["project_id"], name: "index_ci_build_trace_sections_on_project_id", using: :btree + t.index ["section_name_id"], name: "index_ci_build_trace_sections_on_section_name_id", using: :btree end - add_index "ci_build_trace_sections", ["build_id", "section_name_id"], name: "index_ci_build_trace_sections_on_build_id_and_section_name_id", unique: true, using: :btree - add_index "ci_build_trace_sections", ["project_id"], name: "index_ci_build_trace_sections_on_project_id", using: :btree - add_index "ci_build_trace_sections", ["section_name_id"], name: "index_ci_build_trace_sections_on_section_name_id", using: :btree - create_table "ci_builds", force: :cascade do |t| t.string "status" t.datetime "finished_at" @@ -341,7 +328,7 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "erased_at" t.datetime "artifacts_expire_at" t.string "environment" - t.integer "artifacts_size", limit: 8 + t.bigint "artifacts_size" t.string "when" t.text "yaml_variables" t.datetime "queued_at" @@ -356,45 +343,42 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "protected" t.integer "failure_reason" t.datetime_with_timezone "scheduled_at" + t.index ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree + t.index ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree + t.index ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree + t.index ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree + t.index ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree + t.index ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree + t.index ["id"], name: "partial_index_ci_builds_on_id_with_legacy_artifacts", where: "(artifacts_file <> ''::text)", using: :btree + t.index ["project_id", "id"], name: "index_ci_builds_on_project_id_and_id", using: :btree + t.index ["protected"], name: "index_ci_builds_on_protected", using: :btree + t.index ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree + t.index ["scheduled_at"], name: "partial_index_ci_builds_on_scheduled_at_with_scheduled_jobs", where: "((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text))", using: :btree + t.index ["stage_id", "stage_idx"], name: "tmp_build_stage_position_index", where: "(stage_idx IS NOT NULL)", using: :btree + t.index ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree + t.index ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree + t.index ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree + t.index ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree + t.index ["user_id"], name: "index_ci_builds_on_user_id", using: :btree end - add_index "ci_builds", ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree - add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree - add_index "ci_builds", ["commit_id", "stage_idx", "created_at"], name: "index_ci_builds_on_commit_id_and_stage_idx_and_created_at", using: :btree - add_index "ci_builds", ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree - add_index "ci_builds", ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree - add_index "ci_builds", ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree - add_index "ci_builds", ["id"], name: "partial_index_ci_builds_on_id_with_legacy_artifacts", where: "(artifacts_file <> ''::text)", using: :btree - add_index "ci_builds", ["project_id", "id"], name: "index_ci_builds_on_project_id_and_id", using: :btree - add_index "ci_builds", ["protected"], name: "index_ci_builds_on_protected", using: :btree - add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree - add_index "ci_builds", ["scheduled_at"], name: "partial_index_ci_builds_on_scheduled_at_with_scheduled_jobs", where: "((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text))", using: :btree - add_index "ci_builds", ["stage_id", "stage_idx"], name: "tmp_build_stage_position_index", where: "(stage_idx IS NOT NULL)", using: :btree - add_index "ci_builds", ["stage_id"], name: "index_ci_builds_on_stage_id", using: :btree - add_index "ci_builds", ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree - add_index "ci_builds", ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree - add_index "ci_builds", ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree - add_index "ci_builds", ["user_id"], name: "index_ci_builds_on_user_id", using: :btree - create_table "ci_builds_metadata", force: :cascade do |t| t.integer "build_id", null: false t.integer "project_id", null: false t.integer "timeout" t.integer "timeout_source", default: 1, null: false + t.index ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true, using: :btree + t.index ["project_id"], name: "index_ci_builds_metadata_on_project_id", using: :btree end - add_index "ci_builds_metadata", ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true, using: :btree - add_index "ci_builds_metadata", ["project_id"], name: "index_ci_builds_metadata_on_project_id", using: :btree - create_table "ci_builds_runner_session", id: :bigserial, force: :cascade do |t| t.integer "build_id", null: false t.string "url", null: false t.string "certificate" t.string "authorization" + t.index ["build_id"], name: "index_ci_builds_runner_session_on_build_id", unique: true, using: :btree end - add_index "ci_builds_runner_session", ["build_id"], name: "index_ci_builds_runner_session_on_build_id", unique: true, using: :btree - create_table "ci_group_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -405,16 +389,15 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "protected", default: false, null: false t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false + t.index ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree end - add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree - create_table "ci_job_artifacts", force: :cascade do |t| t.integer "project_id", null: false t.integer "job_id", null: false t.integer "file_type", null: false t.integer "file_store" - t.integer "size", limit: 8 + t.bigint "size" t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "expire_at" @@ -422,13 +405,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.binary "file_sha256" t.integer "file_format", limit: 2 t.integer "file_location", limit: 2 + t.index ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree + t.index ["file_store"], name: "index_ci_job_artifacts_on_file_store", using: :btree + t.index ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree + t.index ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree end - add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree - add_index "ci_job_artifacts", ["file_store"], name: "index_ci_job_artifacts_on_file_store", using: :btree - add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree - add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree - create_table "ci_pipeline_schedule_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -438,10 +420,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "pipeline_schedule_id", null: false t.datetime_with_timezone "created_at" t.datetime_with_timezone "updated_at" + t.index ["pipeline_schedule_id", "key"], name: "index_ci_pipeline_schedule_variables_on_schedule_id_and_key", unique: true, using: :btree end - add_index "ci_pipeline_schedule_variables", ["pipeline_schedule_id", "key"], name: "index_ci_pipeline_schedule_variables_on_schedule_id_and_key", unique: true, using: :btree - create_table "ci_pipeline_schedules", force: :cascade do |t| t.string "description" t.string "ref" @@ -453,11 +434,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "active", default: true t.datetime "created_at" t.datetime "updated_at" + t.index ["next_run_at", "active"], name: "index_ci_pipeline_schedules_on_next_run_at_and_active", using: :btree + t.index ["project_id"], name: "index_ci_pipeline_schedules_on_project_id", using: :btree end - add_index "ci_pipeline_schedules", ["next_run_at", "active"], name: "index_ci_pipeline_schedules_on_next_run_at_and_active", using: :btree - add_index "ci_pipeline_schedules", ["project_id"], name: "index_ci_pipeline_schedules_on_project_id", using: :btree - create_table "ci_pipeline_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -465,10 +445,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "encrypted_value_salt" t.string "encrypted_value_iv" t.integer "pipeline_id", null: false + t.index ["pipeline_id", "key"], name: "index_ci_pipeline_variables_on_pipeline_id_and_key", unique: true, using: :btree end - add_index "ci_pipeline_variables", ["pipeline_id", "key"], name: "index_ci_pipeline_variables_on_pipeline_id_and_key", unique: true, using: :btree - create_table "ci_pipelines", force: :cascade do |t| t.string "ref" t.string "sha" @@ -492,37 +471,34 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "protected" t.integer "failure_reason" t.integer "iid" + t.index ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree + t.index ["pipeline_schedule_id"], name: "index_ci_pipelines_on_pipeline_schedule_id", using: :btree + t.index ["project_id", "iid"], name: "index_ci_pipelines_on_project_id_and_iid", unique: true, where: "(iid IS NOT NULL)", using: :btree + t.index ["project_id", "ref", "status", "id"], name: "index_ci_pipelines_on_project_id_and_ref_and_status_and_id", using: :btree + t.index ["project_id", "sha"], name: "index_ci_pipelines_on_project_id_and_sha", using: :btree + t.index ["project_id", "source"], name: "index_ci_pipelines_on_project_id_and_source", using: :btree + t.index ["project_id", "status", "config_source"], name: "index_ci_pipelines_on_project_id_and_status_and_config_source", using: :btree + t.index ["project_id"], name: "index_ci_pipelines_on_project_id", using: :btree + t.index ["status"], name: "index_ci_pipelines_on_status", using: :btree + t.index ["user_id"], name: "index_ci_pipelines_on_user_id", using: :btree end - add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree - add_index "ci_pipelines", ["pipeline_schedule_id"], name: "index_ci_pipelines_on_pipeline_schedule_id", using: :btree - add_index "ci_pipelines", ["project_id", "iid"], name: "index_ci_pipelines_on_project_id_and_iid", unique: true, where: "(iid IS NOT NULL)", using: :btree - add_index "ci_pipelines", ["project_id", "ref", "status", "id"], name: "index_ci_pipelines_on_project_id_and_ref_and_status_and_id", using: :btree - add_index "ci_pipelines", ["project_id", "sha"], name: "index_ci_pipelines_on_project_id_and_sha", using: :btree - add_index "ci_pipelines", ["project_id", "source"], name: "index_ci_pipelines_on_project_id_and_source", using: :btree - add_index "ci_pipelines", ["project_id", "status", "config_source"], name: "index_ci_pipelines_on_project_id_and_status_and_config_source", using: :btree - add_index "ci_pipelines", ["project_id"], name: "index_ci_pipelines_on_project_id", using: :btree - add_index "ci_pipelines", ["status"], name: "index_ci_pipelines_on_status", using: :btree - add_index "ci_pipelines", ["user_id"], name: "index_ci_pipelines_on_user_id", using: :btree - create_table "ci_runner_namespaces", force: :cascade do |t| t.integer "runner_id" t.integer "namespace_id" + t.index ["namespace_id"], name: "index_ci_runner_namespaces_on_namespace_id", using: :btree + t.index ["runner_id", "namespace_id"], name: "index_ci_runner_namespaces_on_runner_id_and_namespace_id", unique: true, using: :btree end - add_index "ci_runner_namespaces", ["namespace_id"], name: "index_ci_runner_namespaces_on_namespace_id", using: :btree - add_index "ci_runner_namespaces", ["runner_id", "namespace_id"], name: "index_ci_runner_namespaces_on_runner_id_and_namespace_id", unique: true, using: :btree - create_table "ci_runner_projects", force: :cascade do |t| t.integer "runner_id", null: false t.datetime "created_at" t.datetime "updated_at" t.integer "project_id" + t.index ["project_id"], name: "index_ci_runner_projects_on_project_id", using: :btree + t.index ["runner_id"], name: "index_ci_runner_projects_on_runner_id", using: :btree end - add_index "ci_runner_projects", ["project_id"], name: "index_ci_runner_projects_on_project_id", using: :btree - add_index "ci_runner_projects", ["runner_id"], name: "index_ci_runner_projects_on_runner_id", using: :btree - create_table "ci_runners", force: :cascade do |t| t.string "token" t.datetime "created_at" @@ -542,14 +518,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "ip_address" t.integer "maximum_timeout" t.integer "runner_type", limit: 2, null: false + t.index ["contacted_at"], name: "index_ci_runners_on_contacted_at", using: :btree + t.index ["is_shared"], name: "index_ci_runners_on_is_shared", using: :btree + t.index ["locked"], name: "index_ci_runners_on_locked", using: :btree + t.index ["runner_type"], name: "index_ci_runners_on_runner_type", using: :btree + t.index ["token"], name: "index_ci_runners_on_token", using: :btree end - add_index "ci_runners", ["contacted_at"], name: "index_ci_runners_on_contacted_at", using: :btree - add_index "ci_runners", ["is_shared"], name: "index_ci_runners_on_is_shared", using: :btree - add_index "ci_runners", ["locked"], name: "index_ci_runners_on_locked", using: :btree - add_index "ci_runners", ["runner_type"], name: "index_ci_runners_on_runner_type", using: :btree - add_index "ci_runners", ["token"], name: "index_ci_runners_on_token", using: :btree - create_table "ci_stages", force: :cascade do |t| t.integer "project_id" t.integer "pipeline_id" @@ -559,23 +534,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "status" t.integer "lock_version" t.integer "position" + t.index ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", unique: true, using: :btree + t.index ["pipeline_id", "position"], name: "index_ci_stages_on_pipeline_id_and_position", using: :btree + t.index ["pipeline_id"], name: "index_ci_stages_on_pipeline_id", using: :btree + t.index ["project_id"], name: "index_ci_stages_on_project_id", using: :btree end - add_index "ci_stages", ["pipeline_id", "name"], name: "index_ci_stages_on_pipeline_id_and_name", unique: true, using: :btree - add_index "ci_stages", ["pipeline_id", "position"], name: "index_ci_stages_on_pipeline_id_and_position", using: :btree - add_index "ci_stages", ["pipeline_id"], name: "index_ci_stages_on_pipeline_id", using: :btree - add_index "ci_stages", ["project_id"], name: "index_ci_stages_on_project_id", using: :btree - create_table "ci_trigger_requests", force: :cascade do |t| t.integer "trigger_id", null: false t.text "variables" t.datetime "created_at" t.datetime "updated_at" t.integer "commit_id" + t.index ["commit_id"], name: "index_ci_trigger_requests_on_commit_id", using: :btree end - add_index "ci_trigger_requests", ["commit_id"], name: "index_ci_trigger_requests_on_commit_id", using: :btree - create_table "ci_triggers", force: :cascade do |t| t.string "token" t.datetime "created_at" @@ -584,10 +557,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "owner_id" t.string "description" t.string "ref" + t.index ["project_id"], name: "index_ci_triggers_on_project_id", using: :btree end - add_index "ci_triggers", ["project_id"], name: "index_ci_triggers_on_project_id", using: :btree - create_table "ci_variables", force: :cascade do |t| t.string "key", null: false t.text "value" @@ -597,18 +569,16 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "project_id", null: false t.boolean "protected", default: false, null: false t.string "environment_scope", default: "*", null: false + t.index ["project_id", "key", "environment_scope"], name: "index_ci_variables_on_project_id_and_key_and_environment_scope", unique: true, using: :btree end - add_index "ci_variables", ["project_id", "key", "environment_scope"], name: "index_ci_variables_on_project_id_and_key_and_environment_scope", unique: true, using: :btree - create_table "cluster_groups", force: :cascade do |t| t.integer "cluster_id", null: false t.integer "group_id", null: false + t.index ["cluster_id", "group_id"], name: "index_cluster_groups_on_cluster_id_and_group_id", unique: true, using: :btree + t.index ["group_id"], name: "index_cluster_groups_on_group_id", using: :btree end - add_index "cluster_groups", ["cluster_id", "group_id"], name: "index_cluster_groups_on_cluster_id_and_group_id", unique: true, using: :btree - add_index "cluster_groups", ["group_id"], name: "index_cluster_groups_on_group_id", using: :btree - create_table "cluster_platforms_kubernetes", force: :cascade do |t| t.integer "cluster_id", null: false t.datetime_with_timezone "created_at", null: false @@ -622,20 +592,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "encrypted_token" t.string "encrypted_token_iv" t.integer "authorization_type", limit: 2 + t.index ["cluster_id"], name: "index_cluster_platforms_kubernetes_on_cluster_id", unique: true, using: :btree end - add_index "cluster_platforms_kubernetes", ["cluster_id"], name: "index_cluster_platforms_kubernetes_on_cluster_id", unique: true, using: :btree - create_table "cluster_projects", force: :cascade do |t| t.integer "project_id", null: false t.integer "cluster_id", null: false t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false + t.index ["cluster_id"], name: "index_cluster_projects_on_cluster_id", using: :btree + t.index ["project_id"], name: "index_cluster_projects_on_project_id", using: :btree end - add_index "cluster_projects", ["cluster_id"], name: "index_cluster_projects_on_cluster_id", using: :btree - add_index "cluster_projects", ["project_id"], name: "index_cluster_projects_on_project_id", using: :btree - create_table "cluster_providers_gcp", force: :cascade do |t| t.integer "cluster_id", null: false t.integer "status" @@ -651,10 +619,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "encrypted_access_token" t.string "encrypted_access_token_iv" t.boolean "legacy_abac", default: true, null: false + t.index ["cluster_id"], name: "index_cluster_providers_gcp_on_cluster_id", unique: true, using: :btree end - add_index "cluster_providers_gcp", ["cluster_id"], name: "index_cluster_providers_gcp_on_cluster_id", unique: true, using: :btree - create_table "clusters", force: :cascade do |t| t.integer "user_id" t.integer "provider_type" @@ -665,11 +632,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "name", null: false t.string "environment_scope", default: "*", null: false t.integer "cluster_type", limit: 2, default: 3, null: false + t.index ["enabled"], name: "index_clusters_on_enabled", using: :btree + t.index ["user_id"], name: "index_clusters_on_user_id", using: :btree end - add_index "clusters", ["enabled"], name: "index_clusters_on_enabled", using: :btree - add_index "clusters", ["user_id"], name: "index_clusters_on_user_id", using: :btree - create_table "clusters_applications_helm", force: :cascade do |t| t.integer "cluster_id", null: false t.datetime_with_timezone "created_at", null: false @@ -733,11 +699,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "version", null: false t.text "status_reason" t.boolean "privileged", default: true, null: false + t.index ["cluster_id"], name: "index_clusters_applications_runners_on_cluster_id", unique: true, using: :btree + t.index ["runner_id"], name: "index_clusters_applications_runners_on_runner_id", using: :btree end - add_index "clusters_applications_runners", ["cluster_id"], name: "index_clusters_applications_runners_on_cluster_id", unique: true, using: :btree - add_index "clusters_applications_runners", ["runner_id"], name: "index_clusters_applications_runners_on_runner_id", using: :btree - create_table "clusters_kubernetes_namespaces", id: :bigserial, force: :cascade do |t| t.integer "cluster_id", null: false t.integer "project_id" @@ -748,23 +713,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "encrypted_service_account_token_iv" t.string "namespace", null: false t.string "service_account_name" + t.index ["cluster_id", "namespace"], name: "kubernetes_namespaces_cluster_and_namespace", unique: true, using: :btree + t.index ["cluster_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_id", using: :btree + t.index ["cluster_project_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_project_id", using: :btree + t.index ["project_id"], name: "index_clusters_kubernetes_namespaces_on_project_id", using: :btree end - add_index "clusters_kubernetes_namespaces", ["cluster_id", "namespace"], name: "kubernetes_namespaces_cluster_and_namespace", unique: true, using: :btree - add_index "clusters_kubernetes_namespaces", ["cluster_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_id", using: :btree - add_index "clusters_kubernetes_namespaces", ["cluster_project_id"], name: "index_clusters_kubernetes_namespaces_on_cluster_project_id", using: :btree - add_index "clusters_kubernetes_namespaces", ["project_id"], name: "index_clusters_kubernetes_namespaces_on_project_id", using: :btree - create_table "container_repositories", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["project_id", "name"], name: "index_container_repositories_on_project_id_and_name", unique: true, using: :btree + t.index ["project_id"], name: "index_container_repositories_on_project_id", using: :btree end - add_index "container_repositories", ["project_id", "name"], name: "index_container_repositories_on_project_id_and_name", unique: true, using: :btree - add_index "container_repositories", ["project_id"], name: "index_container_repositories_on_project_id", using: :btree - create_table "conversational_development_index_metrics", force: :cascade do |t| t.float "leader_issues", null: false t.float "instance_issues", null: false @@ -806,10 +769,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "created_at" t.datetime "updated_at" t.boolean "can_push", default: false, null: false + t.index ["project_id"], name: "index_deploy_keys_projects_on_project_id", using: :btree end - add_index "deploy_keys_projects", ["project_id"], name: "index_deploy_keys_projects_on_project_id", using: :btree - create_table "deploy_tokens", force: :cascade do |t| t.boolean "revoked", default: false t.boolean "read_repository", default: false, null: false @@ -818,11 +780,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime_with_timezone "created_at", null: false t.string "name", null: false t.string "token", null: false + t.index ["token", "expires_at", "id"], name: "index_deploy_tokens_on_token_and_expires_at_and_id", where: "(revoked IS FALSE)", using: :btree + t.index ["token"], name: "index_deploy_tokens_on_token", unique: true, using: :btree end - add_index "deploy_tokens", ["token", "expires_at", "id"], name: "index_deploy_tokens_on_token_and_expires_at_and_id", where: "(revoked IS FALSE)", using: :btree - add_index "deploy_tokens", ["token"], name: "index_deploy_tokens_on_token", unique: true, using: :btree - create_table "deployments", force: :cascade do |t| t.integer "iid", null: false t.integer "project_id", null: false @@ -838,18 +799,17 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "on_stop" t.integer "status", limit: 2, default: 2, null: false t.datetime_with_timezone "finished_at" + t.index ["created_at"], name: "index_deployments_on_created_at", using: :btree + t.index ["deployable_type", "deployable_id"], name: "index_deployments_on_deployable_type_and_deployable_id", using: :btree + t.index ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree + t.index ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree + t.index ["environment_id", "status"], name: "index_deployments_on_environment_id_and_status", using: :btree + t.index ["id"], name: "partial_index_deployments_for_legacy_successful_deployments", where: "((finished_at IS NULL) AND (status = 2))", using: :btree + t.index ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree + t.index ["project_id", "status", "created_at"], name: "index_deployments_on_project_id_and_status_and_created_at", using: :btree + t.index ["project_id", "status"], name: "index_deployments_on_project_id_and_status", using: :btree end - add_index "deployments", ["created_at"], name: "index_deployments_on_created_at", using: :btree - add_index "deployments", ["deployable_type", "deployable_id"], name: "index_deployments_on_deployable_type_and_deployable_id", using: :btree - add_index "deployments", ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree - add_index "deployments", ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree - add_index "deployments", ["environment_id", "status"], name: "index_deployments_on_environment_id_and_status", using: :btree - add_index "deployments", ["id"], name: "partial_index_deployments_for_legacy_successful_deployments", where: "((finished_at IS NULL) AND (status = 2))", using: :btree - add_index "deployments", ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree - add_index "deployments", ["project_id", "status", "created_at"], name: "index_deployments_on_project_id_and_status_and_created_at", using: :btree - add_index "deployments", ["project_id", "status"], name: "index_deployments_on_project_id_and_status", using: :btree - create_table "emails", force: :cascade do |t| t.integer "user_id", null: false t.string "email", null: false @@ -858,12 +818,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "confirmation_token" t.datetime_with_timezone "confirmed_at" t.datetime_with_timezone "confirmation_sent_at" + t.index ["confirmation_token"], name: "index_emails_on_confirmation_token", unique: true, using: :btree + t.index ["email"], name: "index_emails_on_email", unique: true, using: :btree + t.index ["user_id"], name: "index_emails_on_user_id", using: :btree end - add_index "emails", ["confirmation_token"], name: "index_emails_on_confirmation_token", unique: true, using: :btree - add_index "emails", ["email"], name: "index_emails_on_email", unique: true, using: :btree - add_index "emails", ["user_id"], name: "index_emails_on_user_id", using: :btree - create_table "environments", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false @@ -873,11 +832,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "environment_type" t.string "state", default: "available", null: false t.string "slug", null: false + t.index ["project_id", "name"], name: "index_environments_on_project_id_and_name", unique: true, using: :btree + t.index ["project_id", "slug"], name: "index_environments_on_project_id_and_slug", unique: true, using: :btree end - add_index "environments", ["project_id", "name"], name: "index_environments_on_project_id_and_name", unique: true, using: :btree - add_index "environments", ["project_id", "slug"], name: "index_environments_on_project_id_and_slug", unique: true, using: :btree - create_table "events", force: :cascade do |t| t.integer "project_id" t.integer "author_id", null: false @@ -886,95 +844,59 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime_with_timezone "updated_at", null: false t.integer "action", limit: 2, null: false t.string "target_type" + t.index ["action"], name: "index_events_on_action", using: :btree + t.index ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree + t.index ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree + t.index ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree end - add_index "events", ["action"], name: "index_events_on_action", using: :btree - add_index "events", ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree - add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree - add_index "events", ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree - create_table "feature_gates", force: :cascade do |t| t.string "feature_key", null: false t.string "key", null: false t.string "value" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["feature_key", "key", "value"], name: "index_feature_gates_on_feature_key_and_key_and_value", unique: true, using: :btree end - add_index "feature_gates", ["feature_key", "key", "value"], name: "index_feature_gates_on_feature_key_and_key_and_value", unique: true, using: :btree - create_table "features", force: :cascade do |t| t.string "key", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["key"], name: "index_features_on_key", unique: true, using: :btree end - add_index "features", ["key"], name: "index_features_on_key", unique: true, using: :btree - create_table "fork_network_members", force: :cascade do |t| t.integer "fork_network_id", null: false t.integer "project_id", null: false t.integer "forked_from_project_id" + t.index ["fork_network_id"], name: "index_fork_network_members_on_fork_network_id", using: :btree + t.index ["project_id"], name: "index_fork_network_members_on_project_id", unique: true, using: :btree end - add_index "fork_network_members", ["fork_network_id"], name: "index_fork_network_members_on_fork_network_id", using: :btree - add_index "fork_network_members", ["project_id"], name: "index_fork_network_members_on_project_id", unique: true, using: :btree - create_table "fork_networks", force: :cascade do |t| t.integer "root_project_id" t.string "deleted_root_project_name" + t.index ["root_project_id"], name: "index_fork_networks_on_root_project_id", unique: true, using: :btree end - add_index "fork_networks", ["root_project_id"], name: "index_fork_networks_on_root_project_id", unique: true, using: :btree - create_table "forked_project_links", force: :cascade do |t| t.integer "forked_to_project_id", null: false t.integer "forked_from_project_id", null: false t.datetime "created_at" t.datetime "updated_at" + t.index ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree end - add_index "forked_project_links", ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree - - create_table "gcp_clusters", force: :cascade do |t| - t.integer "project_id", null: false - t.integer "user_id" - t.integer "service_id" - t.integer "status" - t.integer "gcp_cluster_size", null: false - t.datetime_with_timezone "created_at", null: false - t.datetime_with_timezone "updated_at", null: false - t.boolean "enabled", default: true - t.text "status_reason" - t.string "project_namespace" - t.string "endpoint" - t.text "ca_cert" - t.text "encrypted_kubernetes_token" - t.string "encrypted_kubernetes_token_iv" - t.string "username" - t.text "encrypted_password" - t.string "encrypted_password_iv" - t.string "gcp_project_id", null: false - t.string "gcp_cluster_zone", null: false - t.string "gcp_cluster_name", null: false - t.string "gcp_machine_type" - t.string "gcp_operation_id" - t.text "encrypted_gcp_token" - t.string "encrypted_gcp_token_iv" - end - - add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree - create_table "gpg_key_subkeys", force: :cascade do |t| t.integer "gpg_key_id", null: false t.binary "keyid" t.binary "fingerprint" + t.index ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree + t.index ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree + t.index ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree end - add_index "gpg_key_subkeys", ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree - add_index "gpg_key_subkeys", ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree - add_index "gpg_key_subkeys", ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree - create_table "gpg_keys", force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false @@ -982,12 +904,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.binary "primary_keyid" t.binary "fingerprint" t.text "key" + t.index ["fingerprint"], name: "index_gpg_keys_on_fingerprint", unique: true, using: :btree + t.index ["primary_keyid"], name: "index_gpg_keys_on_primary_keyid", unique: true, using: :btree + t.index ["user_id"], name: "index_gpg_keys_on_user_id", using: :btree end - add_index "gpg_keys", ["fingerprint"], name: "index_gpg_keys_on_fingerprint", unique: true, using: :btree - add_index "gpg_keys", ["primary_keyid"], name: "index_gpg_keys_on_primary_keyid", unique: true, using: :btree - add_index "gpg_keys", ["user_id"], name: "index_gpg_keys_on_user_id", using: :btree - create_table "gpg_signatures", force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false @@ -999,63 +920,57 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "gpg_key_user_email" t.integer "verification_status", limit: 2, default: 0, null: false t.integer "gpg_key_subkey_id" + t.index ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree + t.index ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree + t.index ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree + t.index ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree + t.index ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree end - add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree - add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree - add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree - add_index "gpg_signatures", ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree - add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree - create_table "group_custom_attributes", force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.integer "group_id", null: false t.string "key", null: false t.string "value", null: false + t.index ["group_id", "key"], name: "index_group_custom_attributes_on_group_id_and_key", unique: true, using: :btree + t.index ["key", "value"], name: "index_group_custom_attributes_on_key_and_value", using: :btree end - add_index "group_custom_attributes", ["group_id", "key"], name: "index_group_custom_attributes_on_group_id_and_key", unique: true, using: :btree - add_index "group_custom_attributes", ["key", "value"], name: "index_group_custom_attributes_on_key_and_value", using: :btree - create_table "identities", force: :cascade do |t| t.string "extern_uid" t.string "provider" t.integer "user_id" t.datetime "created_at" t.datetime "updated_at" + t.index ["user_id"], name: "index_identities_on_user_id", using: :btree end - add_index "identities", ["user_id"], name: "index_identities_on_user_id", using: :btree - create_table "import_export_uploads", force: :cascade do |t| t.datetime_with_timezone "updated_at", null: false t.integer "project_id" t.text "import_file" t.text "export_file" + t.index ["project_id"], name: "index_import_export_uploads_on_project_id", using: :btree + t.index ["updated_at"], name: "index_import_export_uploads_on_updated_at", using: :btree end - add_index "import_export_uploads", ["project_id"], name: "index_import_export_uploads_on_project_id", using: :btree - add_index "import_export_uploads", ["updated_at"], name: "index_import_export_uploads_on_updated_at", using: :btree - create_table "internal_ids", id: :bigserial, force: :cascade do |t| t.integer "project_id" t.integer "usage", null: false t.integer "last_value", null: false t.integer "namespace_id" + t.index ["usage", "namespace_id"], name: "index_internal_ids_on_usage_and_namespace_id", unique: true, where: "(namespace_id IS NOT NULL)", using: :btree + t.index ["usage", "project_id"], name: "index_internal_ids_on_usage_and_project_id", unique: true, where: "(project_id IS NOT NULL)", using: :btree end - add_index "internal_ids", ["usage", "namespace_id"], name: "index_internal_ids_on_usage_and_namespace_id", unique: true, where: "(namespace_id IS NOT NULL)", using: :btree - add_index "internal_ids", ["usage", "project_id"], name: "index_internal_ids_on_usage_and_project_id", unique: true, where: "(project_id IS NOT NULL)", using: :btree - create_table "issue_assignees", id: false, force: :cascade do |t| t.integer "user_id", null: false t.integer "issue_id", null: false + t.index ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree + t.index ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree end - add_index "issue_assignees", ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree - add_index "issue_assignees", ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree - create_table "issue_metrics", force: :cascade do |t| t.integer "issue_id", null: false t.datetime "first_mentioned_in_commit_at" @@ -1063,10 +978,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "first_added_to_board_at" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["issue_id"], name: "index_issue_metrics", using: :btree end - add_index "issue_metrics", ["issue_id"], name: "index_issue_metrics", using: :btree - create_table "issues", force: :cascade do |t| t.string "title" t.integer "author_id" @@ -1092,23 +1006,22 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "discussion_locked" t.datetime_with_timezone "closed_at" t.integer "closed_by_id" + t.index ["author_id"], name: "index_issues_on_author_id", using: :btree + t.index ["confidential"], name: "index_issues_on_confidential", using: :btree + t.index ["description"], name: "index_issues_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} + t.index ["milestone_id"], name: "index_issues_on_milestone_id", using: :btree + t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)", using: :btree + t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state", using: :btree + t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)", using: :btree + t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true, using: :btree + t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state", using: :btree + t.index ["relative_position"], name: "index_issues_on_relative_position", using: :btree + t.index ["state"], name: "index_issues_on_state", using: :btree + t.index ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} + t.index ["updated_at"], name: "index_issues_on_updated_at", using: :btree + t.index ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree end - add_index "issues", ["author_id"], name: "index_issues_on_author_id", using: :btree - add_index "issues", ["confidential"], name: "index_issues_on_confidential", using: :btree - add_index "issues", ["description"], name: "index_issues_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} - add_index "issues", ["milestone_id"], name: "index_issues_on_milestone_id", using: :btree - add_index "issues", ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)", using: :btree - add_index "issues", ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state", using: :btree - add_index "issues", ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)", using: :btree - add_index "issues", ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true, using: :btree - add_index "issues", ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state", using: :btree - add_index "issues", ["relative_position"], name: "index_issues_on_relative_position", using: :btree - add_index "issues", ["state"], name: "index_issues_on_state", using: :btree - add_index "issues", ["title"], name: "index_issues_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} - add_index "issues", ["updated_at"], name: "index_issues_on_updated_at", using: :btree - add_index "issues", ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree - create_table "keys", force: :cascade do |t| t.integer "user_id" t.datetime "created_at" @@ -1119,33 +1032,30 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "fingerprint" t.boolean "public", default: false, null: false t.datetime "last_used_at" + t.index ["fingerprint"], name: "index_keys_on_fingerprint", unique: true, using: :btree + t.index ["user_id"], name: "index_keys_on_user_id", using: :btree end - add_index "keys", ["fingerprint"], name: "index_keys_on_fingerprint", unique: true, using: :btree - add_index "keys", ["user_id"], name: "index_keys_on_user_id", using: :btree - create_table "label_links", force: :cascade do |t| t.integer "label_id" t.integer "target_id" t.string "target_type" t.datetime "created_at" t.datetime "updated_at" + t.index ["label_id"], name: "index_label_links_on_label_id", using: :btree + t.index ["target_id", "target_type"], name: "index_label_links_on_target_id_and_target_type", using: :btree end - add_index "label_links", ["label_id"], name: "index_label_links_on_label_id", using: :btree - add_index "label_links", ["target_id", "target_type"], name: "index_label_links_on_target_id_and_target_type", using: :btree - create_table "label_priorities", force: :cascade do |t| t.integer "project_id", null: false t.integer "label_id", null: false t.integer "priority", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["priority"], name: "index_label_priorities_on_priority", using: :btree + t.index ["project_id", "label_id"], name: "index_label_priorities_on_project_id_and_label_id", unique: true, using: :btree end - add_index "label_priorities", ["priority"], name: "index_label_priorities_on_priority", using: :btree - add_index "label_priorities", ["project_id", "label_id"], name: "index_label_priorities_on_project_id_and_label_id", unique: true, using: :btree - create_table "labels", force: :cascade do |t| t.string "title" t.string "color" @@ -1158,45 +1068,41 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "type" t.integer "group_id" t.integer "cached_markdown_version" + t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true, using: :btree + t.index ["project_id"], name: "index_labels_on_project_id", using: :btree + t.index ["template"], name: "index_labels_on_template", where: "template", using: :btree + t.index ["title"], name: "index_labels_on_title", using: :btree + t.index ["type", "project_id"], name: "index_labels_on_type_and_project_id", using: :btree end - add_index "labels", ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true, using: :btree - add_index "labels", ["project_id"], name: "index_labels_on_project_id", using: :btree - add_index "labels", ["template"], name: "index_labels_on_template", where: "template", using: :btree - add_index "labels", ["title"], name: "index_labels_on_title", using: :btree - add_index "labels", ["type", "project_id"], name: "index_labels_on_type_and_project_id", using: :btree - create_table "lfs_file_locks", force: :cascade do |t| t.integer "project_id", null: false t.integer "user_id", null: false t.datetime "created_at", null: false t.string "path", limit: 511 + t.index ["project_id", "path"], name: "index_lfs_file_locks_on_project_id_and_path", unique: true, using: :btree + t.index ["user_id"], name: "index_lfs_file_locks_on_user_id", using: :btree end - add_index "lfs_file_locks", ["project_id", "path"], name: "index_lfs_file_locks_on_project_id_and_path", unique: true, using: :btree - add_index "lfs_file_locks", ["user_id"], name: "index_lfs_file_locks_on_user_id", using: :btree - create_table "lfs_objects", force: :cascade do |t| t.string "oid", null: false - t.integer "size", limit: 8, null: false + t.bigint "size", null: false t.datetime "created_at" t.datetime "updated_at" t.string "file" t.integer "file_store" + t.index ["file_store"], name: "index_lfs_objects_on_file_store", using: :btree + t.index ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree end - add_index "lfs_objects", ["file_store"], name: "index_lfs_objects_on_file_store", using: :btree - add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree - create_table "lfs_objects_projects", force: :cascade do |t| t.integer "lfs_object_id", null: false t.integer "project_id", null: false t.datetime "created_at" t.datetime "updated_at" + t.index ["project_id"], name: "index_lfs_objects_projects_on_project_id", using: :btree end - add_index "lfs_objects_projects", ["project_id"], name: "index_lfs_objects_projects_on_project_id", using: :btree - create_table "lists", force: :cascade do |t| t.integer "board_id", null: false t.integer "label_id" @@ -1204,12 +1110,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "position" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["board_id", "label_id"], name: "index_lists_on_board_id_and_label_id", unique: true, using: :btree + t.index ["label_id"], name: "index_lists_on_label_id", using: :btree + t.index ["list_type"], name: "index_lists_on_list_type", using: :btree end - add_index "lists", ["board_id", "label_id"], name: "index_lists_on_board_id_and_label_id", unique: true, using: :btree - add_index "lists", ["label_id"], name: "index_lists_on_label_id", using: :btree - add_index "lists", ["list_type"], name: "index_lists_on_list_type", using: :btree - create_table "members", force: :cascade do |t| t.integer "access_level", null: false t.integer "source_id", null: false @@ -1225,14 +1130,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "invite_accepted_at" t.datetime "requested_at" t.date "expires_at" + t.index ["access_level"], name: "index_members_on_access_level", using: :btree + t.index ["invite_token"], name: "index_members_on_invite_token", unique: true, using: :btree + t.index ["requested_at"], name: "index_members_on_requested_at", using: :btree + t.index ["source_id", "source_type"], name: "index_members_on_source_id_and_source_type", using: :btree + t.index ["user_id"], name: "index_members_on_user_id", using: :btree end - add_index "members", ["access_level"], name: "index_members_on_access_level", using: :btree - add_index "members", ["invite_token"], name: "index_members_on_invite_token", unique: true, using: :btree - add_index "members", ["requested_at"], name: "index_members_on_requested_at", using: :btree - add_index "members", ["source_id", "source_type"], name: "index_members_on_source_id_and_source_type", using: :btree - add_index "members", ["user_id"], name: "index_members_on_user_id", using: :btree - create_table "merge_request_diff_commits", id: false, force: :cascade do |t| t.datetime_with_timezone "authored_date" t.datetime_with_timezone "committed_date" @@ -1244,11 +1148,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "committer_name" t.text "committer_email" t.text "message" + t.index ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_commits_on_mr_diff_id_and_order", unique: true, using: :btree + t.index ["sha"], name: "index_merge_request_diff_commits_on_sha", using: :btree end - add_index "merge_request_diff_commits", ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_commits_on_mr_diff_id_and_order", unique: true, using: :btree - add_index "merge_request_diff_commits", ["sha"], name: "index_merge_request_diff_commits_on_sha", using: :btree - create_table "merge_request_diff_files", id: false, force: :cascade do |t| t.integer "merge_request_diff_id", null: false t.integer "relative_order", null: false @@ -1262,10 +1165,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.text "old_path", null: false t.text "diff", null: false t.boolean "binary" + t.index ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_files_on_mr_diff_id_and_order", unique: true, using: :btree end - add_index "merge_request_diff_files", ["merge_request_diff_id", "relative_order"], name: "index_merge_request_diff_files_on_mr_diff_id_and_order", unique: true, using: :btree - create_table "merge_request_diffs", force: :cascade do |t| t.string "state" t.integer "merge_request_id", null: false @@ -1276,10 +1178,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "head_commit_sha" t.string "start_commit_sha" t.integer "commits_count" + t.index ["merge_request_id", "id"], name: "index_merge_request_diffs_on_merge_request_id_and_id", using: :btree end - add_index "merge_request_diffs", ["merge_request_id", "id"], name: "index_merge_request_diffs_on_merge_request_id_and_id", using: :btree - create_table "merge_request_metrics", force: :cascade do |t| t.integer "merge_request_id", null: false t.datetime "latest_build_started_at" @@ -1292,12 +1193,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "merged_by_id" t.integer "latest_closed_by_id" t.datetime_with_timezone "latest_closed_at" + t.index ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at", using: :btree + t.index ["merge_request_id"], name: "index_merge_request_metrics", using: :btree + t.index ["pipeline_id"], name: "index_merge_request_metrics_on_pipeline_id", using: :btree end - add_index "merge_request_metrics", ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at", using: :btree - add_index "merge_request_metrics", ["merge_request_id"], name: "index_merge_request_metrics", using: :btree - add_index "merge_request_metrics", ["pipeline_id"], name: "index_merge_request_metrics_on_pipeline_id", using: :btree - create_table "merge_requests", force: :cascade do |t| t.string "target_branch", null: false t.string "source_branch", null: false @@ -1334,38 +1234,36 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "rebase_commit_sha" t.boolean "squash", default: false, null: false t.boolean "allow_maintainer_to_push" + t.index ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree + t.index ["author_id"], name: "index_merge_requests_on_author_id", using: :btree + t.index ["created_at"], name: "index_merge_requests_on_created_at", using: :btree + t.index ["description"], name: "index_merge_requests_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} + t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id", using: :btree + t.index ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))", using: :btree + t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id", using: :btree + t.index ["merge_user_id"], name: "index_merge_requests_on_merge_user_id", where: "(merge_user_id IS NOT NULL)", using: :btree + t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id", using: :btree + t.index ["source_branch"], name: "index_merge_requests_on_source_branch", using: :btree + t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)", using: :btree + t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree + t.index ["target_branch"], name: "index_merge_requests_on_target_branch", using: :btree + t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true, using: :btree + t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)", using: :btree + t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", using: :btree + t.index ["title"], name: "index_merge_requests_on_title", using: :btree + t.index ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} + t.index ["updated_by_id"], name: "index_merge_requests_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree end - add_index "merge_requests", ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree - add_index "merge_requests", ["author_id"], name: "index_merge_requests_on_author_id", using: :btree - add_index "merge_requests", ["created_at"], name: "index_merge_requests_on_created_at", using: :btree - add_index "merge_requests", ["description"], name: "index_merge_requests_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} - add_index "merge_requests", ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id", using: :btree - add_index "merge_requests", ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))", using: :btree - add_index "merge_requests", ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id", using: :btree - add_index "merge_requests", ["merge_user_id"], name: "index_merge_requests_on_merge_user_id", where: "(merge_user_id IS NOT NULL)", using: :btree - add_index "merge_requests", ["milestone_id"], name: "index_merge_requests_on_milestone_id", using: :btree - add_index "merge_requests", ["source_branch"], name: "index_merge_requests_on_source_branch", using: :btree - add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)", using: :btree - add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree - add_index "merge_requests", ["target_branch"], name: "index_merge_requests_on_target_branch", using: :btree - add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true, using: :btree - add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)", using: :btree - add_index "merge_requests", ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", using: :btree - add_index "merge_requests", ["title"], name: "index_merge_requests_on_title", using: :btree - add_index "merge_requests", ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} - add_index "merge_requests", ["updated_by_id"], name: "index_merge_requests_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree - create_table "merge_requests_closing_issues", force: :cascade do |t| t.integer "merge_request_id", null: false t.integer "issue_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["issue_id"], name: "index_merge_requests_closing_issues_on_issue_id", using: :btree + t.index ["merge_request_id"], name: "index_merge_requests_closing_issues_on_merge_request_id", using: :btree end - add_index "merge_requests_closing_issues", ["issue_id"], name: "index_merge_requests_closing_issues_on_issue_id", using: :btree - add_index "merge_requests_closing_issues", ["merge_request_id"], name: "index_merge_requests_closing_issues_on_merge_request_id", using: :btree - create_table "milestones", force: :cascade do |t| t.string "title", null: false t.integer "project_id" @@ -1380,15 +1278,14 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.date "start_date" t.integer "cached_markdown_version" t.integer "group_id" + t.index ["description"], name: "index_milestones_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} + t.index ["due_date"], name: "index_milestones_on_due_date", using: :btree + t.index ["group_id"], name: "index_milestones_on_group_id", using: :btree + t.index ["project_id", "iid"], name: "index_milestones_on_project_id_and_iid", unique: true, using: :btree + t.index ["title"], name: "index_milestones_on_title", using: :btree + t.index ["title"], name: "index_milestones_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} end - add_index "milestones", ["description"], name: "index_milestones_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} - add_index "milestones", ["due_date"], name: "index_milestones_on_due_date", using: :btree - add_index "milestones", ["group_id"], name: "index_milestones_on_group_id", using: :btree - add_index "milestones", ["project_id", "iid"], name: "index_milestones_on_project_id_and_iid", unique: true, using: :btree - add_index "milestones", ["title"], name: "index_milestones_on_title", using: :btree - add_index "milestones", ["title"], name: "index_milestones_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} - create_table "namespaces", force: :cascade do |t| t.string "name", null: false t.string "path", null: false @@ -1408,19 +1305,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "two_factor_grace_period", default: 48, null: false t.integer "cached_markdown_version" t.string "runners_token" + t.index ["created_at"], name: "index_namespaces_on_created_at", using: :btree + t.index ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree + t.index ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} + t.index ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree + t.index ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree + t.index ["path"], name: "index_namespaces_on_path", using: :btree + t.index ["path"], name: "index_namespaces_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"} + t.index ["require_two_factor_authentication"], name: "index_namespaces_on_require_two_factor_authentication", using: :btree + t.index ["runners_token"], name: "index_namespaces_on_runners_token", unique: true, using: :btree + t.index ["type"], name: "index_namespaces_on_type", using: :btree end - add_index "namespaces", ["created_at"], name: "index_namespaces_on_created_at", using: :btree - add_index "namespaces", ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree - add_index "namespaces", ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} - add_index "namespaces", ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree - add_index "namespaces", ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree - add_index "namespaces", ["path"], name: "index_namespaces_on_path", using: :btree - add_index "namespaces", ["path"], name: "index_namespaces_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"} - add_index "namespaces", ["require_two_factor_authentication"], name: "index_namespaces_on_require_two_factor_authentication", using: :btree - add_index "namespaces", ["runners_token"], name: "index_namespaces_on_runners_token", unique: true, using: :btree - add_index "namespaces", ["type"], name: "index_namespaces_on_type", using: :btree - create_table "note_diff_files", force: :cascade do |t| t.integer "diff_note_id", null: false t.text "diff", null: false @@ -1431,10 +1327,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "b_mode", null: false t.text "new_path", null: false t.text "old_path", null: false + t.index ["diff_note_id"], name: "index_note_diff_files_on_diff_note_id", unique: true, using: :btree end - add_index "note_diff_files", ["diff_note_id"], name: "index_note_diff_files_on_diff_note_id", unique: true, using: :btree - create_table "notes", force: :cascade do |t| t.text "note" t.string "noteable_type" @@ -1459,19 +1354,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "cached_markdown_version" t.text "change_position" t.boolean "resolved_by_push" + t.index ["author_id"], name: "index_notes_on_author_id", using: :btree + t.index ["commit_id"], name: "index_notes_on_commit_id", using: :btree + t.index ["created_at"], name: "index_notes_on_created_at", using: :btree + t.index ["discussion_id"], name: "index_notes_on_discussion_id", using: :btree + t.index ["line_code"], name: "index_notes_on_line_code", using: :btree + t.index ["note"], name: "index_notes_on_note_trigram", using: :gin, opclasses: {"note"=>"gin_trgm_ops"} + t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type", using: :btree + t.index ["noteable_type"], name: "index_notes_on_noteable_type", using: :btree + t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type", using: :btree + t.index ["updated_at"], name: "index_notes_on_updated_at", using: :btree end - add_index "notes", ["author_id"], name: "index_notes_on_author_id", using: :btree - add_index "notes", ["commit_id"], name: "index_notes_on_commit_id", using: :btree - add_index "notes", ["created_at"], name: "index_notes_on_created_at", using: :btree - add_index "notes", ["discussion_id"], name: "index_notes_on_discussion_id", using: :btree - add_index "notes", ["line_code"], name: "index_notes_on_line_code", using: :btree - add_index "notes", ["note"], name: "index_notes_on_note_trigram", using: :gin, opclasses: {"note"=>"gin_trgm_ops"} - add_index "notes", ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type", using: :btree - add_index "notes", ["noteable_type"], name: "index_notes_on_noteable_type", using: :btree - add_index "notes", ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type", using: :btree - add_index "notes", ["updated_at"], name: "index_notes_on_updated_at", using: :btree - create_table "notification_settings", force: :cascade do |t| t.integer "user_id", null: false t.integer "source_id" @@ -1493,12 +1387,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "success_pipeline" t.boolean "push_to_merge_request" t.boolean "issue_due" + t.index ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type", using: :btree + t.index ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true, using: :btree + t.index ["user_id"], name: "index_notification_settings_on_user_id", using: :btree end - add_index "notification_settings", ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type", using: :btree - add_index "notification_settings", ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true, using: :btree - add_index "notification_settings", ["user_id"], name: "index_notification_settings_on_user_id", using: :btree - create_table "oauth_access_grants", force: :cascade do |t| t.integer "resource_owner_id", null: false t.integer "application_id", null: false @@ -1508,10 +1401,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "created_at", null: false t.datetime "revoked_at" t.string "scopes" + t.index ["token"], name: "index_oauth_access_grants_on_token", unique: true, using: :btree end - add_index "oauth_access_grants", ["token"], name: "index_oauth_access_grants_on_token", unique: true, using: :btree - create_table "oauth_access_tokens", force: :cascade do |t| t.integer "resource_owner_id" t.integer "application_id" @@ -1521,12 +1413,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "revoked_at" t.datetime "created_at", null: false t.string "scopes" + t.index ["refresh_token"], name: "index_oauth_access_tokens_on_refresh_token", unique: true, using: :btree + t.index ["resource_owner_id"], name: "index_oauth_access_tokens_on_resource_owner_id", using: :btree + t.index ["token"], name: "index_oauth_access_tokens_on_token", unique: true, using: :btree end - add_index "oauth_access_tokens", ["refresh_token"], name: "index_oauth_access_tokens_on_refresh_token", unique: true, using: :btree - add_index "oauth_access_tokens", ["resource_owner_id"], name: "index_oauth_access_tokens_on_resource_owner_id", using: :btree - add_index "oauth_access_tokens", ["token"], name: "index_oauth_access_tokens_on_token", unique: true, using: :btree - create_table "oauth_applications", force: :cascade do |t| t.string "name", null: false t.string "uid", null: false @@ -1538,11 +1429,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "owner_id" t.string "owner_type" t.boolean "trusted", default: false, null: false + t.index ["owner_id", "owner_type"], name: "index_oauth_applications_on_owner_id_and_owner_type", using: :btree + t.index ["uid"], name: "index_oauth_applications_on_uid", unique: true, using: :btree end - add_index "oauth_applications", ["owner_id", "owner_type"], name: "index_oauth_applications_on_owner_id_and_owner_type", using: :btree - add_index "oauth_applications", ["uid"], name: "index_oauth_applications_on_uid", unique: true, using: :btree - create_table "oauth_openid_requests", force: :cascade do |t| t.integer "access_grant_id", null: false t.string "nonce", null: false @@ -1558,14 +1448,13 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime_with_timezone "verified_at" t.string "verification_code", null: false t.datetime_with_timezone "enabled_until" + t.index ["domain"], name: "index_pages_domains_on_domain", unique: true, using: :btree + t.index ["project_id", "enabled_until"], name: "index_pages_domains_on_project_id_and_enabled_until", using: :btree + t.index ["project_id"], name: "index_pages_domains_on_project_id", using: :btree + t.index ["verified_at", "enabled_until"], name: "index_pages_domains_on_verified_at_and_enabled_until", using: :btree + t.index ["verified_at"], name: "index_pages_domains_on_verified_at", using: :btree end - add_index "pages_domains", ["domain"], name: "index_pages_domains_on_domain", unique: true, using: :btree - add_index "pages_domains", ["project_id", "enabled_until"], name: "index_pages_domains_on_project_id_and_enabled_until", using: :btree - add_index "pages_domains", ["project_id"], name: "index_pages_domains_on_project_id", using: :btree - add_index "pages_domains", ["verified_at", "enabled_until"], name: "index_pages_domains_on_verified_at_and_enabled_until", using: :btree - add_index "pages_domains", ["verified_at"], name: "index_pages_domains_on_verified_at", using: :btree - create_table "personal_access_tokens", force: :cascade do |t| t.integer "user_id", null: false t.string "token" @@ -1577,29 +1466,26 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "scopes", default: "--- []\n", null: false t.boolean "impersonation", default: false, null: false t.string "token_digest" + t.index ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree + t.index ["token_digest"], name: "index_personal_access_tokens_on_token_digest", unique: true, using: :btree + t.index ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree end - add_index "personal_access_tokens", ["token"], name: "index_personal_access_tokens_on_token", unique: true, using: :btree - add_index "personal_access_tokens", ["token_digest"], name: "index_personal_access_tokens_on_token_digest", unique: true, using: :btree - add_index "personal_access_tokens", ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree - create_table "programming_languages", force: :cascade do |t| t.string "name", null: false t.string "color", null: false t.datetime_with_timezone "created_at", null: false + t.index ["name"], name: "index_programming_languages_on_name", unique: true, using: :btree end - add_index "programming_languages", ["name"], name: "index_programming_languages_on_name", unique: true, using: :btree - create_table "project_authorizations", id: false, force: :cascade do |t| t.integer "user_id", null: false t.integer "project_id", null: false t.integer "access_level", null: false + t.index ["project_id"], name: "index_project_authorizations_on_project_id", using: :btree + t.index ["user_id", "project_id", "access_level"], name: "index_project_authorizations_on_user_id_project_id_access_level", unique: true, using: :btree end - add_index "project_authorizations", ["project_id"], name: "index_project_authorizations_on_project_id", using: :btree - add_index "project_authorizations", ["user_id", "project_id", "access_level"], name: "index_project_authorizations_on_user_id_project_id_access_level", unique: true, using: :btree - create_table "project_auto_devops", force: :cascade do |t| t.integer "project_id", null: false t.datetime_with_timezone "created_at", null: false @@ -1607,37 +1493,33 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "enabled" t.string "domain" t.integer "deploy_strategy", default: 0, null: false + t.index ["project_id"], name: "index_project_auto_devops_on_project_id", unique: true, using: :btree end - add_index "project_auto_devops", ["project_id"], name: "index_project_auto_devops_on_project_id", unique: true, using: :btree - create_table "project_ci_cd_settings", force: :cascade do |t| t.integer "project_id", null: false t.boolean "group_runners_enabled", default: true, null: false + t.index ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true, using: :btree end - add_index "project_ci_cd_settings", ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true, using: :btree - create_table "project_custom_attributes", force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.integer "project_id", null: false t.string "key", null: false t.string "value", null: false + t.index ["key", "value"], name: "index_project_custom_attributes_on_key_and_value", using: :btree + t.index ["project_id", "key"], name: "index_project_custom_attributes_on_project_id_and_key", unique: true, using: :btree end - add_index "project_custom_attributes", ["key", "value"], name: "index_project_custom_attributes_on_key_and_value", using: :btree - add_index "project_custom_attributes", ["project_id", "key"], name: "index_project_custom_attributes_on_project_id_and_key", unique: true, using: :btree - create_table "project_deploy_tokens", force: :cascade do |t| t.integer "project_id", null: false t.integer "deploy_token_id", null: false t.datetime_with_timezone "created_at", null: false + t.index ["deploy_token_id"], name: "index_project_deploy_tokens_on_deploy_token_id", using: :btree + t.index ["project_id", "deploy_token_id"], name: "index_project_deploy_tokens_on_project_id_and_deploy_token_id", unique: true, using: :btree end - add_index "project_deploy_tokens", ["deploy_token_id"], name: "index_project_deploy_tokens_on_deploy_token_id", using: :btree - add_index "project_deploy_tokens", ["project_id", "deploy_token_id"], name: "index_project_deploy_tokens_on_project_id_and_deploy_token_id", unique: true, using: :btree - create_table "project_features", force: :cascade do |t| t.integer "project_id", null: false t.integer "merge_requests_access_level" @@ -1649,10 +1531,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "updated_at" t.integer "repository_access_level", default: 20, null: false t.integer "pages_access_level", default: 20, null: false + t.index ["project_id"], name: "index_project_features_on_project_id", unique: true, using: :btree end - add_index "project_features", ["project_id"], name: "index_project_features_on_project_id", unique: true, using: :btree - create_table "project_group_links", force: :cascade do |t| t.integer "project_id", null: false t.integer "group_id", null: false @@ -1660,45 +1541,41 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "updated_at" t.integer "group_access", default: 30, null: false t.date "expires_at" + t.index ["group_id"], name: "index_project_group_links_on_group_id", using: :btree + t.index ["project_id"], name: "index_project_group_links_on_project_id", using: :btree end - add_index "project_group_links", ["group_id"], name: "index_project_group_links_on_group_id", using: :btree - add_index "project_group_links", ["project_id"], name: "index_project_group_links_on_project_id", using: :btree - create_table "project_import_data", force: :cascade do |t| t.integer "project_id" t.text "data" t.text "encrypted_credentials" t.string "encrypted_credentials_iv" t.string "encrypted_credentials_salt" + t.index ["project_id"], name: "index_project_import_data_on_project_id", using: :btree end - add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree - create_table "project_mirror_data", force: :cascade do |t| t.integer "project_id", null: false t.string "status" t.string "jid" t.text "last_error" + t.index ["jid"], name: "index_project_mirror_data_on_jid", using: :btree + t.index ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree + t.index ["status"], name: "index_project_mirror_data_on_status", using: :btree end - add_index "project_mirror_data", ["jid"], name: "index_project_mirror_data_on_jid", using: :btree - add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree - add_index "project_mirror_data", ["status"], name: "index_project_mirror_data_on_status", using: :btree - create_table "project_statistics", force: :cascade do |t| t.integer "project_id", null: false t.integer "namespace_id", null: false - t.integer "commit_count", limit: 8, default: 0, null: false - t.integer "storage_size", limit: 8, default: 0, null: false - t.integer "repository_size", limit: 8, default: 0, null: false - t.integer "lfs_objects_size", limit: 8, default: 0, null: false - t.integer "build_artifacts_size", limit: 8, default: 0, null: false + t.bigint "commit_count", default: 0, null: false + t.bigint "storage_size", default: 0, null: false + t.bigint "repository_size", default: 0, null: false + t.bigint "lfs_objects_size", default: 0, null: false + t.bigint "build_artifacts_size", default: 0, null: false + t.index ["namespace_id"], name: "index_project_statistics_on_namespace_id", using: :btree + t.index ["project_id"], name: "index_project_statistics_on_project_id", unique: true, using: :btree end - add_index "project_statistics", ["namespace_id"], name: "index_project_statistics_on_namespace_id", using: :btree - add_index "project_statistics", ["project_id"], name: "index_project_statistics_on_project_id", unique: true, using: :btree - create_table "projects", force: :cascade do |t| t.string "name" t.string "path" @@ -1751,29 +1628,28 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "jobs_cache_index" t.boolean "pages_https_only", default: true t.boolean "remote_mirror_available_overridden" - t.integer "pool_repository_id", limit: 8 - end - - add_index "projects", ["ci_id"], name: "index_projects_on_ci_id", using: :btree - add_index "projects", ["created_at"], name: "index_projects_on_created_at", using: :btree - add_index "projects", ["creator_id"], name: "index_projects_on_creator_id", using: :btree - add_index "projects", ["description"], name: "index_projects_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} - add_index "projects", ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))", using: :btree - add_index "projects", ["last_activity_at"], name: "index_projects_on_last_activity_at", using: :btree - add_index "projects", ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)", using: :btree - add_index "projects", ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed", using: :btree - add_index "projects", ["last_repository_updated_at"], name: "index_projects_on_last_repository_updated_at", using: :btree - add_index "projects", ["name"], name: "index_projects_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} - add_index "projects", ["namespace_id"], name: "index_projects_on_namespace_id", using: :btree - add_index "projects", ["path"], name: "index_projects_on_path", using: :btree - add_index "projects", ["path"], name: "index_projects_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"} - add_index "projects", ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree - add_index "projects", ["pool_repository_id"], name: "index_projects_on_pool_repository_id", where: "(pool_repository_id IS NOT NULL)", using: :btree - add_index "projects", ["repository_storage", "created_at"], name: "idx_project_repository_check_partial", where: "(last_repository_check_at IS NULL)", using: :btree - add_index "projects", ["repository_storage"], name: "index_projects_on_repository_storage", using: :btree - add_index "projects", ["runners_token"], name: "index_projects_on_runners_token", using: :btree - add_index "projects", ["star_count"], name: "index_projects_on_star_count", using: :btree - add_index "projects", ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree + t.bigint "pool_repository_id" + t.index ["ci_id"], name: "index_projects_on_ci_id", using: :btree + t.index ["created_at"], name: "index_projects_on_created_at", using: :btree + t.index ["creator_id"], name: "index_projects_on_creator_id", using: :btree + t.index ["description"], name: "index_projects_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"} + t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))", using: :btree + t.index ["last_activity_at"], name: "index_projects_on_last_activity_at", using: :btree + t.index ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)", using: :btree + t.index ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed", using: :btree + t.index ["last_repository_updated_at"], name: "index_projects_on_last_repository_updated_at", using: :btree + t.index ["name"], name: "index_projects_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} + t.index ["namespace_id"], name: "index_projects_on_namespace_id", using: :btree + t.index ["path"], name: "index_projects_on_path", using: :btree + t.index ["path"], name: "index_projects_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"} + t.index ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree + t.index ["pool_repository_id"], name: "index_projects_on_pool_repository_id", where: "(pool_repository_id IS NOT NULL)", using: :btree + t.index ["repository_storage", "created_at"], name: "idx_project_repository_check_partial", where: "(last_repository_check_at IS NULL)", using: :btree + t.index ["repository_storage"], name: "index_projects_on_repository_storage", using: :btree + t.index ["runners_token"], name: "index_projects_on_runners_token", using: :btree + t.index ["star_count"], name: "index_projects_on_star_count", using: :btree + t.index ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree + end create_table "prometheus_metrics", force: :cascade do |t| t.integer "project_id" @@ -1787,40 +1663,36 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime_with_timezone "updated_at", null: false t.boolean "common", default: false, null: false t.string "identifier" + t.index ["common"], name: "index_prometheus_metrics_on_common", using: :btree + t.index ["group"], name: "index_prometheus_metrics_on_group", using: :btree + t.index ["identifier"], name: "index_prometheus_metrics_on_identifier", unique: true, using: :btree + t.index ["project_id"], name: "index_prometheus_metrics_on_project_id", using: :btree end - add_index "prometheus_metrics", ["common"], name: "index_prometheus_metrics_on_common", using: :btree - add_index "prometheus_metrics", ["group"], name: "index_prometheus_metrics_on_group", using: :btree - add_index "prometheus_metrics", ["identifier"], name: "index_prometheus_metrics_on_identifier", unique: true, using: :btree - add_index "prometheus_metrics", ["project_id"], name: "index_prometheus_metrics_on_project_id", using: :btree - create_table "protected_branch_merge_access_levels", force: :cascade do |t| t.integer "protected_branch_id", null: false t.integer "access_level", default: 40, null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["protected_branch_id"], name: "index_protected_branch_merge_access", using: :btree end - add_index "protected_branch_merge_access_levels", ["protected_branch_id"], name: "index_protected_branch_merge_access", using: :btree - create_table "protected_branch_push_access_levels", force: :cascade do |t| t.integer "protected_branch_id", null: false t.integer "access_level", default: 40, null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["protected_branch_id"], name: "index_protected_branch_push_access", using: :btree end - add_index "protected_branch_push_access_levels", ["protected_branch_id"], name: "index_protected_branch_push_access", using: :btree - create_table "protected_branches", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false t.datetime "created_at" t.datetime "updated_at" + t.index ["project_id"], name: "index_protected_branches_on_project_id", using: :btree end - add_index "protected_branches", ["project_id"], name: "index_protected_branches_on_project_id", using: :btree - create_table "protected_tag_create_access_levels", force: :cascade do |t| t.integer "protected_tag_id", null: false t.integer "access_level", default: 40 @@ -1828,23 +1700,21 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "group_id" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["protected_tag_id"], name: "index_protected_tag_create_access", using: :btree + t.index ["user_id"], name: "index_protected_tag_create_access_levels_on_user_id", using: :btree end - add_index "protected_tag_create_access_levels", ["protected_tag_id"], name: "index_protected_tag_create_access", using: :btree - add_index "protected_tag_create_access_levels", ["user_id"], name: "index_protected_tag_create_access_levels_on_user_id", using: :btree - create_table "protected_tags", force: :cascade do |t| t.integer "project_id", null: false t.string "name", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["project_id", "name"], name: "index_protected_tags_on_project_id_and_name", unique: true, using: :btree + t.index ["project_id"], name: "index_protected_tags_on_project_id", using: :btree end - add_index "protected_tags", ["project_id", "name"], name: "index_protected_tags_on_project_id_and_name", unique: true, using: :btree - add_index "protected_tags", ["project_id"], name: "index_protected_tags_on_project_id", using: :btree - create_table "push_event_payloads", id: false, force: :cascade do |t| - t.integer "commit_count", limit: 8, null: false + t.bigint "commit_count", null: false t.integer "event_id", null: false t.integer "action", limit: 2, null: false t.integer "ref_type", limit: 2, null: false @@ -1852,21 +1722,19 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.binary "commit_to" t.text "ref" t.string "commit_title", limit: 70 + t.index ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree end - add_index "push_event_payloads", ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree - create_table "redirect_routes", force: :cascade do |t| t.integer "source_id", null: false t.string "source_type", null: false t.string "path", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["path"], name: "index_redirect_routes_on_path", unique: true, using: :btree + t.index ["source_type", "source_id"], name: "index_redirect_routes_on_source_type_and_source_id", using: :btree end - add_index "redirect_routes", ["path"], name: "index_redirect_routes_on_path", unique: true, using: :btree - add_index "redirect_routes", ["source_type", "source_id"], name: "index_redirect_routes_on_source_type_and_source_id", using: :btree - create_table "releases", force: :cascade do |t| t.string "tag" t.text "description" @@ -1875,11 +1743,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "updated_at" t.text "description_html" t.integer "cached_markdown_version" + t.index ["project_id", "tag"], name: "index_releases_on_project_id_and_tag", using: :btree + t.index ["project_id"], name: "index_releases_on_project_id", using: :btree end - add_index "releases", ["project_id", "tag"], name: "index_releases_on_project_id_and_tag", using: :btree - add_index "releases", ["project_id"], name: "index_releases_on_project_id", using: :btree - create_table "remote_mirrors", force: :cascade do |t| t.integer "project_id" t.string "url" @@ -1896,27 +1763,24 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "encrypted_credentials_salt" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["last_successful_update_at"], name: "index_remote_mirrors_on_last_successful_update_at", using: :btree + t.index ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree end - add_index "remote_mirrors", ["last_successful_update_at"], name: "index_remote_mirrors_on_last_successful_update_at", using: :btree - add_index "remote_mirrors", ["project_id"], name: "index_remote_mirrors_on_project_id", using: :btree - create_table "repositories", id: :bigserial, force: :cascade do |t| t.integer "shard_id", null: false t.string "disk_path", null: false + t.index ["disk_path"], name: "index_repositories_on_disk_path", unique: true, using: :btree + t.index ["shard_id"], name: "index_repositories_on_shard_id", using: :btree end - add_index "repositories", ["disk_path"], name: "index_repositories_on_disk_path", unique: true, using: :btree - add_index "repositories", ["shard_id"], name: "index_repositories_on_shard_id", using: :btree - create_table "repository_languages", id: false, force: :cascade do |t| t.integer "project_id", null: false t.integer "programming_language_id", null: false t.float "share", null: false + t.index ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true, using: :btree end - add_index "repository_languages", ["project_id", "programming_language_id"], name: "index_repository_languages_on_project_and_languages_id", unique: true, using: :btree - create_table "resource_label_events", id: :bigserial, force: :cascade do |t| t.integer "action", null: false t.integer "issue_id" @@ -1927,13 +1791,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "cached_markdown_version" t.text "reference" t.text "reference_html" + t.index ["issue_id"], name: "index_resource_label_events_on_issue_id", using: :btree + t.index ["label_id"], name: "index_resource_label_events_on_label_id", using: :btree + t.index ["merge_request_id"], name: "index_resource_label_events_on_merge_request_id", using: :btree + t.index ["user_id"], name: "index_resource_label_events_on_user_id", using: :btree end - add_index "resource_label_events", ["issue_id"], name: "index_resource_label_events_on_issue_id", using: :btree - add_index "resource_label_events", ["label_id"], name: "index_resource_label_events_on_label_id", using: :btree - add_index "resource_label_events", ["merge_request_id"], name: "index_resource_label_events_on_merge_request_id", using: :btree - add_index "resource_label_events", ["user_id"], name: "index_resource_label_events_on_user_id", using: :btree - create_table "routes", force: :cascade do |t| t.integer "source_id", null: false t.string "source_type", null: false @@ -1941,12 +1804,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "created_at" t.datetime "updated_at" t.string "name" + t.index ["path"], name: "index_routes_on_path", unique: true, using: :btree + t.index ["path"], name: "index_routes_on_path_text_pattern_ops", using: :btree, opclasses: {"path"=>"varchar_pattern_ops"} + t.index ["source_type", "source_id"], name: "index_routes_on_source_type_and_source_id", unique: true, using: :btree end - add_index "routes", ["path"], name: "index_routes_on_path", unique: true, using: :btree - add_index "routes", ["path"], name: "index_routes_on_path_text_pattern_ops", using: :btree, opclasses: {"path"=>"varchar_pattern_ops"} - add_index "routes", ["source_type", "source_id"], name: "index_routes_on_source_type_and_source_id", unique: true, using: :btree - create_table "sent_notifications", force: :cascade do |t| t.integer "project_id" t.integer "noteable_id" @@ -1958,10 +1820,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "note_type" t.text "position" t.string "in_reply_to_discussion_id" + t.index ["reply_key"], name: "index_sent_notifications_on_reply_key", unique: true, using: :btree end - add_index "sent_notifications", ["reply_key"], name: "index_sent_notifications_on_reply_key", unique: true, using: :btree - create_table "services", force: :cascade do |t| t.string "type" t.string "title" @@ -1984,17 +1845,15 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "commit_events", default: true, null: false t.boolean "job_events", default: false, null: false t.boolean "confidential_note_events", default: true + t.index ["project_id"], name: "index_services_on_project_id", using: :btree + t.index ["template"], name: "index_services_on_template", using: :btree end - add_index "services", ["project_id"], name: "index_services_on_project_id", using: :btree - add_index "services", ["template"], name: "index_services_on_template", using: :btree - create_table "shards", force: :cascade do |t| t.string "name", null: false + t.index ["name"], name: "index_shards_on_name", unique: true, using: :btree end - add_index "shards", ["name"], name: "index_shards_on_name", unique: true, using: :btree - create_table "site_statistics", force: :cascade do |t| t.integer "repositories_count", default: 0, null: false end @@ -2014,15 +1873,14 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "cached_markdown_version" t.text "description" t.text "description_html" + t.index ["author_id"], name: "index_snippets_on_author_id", using: :btree + t.index ["file_name"], name: "index_snippets_on_file_name_trigram", using: :gin, opclasses: {"file_name"=>"gin_trgm_ops"} + t.index ["project_id"], name: "index_snippets_on_project_id", using: :btree + t.index ["title"], name: "index_snippets_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} + t.index ["updated_at"], name: "index_snippets_on_updated_at", using: :btree + t.index ["visibility_level"], name: "index_snippets_on_visibility_level", using: :btree end - add_index "snippets", ["author_id"], name: "index_snippets_on_author_id", using: :btree - add_index "snippets", ["file_name"], name: "index_snippets_on_file_name_trigram", using: :gin, opclasses: {"file_name"=>"gin_trgm_ops"} - add_index "snippets", ["project_id"], name: "index_snippets_on_project_id", using: :btree - add_index "snippets", ["title"], name: "index_snippets_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} - add_index "snippets", ["updated_at"], name: "index_snippets_on_updated_at", using: :btree - add_index "snippets", ["visibility_level"], name: "index_snippets_on_visibility_level", using: :btree - create_table "spam_logs", force: :cascade do |t| t.integer "user_id" t.string "source_ip" @@ -2045,20 +1903,18 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "created_at" t.datetime "updated_at" t.integer "project_id" + t.index ["subscribable_id", "subscribable_type", "user_id", "project_id"], name: "index_subscriptions_on_subscribable_and_user_id_and_project_id", unique: true, using: :btree end - add_index "subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], name: "index_subscriptions_on_subscribable_and_user_id_and_project_id", unique: true, using: :btree - create_table "system_note_metadata", force: :cascade do |t| t.integer "note_id", null: false t.integer "commit_count" t.string "action" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["note_id"], name: "index_system_note_metadata_on_note_id", unique: true, using: :btree end - add_index "system_note_metadata", ["note_id"], name: "index_system_note_metadata_on_note_id", unique: true, using: :btree - create_table "taggings", force: :cascade do |t| t.integer "tag_id" t.integer "taggable_id" @@ -2067,32 +1923,29 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "tagger_type" t.string "context" t.datetime "created_at" + t.index ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree + t.index ["tag_id"], name: "index_taggings_on_tag_id", using: :btree + t.index ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context", using: :btree + t.index ["taggable_id", "taggable_type"], name: "index_taggings_on_taggable_id_and_taggable_type", using: :btree end - add_index "taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree - add_index "taggings", ["tag_id"], name: "index_taggings_on_tag_id", using: :btree - add_index "taggings", ["taggable_id", "taggable_type", "context"], name: "index_taggings_on_taggable_id_and_taggable_type_and_context", using: :btree - add_index "taggings", ["taggable_id", "taggable_type"], name: "index_taggings_on_taggable_id_and_taggable_type", using: :btree - create_table "tags", force: :cascade do |t| t.string "name" t.integer "taggings_count", default: 0 + t.index ["name"], name: "index_tags_on_name", unique: true, using: :btree end - add_index "tags", ["name"], name: "index_tags_on_name", unique: true, using: :btree - create_table "term_agreements", force: :cascade do |t| t.integer "term_id", null: false t.integer "user_id", null: false t.boolean "accepted", default: false, null: false t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false + t.index ["term_id"], name: "index_term_agreements_on_term_id", using: :btree + t.index ["user_id", "term_id"], name: "term_agreements_unique_index", unique: true, using: :btree + t.index ["user_id"], name: "index_term_agreements_on_user_id", using: :btree end - add_index "term_agreements", ["term_id"], name: "index_term_agreements_on_term_id", using: :btree - add_index "term_agreements", ["user_id", "term_id"], name: "term_agreements_unique_index", unique: true, using: :btree - add_index "term_agreements", ["user_id"], name: "index_term_agreements_on_user_id", using: :btree - create_table "timelogs", force: :cascade do |t| t.integer "time_spent", null: false t.integer "user_id" @@ -2101,12 +1954,11 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "issue_id" t.integer "merge_request_id" t.datetime_with_timezone "spent_at" + t.index ["issue_id"], name: "index_timelogs_on_issue_id", using: :btree + t.index ["merge_request_id"], name: "index_timelogs_on_merge_request_id", using: :btree + t.index ["user_id"], name: "index_timelogs_on_user_id", using: :btree end - add_index "timelogs", ["issue_id"], name: "index_timelogs_on_issue_id", using: :btree - add_index "timelogs", ["merge_request_id"], name: "index_timelogs_on_merge_request_id", using: :btree - add_index "timelogs", ["user_id"], name: "index_timelogs_on_user_id", using: :btree - create_table "todos", force: :cascade do |t| t.integer "user_id", null: false t.integer "project_id" @@ -2120,24 +1972,22 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.integer "note_id" t.string "commit_id" t.integer "group_id" + t.index ["author_id"], name: "index_todos_on_author_id", using: :btree + t.index ["commit_id"], name: "index_todos_on_commit_id", using: :btree + t.index ["group_id"], name: "index_todos_on_group_id", using: :btree + t.index ["note_id"], name: "index_todos_on_note_id", using: :btree + t.index ["project_id"], name: "index_todos_on_project_id", using: :btree + t.index ["target_type", "target_id"], name: "index_todos_on_target_type_and_target_id", using: :btree + t.index ["user_id", "id"], name: "index_todos_on_user_id_and_id_done", where: "((state)::text = 'done'::text)", using: :btree + t.index ["user_id", "id"], name: "index_todos_on_user_id_and_id_pending", where: "((state)::text = 'pending'::text)", using: :btree + t.index ["user_id"], name: "index_todos_on_user_id", using: :btree end - add_index "todos", ["author_id"], name: "index_todos_on_author_id", using: :btree - add_index "todos", ["commit_id"], name: "index_todos_on_commit_id", using: :btree - add_index "todos", ["group_id"], name: "index_todos_on_group_id", using: :btree - add_index "todos", ["note_id"], name: "index_todos_on_note_id", using: :btree - add_index "todos", ["project_id"], name: "index_todos_on_project_id", using: :btree - add_index "todos", ["target_type", "target_id"], name: "index_todos_on_target_type_and_target_id", using: :btree - add_index "todos", ["user_id", "id"], name: "index_todos_on_user_id_and_id_done", where: "((state)::text = 'done'::text)", using: :btree - add_index "todos", ["user_id", "id"], name: "index_todos_on_user_id_and_id_pending", where: "((state)::text = 'pending'::text)", using: :btree - add_index "todos", ["user_id"], name: "index_todos_on_user_id", using: :btree - create_table "trending_projects", force: :cascade do |t| t.integer "project_id", null: false + t.index ["project_id"], name: "index_trending_projects_on_project_id", unique: true, using: :btree end - add_index "trending_projects", ["project_id"], name: "index_trending_projects_on_project_id", unique: true, using: :btree - create_table "u2f_registrations", force: :cascade do |t| t.text "certificate" t.string "key_handle" @@ -2147,13 +1997,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "name" + t.index ["key_handle"], name: "index_u2f_registrations_on_key_handle", using: :btree + t.index ["user_id"], name: "index_u2f_registrations_on_user_id", using: :btree end - add_index "u2f_registrations", ["key_handle"], name: "index_u2f_registrations_on_key_handle", using: :btree - add_index "u2f_registrations", ["user_id"], name: "index_u2f_registrations_on_user_id", using: :btree - create_table "uploads", force: :cascade do |t| - t.integer "size", limit: 8, null: false + t.bigint "size", null: false t.string "path", limit: 511, null: false t.string "checksum", limit: 64 t.integer "model_id" @@ -2163,13 +2012,12 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "mount_point" t.string "secret" t.integer "store" + t.index ["checksum"], name: "index_uploads_on_checksum", using: :btree + t.index ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree + t.index ["store"], name: "index_uploads_on_store", using: :btree + t.index ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree end - add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree - add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree - add_index "uploads", ["store"], name: "index_uploads_on_store", using: :btree - add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree - create_table "user_agent_details", force: :cascade do |t| t.string "user_agent", null: false t.string "ip_address", null: false @@ -2178,47 +2026,42 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "submitted", default: false, null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["subject_id", "subject_type"], name: "index_user_agent_details_on_subject_id_and_subject_type", using: :btree end - add_index "user_agent_details", ["subject_id", "subject_type"], name: "index_user_agent_details_on_subject_id_and_subject_type", using: :btree - create_table "user_callouts", force: :cascade do |t| t.integer "feature_name", null: false t.integer "user_id", null: false + t.index ["user_id", "feature_name"], name: "index_user_callouts_on_user_id_and_feature_name", unique: true, using: :btree + t.index ["user_id"], name: "index_user_callouts_on_user_id", using: :btree end - add_index "user_callouts", ["user_id", "feature_name"], name: "index_user_callouts_on_user_id_and_feature_name", unique: true, using: :btree - add_index "user_callouts", ["user_id"], name: "index_user_callouts_on_user_id", using: :btree - create_table "user_custom_attributes", force: :cascade do |t| t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false t.integer "user_id", null: false t.string "key", null: false t.string "value", null: false + t.index ["key", "value"], name: "index_user_custom_attributes_on_key_and_value", using: :btree + t.index ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true, using: :btree end - add_index "user_custom_attributes", ["key", "value"], name: "index_user_custom_attributes_on_key_and_value", using: :btree - add_index "user_custom_attributes", ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true, using: :btree - create_table "user_interacted_projects", id: false, force: :cascade do |t| t.integer "user_id", null: false t.integer "project_id", null: false + t.index ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree + t.index ["user_id"], name: "index_user_interacted_projects_on_user_id", using: :btree end - add_index "user_interacted_projects", ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree - add_index "user_interacted_projects", ["user_id"], name: "index_user_interacted_projects_on_user_id", using: :btree - create_table "user_preferences", force: :cascade do |t| t.integer "user_id", null: false t.integer "issue_notes_filter", limit: 2, default: 0, null: false t.integer "merge_request_notes_filter", limit: 2, default: 0, null: false t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "updated_at", null: false + t.index ["user_id"], name: "index_user_preferences_on_user_id", unique: true, using: :btree end - add_index "user_preferences", ["user_id"], name: "index_user_preferences_on_user_id", unique: true, using: :btree - create_table "user_statuses", primary_key: "user_id", force: :cascade do |t| t.integer "cached_markdown_version" t.string "emoji", default: "speech_balloon", null: false @@ -2232,10 +2075,9 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "location_synced", default: false t.integer "user_id", null: false t.string "provider" + t.index ["user_id"], name: "index_user_synced_attributes_metadata_on_user_id", unique: true, using: :btree end - add_index "user_synced_attributes_metadata", ["user_id"], name: "index_user_synced_attributes_metadata_on_user_id", unique: true, using: :btree - create_table "users", force: :cascade do |t| t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false @@ -2305,33 +2147,31 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.boolean "private_profile" t.boolean "include_private_contributions" t.string "commit_email" + t.index ["admin"], name: "index_users_on_admin", using: :btree + t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true, using: :btree + t.index ["created_at"], name: "index_users_on_created_at", using: :btree + t.index ["email"], name: "index_users_on_email", unique: true, using: :btree + t.index ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"} + t.index ["feed_token"], name: "index_users_on_feed_token", using: :btree + t.index ["ghost"], name: "index_users_on_ghost", using: :btree + t.index ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree + t.index ["name"], name: "index_users_on_name", using: :btree + t.index ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} + t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree + t.index ["state"], name: "index_users_on_state", using: :btree + t.index ["username"], name: "index_users_on_username", using: :btree + t.index ["username"], name: "index_users_on_username_trigram", using: :gin, opclasses: {"username"=>"gin_trgm_ops"} end - add_index "users", ["admin"], name: "index_users_on_admin", using: :btree - add_index "users", ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true, using: :btree - add_index "users", ["created_at"], name: "index_users_on_created_at", using: :btree - add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree - add_index "users", ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"} - add_index "users", ["feed_token"], name: "index_users_on_feed_token", using: :btree - add_index "users", ["ghost"], name: "index_users_on_ghost", using: :btree - add_index "users", ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree - add_index "users", ["name"], name: "index_users_on_name", using: :btree - add_index "users", ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} - add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree - add_index "users", ["state"], name: "index_users_on_state", using: :btree - add_index "users", ["username"], name: "index_users_on_username", using: :btree - add_index "users", ["username"], name: "index_users_on_username_trigram", using: :gin, opclasses: {"username"=>"gin_trgm_ops"} - create_table "users_star_projects", force: :cascade do |t| t.integer "project_id", null: false t.integer "user_id", null: false t.datetime "created_at" t.datetime "updated_at" + t.index ["project_id"], name: "index_users_star_projects_on_project_id", using: :btree + t.index ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true, using: :btree end - add_index "users_star_projects", ["project_id"], name: "index_users_star_projects_on_project_id", using: :btree - add_index "users_star_projects", ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true, using: :btree - create_table "web_hook_logs", force: :cascade do |t| t.integer "web_hook_id", null: false t.string "trigger" @@ -2345,11 +2185,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "internal_error_message" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.index ["created_at", "web_hook_id"], name: "index_web_hook_logs_on_created_at_and_web_hook_id", using: :btree + t.index ["web_hook_id"], name: "index_web_hook_logs_on_web_hook_id", using: :btree end - add_index "web_hook_logs", ["created_at", "web_hook_id"], name: "index_web_hook_logs_on_created_at_and_web_hook_id", using: :btree - add_index "web_hook_logs", ["web_hook_id"], name: "index_web_hook_logs_on_web_hook_id", using: :btree - create_table "web_hooks", force: :cascade do |t| t.integer "project_id" t.datetime "created_at" @@ -2373,11 +2212,10 @@ ActiveRecord::Schema.define(version: 20181107054254) do t.string "encrypted_token_iv" t.string "encrypted_url" t.string "encrypted_url_iv" + t.index ["project_id"], name: "index_web_hooks_on_project_id", using: :btree + t.index ["type"], name: "index_web_hooks_on_type", using: :btree end - add_index "web_hooks", ["project_id"], name: "index_web_hooks_on_project_id", using: :btree - add_index "web_hooks", ["type"], name: "index_web_hooks_on_type", using: :btree - add_foreign_key "application_settings", "users", column: "usage_stats_set_by_user_id", name: "fk_964370041d", on_delete: :nullify add_foreign_key "badges", "namespaces", column: "group_id", on_delete: :cascade add_foreign_key "badges", "projects", on_delete: :cascade @@ -2450,9 +2288,6 @@ ActiveRecord::Schema.define(version: 20181107054254) do add_foreign_key "fork_network_members", "projects", on_delete: :cascade add_foreign_key "fork_networks", "projects", column: "root_project_id", name: "fk_e7b436b2b5", on_delete: :nullify add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade - add_foreign_key "gcp_clusters", "projects", on_delete: :cascade - add_foreign_key "gcp_clusters", "services", on_delete: :nullify - add_foreign_key "gcp_clusters", "users", on_delete: :nullify add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade add_foreign_key "gpg_keys", "users", on_delete: :cascade add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify diff --git a/doc/administration/auth/authentiq.md b/doc/administration/auth/authentiq.md index 252ff1f4b15..772e55cef07 100644 --- a/doc/administration/auth/authentiq.md +++ b/doc/administration/auth/authentiq.md @@ -6,7 +6,7 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t 1. Get your Client credentials (Client ID and Client Secret) at [Authentiq](https://www.authentiq.com/developers). -2. On your GitLab server, open the configuration file: +1. On your GitLab server, open the configuration file: For omnibus installation ```sh @@ -18,11 +18,11 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t ```sh sudo -u git -H editor /home/git/gitlab/config/gitlab.yml ``` - -3. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings to enable single sign-on and add Authentiq as an OAuth provider. -4. Add the provider configuration for Authentiq: - +1. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings to enable single sign-on and add Authentiq as an OAuth provider. + +1. Add the provider configuration for Authentiq: + For Omnibus packages: ```ruby @@ -31,15 +31,15 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t "name" => "authentiq", "app_id" => "YOUR_CLIENT_ID", "app_secret" => "YOUR_CLIENT_SECRET", - "args" => { + "args" => { "scope": 'aq:name email~rs address aq:push' } } ] ``` - + For installations from source: - + ```yaml - { name: 'authentiq', app_id: 'YOUR_CLIENT_ID', @@ -49,20 +49,20 @@ Authentiq will generate a Client ID and the accompanying Client Secret for you t } } ``` - - -5. The `scope` is set to request the user's name, email (required and signed), and permission to send push notifications to sign in on subsequent visits. + + +1. The `scope` is set to request the user's name, email (required and signed), and permission to send push notifications to sign in on subsequent visits. See [OmniAuth Authentiq strategy](https://github.com/AuthentiqID/omniauth-authentiq/wiki/Scopes,-callback-url-configuration-and-responses) for more information on scopes and modifiers. -6. Change `YOUR_CLIENT_ID` and `YOUR_CLIENT_SECRET` to the Client credentials you received in step 1. +1. Change `YOUR_CLIENT_ID` and `YOUR_CLIENT_SECRET` to the Client credentials you received in step 1. -7. Save the configuration file. +1. Save the configuration file. -8. [Reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect if you installed GitLab via Omnibus or from source respectively. +1. [Reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect if you installed GitLab via Omnibus or from source respectively. -On the sign in page there should now be an Authentiq icon below the regular sign in form. Click the icon to begin the authentication process. +On the sign in page there should now be an Authentiq icon below the regular sign in form. Click the icon to begin the authentication process. -- If the user has the Authentiq ID app installed in their iOS or Android device, they can scan the QR code, decide what personal details to share and sign in to your GitLab installation. -- If not they will be prompted to download the app and then follow the procedure above. +- If the user has the Authentiq ID app installed in their iOS or Android device, they can scan the QR code, decide what personal details to share and sign in to your GitLab installation. +- If not they will be prompted to download the app and then follow the procedure above. -If everything goes right, the user will be returned to GitLab and will be signed in.
\ No newline at end of file +If everything goes right, the user will be returned to GitLab and will be signed in. diff --git a/doc/administration/high_availability/nfs.md b/doc/administration/high_availability/nfs.md index 481eb692674..74b0e2c8184 100644 --- a/doc/administration/high_availability/nfs.md +++ b/doc/administration/high_availability/nfs.md @@ -59,7 +59,7 @@ on an Linux NFS server, do the following: sysctl -w fs.leases-enable=0 ``` -2. Restart the NFS server process. For example, on CentOS run `service nfs restart`. +1. Restart the NFS server process. For example, on CentOS run `service nfs restart`. ## Avoid using AWS's Elastic File System (EFS) @@ -87,12 +87,12 @@ this configuration. Additionally, this configuration is specifically warned against in the [Postgres Documentation](https://www.postgresql.org/docs/current/static/creating-cluster.html#CREATING-CLUSTER-NFS): ->PostgreSQL does nothing special for NFS file systems, meaning it assumes NFS behaves exactly like ->locally-connected drives. If the client or server NFS implementation does not provide standard file ->system semantics, this can cause reliability problems. Specifically, delayed (asynchronous) writes +>PostgreSQL does nothing special for NFS file systems, meaning it assumes NFS behaves exactly like +>locally-connected drives. If the client or server NFS implementation does not provide standard file +>system semantics, this can cause reliability problems. Specifically, delayed (asynchronous) writes >to the NFS server can cause data corruption problems. -For supported database architecture, please see our documentation on +For supported database architecture, please see our documentation on [Configuring a Database for GitLab HA](https://docs.gitlab.com/ee/administration/high_availability/database.html). ## NFS Client mount options diff --git a/doc/administration/high_availability/redis.md b/doc/administration/high_availability/redis.md index dcee57def74..7c1ef43499d 100644 --- a/doc/administration/high_availability/redis.md +++ b/doc/administration/high_availability/redis.md @@ -665,7 +665,7 @@ cache, queues, and shared_state. To make this work with Sentinel: **Note**: Redis URLs should be in the format: `redis://:PASSWORD@SENTINEL_MASTER_NAME` 1. PASSWORD is the plaintext password for the Redis instance - 2. SENTINEL_MASTER_NAME is the Sentinel master name (e.g. `gitlab-redis-cache`) + 1. SENTINEL_MASTER_NAME is the Sentinel master name (e.g. `gitlab-redis-cache`) 1. Include an array of hashes with host/port combinations, such as the following: ```ruby diff --git a/doc/administration/logs.md b/doc/administration/logs.md index 038e043281c..7e5a3eb9ccd 100644 --- a/doc/administration/logs.md +++ b/doc/administration/logs.md @@ -29,9 +29,9 @@ Each line contains a JSON line that can be ingested by Elasticsearch, Splunk, et In this example, you can see this was a GET request for a specific issue. Notice each line also contains performance data: 1. `duration`: the total time taken to retrieve the request -2. `view`: total time taken inside the Rails views -3. `db`: total time to retrieve data from the database -4. `gitaly_calls`: total number of calls made to Gitaly +1. `view`: total time taken inside the Rails views +1. `db`: total time to retrieve data from the database +1. `gitaly_calls`: total number of calls made to Gitaly User clone/fetch activity using http transport appears in this log as `action: git_upload_pack`. @@ -119,7 +119,7 @@ This file lives in `/var/log/gitlab/gitlab-rails/integrations_json.log` for Omnibus GitLab packages or in `/home/git/gitlab/log/integrations_json.log` for installations from source. -It contains information about [integrations](../user/project/integrations/project_services.md) activities such as JIRA, Asana and Irker services. It uses JSON format like the example below: +It contains information about [integrations](../user/project/integrations/project_services.md) activities such as JIRA, Asana and Irker services. It uses JSON format like the example below: ``` json {"severity":"ERROR","time":"2018-09-06T14:56:20.439Z","service_class":"JiraService","project_id":8,"project_path":"h5bp/html5-boilerplate","message":"Error sending message","client_url":"http://jira.gitlap.com:8080","error":"execution expired"} @@ -257,8 +257,8 @@ importer. Future importers may use this file. ## Reconfigure Logs -Reconfigure log files live in `/var/log/gitlab/reconfigure` for Omnibus GitLab -packages. Installations from source don't have reconfigure logs. A reconfigure log +Reconfigure log files live in `/var/log/gitlab/reconfigure` for Omnibus GitLab +packages. Installations from source don't have reconfigure logs. A reconfigure log is populated whenever `gitlab-ctl reconfigure` is run manually or as part of an upgrade. Reconfigure logs files are named according to the UNIX timestamp of when the reconfigure diff --git a/doc/administration/monitoring/performance/influxdb_configuration.md b/doc/administration/monitoring/performance/influxdb_configuration.md index c30cd2950d8..fa281f47ed8 100644 --- a/doc/administration/monitoring/performance/influxdb_configuration.md +++ b/doc/administration/monitoring/performance/influxdb_configuration.md @@ -95,10 +95,10 @@ UDP can be done using the following settings: This does the following: 1. Enable UDP and bind it to port 8089 for all addresses. -2. Store any data received in the "gitlab" database. -3. Define a batch of points to be 1000 points in size and allow a maximum of +1. Store any data received in the "gitlab" database. +1. Define a batch of points to be 1000 points in size and allow a maximum of 5 batches _or_ flush them automatically after 1 second. -4. Define a UDP read buffer size of 200 MB. +1. Define a UDP read buffer size of 200 MB. One of the most important settings here is the UDP read buffer size as if this value is set too low, packets will be dropped. You must also make sure the OS diff --git a/doc/administration/monitoring/prometheus/index.md b/doc/administration/monitoring/prometheus/index.md index b1b670c3b42..33611c5efc3 100644 --- a/doc/administration/monitoring/prometheus/index.md +++ b/doc/administration/monitoring/prometheus/index.md @@ -1,4 +1,4 @@ -# GitLab Prometheus +# Monitoring GitLab with Prometheus > **Notes:** > - Prometheus and the various exporters listed in this page are bundled in the @@ -24,7 +24,7 @@ dashboard tool like [Grafana]. ## Configuring Prometheus ->**Note:** +NOTE: **Note:** For installations from source you'll have to install and configure it yourself. Prometheus and it's exporters are on by default, starting with GitLab 9.0. @@ -43,17 +43,17 @@ To disable Prometheus and all of its exporters, as well as any added in the futu ``` 1. Save the file and [reconfigure GitLab][reconfigure] for the changes to - take effect + take effect. -## Changing the port and address Prometheus listens on +### Changing the port and address Prometheus listens on ->**Note:** +NOTE: **Note:** The following change was added in [GitLab Omnibus 8.17][1261]. Although possible, it's not recommended to change the port Prometheus listens on as this might affect or conflict with other services running on the GitLab server. Proceed at your own risk. -In order to access Prometheus from outside the GitLab server you will need to +In order to access Prometheus from outside the GitLab server you will need to set a FQDN or IP in `prometheus['listen_address']`. To change the address/port that Prometheus listens on: @@ -77,6 +77,60 @@ To change the address/port that Prometheus listens on: 1. Save the file and [reconfigure GitLab][reconfigure] for the changes to take effect +### Using an external Prometheus server + +NOTE: **Note:** +Prometheus and most exporters do not support authentication. We do not recommend exposing them outside the local network. + +A few configuration changes are required to allow GitLab to be monitored by an external Prometheus server. External servers are recommended for highly available deployments of GitLab with multiple nodes. + +To use an external Prometheus server: + +1. Edit `/etc/gitlab/gitlab.rb`. +1. Disable the bundled Prometheus: + + ```ruby + prometheus['enable'] = false + ``` + +1. Set each bundled service's [exporter](#bundled-software-metrics) to listen on a network address, for example: + + ```ruby + gitlab_monitor['listen_address'] = '0.0.0.0' + gitlab_monitor['listen_port'] = '9168' + gitaly['prometheus_listen_addr'] = "0.0.0.0:9236" + node_exporter['listen_address'] = '0.0.0.0:9100' + redis_exporter['listen_address'] = '0.0.0.0:9121' + postgres_exporter['listen_address'] = '0.0.0.0:9187' + ``` + +1. Install and set up a dedicated Prometheus instance, if necessary, using the [official installation instructions](https://prometheus.io/docs/prometheus/latest/installation/). +1. Add the Prometheus server IP address to the [monitoring IP whitelist](../ip_whitelist.html). For example: + + ```ruby + gitlab_rails['monitoring_whitelist'] = ['127.0.0.0/8', '192.168.0.1'] + ``` + +1. [Reconfigure GitLab][reconfigure] to apply the changes +1. Edit the Prometheus server's configuration file. +1. Add each node's exporters to the Prometheus server's + [scrape target configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#%3Cscrape_config%3E). + For example, a sample snippet using `static_configs`: + + ```yaml + scrape_configs: + - job_name: 'gitlab_exporters' + static_configs: + - targets: ['1.1.1.1:9168', '1.1.1.1:9236', '1.1.1.1:9236', '1.1.1.1:9100', '1.1.1.1:9121', '1.1.1.1:9187'] + + - job_name: 'gitlab_metrics' + metrics_path: /-/metrics + static_configs: + - targets: ['1.1.1.1:443'] + ``` + +1. Restart the Prometheus server. + ## Viewing performance metrics You can visit `http://localhost:9090` for the dashboard that Prometheus offers by default. @@ -86,7 +140,7 @@ If SSL has been enabled on your GitLab instance, you may not be able to access Prometheus on the same browser as GitLab if using the same FQDN due to [HSTS][hsts]. We plan to [provide access via GitLab][multi-user-prometheus], but in the interim there are some workarounds: using a separate FQDN, using server IP, using a separate browser for Prometheus, resetting HSTS, or -having [Nginx proxy it][nginx-custom-config]. +having [NGINX proxy it][nginx-custom-config]. The performance data collected by Prometheus can be viewed directly in the Prometheus console or through a compatible dashboard tool. @@ -102,26 +156,7 @@ Sample Prometheus queries: - **Data transmitted:** `rate(node_network_transmit_bytes_total{device!="lo"}[5m])` - **Data received:** `rate(node_network_receive_bytes_total{device!="lo"}[5m])` -## Configuring Prometheus to monitor Kubernetes - -> Introduced in GitLab 9.0. -> Pod monitoring introduced in GitLab 9.4. - -If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][] to monitor them. - -To disable the monitoring of Kubernetes: - -1. Edit `/etc/gitlab/gitlab.rb` -1. Add or find and uncomment the following line and set it to `false`: - - ```ruby - prometheus['monitor_kubernetes'] = false - ``` - -1. Save the file and [reconfigure GitLab][reconfigure] for the changes to - take effect - -## GitLab Prometheus metrics +## GitLab metrics > Introduced in GitLab 9.3. @@ -129,17 +164,10 @@ GitLab monitors its own internal service metrics, and makes them available at th [➔ Read more about the GitLab Metrics.](gitlab_metrics.md) -## Prometheus exporters - -There are a number of libraries and servers which help in exporting existing -metrics from third-party systems as Prometheus metrics. This is useful for cases -where it is not feasible to instrument a given system with Prometheus metrics -directly (for example, HAProxy or Linux system stats). You can read more in the -[Prometheus exporters and integrations upstream documentation][prom-exporters]. +## Bundled software metrics -While you can use any exporter you like with your GitLab installation, the -following ones documented here are bundled in the Omnibus GitLab packages -making it easy to configure and use. +Many of the GitLab dependencies bundled in Omnibus GitLab are preconfigured to +export Prometheus metrics. ### Node exporter @@ -166,6 +194,25 @@ The GitLab monitor exporter allows you to measure various GitLab metrics, pulled [➔ Read more about the GitLab monitor exporter.](gitlab_monitor_exporter.md) +## Configuring Prometheus to monitor Kubernetes + +> Introduced in GitLab 9.0. +> Pod monitoring introduced in GitLab 9.4. + +If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them. + +To disable the monitoring of Kubernetes: + +1. Edit `/etc/gitlab/gitlab.rb`. +1. Add or find and uncomment the following line and set it to `false`: + + ```ruby + prometheus['monitor_kubernetes'] = false + ``` + +1. Save the file and [reconfigure GitLab][reconfigure] for the changes to + take effect. + [grafana]: https://grafana.net [hsts]: https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security [multi-user-prometheus]: https://gitlab.com/gitlab-org/multi-user-prometheus diff --git a/doc/administration/operations/fast_ssh_key_lookup.md b/doc/administration/operations/fast_ssh_key_lookup.md index eada7b19dcd..c293df3fc57 100644 --- a/doc/administration/operations/fast_ssh_key_lookup.md +++ b/doc/administration/operations/fast_ssh_key_lookup.md @@ -5,7 +5,7 @@ NOTE: **Note:** This document describes a drop-in replacement for the using [ssh certificates](ssh_certificates.md), they are even faster, but are not a drop-in replacement. -> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/1631) in +> [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/issues/1631) in > [GitLab Starter](https://about.gitlab.com/gitlab-ee) 9.3. > > [Available in](https://gitlab.com/gitlab-org/gitlab-ee/issues/3953) GitLab @@ -109,7 +109,7 @@ the database. The following instructions can be used to build OpenSSH 7.5: yum install rpm-build gcc make wget openssl-devel krb5-devel pam-devel libX11-devel xmkmf libXt-devel ``` -3. Prepare the build by copying files to the right place: +1. Prepare the build by copying files to the right place: ``` mkdir -p /root/rpmbuild/{SOURCES,SPECS} @@ -118,7 +118,7 @@ the database. The following instructions can be used to build OpenSSH 7.5: cd /root/rpmbuild/SPECS ``` -3. Next, set the spec settings properly: +1. Next, set the spec settings properly: ``` sed -i -e "s/%define no_gnome_askpass 0/%define no_gnome_askpass 1/g" openssh.spec @@ -126,13 +126,13 @@ the database. The following instructions can be used to build OpenSSH 7.5: sed -i -e "s/BuildPreReq/BuildRequires/g" openssh.spec ``` -3. Build the RPMs: +1. Build the RPMs: ``` rpmbuild -bb openssh.spec ``` -4. Ensure the RPMs were built: +1. Ensure the RPMs were built: ``` ls -al /root/rpmbuild/RPMS/x86_64/ @@ -150,7 +150,7 @@ the database. The following instructions can be used to build OpenSSH 7.5: -rw-r--r--. 1 root root 367516 Jun 20 19:37 openssh-server-7.5p1-1.x86_64.rpm ``` -5. Install the packages. OpenSSH packages will replace `/etc/pam.d/sshd` +1. Install the packages. OpenSSH packages will replace `/etc/pam.d/sshd` with its own version, which may prevent users from logging in, so be sure that the file is backed up and restored after installation: @@ -161,7 +161,7 @@ the database. The following instructions can be used to build OpenSSH 7.5: yes | cp pam-ssh-conf-$timestamp /etc/pam.d/sshd ``` -6. Verify the installed version. In another window, attempt to login to the server: +1. Verify the installed version. In another window, attempt to login to the server: ``` ssh -v <your-centos-machine> @@ -171,7 +171,7 @@ the database. The following instructions can be used to build OpenSSH 7.5: If not, you may need to restart sshd (e.g. `systemctl restart sshd.service`). -7. *IMPORTANT!* Open a new SSH session to your server before exiting to make +1. *IMPORTANT!* Open a new SSH session to your server before exiting to make sure everything is working! If you need to downgrade, simple install the older package: diff --git a/doc/administration/reply_by_email_postfix_setup.md b/doc/administration/reply_by_email_postfix_setup.md index d1a03219542..4c42cb7756a 100644 --- a/doc/administration/reply_by_email_postfix_setup.md +++ b/doc/administration/reply_by_email_postfix_setup.md @@ -8,7 +8,7 @@ The instructions make the assumption that you will be using the email address `i ## Configure your server firewall 1. Open up port 25 on your server so that people can send email into the server over SMTP. -2. If the mail server is different from the server running GitLab, open up port 143 on your server so that GitLab can read email from the server over IMAP. +1. If the mail server is different from the server running GitLab, open up port 143 on your server so that GitLab can read email from the server over IMAP. ## Install packages diff --git a/doc/administration/troubleshooting/debug.md b/doc/administration/troubleshooting/debug.md index 2902af8c782..643c5b9fe80 100644 --- a/doc/administration/troubleshooting/debug.md +++ b/doc/administration/troubleshooting/debug.md @@ -20,7 +20,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se bundle exec rails console production ``` -2. Look at the ActionMailer `delivery_method` to make sure it matches what you +1. Look at the ActionMailer `delivery_method` to make sure it matches what you intended. If you configured SMTP, it should say `:smtp`. If you're using Sendmail, it should say `:sendmail`: @@ -29,7 +29,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se => :smtp ``` -3. If you're using SMTP, check the mail settings: +1. If you're using SMTP, check the mail settings: ```ruby irb(main):002:0> ActionMailer::Base.smtp_settings @@ -39,7 +39,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se In the example above, the SMTP server is configured for the local machine. If this is intended, you may need to check your local mail logs (e.g. `/var/log/mail.log`) for more details. -4. Send a test message via the console. +1. Send a test message via the console. ```ruby irb(main):003:0> Notify.test_email('youremail@email.com', 'Hello World', 'This is a test message').deliver_now diff --git a/doc/api/avatar.md b/doc/api/avatar.md index 7faed893066..aa6f7c185ae 100644 --- a/doc/api/avatar.md +++ b/doc/api/avatar.md @@ -4,7 +4,7 @@ ## Get a single avatar URL -Get a single avatar URL for a given email addres. If user with matching public +Get a single avatar URL for a given email address. If user with matching public email address is not found, results from external avatar services are returned. This endpoint can be accessed without authentication. In case public visibility is restricted, response will be `403 Forbidden` when unauthenticated. diff --git a/doc/api/commits.md b/doc/api/commits.md index 994eefa423f..7d9b52ec24f 100644 --- a/doc/api/commits.md +++ b/doc/api/commits.md @@ -290,7 +290,7 @@ Example response: ## Revert a commit -> [Introduced][ce-22919] in GitLab 11.6. +> [Introduced][ce-22919] in GitLab 11.5. Reverts a commit in a given branch. diff --git a/doc/api/events.md b/doc/api/events.md index ccac5b8bb60..e1c6b801a77 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -71,7 +71,7 @@ Parameters: Example request: ``` -curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/events&target_type=issue&action=created&after=2017-01-31&before=2017-03-01 +curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/events?target_type=issue&action=created&after=2017-01-31&before=2017-03-01 ``` Example response: @@ -276,7 +276,7 @@ Parameters: Example request: ``` -curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/projects/:project_id/events&target_type=issue&action=created&after=2017-01-31&before=2017-03-01 +curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/projects/:project_id/events?target_type=issue&action=created&after=2017-01-31&before=2017-03-01 ``` Example response: diff --git a/doc/api/users.md b/doc/api/users.md index ee24aa09156..e3633c46041 100644 --- a/doc/api/users.md +++ b/doc/api/users.md @@ -1072,7 +1072,6 @@ Example response: [ { "active" : true, - "token" : "EsMo-vhKfXGwX9RKrwiy", "scopes" : [ "api" ], @@ -1089,7 +1088,6 @@ Example response: "read_user" ], "revoked" : true, - "token" : "ZcZRpLeEuQRprkRjYydY", "name" : "mytoken2", "created_at" : "2017-03-17T17:19:28.697Z", "id" : 3, @@ -1125,7 +1123,6 @@ Example response: ```json { "active" : true, - "token" : "EsMo-vhKfXGwX9RKrwiy", "scopes" : [ "api" ], @@ -1142,6 +1139,8 @@ Example response: > Requires admin permissions. +> Token values are returned once. Make sure you save it - you won't be able to access it again. + It creates a new impersonation token. Note that only administrators can do this. You are only able to create impersonation tokens to impersonate the user and perform both API calls and Git reads and writes. The user will not see these tokens in their profile diff --git a/doc/ci/autodeploy/quick_start_guide.md b/doc/ci/autodeploy/quick_start_guide.md index 3836216e951..1473703542d 100644 --- a/doc/ci/autodeploy/quick_start_guide.md +++ b/doc/ci/autodeploy/quick_start_guide.md @@ -23,9 +23,9 @@ You need to have the Google Cloud SDK installed. e.g. On OSX, install [homebrew](https://brew.sh): 1. Install Brew Caskroom: `brew install caskroom/cask/brew-cask` -2. Install Google Cloud SDK: `brew cask install google-cloud-sdk` -3. Add `kubectl`: `gcloud components install kubectl` -4. Log in: `gcloud auth login` +1. Install Google Cloud SDK: `brew cask install google-cloud-sdk` +1. Add `kubectl`: `gcloud components install kubectl` +1. Log in: `gcloud auth login` Now go back to the Google interface, find your cluster, and follow the instructions under `Connect to the cluster` and open the Kubernetes Dashboard. It will look something like `gcloud container clusters get-credentials ruby-autodeploy \ --zone europe-west2-c --project api-project-XXXXXXX` and then `kubectl proxy`. diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md index 3b41036cd14..fef367051bf 100644 --- a/doc/ci/docker/using_docker_build.md +++ b/doc/ci/docker/using_docker_build.md @@ -46,18 +46,18 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user. --description "My Runner" ``` -2. Install Docker Engine on server. +1. Install Docker Engine on server. For more information how to install Docker Engine on different systems checkout the [Supported installations](https://docs.docker.com/engine/installation/). -3. Add `gitlab-runner` user to `docker` group: +1. Add `gitlab-runner` user to `docker` group: ```bash sudo usermod -aG docker gitlab-runner ``` -4. Verify that `gitlab-runner` has access to Docker: +1. Verify that `gitlab-runner` has access to Docker: ```bash sudo -u gitlab-runner -H docker info @@ -75,7 +75,7 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user. - docker run my-docker-image /script/to/run/tests ``` -5. You can now use `docker` command and install `docker-compose` if needed. +1. You can now use `docker` command and install `docker-compose` if needed. NOTE: **Note:** By adding `gitlab-runner` to the `docker` group you are effectively granting `gitlab-runner` full root permissions. diff --git a/doc/ci/examples/browser_performance.md b/doc/ci/examples/browser_performance.md index d36e97ebfd3..7c3b3a65675 100644 --- a/doc/ci/examples/browser_performance.md +++ b/doc/ci/examples/browser_performance.md @@ -1,14 +1,20 @@ # Browser Performance Testing with the Sitespeed.io container +CAUTION: **Caution:** +The job definition shown below is supported on GitLab 11.5 and later versions. +It also requires the GitLab Runner 11.5 or later. +For earlier versions, use the [previous job definitions](#previous-job-definitions). + This example shows how to run the [Sitespeed.io container](https://hub.docker.com/r/sitespeedio/sitespeed.io/) on your code by using GitLab CI/CD and [Sitespeed.io](https://www.sitespeed.io) using Docker-in-Docker. -First, you need a GitLab Runner with the +First, you need GitLab Runner with [docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor). -Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called -`performance`: + +Once you set up the Runner, add a new job to `.gitlab-ci.yml` that +generates the expected report: ```yaml performance: @@ -26,19 +32,22 @@ performance: - mv sitespeed-results/data/performance.json performance.json artifacts: paths: - - performance.json - - sitespeed-results/ + - sitespeed-results/ + reports: + performance: performance.json ``` -The above example will: +The above example will create a `performance` job in your CI/CD pipeline and will run +Sitespeed.io against the webpage you defined in `URL` to gather key metrics. +The [GitLab plugin](https://gitlab.com/gitlab-org/gl-performance) for +Sitespeed.io is downloaded in order to save the report as a +[Performance report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportsperformance) +that you can later download and analyze. +Due to implementation limitations we always take the latest Performance artifact available. -1. Create a `performance` job in your CI/CD pipeline and will run - Sitespeed.io against the webpage you defined in `URL`. -1. The [GitLab plugin](https://gitlab.com/gitlab-org/gl-performance) for - Sitespeed.io is downloaded in order to export key metrics to JSON. The full - HTML Sitespeed.io report will also be saved as an artifact, and if you have - [GitLab Pages](../../user/project/pages/index.md) enabled, it can be viewed - directly in your browser. +The full HTML Sitespeed.io report will also be saved as an artifact, and if you have +[GitLab Pages](../../user/project/pages/index.md) enabled, it can be viewed +directly in your browser. For further customization options of Sitespeed.io, including the ability to provide a list of URLs to test, please consult @@ -46,8 +55,8 @@ provide a list of URLs to test, please consult TIP: **Tip:** For [GitLab Premium](https://about.gitlab.com/pricing/) users, key metrics are automatically -extracted and shown right in the merge request widget. Learn more about -[Browser Performance Testing](https://docs.gitlab.com/ee/user/project/merge_requests/browser_performance_testing.html). +extracted and shown right in the merge request widget. +[Learn more on Browser Performance Testing in merge requests](https://docs.gitlab.com/ee//user/project/merge_requests/browser_performance_testing.html). ## Performance testing on Review Apps @@ -106,8 +115,40 @@ performance: - mv sitespeed-results/data/performance.json performance.json artifacts: paths: - - performance.json - sitespeed-results/ + reports: + performance: performance.json ``` A complete example can be found in our [Auto DevOps CI YML](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml). + +## Previous job definitions + +CAUTION: **Caution:** +Before GitLab 11.5, Performance job and artifact had to be named specifically +to automatically extract report data and show it in the merge request widget. +While these old job definitions are still maintained they have been deprecated +and may be removed in next major release, GitLab 12.0. +You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change. + +For GitLab 11.4 and earlier, the job should look like: + +```yaml +performance: + stage: performance + image: docker:git + variables: + URL: https://example.com + services: + - docker:stable-dind + script: + - mkdir gitlab-exporter + - wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js + - mkdir sitespeed-results + - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL + - mv sitespeed-results/data/performance.json performance.json + artifacts: + paths: + - performance.json + - sitespeed-results/ +```
\ No newline at end of file diff --git a/doc/ci/examples/code_quality.md b/doc/ci/examples/code_quality.md index 2a7040ecdeb..ae000b9d30d 100644 --- a/doc/ci/examples/code_quality.md +++ b/doc/ci/examples/code_quality.md @@ -1,11 +1,18 @@ # Analyze your project's Code Quality +CAUTION: **Caution:** +The job definition shown below is supported on GitLab 11.5 and later versions. +It also requires the GitLab Runner 11.5 or later. +For earlier versions, use the [previous job definitions](#previous-job-definitions). + This example shows how to run Code Quality on your code by using GitLab CI/CD and Docker. -First, you need GitLab Runner with [docker-in-docker executor][dind]. +First, you need GitLab Runner with +[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor). -Once you set up the Runner, add a new job to `.gitlab-ci.yml`, called `code_quality`: +Once you set up the Runner, add a new job to `.gitlab-ci.yml` that +generates the expected report: ```yaml code_quality: @@ -23,27 +30,72 @@ code_quality: --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code artifacts: - paths: [gl-code-quality-report.json] + reports: + codequality: gl-code-quality-report.json ``` The above example will create a `code_quality` job in your CI/CD pipeline which -will scan your source code for code quality issues. The report will be saved -as an artifact that you can later download and analyze. +will scan your source code for code quality issues. The report will be saved as a +[Code Quality report artifact](../../ci/yaml/README.md#artifactsreportscodequality) +that you can later download and analyze. +Due to implementation limitations we always take the latest Code Quality artifact available. TIP: **Tip:** -Starting with [GitLab Starter][ee] 9.3, this information will -be automatically extracted and shown right in the merge request widget. To do -so, the CI/CD job must be named `code_quality` and the artifact path must be -`gl-code-quality-report.json`. +For [GitLab Starter][ee] users, this information will be automatically +extracted and shown right in the merge request widget. [Learn more on Code Quality in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html). +## Previous job definitions + CAUTION: **Caution:** -Code Quality was previously using `codeclimate` and `codequality` for job name and -`codeclimate.json` for the artifact name. While these old names -are still maintained they have been deprecated with GitLab 11.0 and may be removed -in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml` -configuration to reflect that change. +Before GitLab 11.5, Code Quality job and artifact had to be named specifically +to automatically extract report data and show it in the merge request widget. +While these old job definitions are still maintained they have been deprecated +and may be removed in next major release, GitLab 12.0. +You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change. + +For GitLab 11.4 and earlier, the job should look like: + +```yaml +code_quality: + image: docker:stable + variables: + DOCKER_DRIVER: overlay2 + allow_failure: true + services: + - docker:stable-dind + script: + - export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/') + - docker run + --env SOURCE_CODE="$PWD" + --volume "$PWD":/code + --volume /var/run/docker.sock:/var/run/docker.sock + "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code + artifacts: + paths: [gl-code-quality-report.json] +``` + +Alternatively the job name could be `codeclimate` or `codequality` +and the artifact name could be `codeclimate.json`. +These names have been deprecated with GitLab 11.0 +and may be removed in next major release, GitLab 12.0. + +For GitLab 10.3 and earlier, the job should look like: + +```yaml +codequality: + image: docker:latest + variables: + DOCKER_DRIVER: overlay + services: + - docker:dind + script: + - docker pull codeclimate/codeclimate:0.69.0 + - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 init + - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > codeclimate.json || true + artifacts: + paths: [codeclimate.json] +``` [cli]: https://github.com/codeclimate/codeclimate -[dind]: ../docker/using_docker_build.md#use-docker-in-docker-executor [ee]: https://about.gitlab.com/pricing/ diff --git a/doc/ci/examples/container_scanning.md b/doc/ci/examples/container_scanning.md index bc948dc6ea9..68330261910 100644 --- a/doc/ci/examples/container_scanning.md +++ b/doc/ci/examples/container_scanning.md @@ -1,13 +1,20 @@ # Container Scanning with GitLab CI/CD +CAUTION: **Caution:** +The job definition shown below is supported on GitLab 11.5 and later versions. +It also requires the GitLab Runner 11.5 or later. +For earlier versions, use the [previous job definitions](#previous-job-definitions). + You can check your Docker images (or more precisely the containers) for known vulnerabilities by using [Clair](https://github.com/coreos/clair) and [clair-scanner](https://github.com/arminc/clair-scanner), two open source tools for Vulnerability Static Analysis for containers. -All you need is a GitLab Runner with the Docker executor (the shared Runners on -GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`, -called `container_scanning`: +First, you need GitLab Runner with +[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor). + +Once you set up the Runner, add a new job to `.gitlab-ci.yml` that +generates the expected report: ```yaml container_scanning: @@ -36,32 +43,26 @@ container_scanning: - while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true artifacts: - paths: [gl-container-scanning-report.json] + reports: + container_scanning: gl-container-scanning-report.json ``` The above example will create a `container_scanning` job in your CI/CD pipeline, pull the image from the [Container Registry](../../user/project/container_registry.md) (whose name is defined from the two `CI_APPLICATION_` variables) and scan it -for possible vulnerabilities. The report will be saved as an artifact that you -can later download and analyze. +for possible vulnerabilities. The report will be saved as a +[Container Scanning report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportscontainer_scanning) +that you can later download and analyze. +Due to implementation limitations we always take the latest Container Scanning artifact available. If you want to whitelist some specific vulnerabilities, you can do so by defining them in a [YAML file](https://github.com/arminc/clair-scanner/blob/master/README.md#example-whitelist-yaml-file), in our case its named `clair-whitelist.yml`. TIP: **Tip:** -Starting with [GitLab Ultimate][ee] 10.4, this information will -be automatically extracted and shown right in the merge request widget. To do -so, the CI/CD job must be named `container_scanning` and the artifact path must be -`gl-container-scanning-report.json`. -[Learn more on container scanning results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html). - -CAUTION: **Caution:** -Before GitLab 11.0, Container Scanning was previously using `sast:container` for job name and -`gl-sast-container-report.json` for the artifact name. While these old names -are still maintained, they have been deprecated with GitLab 11.0 and may be removed -in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml` -configuration to reflect that change. +For [GitLab Ultimate][ee] users, this information will +be automatically extracted and shown right in the merge request widget. +[Learn more on Container Scanning in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html). CAUTION: **Caution:** Starting with GitLab 11.5, Container Scanning feature is licensed under the name `container_scanning`. @@ -69,4 +70,50 @@ While the old name `sast_container` is still maintained, it has been deprecated may be removed in next major release, GitLab 12.0. You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change if you are using the `$GITLAB_FEATURES` environment variable. +## Previous job definitions + +CAUTION: **Caution:** +Before GitLab 11.5, Container Scanning job and artifact had to be named specifically +to automatically extract report data and show it in the merge request widget. +While these old job definitions are still maintained they have been deprecated +and may be removed in next major release, GitLab 12.0. +You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change. + +For GitLab 11.4 and earlier, the job should look like: + +```yaml +container_scanning: + image: docker:stable + variables: + DOCKER_DRIVER: overlay2 + ## Define two new variables based on GitLab's CI/CD predefined variables + ## https://docs.gitlab.com/ee/ci/variables/#predefined-variables-environment-variables + CI_APPLICATION_REPOSITORY: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG + CI_APPLICATION_TAG: $CI_COMMIT_SHA + allow_failure: true + services: + - docker:stable-dind + script: + - docker run -d --name db arminc/clair-db:latest + - docker run -p 6060:6060 --link db:postgres -d --name clair --restart on-failure arminc/clair-local-scan:v2.0.1 + - apk add -U wget ca-certificates + - docker pull ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} + - wget https://github.com/arminc/clair-scanner/releases/download/v8/clair-scanner_linux_amd64 + - mv clair-scanner_linux_amd64 clair-scanner + - chmod +x clair-scanner + - touch clair-whitelist.yml + - while( ! wget -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; done + - retries=0 + - echo "Waiting for clair daemon to start" + - while( ! wget -T 10 -q -O /dev/null http://docker:6060/v1/namespaces ) ; do sleep 1 ; echo -n "." ; if [ $retries -eq 10 ] ; then echo " Timeout, aborting." ; exit 1 ; fi ; retries=$(($retries+1)) ; done + - ./clair-scanner -c http://docker:6060 --ip $(hostname -i) -r gl-container-scanning-report.json -l clair.log -w clair-whitelist.yml ${CI_APPLICATION_REPOSITORY}:${CI_APPLICATION_TAG} || true + artifacts: + paths: [gl-container-scanning-report.json] +``` + +Alternatively the job name could be `sast:container` +and the artifact name could be `gl-sast-container-report.json`. +These names have been deprecated with GitLab 11.0 +and may be removed in next major release, GitLab 12.0. + [ee]: https://about.gitlab.com/pricing/ diff --git a/doc/ci/examples/dast.md b/doc/ci/examples/dast.md index ff20f0b3b5e..0ca89eb6700 100644 --- a/doc/ci/examples/dast.md +++ b/doc/ci/examples/dast.md @@ -1,16 +1,26 @@ # Dynamic Application Security Testing with GitLab CI/CD +CAUTION: **Caution:** +The job definition shown below is supported on GitLab 11.5 and later versions. +It also requires the GitLab Runner 11.5 or later. +For earlier versions, use the [previous job definitions](#previous-job-definitions). + [Dynamic Application Security Testing (DAST)](https://en.wikipedia.org/wiki/Dynamic_program_analysis) is using the popular open source tool [OWASP ZAProxy](https://github.com/zaproxy/zaproxy) to perform an analysis on your running web application. +Since it is based on [ZAP Baseline](https://github.com/zaproxy/zaproxy/wiki/ZAP-Baseline-Scan) +DAST will perform passive scanning only; +it will not actively attack your application. It can be very useful combined with [Review Apps](../review_apps/index.md). ## Example -All you need is a GitLab Runner with the Docker executor (the shared Runners on -GitLab.com will work fine). You can then add a new job to `.gitlab-ci.yml`, -called `dast`: +First, you need GitLab Runner with +[docker-in-docker executor](../docker/using_docker_build.md#use-docker-in-docker-executor). + +Once you set up the Runner, add a new job to `.gitlab-ci.yml` that +generates the expected report: ```yaml dast: @@ -23,13 +33,16 @@ dast: - /zap/zap-baseline.py -J gl-dast-report.json -t $website || true - cp /zap/wrk/gl-dast-report.json . artifacts: - paths: [gl-dast-report.json] + reports: + dast: gl-dast-report.json ``` The above example will create a `dast` job in your CI/CD pipeline which will run the tests on the URL defined in the `website` variable (change it to use your -own) and finally write the results in the `gl-dast-report.json` file. You can -then download and analyze the report artifact in JSON format. +own) and scan it for possible vulnerabilities. The report will be saved as a +[DAST report artifact](https://docs.gitlab.com/ee//ci/yaml/README.html#artifactsreportsdast) +that you can later download and analyze. +Due to implementation limitations we always take the latest DAST artifact available. It's also possible to authenticate the user before performing DAST checks: @@ -39,25 +52,51 @@ dast: variables: website: "https://example.com" login_url: "https://example.com/sign-in" + username: "john.doe@example.com" + password: "john-doe-password" allow_failure: true script: - mkdir /zap/wrk/ - /zap/zap-baseline.py -J gl-dast-report.json -t $website --auth-url $login_url - --auth-username "john.doe@example.com" - --auth-password "john-doe-password" || true + --auth-username $username + --auth-password $password || true - cp /zap/wrk/gl-dast-report.json . artifacts: - paths: [gl-dast-report.json] + reports: + dast: gl-dast-report.json ``` See [zaproxy documentation](https://gitlab.com/gitlab-org/security-products/zaproxy) to learn more about authentication settings. TIP: **Tip:** -Starting with [GitLab Ultimate][ee] 10.4, this information will -be automatically extracted and shown right in the merge request widget. To do -so, the CI job must be named `dast` and the artifact path must be -`gl-dast-report.json`. -[Learn more about DAST results shown in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html). +For [GitLab Ultimate][ee] users, this information will +be automatically extracted and shown right in the merge request widget. +[Learn more on DAST in merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html). + +## Previous job definitions + +CAUTION: **Caution:** +Before GitLab 11.5, DAST job and artifact had to be named specifically +to automatically extract report data and show it in the merge request widget. +While these old job definitions are still maintained they have been deprecated +and may be removed in next major release, GitLab 12.0. +You are advised to update your current `.gitlab-ci.yml` configuration to reflect that change. + +For GitLab 11.4 and earlier, the job should look like: + +```yaml +dast: + image: registry.gitlab.com/gitlab-org/security-products/zaproxy + variables: + website: "https://example.com" + allow_failure: true + script: + - mkdir /zap/wrk/ + - /zap/zap-baseline.py -J gl-dast-report.json -t $website || true + - cp /zap/wrk/gl-dast-report.json . + artifacts: + paths: [gl-dast-report.json] +``` [ee]: https://about.gitlab.com/pricing/ diff --git a/doc/ci/examples/deployment/composer-npm-deploy.md b/doc/ci/examples/deployment/composer-npm-deploy.md index 55ff131efaa..36358515b84 100644 --- a/doc/ci/examples/deployment/composer-npm-deploy.md +++ b/doc/ci/examples/deployment/composer-npm-deploy.md @@ -33,9 +33,9 @@ before_script: In this particular case, the `npm deploy` script is a Gulp script that does the following: 1. Compile CSS & JS -2. Create sprites -3. Copy various assets (images, fonts) around -4. Replace some strings +1. Create sprites +1. Copy various assets (images, fonts) around +1. Replace some strings All these operations will put all files into a `build` folder, which is ready to be deployed to a live server. @@ -62,10 +62,10 @@ before_script: In order, this means that: -1. We check if the `ssh-agent` is available and we install it if it's not; -2. We create the `~/.ssh` folder; -3. We make sure we're running bash; -4. We disable host checking (we don't ask for user accept when we first connect to a server; and since every job will equal a first connect, we kind of need this) +1. We check if the `ssh-agent` is available and we install it if it's not. +1. We create the `~/.ssh` folder. +1. We make sure we're running bash. +1. We disable host checking (we don't ask for user accept when we first connect to a server and since every job will equal a first connect, we kind of need this). And this is basically all you need in the `before_script` section. @@ -91,11 +91,11 @@ stage_deploy: Here's the breakdown: 1. `only:dev` means that this build will run only when something is pushed to the `dev` branch. You can remove this block completely and have everything be ran on every push (but probably this is something you don't want) -2. `ssh-add ...` we will add that private key you added on the web UI to the docker container -3. We will connect via `ssh` and create a new `_tmp` folder -4. We will connect via `scp` and upload the `build` folder (which was generated by a `npm` script) to our previously created `_tmp` folder -5. We will connect again to `ssh` and move the `live` folder to an `_old` folder, then move `_tmp` to `live`. -6. We connect to ssh and remove the `_old` folder +1. `ssh-add ...` we will add that private key you added on the web UI to the docker container +1. We will connect via `ssh` and create a new `_tmp` folder +1. We will connect via `scp` and upload the `build` folder (which was generated by a `npm` script) to our previously created `_tmp` folder +1. We will connect again to `ssh` and move the `live` folder to an `_old` folder, then move `_tmp` to `live`. +1. We connect to ssh and remove the `_old` folder What's the deal with the artifacts? We just tell GitLab CI to keep the `build` directory (later on, you can download that as needed). diff --git a/doc/ci/examples/laravel_with_gitlab_and_envoy/img/container_registry_checkbox.png b/doc/ci/examples/laravel_with_gitlab_and_envoy/img/container_registry_checkbox.png Binary files differdeleted file mode 100644 index a56c07a0da7..00000000000 --- a/doc/ci/examples/laravel_with_gitlab_and_envoy/img/container_registry_checkbox.png +++ /dev/null diff --git a/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md b/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md index b6989d229d1..b090ea014dc 100644 --- a/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md +++ b/doc/ci/examples/laravel_with_gitlab_and_envoy/index.md @@ -444,9 +444,7 @@ On your GitLab project repository navigate to the **Registry** tab.  -You may need to [enable Container Registry](../../../user/project/container_registry.md#enable-the-container-registry-for-your-project) to your project to see this tab. You'll find it under your project's **Settings > General > Sharing and permissions**. - - +You may need to [enable Container Registry](../../../user/project/container_registry.md#enable-the-container-registry-for-your-project) to your project to see this tab. You'll find it under your project's **Settings > General > Permissions**. To start using Container Registry on our machine, we first need to login to the GitLab registry using our GitLab username and password: diff --git a/doc/ci/examples/test-and-deploy-python-application-to-heroku.md b/doc/ci/examples/test-and-deploy-python-application-to-heroku.md index 087b317ab73..ec0b5aaed09 100644 --- a/doc/ci/examples/test-and-deploy-python-application-to-heroku.md +++ b/doc/ci/examples/test-and-deploy-python-application-to-heroku.md @@ -40,15 +40,17 @@ production: ``` This project has three jobs: -1. `test` - used to test Django application, -2. `staging` - used to automatically deploy staging environment every push to `master` branch -3. `production` - used to automatically deploy production environment for every created tag + +- `test` - used to test Django application, +- `staging` - used to automatically deploy staging environment every push to `master` branch +- `production` - used to automatically deploy production environment for every created tag ## Store API keys You'll need to create two variables in `Settings > CI/CD > Variables` on your GitLab project settings: -1. `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app, -2. `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app. + +- `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app. +- `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app. Find your Heroku API key in [Manage Account](https://dashboard.heroku.com/account). diff --git a/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md b/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md index 7f9ab1f3a5e..33a353f17f5 100644 --- a/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md +++ b/doc/ci/examples/test-and-deploy-ruby-application-to-heroku.md @@ -36,16 +36,17 @@ production: ``` This project has three jobs: -1. `test` - used to test Rails application, -2. `staging` - used to automatically deploy staging environment every push to `master` branch -3. `production` - used to automatically deploy production environment for every created tag + +- `test` - used to test Rails application. +- `staging` - used to automatically deploy staging environment every push to `master` branch. +- `production` - used to automatically deploy production environment for every created tag. ## Store API keys You'll need to create two variables in your project's **Settings > CI/CD > Variables**: -1. `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app, -2. `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app. +- `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app. +- `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app. Find your Heroku API key in [Manage Account](https://dashboard.heroku.com/account). diff --git a/doc/ci/quick_start/README.md b/doc/ci/quick_start/README.md index 636117536a2..bdc593493ea 100644 --- a/doc/ci/quick_start/README.md +++ b/doc/ci/quick_start/README.md @@ -168,7 +168,7 @@ can be found at <https://docs.gitlab.com/runner/>. In order to have a functional Runner you need to follow two steps: 1. [Install it][runner-install] -2. [Configure it](../runners/README.md#registering-a-specific-runner) +1. [Configure it](../runners/README.md#registering-a-specific-runner) Follow the links above to set up your own Runner or use a Shared Runner as described in the next section. diff --git a/doc/ci/runners/README.md b/doc/ci/runners/README.md index 2a179bfbbf0..9c9ea651678 100644 --- a/doc/ci/runners/README.md +++ b/doc/ci/runners/README.md @@ -138,9 +138,9 @@ project without requiring your authorization, so use it with caution. An admin can enable/disable a specific Runner for projects: 1. Navigate to **Admin > Runners** -2. Find the Runner you wish to enable/disable -3. Click edit on the Runner -4. Click **Enable** or **Disable** on the project +1. Find the Runner you wish to enable/disable +1. Click edit on the Runner +1. Click **Enable** or **Disable** on the project ## Protected Runners diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md index 5deeb2b0133..2a667c985da 100644 --- a/doc/ci/yaml/README.md +++ b/doc/ci/yaml/README.md @@ -103,7 +103,7 @@ rspec: - $RSPEC ``` -In the example above, the `rspec` job inherits from the `.tests` template job. +In the example above, the `rspec` job inherits from the `.tests` template job. GitLab will perform a reverse deep merge based on the keys. GitLab will: - Merge the `rspec` contents into `.tests` recursively. @@ -476,6 +476,7 @@ docker build: - Dockerfile - docker/scripts/* - dockerfiles/**/* + - more_scripts/*.{rb,py,sh} ``` In the scenario above, if you are pushing multiple commits to GitLab to an @@ -485,6 +486,7 @@ one of the commits contains changes to either: - The `Dockerfile` file. - Any of the files inside `docker/scripts/` directory. - Any of the files and subfolders inside `dockerfiles` directory. +- Any of the files with `rb`, `py`, `sh` extensions inside `more_scripts` directory. CAUTION: **Warning:** There are some caveats when using this feature with new branches and tags. See @@ -1337,6 +1339,81 @@ concatenated into a single file. Use a filename pattern (`junit: rspec-*.xml`), an array of filenames (`junit: [rspec-1.xml, rspec-2.xml, rspec-3.xml]`), or a combination thereof (`junit: [rspec.xml, test-results/TEST-*.xml]`). +#### `artifacts:reports:codequality` **[STARTER]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `codequality` report collects [CodeQuality issues](https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html) +as artifacts. + +The collected Code Quality report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests. + +#### `artifacts:reports:sast` **[ULTIMATE]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `sast` report collects [SAST vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/sast.html) +as artifacts. + +The collected SAST report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests, pipeline view and provide data for security +dashboards. + +#### `artifacts:reports:dependency_scanning` **[ULTIMATE]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `dependency_scanning` report collects [Dependency Scanning vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/dependency_scanning.html) +as artifacts. + +The collected Dependency Scanning report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests, pipeline view and provide data for security +dashboards. + +#### `artifacts:reports:container_scanning` **[ULTIMATE]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `container_scanning` report collects [Container Scanning vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/container_scanning.html) +as artifacts. + +The collected Container Scanning report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests, pipeline view and provide data for security +dashboards. + +#### `artifacts:reports:dast` **[ULTIMATE]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `dast` report collects [DAST vulnerabilities](https://docs.gitlab.com/ee/user/project/merge_requests/dast.html) +as artifacts. + +The collected DAST report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests, pipeline view and provide data for security +dashboards. + +#### `artifacts:reports:license_management` **[ULTIMATE]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `license_management` report collects [Licenses](https://docs.gitlab.com/ee/user/project/merge_requests/license_management.html) +as artifacts. + +The collected License Management report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests, pipeline view and provide data for security +dashboards. + +#### `artifacts:reports:performance` **[PREMIUM]** + +> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above. + +The `performance` report collects [Performance metrics](https://docs.gitlab.com/ee//user/project/merge_requests/browser_performance_testing.html) +as artifacts. + +The collected Performance report will be uploaded to GitLab as an artifact and will +be automatically shown in merge requests. + ## `dependencies` > Introduced in GitLab 8.6 and GitLab Runner v1.1.1. diff --git a/doc/development/README.md b/doc/development/README.md index 14dfe8eb1f3..d2dd62ecac5 100644 --- a/doc/development/README.md +++ b/doc/development/README.md @@ -30,6 +30,7 @@ description: 'Learn how to contribute to GitLab.' ## Backend guides - [GitLab utilities](utilities.md) +- [Logging](logging.md) - [API styleguide](api_styleguide.md) Use this styleguide if you are contributing to the API. - [GraphQL API styleguide](api_graphql_styleguide.md) Use this @@ -51,6 +52,7 @@ description: 'Learn how to contribute to GitLab.' - [Prometheus metrics](prometheus_metrics.md) - [Guidelines for reusing abstractions](reusing_abstractions.md) - [DeclarativePolicy framework](policies.md) +- [Switching to Rails 5](switching_to_rails5.md) ## Performance guides diff --git a/doc/development/adding_database_indexes.md b/doc/development/adding_database_indexes.md index ea6f14da3b9..d1d2b8c4907 100644 --- a/doc/development/adding_database_indexes.md +++ b/doc/development/adding_database_indexes.md @@ -28,9 +28,9 @@ to filter data by. Instead one should ask themselves the following questions: 1. Can I write my query in such a way that it re-uses as many existing indexes as possible? -2. Is the data going to be large enough that using an index will actually be +1. Is the data going to be large enough that using an index will actually be faster than just iterating over the rows in the table? -3. Is the overhead of maintaining the index worth the reduction in query +1. Is the overhead of maintaining the index worth the reduction in query timings? We'll explore every question in detail below. @@ -62,7 +62,7 @@ In short: 1. Try to write your query in such a way that it re-uses as many existing indexes as possible. -2. Run the query using `EXPLAIN ANALYZE` and study the output to find the most +1. Run the query using `EXPLAIN ANALYZE` and study the output to find the most ideal query. ## Data Size diff --git a/doc/development/build_test_package.md b/doc/development/build_test_package.md index 439d228baef..c5f6adfeaeb 100644 --- a/doc/development/build_test_package.md +++ b/doc/development/build_test_package.md @@ -4,12 +4,13 @@ While developing a new feature or modifying an existing one, it is helpful if an installable package (or a docker image) containing those changes is available for testing. For this very purpose, a manual job is provided in the GitLab CI/CD pipeline that can be used to trigger a pipeline in the omnibus-gitlab repository -that will create -1. A deb package for Ubuntu 16.04, available as a build artifact, and -2. A docker image, which is pushed to [Omnibus GitLab's container -registry](https://gitlab.com/gitlab-org/omnibus-gitlab/container_registry) -(images titled `gitlab-ce` and `gitlab-ee` respectively and image tag is the -commit which triggered the pipeline). +that will create: + +- A deb package for Ubuntu 16.04, available as a build artifact, and +- A docker image, which is pushed to [Omnibus GitLab's container + registry](https://gitlab.com/gitlab-org/omnibus-gitlab/container_registry) + (images titled `gitlab-ce` and `gitlab-ee` respectively and image tag is the + commit which triggered the pipeline). When you push a commit to either the gitlab-ce or gitlab-ee project, the pipeline for that commit will have a `build-package` manual action you can diff --git a/doc/development/code_review.md b/doc/development/code_review.md index 96f3861f8d7..52710e54e86 100644 --- a/doc/development/code_review.md +++ b/doc/development/code_review.md @@ -9,11 +9,11 @@ code is effective, understandable, and maintainable. ## Getting your merge request reviewed, approved, and merged -You are strongly encouraged to get your code **reviewed** by a +You are strongly encouraged to get your code **reviewed** by a [reviewer](https://about.gitlab.com/handbook/engineering/#reviewer) as soon as there is any code to review, to get a second opinion on the chosen solution and implementation, and an extra pair of eyes looking for bugs, logic problems, or -uncovered edge cases. The reviewer can be from a different team, but it is +uncovered edge cases. The reviewer can be from a different team, but it is recommended to pick someone who knows the domain well. You can read more about the importance of involving reviewer(s) in the section on the responsibility of the author below. @@ -23,7 +23,7 @@ one of the [Merge request coaches][team]. Depending on the areas your merge request touches, it must be **approved** by one or more [maintainers](https://about.gitlab.com/handbook/engineering/#maintainer): -For approvals, we use the approval functionality found in the merge request +For approvals, we use the approval functionality found in the merge request widget. Reviewers can add their approval by [approving additionally](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html#adding-or-removing-an-approval). 1. If your merge request includes backend changes [^1], it must be @@ -42,43 +42,43 @@ widget. Reviewers can add their approval by [approving additionally](https://doc Getting your merge request **merged** also requires a maintainer. If it requires more than one approval, the last maintainer to review and approve it will also merge it. -As described in the section on the responsibility of the maintainer below, you -are recommended to get your merge request approved and merged by maintainer(s) +As described in the section on the responsibility of the maintainer below, you +are recommended to get your merge request approved and merged by maintainer(s) from other teams than your own. ### The responsibility of the merge request author The responsibility to find the best solution and implement it lies with the -merge request author. +merge request author. -Before assigning a merge request to a maintainer for approval and merge, they -should be confident that it actually solves the problem it was meant to solve, -that it does so in the most appropriate way, that it satisfies all requirements, -and that there are no remaining bugs, logical problems, or uncovered edge cases. -The merge request should also have a completed task list in its description and +Before assigning a merge request to a maintainer for approval and merge, they +should be confident that it actually solves the problem it was meant to solve, +that it does so in the most appropriate way, that it satisfies all requirements, +and that there are no remaining bugs, logical problems, or uncovered edge cases. +The merge request should also have a completed task list in its description and a passing CI pipeline to avoid unnecessary back and forth. To reach the required level of confidence in their solution, an author is expected -to involve other people in the investigation and implementation processes as +to involve other people in the investigation and implementation processes as appropriate. -They are encouraged to reach out to domain experts to discuss different solutions -or get an implementation reviewed, to product managers and UX designers to clear -up confusion or verify that the end result matches what they had in mind, to +They are encouraged to reach out to domain experts to discuss different solutions +or get an implementation reviewed, to product managers and UX designers to clear +up confusion or verify that the end result matches what they had in mind, to database specialists to get input on the data model or specific queries, or to any other developer to get an in-depth review of the solution. If an author is unsure if a merge request needs a domain expert's opinion, that's -usually a pretty good sign that it does, since without it the required level of +usually a pretty good sign that it does, since without it the required level of confidence in their solution will not have been reached. ### The responsibility of the maintainer Maintainers are responsible for the overall health, quality, and consistency of -the GitLab codebase, across domains and product areas. +the GitLab codebase, across domains and product areas. -Consequently, their reviews will focus primarily on things like overall -architecture, code organization, separation of concerns, tests, DRYness, +Consequently, their reviews will focus primarily on things like overall +architecture, code organization, separation of concerns, tests, DRYness, consistency, and readability. Since a maintainer's job only depends on their knowledge of the overall GitLab @@ -87,12 +87,12 @@ merge requests from any team and in any product area. In fact, authors are recommended to get their merge requests merged by maintainers from other teams than their own, to ensure that all code across GitLab is consistent -and can be easily understood by all contributors, from both inside and outside the +and can be easily understood by all contributors, from both inside and outside the company, without requiring team-specific expertise. Maintainers will do their best to also review the specifics of the chosen solution before merging, but as they are not necessarily domain experts, they may be poorly -placed to do so without an unreasonable investment of time. In those cases, they +placed to do so without an unreasonable investment of time. In those cases, they will defer to the judgment of the author and earlier reviewers and involved domain experts, in favor of focusing on their primary responsibilities. @@ -100,7 +100,7 @@ If a developer who happens to also be a maintainer was involved in a merge reque as a domain expert and/or reviewer, it is recommended that they are not also picked as the maintainer to ultimately approve and merge it. -Maintainers should check before merging if the merge request is approved by the +Maintainers should check before merging if the merge request is approved by the required approvers. ## Best practices @@ -230,41 +230,41 @@ Enterprise Edition instance. This has some implications: 1. **Query changes** should be tested to ensure that they don't result in worse performance at the scale of GitLab.com: 1. Generating large quantities of data locally can help. - 2. Asking for query plans from GitLab.com is the most reliable way to validate + 1. Asking for query plans from GitLab.com is the most reliable way to validate these. -2. **Database migrations** must be: +1. **Database migrations** must be: 1. Reversible. - 2. Performant at the scale of GitLab.com - ask a maintainer to test the + 1. Performant at the scale of GitLab.com - ask a maintainer to test the migration on the staging environment if you aren't sure. - 3. Categorised correctly: + 1. Categorised correctly: - Regular migrations run before the new code is running on the instance. - [Post-deployment migrations](post_deployment_migrations.md) run _after_ the new code is deployed, when the instance is configured to do that. - [Background migrations](background_migrations.md) run in Sidekiq, and should only be done for migrations that would take an extreme amount of time at GitLab.com scale. -3. **Sidekiq workers** +1. **Sidekiq workers** [cannot change in a backwards-incompatible way](sidekiq_style_guide.md#removing-or-renaming-queues): 1. Sidekiq queues are not drained before a deploy happens, so there will be workers in the queue from the previous version of GitLab. - 2. If you need to change a method signature, try to do so across two releases, + 1. If you need to change a method signature, try to do so across two releases, and accept both the old and new arguments in the first of those. - 3. Similarly, if you need to remove a worker, stop it from being scheduled in + 1. Similarly, if you need to remove a worker, stop it from being scheduled in one release, then remove it in the next. This will allow existing jobs to execute. - 4. Don't forget, not every instance will upgrade to every intermediate version + 1. Don't forget, not every instance will upgrade to every intermediate version (some people may go from X.1.0 to X.10.0, or even try bigger upgrades!), so try to be liberal in accepting the old format if it is cheap to do so. -4. **Cached values** may persist across releases. If you are changing the type a +1. **Cached values** may persist across releases. If you are changing the type a cached value returns (say, from a string or nil to an array), change the cache key at the same time. -5. **Settings** should be added as a +1. **Settings** should be added as a [last resort](https://about.gitlab.com/handbook/product/#convention-over-configuration). If you're adding a new setting in `gitlab.yml`: 1. Try to avoid that, and add to `ApplicationSetting` instead. - 2. Ensure that it is also + 1. Ensure that it is also [added to Omnibus](https://docs.gitlab.com/omnibus/settings/gitlab.yml.html#adding-a-new-setting-to-gitlab-yml). -6. **Filesystem access** can be slow, so try to avoid +1. **Filesystem access** can be slow, so try to avoid [shared files](shared_files.md) when an alternative solution is available. ### Credits diff --git a/doc/development/diffs.md b/doc/development/diffs.md index 4adae5dabe2..43fc125c21d 100644 --- a/doc/development/diffs.md +++ b/doc/development/diffs.md @@ -17,20 +17,20 @@ The diffs fetching process _limits_ single file diff sizes and the overall size then persisted on `merge_request_diff_files` table. Even though diffs larger than 10% of the value of `ApplicationSettings#diff_max_patch_bytes` are collapsed, -we still keep them on Postgres. However, diff files larger than defined _safety limits_ +we still keep them on Postgres. However, diff files larger than defined _safety limits_ (see the [Diff limits section](#diff-limits)) are _not_ persisted in the database. In order to present diffs information on the Merge Request diffs page, we: 1. Fetch all diff files from database `merge_request_diff_files` -2. Fetch the _old_ and _new_ file blobs in batch to: - 1. Highlight old and new file content - 2. Know which viewer it should use for each file (text, image, deleted, etc) - 3. Know if the file content changed - 4. Know if it was stored externally - 5. Know if it had storage errors -3. If the diff file is cacheable (text-based), it's cached on Redis -using `Gitlab::Diff::FileCollection::MergeRequestDiff` +1. Fetch the _old_ and _new_ file blobs in batch to: + - Highlight old and new file content + - Know which viewer it should use for each file (text, image, deleted, etc) + - Know if the file content changed + - Know if it was stored externally + - Know if it had storage errors +1. If the diff file is cacheable (text-based), it's cached on Redis + using `Gitlab::Diff::FileCollection::MergeRequestDiff` ### Note diffs @@ -39,9 +39,9 @@ on `NoteDiffFile` (which is associated with the actual `DiffNote`). So instead of hitting the repository every time we need the diff of the file, we: 1. Check whether we have the `NoteDiffFile#diff` persisted and use it -2. Otherwise, if it's a current MR revision, use the persisted -`MergeRequestDiffFile#diff` -3. In the last scenario, go the the repository and fetch the diff +1. Otherwise, if it's a current MR revision, use the persisted + `MergeRequestDiffFile#diff` +1. In the last scenario, go the repository and fetch the diff ## Diff limits diff --git a/doc/development/documentation/index.md b/doc/development/documentation/index.md index 154ede087cc..b8b86ac1bf5 100644 --- a/doc/development/documentation/index.md +++ b/doc/development/documentation/index.md @@ -41,9 +41,11 @@ how to structure GitLab docs. ## Markdown and styles -Currently GitLab docs use [Kramdown](https://gitlab.com/gitlab-com/gitlab-docs/issues/50) as the [markdown](../../user/markdown.md) engine. +[GitLab docs](https://gitlab.com/gitlab-com/gitlab-docs) uses [GitLab Kramdown](https://gitlab.com/gitlab-org/gitlab_kramdown) +as markdown engine. Check the [GitLab Markdown Kramdown Guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/) +for a complete Kramdown reference. -All the docs follow the [documentation style guidelines](styleguide.md). See [Linting](#linting) for help to follow the guidelines. +Follow the [documentation style guidelines](styleguide.md) strictly. ## Documentation directory structure @@ -198,6 +200,11 @@ redirect_to: '../path/to/file/README.md' It supports both full and relative URLs, e.g. `https://docs.gitlab.com/ee/path/to/file.html`, `../path/to/file.html`, `path/to/file.md`. Note that any `*.md` paths will be compiled to `*.html`. +NOTE: **Note:** +This redirection method will not provide a redirect fallback on GitLab `/help`. When using +it, make sure to add a link to the new page on the doc, otherwise it's a dead end for users that +land on the doc via `/help`. + ### Redirections for pages with Disqus comments If the documentation page being relocated already has any Disqus comments, @@ -223,145 +230,6 @@ redirect_from: 'https://docs.gitlab.com/my-old-location/README.html' Note: it is necessary to include the file name in the `redirect_from` URL, even if it's `index.html` or `README.html`. -## Linting - -To help adhere to the [documentation style guidelines](styleguide.md), and to improve the content -added to documentation, consider locally installing and running documentation linters. This will -help you catch common issues before raising merge requests for review of documentation. - -The following are some suggested linters you can install locally and sample configuration: - -- [`proselint`](#proselint) -- [`markdownlint`](#markdownlint) - -NOTE: **Note:** -This list does not limit what other linters you can add to your local documentation writing toolchain. - -### `proselint` - -`proselint` checks for common problems with English prose. It provides a - [plethora of checks](http://proselint.com/checks/) that are helpful for technical writing. - -`proselint` can be used [on the command line](http://proselint.com/utility/), either on a single - Markdown file or on all Markdown files in a project. For example, to run `proselint` on all - documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), run the - following commands from within the `gitlab-ce` project: - -```sh -cd doc -proselint **/*.md -``` - -`proselint` can also be run from within editors using plugins. For example, the following plugins - are available: - -- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-proselint) -- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=PatrykPeszko.vscode-proselint) -- [Others](https://github.com/amperser/proselint#plugins-for-other-software) - -#### Sample `proselint` configuration - -All of the checks are good to use. However, excluding the `typography.symbols` and `misc.phrasal_adjectives` checks will reduce -noise. The following sample `proselint` configuration disables these checks: - -```json -{ - "checks": { - "typography.symbols": false, - "misc.phrasal_adjectives": false - } -} -``` - -A file with `proselint` configuration must be placed in a -[valid location](https://github.com/amperser/proselint#checks). For example, `~/.config/proselint/config`. - -### `markdownlint` - -`markdownlint` checks that certain rules ([example](https://github.com/DavidAnson/markdownlint/blob/master/README.md#rules--aliases)) - are followed for Markdown syntax. Our [style guidelines](styleguide.md) elaborate on which choices - must be made when selecting Markdown syntax for GitLab documentation and this tool helps - catch deviations from those guidelines. - -`markdownlint` can be used [on the command line](https://github.com/igorshubovych/markdownlint-cli#markdownlint-cli--), - either on a single Markdown file or on all Markdown files in a project. For example, to run - `markdownlint` on all documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), - run the following commands from within the `gitlab-ce` project: - -```sh -cd doc -markdownlint **/*.md -``` - -`markdownlint` can also be run from within editors using plugins. For example, the following plugins - are available: - -- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-markdownlint) -- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint) -- [Others](https://github.com/DavidAnson/markdownlint#related) - -#### Sample `markdownlint` configuration - -The following sample `markdownlint` configuration modifies the available default rules to: - -- Adhere to the [style guidelines](styleguide.md). -- Apply conventions found in the GitLab documentation. -- Allow the flexibility of using some inline HTML. - -```json -{ - "default": true, - "header-style": { "style": "atx" }, - "ul-style": { "style": "dash" }, - "line-length": false, - "no-trailing-punctuation": false, - "ol-prefix": { "style": "one" }, - "blanks-around-fences": false, - "no-inline-html": { - "allowed_elements": [ - "table", - "tbody", - "tr", - "td", - "ul", - "ol", - "li", - "br", - "img", - "a", - "strong", - "i", - "div" - ] - }, - "hr-style": { "style": "---" }, - "fenced-code-language": false -} -``` - -For [`markdownlint`](https://github.com/DavidAnson/markdownlint/), this configuration must be -placed in a [valid location](https://github.com/igorshubovych/markdownlint-cli#configuration). For -example, `~/.markdownlintrc`. - -## Testing - -We treat documentation as code, thus have implemented some testing. -Currently, the following tests are in place: - -1. `docs lint`: Check that all internal (relative) links work correctly and - that all cURL examples in API docs use the full switches. It's recommended - to [check locally](#previewing-locally) before pushing to GitLab by executing the command - `bundle exec nanoc check internal_links` on your local - [`gitlab-docs`](https://gitlab.com/gitlab-com/gitlab-docs) directory. -1. [`ee_compat_check`](../automatic_ce_ee_merge.md#avoiding-ce-gt-ee-merge-conflicts-beforehand) (runs on CE only): - When you submit a merge request to GitLab Community Edition (CE), - there is this additional job that runs against Enterprise Edition (EE) - and checks if your changes can apply cleanly to the EE codebase. - If that job fails, read the instructions in the job log for what to do next. - As CE is merged into EE once a day, it's important to avoid merge conflicts. - Submitting an EE-equivalent merge request cherry-picking all commits from CE to EE is - essential to avoid them. - ## Branch naming If your contribution contains **only** documentation changes, you can speed up @@ -377,15 +245,6 @@ choices: If your branch name matches any of the above, it will run only the docs tests. If it doesn't, the whole test suite will run (including docs). -## Danger bot - -GitLab uses [danger bot](https://github.com/danger/danger) for some elements in -code review. For docs changes in merge requests, the following actions are taken: - -1. Whenever a change under `/doc` is made, the bot leaves a comment for the - author to mention `@gl-docsteam`, so that the docs can be properly - reviewed. - ## Merge requests for GitLab documentation Before getting started, make sure you read the introductory section @@ -428,105 +287,6 @@ Follow this [method for cherry-picking from CE to EE](../automatic_ce_ee_merge.m additionally to the CE MR. If there are many EE-only changes though, start a new MR to EE only. -## Previewing the changes live - -NOTE: **Note:** -To preview your changes to documentation locally, follow this -[development guide](https://gitlab.com/gitlab-com/gitlab-docs/blob/master/README.md#development-when-contributing-to-gitlab-documentation) or [these instructions for GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/gitlab_docs.md). - -The live preview is currently enabled for the following projects: - -- <https://gitlab.com/gitlab-org/gitlab-ce> -- <https://gitlab.com/gitlab-org/gitlab-ee> -- <https://gitlab.com/gitlab-org/gitlab-runner> - -If your branch contains only documentation changes, you can use -[special branch names](#branch-naming) to avoid long running pipelines. - -For [docs-only changes](#branch-naming), the review app is run automatically. -For all other branches, you can use the manual `review-docs-deploy-manual` job -in your merge request. You will need at least Maintainer permissions to be able -to run it. In the mini pipeline graph, you should see an `>>` icon. Clicking on it will -reveal the `review-docs-deploy-manual` job. Hit the play button for the job to start. - - - -NOTE: **Note:** -You will need to push a branch to those repositories, it doesn't work for forks. - -The `review-docs-deploy*` job will: - -1. Create a new branch in the [gitlab-docs](https://gitlab.com/gitlab-com/gitlab-docs) - project named after the scheme: `$DOCS_GITLAB_REPO_SUFFIX-$CI_ENVIRONMENT_SLUG`, - where `DOCS_GITLAB_REPO_SUFFIX` is the suffix for each product, e.g, `ce` for - CE, etc. -1. Trigger a cross project pipeline and build the docs site with your changes - -After a few minutes, the Review App will be deployed and you will be able to -preview the changes. The docs URL can be found in two places: - -- In the merge request widget -- In the output of the `review-docs-deploy*` job, which also includes the - triggered pipeline so that you can investigate whether something went wrong - -TIP: **Tip:** -Someone that has no merge rights to the CE/EE projects (think of forks from -contributors) will not be able to run the manual job. In that case, you can -ask someone from the GitLab team who has the permissions to do that for you. - -NOTE: **Note:** -Make sure that you always delete the branch of the merge request you were -working on. If you don't, the remote docs branch won't be removed either, -and the server where the Review Apps are hosted will eventually be out of -disk space. - -### Troubleshooting review apps - -In case the review app URL returns 404, follow these steps to debug: - -1. **Did you follow the URL from the merge request widget?** If yes, then check if - the link is the same as the one in the job output. -1. **Did you follow the URL from the job output?** If yes, then it means that - either the site is not yet deployed or something went wrong with the remote - pipeline. Give it a few minutes and it should appear online, otherwise you - can check the status of the remote pipeline from the link in the job output. - If the pipeline failed or got stuck, drop a line in the `#docs` chat channel. - -### Technical aspects - -If you want to know the hot details, here's what's really happening: - -1. You manually run the `review-docs-deploy` job in a CE/EE merge request. -1. The job runs the [`scripts/trigger-build-docs`](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/scripts/trigger-build-docs) - script with the `deploy` flag, which in turn: - 1. Takes your branch name and applies the following: - - The slug of the branch name is used to avoid special characters since - ultimately this will be used by NGINX. - - The `preview-` prefix is added to avoid conflicts if there's a remote branch - with the same name that you created in the merge request. - - The final branch name is truncated to 42 characters to avoid filesystem - limitations with long branch names (> 63 chars). - 1. The remote branch is then created if it doesn't exist (meaning you can - re-run the manual job as many times as you want and this step will be skipped). - 1. A new cross-project pipeline is triggered in the docs project. - 1. The preview URL is shown both at the job output and in the merge request - widget. You also get the link to the remote pipeline. -1. In the docs project, the pipeline is created and it - [skips the test jobs](https://gitlab.com/gitlab-com/gitlab-docs/blob/8d5d5c750c602a835614b02f9db42ead1c4b2f5e/.gitlab-ci.yml#L50-55) - to lower the build time. -1. Once the docs site is built, the HTML files are uploaded as artifacts. -1. A specific Runner tied only to the docs project, runs the Review App job - that downloads the artifacts and uses `rsync` to transfer the files over - to a location where NGINX serves them. - -The following GitLab features are used among others: - -- [Manual actions](../../ci/yaml/README.md#manual-actions) -- [Multi project pipelines](https://docs.gitlab.com/ee/ci/multi_project_pipeline_graphs.html) -- [Review Apps](../../ci/review_apps/index.md) -- [Artifacts](../../ci/yaml/README.md#artifacts) -- [Specific Runner](../../ci/runners/README.md#locking-a-specific-runner-from-being-enabled-for-other-projects) - ## GitLab `/help` Every GitLab instance includes the documentation, which is available from `/help` @@ -678,5 +438,250 @@ date: 2017-02-01 Use the [writing method](https://about.gitlab.com/handbook/product/technical-writing/#writing-method) defined by the Technical Writing team. +## Previewing the changes live + +NOTE: **Note:** +To preview your changes to documentation locally, follow this +[development guide](https://gitlab.com/gitlab-com/gitlab-docs/blob/master/README.md#development-when-contributing-to-gitlab-documentation) or [these instructions for GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/gitlab_docs.md). + +The live preview is currently enabled for the following projects: + +- <https://gitlab.com/gitlab-org/gitlab-ce> +- <https://gitlab.com/gitlab-org/gitlab-ee> +- <https://gitlab.com/gitlab-org/gitlab-runner> + +If your branch contains only documentation changes, you can use +[special branch names](#branch-naming) to avoid long running pipelines. + +For [docs-only changes](#branch-naming), the review app is run automatically. +For all other branches, you can use the manual `review-docs-deploy-manual` job +in your merge request. You will need at least Maintainer permissions to be able +to run it. In the mini pipeline graph, you should see an `>>` icon. Clicking on it will +reveal the `review-docs-deploy-manual` job. Hit the play button for the job to start. + + + +NOTE: **Note:** +You will need to push a branch to those repositories, it doesn't work for forks. + +The `review-docs-deploy*` job will: + +1. Create a new branch in the [gitlab-docs](https://gitlab.com/gitlab-com/gitlab-docs) + project named after the scheme: `$DOCS_GITLAB_REPO_SUFFIX-$CI_ENVIRONMENT_SLUG`, + where `DOCS_GITLAB_REPO_SUFFIX` is the suffix for each product, e.g, `ce` for + CE, etc. +1. Trigger a cross project pipeline and build the docs site with your changes + +After a few minutes, the Review App will be deployed and you will be able to +preview the changes. The docs URL can be found in two places: + +- In the merge request widget +- In the output of the `review-docs-deploy*` job, which also includes the + triggered pipeline so that you can investigate whether something went wrong + +TIP: **Tip:** +Someone that has no merge rights to the CE/EE projects (think of forks from +contributors) will not be able to run the manual job. In that case, you can +ask someone from the GitLab team who has the permissions to do that for you. + +NOTE: **Note:** +Make sure that you always delete the branch of the merge request you were +working on. If you don't, the remote docs branch won't be removed either, +and the server where the Review Apps are hosted will eventually be out of +disk space. + +### Troubleshooting review apps + +In case the review app URL returns 404, follow these steps to debug: + +1. **Did you follow the URL from the merge request widget?** If yes, then check if + the link is the same as the one in the job output. +1. **Did you follow the URL from the job output?** If yes, then it means that + either the site is not yet deployed or something went wrong with the remote + pipeline. Give it a few minutes and it should appear online, otherwise you + can check the status of the remote pipeline from the link in the job output. + If the pipeline failed or got stuck, drop a line in the `#docs` chat channel. + +### Technical aspects + +If you want to know the in-depth details, here's what's really happening: + +1. You manually run the `review-docs-deploy` job in a CE/EE merge request. +1. The job runs the [`scripts/trigger-build-docs`](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/scripts/trigger-build-docs) + script with the `deploy` flag, which in turn: + 1. Takes your branch name and applies the following: + - The slug of the branch name is used to avoid special characters since + ultimately this will be used by NGINX. + - The `preview-` prefix is added to avoid conflicts if there's a remote branch + with the same name that you created in the merge request. + - The final branch name is truncated to 42 characters to avoid filesystem + limitations with long branch names (> 63 chars). + 1. The remote branch is then created if it doesn't exist (meaning you can + re-run the manual job as many times as you want and this step will be skipped). + 1. A new cross-project pipeline is triggered in the docs project. + 1. The preview URL is shown both at the job output and in the merge request + widget. You also get the link to the remote pipeline. +1. In the docs project, the pipeline is created and it + [skips the test jobs](https://gitlab.com/gitlab-com/gitlab-docs/blob/8d5d5c750c602a835614b02f9db42ead1c4b2f5e/.gitlab-ci.yml#L50-55) + to lower the build time. +1. Once the docs site is built, the HTML files are uploaded as artifacts. +1. A specific Runner tied only to the docs project, runs the Review App job + that downloads the artifacts and uses `rsync` to transfer the files over + to a location where NGINX serves them. + +The following GitLab features are used among others: + +- [Manual actions](../../ci/yaml/README.md#manual-actions) +- [Multi project pipelines](https://docs.gitlab.com/ee/ci/multi_project_pipeline_graphs.html) +- [Review Apps](../../ci/review_apps/index.md) +- [Artifacts](../../ci/yaml/README.md#artifacts) +- [Specific Runner](../../ci/runners/README.md#locking-a-specific-runner-from-being-enabled-for-other-projects) + +## Testing + +We treat documentation as code, thus have implemented some testing. +Currently, the following tests are in place: + +1. `docs lint`: Check that all internal (relative) links work correctly and + that all cURL examples in API docs use the full switches. It's recommended + to [check locally](#previewing-locally) before pushing to GitLab by executing the command + `bundle exec nanoc check internal_links` on your local + [`gitlab-docs`](https://gitlab.com/gitlab-com/gitlab-docs) directory. +1. [`ee_compat_check`](../automatic_ce_ee_merge.md#avoiding-ce-gt-ee-merge-conflicts-beforehand) (runs on CE only): + When you submit a merge request to GitLab Community Edition (CE), + there is this additional job that runs against Enterprise Edition (EE) + and checks if your changes can apply cleanly to the EE codebase. + If that job fails, read the instructions in the job log for what to do next. + As CE is merged into EE once a day, it's important to avoid merge conflicts. + Submitting an EE-equivalent merge request cherry-picking all commits from CE to EE is + essential to avoid them. + +### Linting + +To help adhere to the [documentation style guidelines](styleguide.md), and to improve the content +added to documentation, consider locally installing and running documentation linters. This will +help you catch common issues before raising merge requests for review of documentation. + +The following are some suggested linters you can install locally and sample configuration: + +- [`proselint`](#proselint) +- [`markdownlint`](#markdownlint) + +NOTE: **Note:** +This list does not limit what other linters you can add to your local documentation writing toolchain. + +#### `proselint` + +`proselint` checks for common problems with English prose. It provides a + [plethora of checks](http://proselint.com/checks/) that are helpful for technical writing. + +`proselint` can be used [on the command line](http://proselint.com/utility/), either on a single + Markdown file or on all Markdown files in a project. For example, to run `proselint` on all + documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), run the + following commands from within the `gitlab-ce` project: + +```sh +cd doc +proselint **/*.md +``` + +`proselint` can also be run from within editors using plugins. For example, the following plugins + are available: + +- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-proselint) +- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=PatrykPeszko.vscode-proselint) +- [Others](https://github.com/amperser/proselint#plugins-for-other-software) + +##### Sample `proselint` configuration + +All of the checks are good to use. However, excluding the `typography.symbols` and `misc.phrasal_adjectives` checks will reduce +noise. The following sample `proselint` configuration disables these checks: + +```json +{ + "checks": { + "typography.symbols": false, + "misc.phrasal_adjectives": false + } +} +``` + +A file with `proselint` configuration must be placed in a +[valid location](https://github.com/amperser/proselint#checks). For example, `~/.config/proselint/config`. + +#### `markdownlint` + +`markdownlint` checks that certain rules ([example](https://github.com/DavidAnson/markdownlint/blob/master/README.md#rules--aliases)) + are followed for Markdown syntax. Our [style guidelines](styleguide.md) elaborate on which choices + must be made when selecting Markdown syntax for GitLab documentation and this tool helps + catch deviations from those guidelines. + +`markdownlint` can be used [on the command line](https://github.com/igorshubovych/markdownlint-cli#markdownlint-cli--), + either on a single Markdown file or on all Markdown files in a project. For example, to run + `markdownlint` on all documentation in the [`gitlab-ce` project](https://gitlab.com/gitlab-org/gitlab-ce), + run the following commands from within the `gitlab-ce` project: + +```sh +cd doc +markdownlint **/*.md +``` + +`markdownlint` can also be run from within editors using plugins. For example, the following plugins + are available: + +- [Sublime Text](https://packagecontrol.io/packages/SublimeLinter-contrib-markdownlint) +- [Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint) +- [Others](https://github.com/DavidAnson/markdownlint#related) + +##### Sample `markdownlint` configuration + +The following sample `markdownlint` configuration modifies the available default rules to: + +- Adhere to the [style guidelines](styleguide.md). +- Apply conventions found in the GitLab documentation. +- Allow the flexibility of using some inline HTML. + +```json +{ + "default": true, + "header-style": { "style": "atx" }, + "ul-style": { "style": "dash" }, + "line-length": false, + "no-trailing-punctuation": false, + "ol-prefix": { "style": "one" }, + "blanks-around-fences": false, + "no-inline-html": { + "allowed_elements": [ + "table", + "tbody", + "tr", + "td", + "ul", + "ol", + "li", + "br", + "img", + "a", + "strong", + "i", + "div" + ] + }, + "hr-style": { "style": "---" }, + "fenced-code-language": false +} +``` + +For [`markdownlint`](https://github.com/DavidAnson/markdownlint/), this configuration must be +placed in a [valid location](https://github.com/igorshubovych/markdownlint-cli#configuration). For +example, `~/.markdownlintrc`. + +## Danger bot + +GitLab uses [danger bot](https://github.com/danger/danger) for some elements in +code review. For docs changes in merge requests, whenever a change under `/doc` +is made, the bot leaves a comment for the author to mention `@gl-docsteam`, so +that the docs can be properly reviewed. + [gitlab-map]: https://gitlab.com/gitlab-org/gitlab-design/raw/master/production/resources/gitlab-map.png [graffle]: https://gitlab.com/gitlab-org/gitlab-design/blob/d8d39f4a87b90fb9ae89ca12dc565347b4900d5e/production/resources/gitlab-map.graffle diff --git a/doc/development/documentation/styleguide.md b/doc/development/documentation/styleguide.md index 97b1b890836..8309ba9a72c 100644 --- a/doc/development/documentation/styleguide.md +++ b/doc/development/documentation/styleguide.md @@ -10,17 +10,15 @@ GitLab documentation. Check the Check the GitLab handbook for the [writing styles guidelines](https://about.gitlab.com/handbook/communication/#writing-style-guidelines). -For help adhering to the guidelines, see [Linting](index.md#linting). +For help adhering to the guidelines, see [linting](index.md#linting). ## Files - [Directory structure](index.md#location-and-naming-documents): place the docs - in the correct location. +in the correct location. - [Documentation files](index.md#documentation-files): name the files accordingly. -- [Markdown](../../user/markdown.md): use the GitLab Flavored Markdown in the - documentation. -NOTE: **Note:** +DANGER: **Attention:** **Do not** use capital letters, spaces, or special chars in file names, branch names, directory names, headings, or in anything that generates a path. @@ -28,65 +26,144 @@ NOTE: **Note:** **Do not** create new `README.md` files, name them `index.md` instead. There's a test that will fail if it spots a new `README.md` file. -## Text +### Markdown + +The [documentation website](https://docs.gitlab.com) had its markdown engine migrated from [Redcarpet to GitLab Kramdown](https://gitlab.com/gitlab-com/gitlab-docs/merge_requests/108) +in October, 2018. + +The [`gitlab-kramdown`](https://gitlab.com/gitlab-org/gitlab_kramdown) +gem will support all [GFM markup](../../user/markdown.md) in the future. For now, +use regular markdown markup, following the rules on this style guide. For a complete +Kramdown reference, check the [GiLab Markdown Kramdown Guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/). +Use Kramdown markup wisely: do not overuse its specific markup (e.g., `{:.class}`) as it will not render properly in +[`/help`](#gitlab-help). + +## Content -- Split up long lines (wrap text), this makes it much easier to review and edit. Only - double line breaks are shown as a full line break in [GitLab markdown][gfm]. - 80-100 characters is a good line length. - Make sure that the documentation is added in the correct - [directory](index.md#documentation-directory-structure) and that - there's a link to it somewhere useful. + [directory](index.md#documentation-directory-structure), linked from its + higher-level index, and linked from other related pages. - Do not duplicate information. - Be brief and clear. -- Unless there's a logical reason not to, add documents in alphabetical order. +- Unless there's a logical reason not to, structure the document in alphabetical order +(headings, tables, and lists). - Write in US English. -- Use [single spaces][] instead of double spaces. -- Jump a line between different markups (e.g., after every paragraph, header, list, etc) - Capitalize "G" and "L" in GitLab. -- Use sentence case for titles, headings, labels, menu items, and buttons. - Use title case when referring to [features](https://about.gitlab.com/features/) or - [products](https://about.gitlab.com/pricing/) (e.g., GitLab Runner, Geo, - Issue Boards, GitLab Core, Git, Prometheus, Kubernetes, etc), and methods or methodologies - (e.g., Continuous Integration, Continuous Deployment, Scrum, Agile, etc). Note that - some features are also objects (e.g. "Merge Requests" and "merge requests"). +[products](https://about.gitlab.com/pricing/) (e.g., GitLab Runner, Geo, +Issue Boards, GitLab Core, Git, Prometheus, Kubernetes, etc), and methods or methodologies +(e.g., Continuous Integration, Continuous Deployment, Scrum, Agile, etc). Note that +some features are also objects (e.g. "GitLab's Merge Requests support X." and "Create a new merge request for Z."). -## Formatting +## Text + +- Split up long lines (wrap text), this makes it much easier to review and edit. Only + double line breaks are shown as a full line break by creating new paragraphs. + 80-100 characters is the recommended line length. +- Use sentence case for titles, headings, labels, menu items, and buttons. +- Jump a line between different markups (e.g., after every paragraph, header, list, etc). Example: -- Use double asterisks (`**`) to mark a word or text in bold (`**bold**`). -- Use undescore (`_`) for text in italics (`_italic_`). -- Put an empty line between different markups. For example: ```md ## Header Paragraph. - - List item - - List item + - List item 1 + - List item 2 ``` -### Punctuation +## Emphasis -For punctuation rules, please refer to the [GitLab UX guide](https://design.gitlab.com/content/punctuation/). +- Use double asterisks (`**`) to mark a word or text in bold (`**bold**`). +- Use undescore (`_`) for text in italics (`_italic_`). +- Use greater than (`>`) for blockquotes. + +## Punctuation + +Check the general punctuation rules for the GitLab documentation on the table below. +Check specific punctuation rules for [list items](#list-items) below. + +| Rule | Example | +| ---- | ------- | +| Always end full sentences with a period. | _For a complete overview, read through this document._| +| Always add a space after a period when beginning a new sentence | _For a complete overview, check this doc. For other references, check out this guide._ | +| Do not use double spaces. | --- | +| Do not use tabs for indentation. Use spaces instead. You can configure your code editor to output spaces instead of tabs when pressing the tab key. | --- | +| Use serial commas ("Oxford commas") before the final 'and/or' in a list. | _You can create new issues, merge requests, and milestones._ | +| Always add a space before and after dashes when using it in a sentence (for replacing a comma, for example). | _You should try this - or not._ | +| Always use lowercase after a colon. | _Related Issues: a way to create a relationship between issues._ | + +## List items + +- Always start list items with a capital letter. +- Always leave a blank line before and after a list. +- Begin a line with spaces (not tabs) to denote a subitem. +- To nest subitems, indent them with two spaces. +- To nest code blocks, indent them with four spaces. +- Only use ordered lists when their items describe a sequence of steps to follow. -### Ordered and unordered lists +**Markup:** -- Use dashes (`-`) for unordered lists instead of asterisks (`*`). -- Use the number one (`1`) for ordered lists. +- Use dashes (`- `) for unordered lists instead of asterisks (`* `). +- Use the number one (`1`) for each item in an ordered list. +When rendered, the list items will appear with sequential numbering. + +**Punctuation:** + +- Do not add commas (`,`) or semicolons (`;`) to the end of a list item. +- Only add periods to the end of a list item if the item consists of a complete sentence. The [definition of full sentence](https://www2.le.ac.uk/offices/ld/resources/writing/grammar/grammar-guides/sentence) is: _"a complete sentence always contains a verb, expresses a complete idea, and makes sense standing alone"_. +- Be consistent throughout the list: if the majority of the items do not end in a period, do not end any of the items in a period, even if they consist of a complete sentence. The opposite is also valid: if the majority of the items end with a period, end all with a period. - Separate list items from explanatory text with a colon (`:`). For example: + ```md The list is as follows: - - First item: This explains the first item. - - Second item: This explains the second item. + - First item: this explains the first item. + - Second item: this explains the second item. ``` -- For further guidance on punctuation in bullet lists, please refer to the [GitLab UX guide](https://design.gitlab.com/content/punctuation/). + +**Examples:** + +Do: + +- First list item +- Second list item +- Third list item + +Don't: + +- First list item +- Second list item +- Third list item. + +Do: + +- Let's say this is a complete sentence. +- Let's say this is also a complete sentence. +- Not a complete sentence. + +Don't: + +- Let's say this is a complete sentence. +- Let's say this is also a complete sentence. +- Not a complete sentence + +## Quotes + +Valid for markdown content only, not for frontmatter entries: + +- Standard quotes: double quotes (`"`). Example: "This is wrapped in double quotes". +- Quote within a quote: double quotes (`"`) wrap single quotes (`'`). Example: "I am 'quoting' something within a quote". + +For other punctuation rules, please refer to the +[GitLab UX guide](https://design.gitlab.com/content/punctuation/). ## Headings - Add **only one H1** in each document, by adding `#` at the beginning of it (when using markdown). The `h1` will be the document `<title>`. -- Start with an h2 (`##`), and respect the order h2 > h3 > h4 > h5 > h6. - Never skip the hierarchy level, such as h2 > h4 +- Start with an `h2` (`##`), and respect the order `h2` > `h3` > `h4` > `h5` > `h6`. + Never skip the hierarchy level, such as `h2` > `h4` - Avoid putting numbers in headings. Numbers shift, hence documentation anchor links shift too, which eventually leads to dead links. If you think it is compelling to add numbers in headings, make sure to at least discuss it with @@ -96,21 +173,19 @@ For punctuation rules, please refer to the [GitLab UX guide](https://design.gitl - Avoid adding things that show ephemeral statuses. For example, if a feature is considered beta or experimental, put this info in a note, not in the heading. - When introducing a new document, be careful for the headings to be - grammatically and syntactically correct. Mention one or all - of the following GitLab members for a review: `@axil` or `@marcia`. + grammatically and syntactically correct. Mention an [assigned technical writer (TW)](https://about.gitlab.com/handbook/product/categories/) + for review. This is to ensure that no document with wrong heading is going live without an audit, thus preventing dead links and redirection issues when corrected. - Leave exactly one new line after a heading. +- Do not use links in headings. +- Add the corresponding [product badge](#product-badges) according to the tier the feature belongs. ## Links -- Use the regular inline link markdown markup `[Text](https://example.com)`. - It's easier to read, review, and maintain. -- If there's a link that repeats several times through the same document, - you can use `[Text][identifier]` and at the bottom of the section or the - document add: `[identifier]: https://example.com`, in which case, we do - encourage you to also add an alternative text: `[identifier]: https://example.com "Alternative text"` that appears when hovering your mouse on a link. +- Use inline link markdown markup `[Text](https://example.com)`. + It's easier to read, review, and maintain. **Do not** use `[Text][identifier]`. - To link to internal documentation, use relative links, not full URLs. Use `../` to navigate tp high-level directories, and always add the file name `file.md` at the end of the link with the `.md` extension, not `.html`. @@ -128,11 +203,12 @@ For punctuation rules, please refer to the [GitLab UX guide](https://design.gitl To indicate the steps of navigation through the UI: + - Use the exact word as shown in the UI, including any capital letters as-is. -- Use bold text for navigation items and the char `>` as separator - (e.g., `Navigate to your project's **Settings > CI/CD**` ). +- Use bold text for navigation items and the char "greater than" (`>`) as separator +(e.g., `Navigate to your project's **Settings > CI/CD**` ). - If there are any expandable menus, make sure to mention that the user - needs to expand the tab to find the settings you're referring to. +needs to expand the tab to find the settings you're referring to (e.g., `Navigate to your project's **Settings > CI/CD** and expand **General pipelines**`). ## Images @@ -141,13 +217,13 @@ To indicate the steps of navigation through the UI: names with the name of the document that they will be included in. For example, if there is a document called `twitter.md`, then a valid image name could be `twitter_login_screen.png`. -- Images should have a specific, non-generic name that will differentiate them. +- Images should have a specific, non-generic name that will differentiate and describe them properly. - Keep all file names in lower case. - Consider using PNG images instead of JPEG. - Compress all images with <https://tinypng.com/> or similar tool. - Compress gifs with <https://ezgif.com/optimize> or similar tool. - Images should be used (only when necessary) to _illustrate_ the description - of a process, not to _replace_ it. +of a process, not to _replace_ it. - Max image size: 100KB (gifs included). - The GitLab docs do not support videos yet. @@ -164,13 +240,39 @@ Inside the document: - If a heading is placed right after an image, always add three dashes (`---`) between the image and the heading. +## Code blocks + +- Always wrap code added to a sentence in inline code blocks (``` ` ```). +E.g., `.gitlab-ci.yml`, `git add .`, `CODEOWNERS`, `only: master`. +File names, commands, entries, and anything that refers to code should be added to code blocks. +To make things easier for the user, always add a full code block for things that can be +useful to copy and paste, as they can easily do it with the button on code blocks. +- For regular code blocks, always use a highlighting class corresponding to the +language for better readability. Examples: + + ```md + ```ruby + Ruby code + ``` + + ```js + JavaScript code + ``` + + ```md + Markdown code + ``` + ``` + +- For a complete reference on code blocks, check the [Kramdown guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/#code-blocks). + ## Alert boxes Whenever you want to call the attention to a particular sentence, use the following markup for highlighting. _Note that the alert boxes only work for one paragraph only. Multiple paragraphs, -lists, headers, etc will not render correctly._ +lists, headers, etc will not render correctly. For multiple lines, use blockquotes instead._ ### Note @@ -234,6 +336,31 @@ which renders in docs.gitlab.com to: If the text spans across multiple lines it's OK to split the line. +For multiple paragraphs, use the symbol `>` before every line: + +```md +> This is the first paragraph. +> +> This is the second paragraph. +> +> - This is a list item +> - Second item in the list +> +> ### This is an `h3` +``` + +Which renders to: + +> This is the first paragraph. +> +> This is the second paragraph. +> +> - This is a list item +> - Second item in the list +> +> ### This is an `h3` +>{:.no_toc} + ## Specific sections and terms To mention and/or reference specific terms in GitLab, please follow the styles @@ -290,18 +417,18 @@ feature availability: To exclude GitLab.com tiers (when the feature is not available in GitLab.com), add the keyword "only": +- For GitLab Core: `**[CORE ONLY]**`. - For GitLab Starter: `**[STARTER ONLY]**`. - For GitLab Premium: `**[PREMIUM ONLY]**`. - For GitLab Ultimate: `**[ULTIMATE ONLY]**`. -- For GitLab Core: `**[CORE ONLY]**`. The tier should be ideally added to headers, so that the full badge will be displayed. However, it can be also mentioned from paragraphs, list items, and table cells. For these cases, -the tier mention will be represented by an orange question mark. +the tier mention will be represented by an orange question mark that will show the tiers on hover. E.g., `**[STARTER]**` renders **[STARTER]**, `**[STARTER ONLY]**` renders **[STARTER ONLY]**. The absence of tiers' mentions mean that the feature is available in GitLab Core, -GitLab.com Free, and higher tiers. +GitLab.com Free, and all higher tiers. #### How it works @@ -348,8 +475,8 @@ prefer to document it in the CE docs to avoid duplication. Configuration settings include: -- Settings that touch configuration files in `config/`. -- NGINX settings and settings in `lib/support/` in general. +1. Settings that touch configuration files in `config/`. +1. NGINX settings and settings in `lib/support/` in general. When there is a list of steps to perform, usually that entails editing the configuration file and reconfiguring/restarting GitLab. In such case, follow @@ -386,13 +513,13 @@ the style below as a guide: In this case: -- before each step list the installation method is declared in bold -- three dashes (`---`) are used to create a horizontal line and separate the +- Before each step list the installation method is declared in bold +- Three dashes (`---`) are used to create a horizontal line and separate the two methods -- the code blocks are indented one or more spaces under the list item to render +- The code blocks are indented one or more spaces under the list item to render correctly -- different highlighting languages are used for each config in the code block -- the [references](#references) guide is used for reconfigure/restart +- Different highlighting languages are used for each config in the code block +- The [references](#references) guide is used for reconfigure/restart ### Fake tokens diff --git a/doc/development/ee_features.md b/doc/development/ee_features.md index b6f053ff0e9..9aea03139ee 100644 --- a/doc/development/ee_features.md +++ b/doc/development/ee_features.md @@ -119,10 +119,20 @@ This also applies to views. ### EE features based on CE features -For features that build on existing CE features, write a module in the -`EE` namespace and `prepend` it in the CE class. This makes conflicts -less likely to happen during CE to EE merges because only one line is -added to the CE class - the `prepend` line. +For features that build on existing CE features, write a module in the `EE` +namespace and `prepend` it in the CE class, on the last line of the file that +the class resides in. This makes conflicts less likely to happen during CE to EE +merges because only one line is added to the CE class - the `prepend` line. For +example, to prepend a module into the `User` class you would use the following +approach: + +```ruby +class User < ActiveRecord::Base + # ... lots of code here ... +end + +User.prepend(EE::User) +``` Since the module would require an `EE` namespace, the file should also be put in an `ee/` sub-directory. For example, we want to extend the user model @@ -231,7 +241,6 @@ the existing file: ```ruby class ApplicationController < ActionController::Base - prepend EE::ApplicationController # ... def after_sign_out_path_for(resource) @@ -240,6 +249,8 @@ class ApplicationController < ActionController::Base # ... end + +ApplicationController.prepend(EE::ApplicationController) ``` And create a new file in the `ee/` sub-directory with the altered @@ -533,8 +544,6 @@ module API end end - prepend EE::API::MergeRequests - params :optional_params do # CE specific params go here... @@ -542,6 +551,8 @@ module API end end end + +API::MergeRequests.prepend(EE::API::MergeRequests) ``` And then we could override it in EE module: @@ -582,10 +593,10 @@ module API authorize_read_builds! end end - - prepend EE::API::JobArtifacts end end + +API::JobArtifacts.prepend(EE::API::JobArtifacts) ``` And then we can follow regular object-oriented practices to override it: @@ -626,8 +637,6 @@ module API end end - prepend EE::API::MergeRequests - put ':id/merge_requests/:merge_request_iid/merge' do merge_request = find_project_merge_request(params[:merge_request_iid]) @@ -639,6 +648,8 @@ module API end end end + +API::MergeRequests.prepend(EE::API::MergeRequests) ``` Note that `update_merge_request_ee` doesn't do anything in CE, but @@ -676,27 +687,37 @@ or not we really need to extend it from EE. For now we're not using it much. Sometimes we need to use different arguments for a particular API route, and we can't easily extend it with an EE module because Grape has different context in -different blocks. In order to overcome this, we could use class methods from the -API class. +different blocks. In order to overcome this, we need to move the data to a class +method that resides in a separate module or class. This allows us to extend that +module or class before its data is used, without having to place a `prepend` in +the middle of CE code. For example, in one place we need to pass an extra argument to `at_least_one_of` so that the API could consider an EE-only argument as the -least argument. This is not quite beautiful but it's working: +least argument. We would approach this as follows: ```ruby +# api/merge_requests/parameters.rb module API class MergeRequests < Grape::API - def self.update_params_at_least_one_of - %i[ - assignee_id - description - ] + module Parameters + def self.update_params_at_least_one_of + %i[ + assignee_id + description + ] + end end + end +end - prepend EE::API::MergeRequests +API::MergeRequests::Parameters.prepend(EE::API::MergeRequests::Parameters) +# api/merge_requests.rb +module API + class MergeRequests < Grape::API params do - at_least_one_of(*::API::MergeRequests.update_params_at_least_one_of) + at_least_one_of(*Parameters.update_params_at_least_one_of) end end end @@ -708,16 +729,18 @@ And then we could easily extend that argument in the EE class method: module EE module API module MergeRequests - extend ActiveSupport::Concern + module Parameters + extend ActiveSupport::Concern - class_methods do - extend ::Gitlab::Utils::Override + class_methods do + extend ::Gitlab::Utils::Override - override :update_params_at_least_one_of - def update_params_at_least_one_of - super.push(*%i[ - squash - ]) + override :update_params_at_least_one_of + def update_params_at_least_one_of + super.push(*%i[ + squash + ]) + end end end end @@ -728,6 +751,78 @@ end It could be annoying if we need this for a lot of routes, but it might be the simplest solution right now. +This approach can also be used when models define validations that depend on +class methods. For example: + +```ruby +# app/models/identity.rb +class Identity < ActiveRecord::Base + def self.uniqueness_scope + [:provider] + end + + prepend EE::Identity + + validates :extern_uid, + allow_blank: true, + uniqueness: { scope: uniqueness_scope, case_sensitive: false } +end + +# ee/app/models/ee/identity.rb +module EE + module Identity + extend ActiveSupport::Concern + + class_methods do + extend ::Gitlab::Utils::Override + + def uniqueness_scope + [*super, :saml_provider_id] + end + end + end +end +``` + +Instead of taking this approach, we would refactor our code into the following: + +```ruby +# ee/app/models/ee/identity/uniqueness_scopes.rb +module EE + module Identity + module UniquenessScopes + extend ActiveSupport::Concern + + class_methods do + extend ::Gitlab::Utils::Override + + def uniqueness_scope + [*super, :saml_provider_id] + end + end + end + end +end + +# app/models/identity/uniqueness_scopes.rb +class Identity < ActiveRecord::Base + module UniquenessScopes + def self.uniqueness_scope + [:provider] + end + end +end + +Identity::UniquenessScopes.prepend(EE::Identity::UniquenessScopes) + +# app/models/identity.rb +class Identity < ActiveRecord::Base + validates :extern_uid, + allow_blank: true, + uniqueness: { scope: Identity::UniquenessScopes.scopes, case_sensitive: false } +end +``` + ### Code in `spec/` When you're testing EE-only features, avoid adding examples to the diff --git a/doc/development/fe_guide/style_guide_scss.md b/doc/development/fe_guide/style_guide_scss.md index 48eb6d0a7d6..b09243598d5 100644 --- a/doc/development/fe_guide/style_guide_scss.md +++ b/doc/development/fe_guide/style_guide_scss.md @@ -183,9 +183,11 @@ Don't use ID selectors in CSS. ``` ### Variables + Before adding a new variable for a color or a size, guarantee: -1. There isn't already one -2. There isn't a similar one we can use instead. + +- There isn't already one +- There isn't a similar one we can use instead. ## Linting diff --git a/doc/development/fe_guide/vue.md b/doc/development/fe_guide/vue.md index f6cbd11042c..ccfd465531a 100644 --- a/doc/development/fe_guide/vue.md +++ b/doc/development/fe_guide/vue.md @@ -221,6 +221,14 @@ const vm = mountComponent(Component, data); The main return value of a Vue component is the rendered output. In order to test the component we need to test the rendered output. [Vue][vue-test] guide's to unit test show us exactly that: +## Vue.js Expert Role +One should apply to be a Vue.js expert by opening an MR when the Merge Request's they create and review show: +- Deep understanding of Vue and Vuex reactivy +- Vue and Vuex code are structured according to both official and our guidelines +- Full understanding of testing a Vue and Vuex application +- Vuex code follows the [documented pattern](./vuex.md#actions-pattern-request-and-receive-namespaces) +- Knowledge about the existing Vue and Vuex applications and existing reusable components + [vue-docs]: http://vuejs.org/guide/index.html [issue-boards]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/app/assets/javascripts/boards diff --git a/doc/development/fe_guide/vuex.md b/doc/development/fe_guide/vuex.md index f582f5da323..0f57835fb87 100644 --- a/doc/development/fe_guide/vuex.md +++ b/doc/development/fe_guide/vuex.md @@ -114,19 +114,21 @@ When a request is made we often want to show a loading state to the user. Instead of creating an action to toggle the loading state and dispatch it in the component, create: + 1. An action `requestSomething`, to toggle the loading state 1. An action `receiveSomethingSuccess`, to handle the success callback 1. An action `receiveSomethingError`, to handle the error callback 1. An action `fetchSomething` to make the request. 1. In case your application does more than a `GET` request you can use these as examples: - 1. `PUT`: `createSomething` - 2. `POST`: `updateSomething` - 3. `DELETE`: `deleteSomething` + - `PUT`: `createSomething` + - `POST`: `updateSomething` + - `DELETE`: `deleteSomething` The component MUST only dispatch the `fetchNamespace` action. Actions namespaced with `request` or `receive` should not be called from the component The `fetch` action will be responsible to dispatch `requestNamespace`, `receiveNamespaceSuccess` and `receiveNamespaceError` By following this pattern we guarantee: + 1. All applications follow the same pattern, making it easier for anyone to maintain the code 1. All data in the application follows the same lifecycle pattern 1. Actions are contained and human friendly @@ -297,12 +299,12 @@ export default { ```javascript // component.vue - + // bad created() { this.$store.commit('mutation'); } - + // good created() { this.$store.dispatch('action'); diff --git a/doc/development/github_importer.md b/doc/development/github_importer.md index 0d558583bb8..e860bde48dc 100644 --- a/doc/development/github_importer.md +++ b/doc/development/github_importer.md @@ -99,8 +99,8 @@ This worker will wrap up the import process by performing some housekeeping Advancing stages is done in one of two ways: -1. Scheduling the worker for the next stage directly. -2. Scheduling a job for `Gitlab::GithubImport::AdvanceStageWorker` which will +- Scheduling the worker for the next stage directly. +- Scheduling a job for `Gitlab::GithubImport::AdvanceStageWorker` which will advance the stage when all work of the current stage has been completed. The first approach should only be used by workers that perform all their work in @@ -147,7 +147,7 @@ We handle this by doing the following: 1. Once we hit the rate limit all jobs will automatically reschedule themselves in such a way that they are not executed until the rate limit has been reset. -2. We cache the mapping of GitHub users to GitLab users in Redis. +1. We cache the mapping of GitHub users to GitLab users in Redis. More information on user caching can be found below. @@ -157,21 +157,21 @@ When mapping GitHub users to GitLab users we need to (in the worst case) perform: 1. One API call to get the user's Email address. -2. Two database queries to see if a corresponding GitLab user exists. One query +1. Two database queries to see if a corresponding GitLab user exists. One query will try to find the user based on the GitHub user ID, while the second query is used to find the user using their GitHub Email address. Because this process is quite expensive we cache the result of these lookups in Redis. For every user looked up we store three keys: -1. A Redis key mapping GitHub usernames to their Email addresses. -2. A Redis key mapping a GitHub Email addresses to a GitLab user ID. -3. A Redis key mapping a GitHub user ID to GitLab user ID. +- A Redis key mapping GitHub usernames to their Email addresses. +- A Redis key mapping a GitHub Email addresses to a GitLab user ID. +- A Redis key mapping a GitHub user ID to GitLab user ID. There are two types of lookups we cache: -1. A positive lookup, meaning we found a GitLab user ID. -2. A negative lookup, meaning we didn't find a GitLab user ID. Caching this +- A positive lookup, meaning we found a GitLab user ID. +- A negative lookup, meaning we didn't find a GitLab user ID. Caching this prevents us from performing the same work for users that we know don't exist in our GitLab database. diff --git a/doc/development/i18n/proofreader.md b/doc/development/i18n/proofreader.md index f58d79fccf1..c4ac53f45ac 100644 --- a/doc/development/i18n/proofreader.md +++ b/doc/development/i18n/proofreader.md @@ -41,6 +41,7 @@ are very appreciative of the work done by translators and proofreaders! - Nikita Grylov - [GitLab](https://gitlab.com/nixel2007), [Crowdin](https://crowdin.com/profile/nixel2007) - Alexy Lustin - [GitLab](https://gitlab.com/allustin), [Crowdin](https://crowdin.com/profile/lustin) - Spanish + - Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [Crowdin](https://crowdin.com/profile/breaking_pitt) - Ukrainian - Volodymyr Sobotovych - [GitLab](https://gitlab.com/wheleph), [Crowdin](https://crowdin.com/profile/wheleph) - Andrew Vityuk - [GitLab](https://gitlab.com/3_1_3_u), [Crowdin](https://crowdin.com/profile/andruwa13) diff --git a/doc/development/instrumentation.md b/doc/development/instrumentation.md index 7761f65d78a..bef166f2aec 100644 --- a/doc/development/instrumentation.md +++ b/doc/development/instrumentation.md @@ -117,11 +117,11 @@ The block is executed and the execution time is stored as a set of fields in the currently running transaction. If no transaction is present the block is yielded without measuring anything. -3 values are measured for a block: +Three values are measured for a block: -1. The real time elapsed, stored in NAME_real_time. -2. The CPU time elapsed, stored in NAME_cpu_time. -3. The call count, stored in NAME_call_count. +- The real time elapsed, stored in NAME_real_time. +- The CPU time elapsed, stored in NAME_cpu_time. +- The call count, stored in NAME_call_count. Both the real and CPU timings are measured in milliseconds. diff --git a/doc/development/logging.md b/doc/development/logging.md new file mode 100644 index 00000000000..abd08c420da --- /dev/null +++ b/doc/development/logging.md @@ -0,0 +1,144 @@ +# GitLab Developers Guide to Logging + +[GitLab Logs](../administration/logs.md) play a critical role for both +administrators and GitLab team members to diagnose problems in the field. + +## Don't use `Rails.logger` + +Currently `Rails.logger` calls all get saved into `production.log`, which contains +a mix of Rails' logs and other calls developers have inserted in the code base. +For example: + +``` +Started GET "/gitlabhq/yaml_db/tree/master" for 168.111.56.1 at 2015-02-12 19:34:53 +0200 +Processing by Projects::TreeController#show as HTML + Parameters: {"project_id"=>"gitlabhq/yaml_db", "id"=>"master"} + + ... + + Namespaces"."created_at" DESC, "namespaces"."id" DESC LIMIT 1 [["id", 26]] + CACHE (0.0ms) SELECT "members".* FROM "members" WHERE "members"."source_type" = 'Project' AND "members"."type" IN ('ProjectMember') AND "members"."source_id" = $1 AND "members"."source_type" = $2 AND "members"."user_id" = 1 ORDER BY "members"."created_at" DESC, "members"."id" DESC LIMIT 1 [["source_id", 18], ["source_type", "Project"]] + CACHE (0.0ms) SELECT "members".* FROM "members" WHERE "members"."source_type" = 'Project' AND "members". + (1.4ms) SELECT COUNT(*) FROM "merge_requests" WHERE "merge_requests"."target_project_id" = $1 AND ("merge_requests"."state" IN ('opened','reopened')) [["target_project_id", 18]] + Rendered layouts/nav/_project.html.haml (28.0ms) + Rendered layouts/_collapse_button.html.haml (0.2ms) + Rendered layouts/_flash.html.haml (0.1ms) + Rendered layouts/_page.html.haml (32.9ms) +Completed 200 OK in 166ms (Views: 117.4ms | ActiveRecord: 27.2ms) +``` + +These logs suffer from a number of problems: + +1. They often lack timestamps or other contextual information (e.g. project ID, user) +2. They may span multiple lines, which make them hard to find via Elasticsearch. +3. They lack a common structure, which make them hard to parse by log +forwarders, such as Logstash or Fluentd. This also makes them hard to +search. + +Note that currently on GitLab.com, any messages in `production.log` will +NOT get indexed by Elasticsearch due to the sheer volume and noise. They +do end up in Google Stackdriver, but it is still harder to search for +logs there. See the [GitLab.com logging +documentation](https://gitlab.com/gitlab-com/runbooks/blob/master/howto/logging.md) +for more details. + +## Use structured (JSON) logging + +Structured logging solves these problems. Consider the example from an API request: + +```json +{"time":"2018-10-29T12:49:42.123Z","severity":"INFO","duration":709.08,"db":14.59,"view":694.49,"status":200,"method":"GET","path":"/api/v4/projects","params":[{"key":"action","value":"git-upload-pack"},{"key":"changes","value":"_any"},{"key":"key_id","value":"secret"},{"key":"secret_token","value":"[FILTERED]"}],"host":"localhost","ip":"::1","ua":"Ruby","route":"/api/:version/projects","user_id":1,"username":"root","queue_duration":100.31,"gitaly_calls":30} +``` + +In a single line, we've included all the information that a user needs +to understand what happened: the timestamp, HTTP method and path, user +ID, etc. + +### How to use JSON logging + +Suppose you want to log the events that happen in a project +importer. You want to log issues created, merge requests, etc. as the +importer progresses. Here's what to do: + +1. Look at [the list of GitLab Logs](../administration/logs.md) to see +if your log message might belong with one of the existing log files. +1. If there isn't a good place, consider creating a new filename, but +check with a maintainer if it makes sense to do so. A log file should +make it easy for people to search pertinent logs in one place. For +example, `geo.log` contains all logs pertaining to GitLab Geo. +To create a new file: + 1. Choose a filename (e.g. `importer_json.log`). + 1. Create a new subclass of `Gitlab::JsonLogger`: + + ```ruby + module Gitlab + module Import + class Logger < ::Gitlab::JsonLogger + def self.file_name_noext + 'importer_json' + end + end + end + end + ``` + + 1. In your class where you want to log, you might initialize the logger as an instance variable: + + ```ruby + attr_accessor :logger + + def initialize + @logger = Gitlab::Import::Logger.build + end + ``` + + Note that it's useful to memoize this because creating a new logger + each time you log will open a file, adding unnecessary overhead. + +1. Now insert log messages into your code. When adding logs, + make sure to include all the context as key-value pairs: + + ```ruby + # BAD + logger.info("Unable to create project #{project.id}") + ``` + + ```ruby + # GOOD + logger.info("Unable to create project", project_id: project.id) + ``` + +1. Be sure to create a common base structure of your log messages. For example, + all messages might have `current_user_id` and `project_id` to make it easier + to search for activities by user for a given time. + +1. Do NOT mix and match types. Elasticsearch won't be able to index your + logs properly if you [mix integer and string + types](https://www.elastic.co/guide/en/elasticsearch/guide/current/mapping.html#_avoiding_type_gotchas): + + ```ruby + # BAD + logger.info("Import error", error: 1) + logger.info("Import error", error: "I/O failure") + ``` + + ```ruby + # GOOD + logger.info("Import error", error_code: 1, error: "I/O failure") + ``` + +## Additional steps with new log files + +1. Consider log retention settings. By default, Omnibus will rotate any +logs in `/var/log/gitlab/gitlab-rails/*.log` every hour and [keep at +most 30 compressed files](https://docs.gitlab.com/omnibus/settings/logs.html#logrotate). +On GitLab.com, that setting is only 6 compressed files. These settings should suffice +for most users, but you may need to tweak them in [omnibus-gitlab](https://gitlab.com/gitlab-org/omnibus-gitlab). + +1. If you add a new file, submit an issue to the [production +tracker](https://gitlab.com/gitlab-com/gl-infra/production/issues) or +a merge request to the [gitlab_fluentd](https://gitlab.com/gitlab-cookbooks/gitlab_fluentd) +project. See [this example](https://gitlab.com/gitlab-cookbooks/gitlab_fluentd/merge_requests/51/diffs). + +1. Be sure to update the [GitLab CE/EE documentation](../administration/logs.md) and the [GitLab.com +runbooks](https://gitlab.com/gitlab-com/runbooks/blob/master/howto/logging.md). diff --git a/doc/development/new_fe_guide/development/performance.md b/doc/development/new_fe_guide/development/performance.md index 244dfb3756f..5ccd5357314 100644 --- a/doc/development/new_fe_guide/development/performance.md +++ b/doc/development/new_fe_guide/development/performance.md @@ -2,7 +2,7 @@ ## Monitoring -We have a performance dashboard available in one of our [grafana instances](https://performance.gprd.gitlab.com/dashboard/db/sitespeed-page-summary?orgId=1). This dashboard automatically aggregates metric data from [sitespeed.io](https://sitespeed.io) every 6 hours. These changes are displayed after a set number of pages are aggregated. +We have a performance dashboard available in one of our [grafana instances](https://dashboards.gitlab.net/d/1EBTz3Dmz/sitespeed-page-summary?orgId=1). This dashboard automatically aggregates metric data from [sitespeed.io](https://sitespeed.io) every 6 hours. These changes are displayed after a set number of pages are aggregated. These pages can be found inside a text file in the gitlab-build-images [repository](https://gitlab.com/gitlab-org/gitlab-build-images) called [gitlab.txt](https://gitlab.com/gitlab-org/gitlab-build-images/blob/master/scripts/gitlab.txt) Any frontend engineer can contribute to this dashboard. They can contribute by adding or removing urls of pages from this text file. Please have a [frontend monitoring expert](https://about.gitlab.com/team) review your changes before assigning to a maintainer of the `gitlab-build-images` project. The changes will go live on the next scheduled run after the changes are merged into `master`. diff --git a/doc/development/new_fe_guide/development/testing.md b/doc/development/new_fe_guide/development/testing.md index a9223ac6b0f..082acbedcd2 100644 --- a/doc/development/new_fe_guide/development/testing.md +++ b/doc/development/new_fe_guide/development/testing.md @@ -1,30 +1,247 @@ # Overview of Frontend Testing -## Types of tests in our codebase +Tests relevant for frontend development can be found at two places: + +- `spec/javascripts/` which are run by Karma and contain + - [frontend unit tests](#frontend-unit-tests) + - [frontend component tests](#frontend-component-tests) + - [frontend integration tests](#frontend-integration-tests) +- `spec/features/` which are run by RSpec and contain + - [feature tests](#feature-tests) + +In addition there were feature tests in `features/` run by Spinach in the past. +These have been removed from our codebase in May 2018 ([#23036](https://gitlab.com/gitlab-org/gitlab-ce/issues/23036)). + +See also: -* **RSpec** - * **[Ruby unit tests](#ruby-unit-tests-spec-rb)** for models, controllers, helpers, etc. (`/spec/**/*.rb`) - * **[Full feature tests](#full-feature-tests-spec-features-rb)** (`/spec/features/**/*.rb`) -* **[Karma](#karma-tests-spec-javascripts-js)** (`/spec/javascripts/**/*.js`) -* <s>Spinach</s> — These have been removed from our codebase in May 2018. (`/features/`) +- [old testing guide](../../testing_guide/frontend_testing.html) +- [notes on testing Vue components](../../fe_guide/vue.html#testing-vue-components) -## RSpec: Ruby unit tests `/spec/**/*.rb` +## Frontend unit tests -These tests are meant to unit test the ruby models, controllers and helpers. +Unit tests are on the lowest abstraction level and typically test functionality that is not directly perceivable by a user. -### When do we write/update these tests? +### When to use unit tests -Whenever we create or modify any Ruby models, controllers or helpers we add/update corresponding tests. +<details> + <summary>exported functions and classes</summary> + Anything that is exported can be reused at various places in a way you have no control over. + Therefore it is necessary to document the expected behavior of the public interface with tests. +</details> ---- +<details> + <summary>Vuex actions</summary> + Any Vuex action needs to work in a consistent way independent of the component it is triggered from. +</details> -## RSpec: Full feature tests `/spec/features/**/*.rb` +<details> + <summary>Vuex mutations</summary> + For complex Vuex mutations it helps to identify the source of a problem by separating the tests from other parts of the Vuex store. +</details> -Full feature tests will load a full app environment and allow us to test things like rendering DOM, interacting with links and buttons, testing the outcome of those interactions through multiple pages if necessary. These are also called end-to-end tests but should not be confused with QA end-to-end tests (`package-and-qa` manual pipeline job). +### When *not* to use unit tests -### When do we write/update these tests? +<details> + <summary>non-exported functions or classes</summary> + Anything that is not exported from a module can be considered private or an implementation detail and doesn't need to be tested. +</details> -When we add a new feature, we write at least two tests covering the success and the failure scenarios. +<details> + <summary>constants</summary> + Testing the value of a constant would mean to copy it. + This results in extra effort without additional confidence that the value is correct. +</details> + +<details> + <summary>Vue components</summary> + Computed properties, methods, and lifecycle hooks can be considered an implementation detail of components and don't need to be tested. + They are implicitly covered by component tests. + The <a href="https://vue-test-utils.vuejs.org/guides/#getting-started">official Vue guidelines</a> suggest the same. +</details> + +### What to mock in unit tests + +<details> + <summary>state of the class under test</summary> + Modifying the state of the class under test directly rather than using methods of the class avoids side-effects in test setup. +</details> + +<details> + <summary>other exported classes</summary> + Every class needs to be tested in isolation to prevent test scenarios from growing exponentially. +</details> + +<details> + <summary>single DOM elements if passed as parameters</summary> + For tests that only operate on single DOM elements rather than a whole page, creating these elements is cheaper than loading a whole HTML fixture. +</details> + +<details> + <summary>all server requests</summary> + When running frontend unit tests, the backend may not be reachable. + Therefore all outgoing requests need to be mocked. +</details> + +<details> + <summary>asynchronous background operations</summary> + Background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects. +</details> + +### What *not* to mock in unit tests + +<details> + <summary>non-exported functions or classes</summary> + Everything that is not exported can be considered private to the module and will be implicitly tested via the exported classes / functions. +</details> + +<details> + <summary>methods of the class under test</summary> + By mocking methods of the class under test, the mocks will be tested and not the real methods. +</details> + +<details> + <summary>utility functions (pure functions, or those that only modify parameters)</summary> + If a function has no side effects because it has no state, it is safe to not mock it in tests. +</details> + +<details> + <summary>full HTML pages</summary> + Loading the HTML of a full page slows down tests, so it should be avoided in unit tests. +</details> + +## Frontend component tests + +Component tests cover the state of a single component that is perceivable by a user depending on external signals such as user input, events fired from other components, or application state. + +### When to use component tests + +- Vue components + +### When *not* to use component tests + +<details> + <summary>Vue applications</summary> + Vue applications may contain many components. + Testing them on a component level requires too much effort. + Therefore they are tested on frontend integration level. +</details> + +<details> + <summary>HAML templates</summary> + HAML templates contain only Markup and no frontend-side logic. + Therefore they are not complete components. +</details> + +### What to mock in component tests + +<details> + <summary>DOM</summary> + Operating on the real DOM is significantly slower than on the virtual DOM. +</details> + +<details> + <summary>properties and state of the component under test</summary> + Similarly to testing classes, modifying the properties directly (rather than relying on methods of the component) avoids side-effects. +</details> + +<details> + <summary>Vuex store</summary> + To avoid side effects and keep component tests simple, Vuex stores are replaced with mocks. +</details> + +<details> + <summary>all server requests</summary> + Similar to unit tests, when running component tests, the backend may not be reachable. + Therefore all outgoing requests need to be mocked. +</details> + +<details> + <summary>asynchronous background operations</summary> + Similar to unit tests, background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects. +</details> + +<details> + <summary>child components</summary> + Every component is tested individually, so child components are mocked. + See also <a href="https://vue-test-utils.vuejs.org/api/#shallowmount">shallowMount()</a> +</details> + +### What *not* to mock in component tests + +<details> + <summary>methods or computed properties of the component under test</summary> + By mocking part of the component under test, the mocks will be tested and not the real component. +</details> + +<details> + <summary>functions and classes independent from Vue</summary> + All plain JavaScript code is already covered by unit tests and needs not to be mocked in component tests. +</details> + +## Frontend integration tests + +Integration tests cover the interaction between all components on a single page. +Their abstraction level is comparable to how a user would interact with the UI. + +### When to use integration tests + +<details> + <summary>page bundles (<code>index.js</code> files in <code>app/assets/javascripts/pages/</code>)</summary> + Testing the page bundles ensures the corresponding frontend components integrate well. +</details> + +<details> + <summary>Vue applications outside of page bundles</summary> + Testing Vue applications as a whole ensures the corresponding frontend components integrate well. +</details> + +### What to mock in integration tests + +<details> + <summary>HAML views (use fixtures instead)</summary> + Rendering HAML views requires a Rails environment including a running database which we cannot rely on in frontend tests. +</details> + +<details> + <summary>all server requests</summary> + Similar to unit and component tests, when running component tests, the backend may not be reachable. + Therefore all outgoing requests need to be mocked. +</details> + +<details> + <summary>asynchronous background operations that are not perceivable on the page</summary> + Background operations that affect the page need to be tested on this level. + All other background operations cannot be stopped or waited on, so they will continue running in the following tests and cause side effects. +</details> + +### What *not* to mock in integration tests + +<details> + <summary>DOM</summary> + Testing on the real DOM ensures our components work in the environment they are meant for. + Part of this will be delegated to <a href="https://gitlab.com/gitlab-org/quality/team-tasks/issues/45">cross-browser testing</a>. +</details> + +<details> + <summary>properties or state of components</summary> + On this level, all tests can only perform actions a user would do. + For example to change the state of a component, a click event would be fired. +</details> + +<details> + <summary>Vuex stores</summary> + When testing the frontend code of a page as a whole, the interaction between Vue components and Vuex stores is covered as well. +</details> + +## Feature tests + +In contrast to [frontend integration tests](#frontend-integration-tests), feature tests make requests against the real backend instead of using fixtures. +This also implies that database queries are executed which makes this category significantly slower. + +### When to use feature tests + +- use cases that require a backend and cannot be tested using fixtures +- behavior that is not part of a page bundle but defined globally ### Relevant notes @@ -48,33 +265,9 @@ wait_for_requests expect(page).not_to have_selector('.card') ``` ---- - -## Karma tests `/spec/javascripts/**/*.js` - -These are the more frontend-focused, at the moment. They're **faster** than `rspec` and make for very quick testing of frontend components. - -### When do we write/update these tests? - -When we add/update a method/action/mutation to Vue or Vuex, we write karma tests to ensure the logic we wrote doesn't break. We should, however, refrain from writing tests that double-test Vue's internal features. - -### Relevant notes - -Karma tests are run against a virtual DOM. +## Test helpers -To populate the DOM, we can use fixtures to fake the generation of HTML instead of having Rails do that. - -Be sure to check the [best practices for karma tests](../../testing_guide/frontend_testing.html#best-practices). - -### Vue and Vuex - -Test as much as possible without double-testing Vue's internal features, as mentioned above. - -Make sure to test computedProperties, mutations, actions. Run the action and test that the proper mutations are committed. - -Also check these [notes on testing Vue components](../../fe_guide/vue.html#testing-vue-components). - -#### Vuex Helper: `testAction` +### Vuex Helper: `testAction` We have a helper available to make testing actions easier, as per [official documentation](https://vuex.vuejs.org/en/testing.html): @@ -97,7 +290,7 @@ testAction( Check an example in [spec/javascripts/ide/stores/actions_spec.jsspec/javascripts/ide/stores/actions_spec.js](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/spec/javascripts/ide/stores/actions_spec.js). -#### Vue Helper: `mountComponent` +### Vue Helper: `mountComponent` To make mounting a Vue component easier and more readable, we have a few helpers available in `spec/helpers/vue_mount_component_helper`. @@ -133,6 +326,7 @@ afterEach(() => { vm.$destroy(); }); ``` + ## Testing with older browsers Some regressions only affect a specific browser version. We can install and test in particular browsers with either Firefox or Browserstack using the following steps: @@ -140,20 +334,21 @@ Some regressions only affect a specific browser version. We can install and test ### Browserstack -[Browserstack](https://www.browserstack.com/) allows you to test more than 1200 mobile devices and browsers. +[Browserstack](https://www.browserstack.com/) allows you to test more than 1200 mobile devices and browsers. You can use it directly through the [live app](https://www.browserstack.com/live) or you can install the [chrome extension](https://chrome.google.com/webstore/detail/browserstack/nkihdmlheodkdfojglpcjjmioefjahjb) for easy access. You can find the credentials on 1Password, under `frontendteam@gitlab.com`. ### Firefox #### macOS + You can download any older version of Firefox from the releases FTP server, https://ftp.mozilla.org/pub/firefox/releases/ 1. From the website, select a version, in this case `50.0.1`. -2. Go to the mac folder. -3. Select your preferred language, you will find the dmg package inside, download it. -4. Drag and drop the application to any other folder but the `Applications` folder. -5. Rename the application to something like `Firefox_Old`. -6. Move the application to the `Applications` folder. -7. Open up a terminal and run `/Applications/Firefox_Old.app/Contents/MacOS/firefox-bin -profilemanager` to create a new profile specific to that Firefox version. -8. Once the profile has been created, quit the app, and run it again like normal. You now have a working older Firefox version. +1. Go to the mac folder. +1. Select your preferred language, you will find the dmg package inside, download it. +1. Drag and drop the application to any other folder but the `Applications` folder. +1. Rename the application to something like `Firefox_Old`. +1. Move the application to the `Applications` folder. +1. Open up a terminal and run `/Applications/Firefox_Old.app/Contents/MacOS/firefox-bin -profilemanager` to create a new profile specific to that Firefox version. +1. Once the profile has been created, quit the app, and run it again like normal. You now have a working older Firefox version. diff --git a/doc/development/performance.md b/doc/development/performance.md index e738f2b4b66..4cc2fdc9a58 100644 --- a/doc/development/performance.md +++ b/doc/development/performance.md @@ -9,17 +9,17 @@ The process of solving performance problems is roughly as follows: 1. Make sure there's an issue open somewhere (e.g., on the GitLab CE issue tracker), create one if there isn't. See [#15607][#15607] for an example. -2. Measure the performance of the code in a production environment such as +1. Measure the performance of the code in a production environment such as GitLab.com (see the [Tooling](#tooling) section below). Performance should be measured over a period of _at least_ 24 hours. -3. Add your findings based on the measurement period (screenshots of graphs, +1. Add your findings based on the measurement period (screenshots of graphs, timings, etc) to the issue mentioned in step 1. -4. Solve the problem. -5. Create a merge request, assign the "Performance" label and assign it to +1. Solve the problem. +1. Create a merge request, assign the "Performance" label and assign it to [@yorickpeterse][yorickpeterse] for reviewing. -6. Once a change has been deployed make sure to _again_ measure for at least 24 +1. Once a change has been deployed make sure to _again_ measure for at least 24 hours to see if your changes have any impact on the production environment. -7. Repeat until you're done. +1. Repeat until you're done. When providing timings make sure to provide: @@ -94,14 +94,14 @@ result of this should be used instead of the `Benchmark` module. In short: -1. Don't trust benchmarks you find on the internet. -2. Never make claims based on just benchmarks, always measure in production to +- Don't trust benchmarks you find on the internet. +- Never make claims based on just benchmarks, always measure in production to confirm your findings. -3. X being N times faster than Y is meaningless if you don't know what impact it +- X being N times faster than Y is meaningless if you don't know what impact it will actually have on your production environment. -4. A production environment is the _only_ benchmark that always tells the truth +- A production environment is the _only_ benchmark that always tells the truth (unless your performance monitoring systems are not set up correctly). -5. If you must write a benchmark use the benchmark-ips Gem instead of Ruby's +- If you must write a benchmark use the benchmark-ips Gem instead of Ruby's `Benchmark` module. ## Profiling diff --git a/doc/development/post_deployment_migrations.md b/doc/development/post_deployment_migrations.md index cfc91539bee..5986efa9974 100644 --- a/doc/development/post_deployment_migrations.md +++ b/doc/development/post_deployment_migrations.md @@ -57,13 +57,13 @@ depends on this column being present while it's running. Normally you'd follow these steps in such a case: 1. Stop the GitLab instance -2. Run the migration removing the column -3. Start the GitLab instance again +1. Run the migration removing the column +1. Start the GitLab instance again Using post deployment migrations we can instead follow these steps: 1. Deploy a new version of GitLab while ignoring post deployment migrations -2. Re-run `rake db:migrate` but without the environment variable set +1. Re-run `rake db:migrate` but without the environment variable set Here we don't need any downtime as the migration takes place _after_ a new version (which doesn't depend on the column anymore) has been deployed. diff --git a/doc/development/query_count_limits.md b/doc/development/query_count_limits.md index 310e3faf61b..b3ecaf30d8a 100644 --- a/doc/development/query_count_limits.md +++ b/doc/development/query_count_limits.md @@ -8,8 +8,8 @@ in test environments we'll raise an error when this threshold is exceeded. When a test fails because it executes more than 100 SQL queries there are two solutions to this problem: -1. Reduce the number of SQL queries that are executed. -2. Whitelist the controller or API endpoint. +- Reduce the number of SQL queries that are executed. +- Whitelist the controller or API endpoint. You should only resort to whitelisting when an existing controller or endpoint is to blame as in this case reducing the number of SQL queries can take a lot of diff --git a/doc/development/swapping_tables.md b/doc/development/swapping_tables.md index 6b990ece72c..29cd6a43aff 100644 --- a/doc/development/swapping_tables.md +++ b/doc/development/swapping_tables.md @@ -8,8 +8,8 @@ Let's say you want to swap the table "events" with "events_for_migration". In this case you need to follow 3 steps: 1. Rename "events" to "events_temporary" -2. Rename "events_for_migration" to "events" -3. Rename "events_temporary" to "events_for_migration" +1. Rename "events_for_migration" to "events" +1. Rename "events_temporary" to "events_for_migration" Rails allows you to do this using the `rename_table` method: diff --git a/doc/development/switching_to_rails5.md b/doc/development/switching_to_rails5.md new file mode 100644 index 00000000000..c9a4ce1a1d1 --- /dev/null +++ b/doc/development/switching_to_rails5.md @@ -0,0 +1,27 @@ +# Switching to Rails 5 + +GitLab switched recently to Rails 5. This is a big change (especially for backend development) and it introduces couple of temporary inconveniences. + +## After the switch, I found a broken feature. What do I do? + +Many fixes and tweaks were done to make our codebase compatible with Rails 5, but it's possible that not all issues were found. If you find an bug, please create an issue and assign it the ~rails5 label. + +## It takes much longer to run CI pipelines that build GitLab. Why? + +We are temporarily running CI pipelines with Rails 4 and 5 so that we ensure we remain compatible with Rails 4 in case we must revert back to Rails 4 from Rails 5 (this can double the duration of CI pipelines). + +We might revert back to Rails 4 if we found a major issue we were unable to quickly fix. + +Once we are sure we can stay with Rails 5, we will stop running CI pipelines with Rails 4. + +## Can I skip running Rails 4 tests? + +If you are sure that your merge request doesn't introduce any incompatibility, you can just include `norails4` anywhere in your branch name and Rails 4 tests will be skipped. + +## CI is failing on my test with Rails 4. How can I debug it? + +You can run specs locally with Rails 4 using the following command: + +```sh +BUNDLE_GEMFILE=Gemfile.rails4 RAILS5=0 bundle exec rspec ... +``` diff --git a/doc/development/testing_guide/end_to_end_tests.md b/doc/development/testing_guide/end_to_end_tests.md index 21ec926414d..e9f236c6b3a 100644 --- a/doc/development/testing_guide/end_to_end_tests.md +++ b/doc/development/testing_guide/end_to_end_tests.md @@ -1,32 +1,37 @@ -# End-to-End Testing +# End-to-end Testing -## What is End-to-End testing? +## What is end-to-end testing? -End-to-End testing is a strategy used to check whether your application works -as expected across entire software stack and architecture, including -integration of all microservices and components that are supposed to work +End-to-end testing is a strategy used to check whether your application works +as expected across the entire software stack and architecture, including +integration of all micro-services and components that are supposed to work together. ## How do we test GitLab? We use [Omnibus GitLab][omnibus-gitlab] to build GitLab packages and then we -test these packages using [GitLab QA][gitlab-qa] project, which is entirely -black-box, click-driven testing framework. +test these packages using the [GitLab QA orchestrator][gitlab-qa] tool, which is +a black-box testing framework for the API and the UI. ### Testing nightly builds We run scheduled pipeline each night to test nightly builds created by Omnibus. -You can find these nightly pipelines at [GitLab QA pipelines page][gitlab-qa-pipelines]. +You can find these nightly pipelines at [gitlab-org/quality/nightly/pipelines][quality-nightly-pipelines]. + +### Testing staging + +We run scheduled pipeline each night to test staging. +You can find these nightly pipelines at [gitlab-org/quality/staging/pipelines][quality-staging-pipelines]. ### Testing code in merge requests It is possible to run end-to-end tests (eventually being run within a [GitLab QA pipeline][gitlab-qa-pipelines]) for a merge request by triggering -the `package-and-qa` manual action, that should be present in a merge request -widget. +the `package-and-qa` manual action in the `test` stage, that should be present +in a merge request widget (unless the merge request is from a fork). Manual action that starts end-to-end tests is also available in merge requests -in Omnibus GitLab project. +in [Omnibus GitLab][omnibus-gitlab]. Below you can read more about how to use it and how does it work. @@ -35,46 +40,56 @@ Below you can read more about how to use it and how does it work. Currently, we are using _multi-project pipeline_-like approach to run QA pipelines. -1. Developer triggers a manual action, that can be found in CE and EE merge +1. Developer triggers a manual action, that can be found in CE / EE merge requests. This starts a chain of pipelines in multiple projects. -1. The script being executed triggers a pipeline in GitLab Omnibus and waits -for the resulting status. We call this a _status attribution_. +1. The script being executed triggers a pipeline in [Omnibus GitLab][omnibus-gitlab] +and waits for the resulting status. We call this a _status attribution_. -1. GitLab packages are being built in Omnibus pipeline. Packages are going to be -pushed to Container Registry. +1. GitLab packages are being built in the [Omnibus GitLab][omnibus-gitlab] +pipeline. Packages are then pushed to its Container Registry. 1. When packages are ready, and available in the registry, a final step in the -pipeline, that is now running in Omnibus, triggers a new pipeline in the GitLab -QA project. It also waits for a resulting status. +[Omnibus GitLab][omnibus-gitlab] pipeline, triggers a new +[GitLab QA pipeline][gitlab-qa-pipelines]. It also waits for a resulting status. 1. GitLab QA pulls images from the registry, spins-up containers and runs tests against a test environment that has been just orchestrated by the `gitlab-qa` tool. -1. The result of the GitLab QA pipeline is being propagated upstream, through -Omnibus, back to CE / EE merge request. +1. The result of the [GitLab QA pipeline][gitlab-qa-pipelines] is being +propagated upstream, through Omnibus, back to the CE / EE merge request. #### How do I write tests? In order to write new tests, you first need to learn more about GitLab QA -architecture. See the [documentation about it][gitlab-qa-architecture] in -GitLab QA project. +architecture. See the [documentation about it][gitlab-qa-architecture]. -Once you decided where to put test environment orchestration scenarios and -instance specs, take a look at the [relevant documentation][instance-qa-readme] -and examples in [the `qa/` directory][instance-qa-examples]. +Once you decided where to put [test environment orchestration scenarios] and +[instance-level scenarios], take a look at the [GitLab QA README][instance-qa-readme], +the [GitLab QA orchestrator README][gitlab-qa-readme], and [the already existing +instance-level scenarios][instance-level scenarios]. ## Where can I ask for help? You can ask question in the `#quality` channel on Slack (GitLab internal) or you can find an issue you would like to work on in -[the issue tracker][gitlab-qa-issues] and start a new discussion there. +[the `gitlab-ce` issue tracker][gitlab-ce-issues], +[the `gitlab-ee` issue tracker][gitlab-ce-issues], or +[the `gitlab-qa` issue tracker][gitlab-qa-issues]. [omnibus-gitlab]: https://gitlab.com/gitlab-org/omnibus-gitlab [gitlab-qa]: https://gitlab.com/gitlab-org/gitlab-qa +[gitlab-qa-readme]: https://gitlab.com/gitlab-org/gitlab-qa/tree/master/README.md [gitlab-qa-pipelines]: https://gitlab.com/gitlab-org/gitlab-qa/pipelines +[quality-nightly-pipelines]: https://gitlab.com/gitlab-org/quality/nightly/pipelines +[quality-staging-pipelines]: https://gitlab.com/gitlab-org/quality/staging/pipelines [gitlab-qa-architecture]: https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/architecture.md -[gitlab-qa-issues]: https://gitlab.com/gitlab-org/gitlab-qa/issues +[gitlab-qa-issues]: https://gitlab.com/gitlab-org/gitlab-qa/issues?label_name%5B%5D=new+scenario +[gitlab-ce-issues]: https://gitlab.com/gitlab-org/gitlab-ce/issues?label_name[]=QA&label_name[]=test +[gitlab-ee-issues]: https://gitlab.com/gitlab-org/gitlab-ee/issues?label_name[]=QA&label_name[]=test +[test environment orchestration scenarios]: https://gitlab.com/gitlab-org/gitlab-qa/tree/master/lib/gitlab/qa/scenario +[instance-level scenarios]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa/qa/specs/features +[Page objects documentation]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa/qa/page/README.md [instance-qa-readme]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa/README.md [instance-qa-examples]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa/qa diff --git a/doc/development/testing_guide/smoke.md b/doc/development/testing_guide/smoke.md index 3f2843cba6e..3360031c220 100644 --- a/doc/development/testing_guide/smoke.md +++ b/doc/development/testing_guide/smoke.md @@ -1,8 +1,9 @@ # Smoke Tests -It is imperative in any testing suite that we have Smoke Tests. In short, smoke tests are will run quick sanity -end-to-end functional tests from GitLab QA and are designed to run against the specified environment to ensure that -basic functionality is working. +It is imperative in any testing suite that we have Smoke Tests. In short, smoke +tests will run quick sanity end-to-end functional tests from GitLab QA and are +designed to run against the specified environment to ensure that basic +functionality is working. Currently, our suite consists of this basic functionality coverage: @@ -11,6 +12,8 @@ Currently, our suite consists of this basic functionality coverage: - Issue Creation - Merge Request Creation +Smoke tests have the `:smoke` RSpec metadata. + --- [Return to Testing documentation](index.md) diff --git a/doc/development/testing_guide/testing_levels.md b/doc/development/testing_guide/testing_levels.md index 32ed22ca3ed..a8671fc3aa3 100644 --- a/doc/development/testing_guide/testing_levels.md +++ b/doc/development/testing_guide/testing_levels.md @@ -34,7 +34,11 @@ records should use stubs/doubles as much as possible. Formal definition: https://en.wikipedia.org/wiki/Integration_testing -These kind of tests ensure that individual parts of the application work well together, without the overhead of the actual app environment (i.e. the browser). These tests should assert at the request/response level: status code, headers, body. They're useful to test permissions, redirections, what view is rendered etc. +These kind of tests ensure that individual parts of the application work well +together, without the overhead of the actual app environment (i.e. the browser). +These tests should assert at the request/response level: status code, headers, +body. +They're useful to test permissions, redirections, what view is rendered etc. | Code path | Tests path | Testing engine | Notes | | --------- | ---------- | -------------- | ----- | @@ -67,20 +71,40 @@ run JavaScript tests, so you can either run unit tests (e.g. test a single JavaScript method), or integration tests (e.g. test a component that is composed of multiple components). -## System tests or feature tests +## White-box tests at the system level (formerly known as System / Feature tests) -Formal definition: https://en.wikipedia.org/wiki/System_testing. +Formal definitions: -These kind of tests ensure the application works as expected from a user point -of view (aka black-box testing). These tests should test a happy path for a -given page or set of pages, and a test case should be added for any regression +- https://en.wikipedia.org/wiki/System_testing +- https://en.wikipedia.org/wiki/White-box_testing + +These kind of tests ensure the GitLab *Rails* application (i.e. +`gitlab-ce`/`gitlab-ee`) works as expected from a *browser* point of view. + +Note that: + +- knowledge of the internals of the application are still required +- data needed for the tests are usually created directly using RSpec factories +- expectations are often set on the database or objects state + +These tests should only be used when: + +- the functionality/component being tested is small +- the internal state of the objects/database *needs* to be tested +- it cannot be tested at a lower level + +For instance, to test the breadcrumbs on a given page, writing a system test +makes sense since it's a small component, which cannot be tested at the unit or +controller level. + +Only test the happy path, but make sure to add a test case for any regression that couldn't have been caught at lower levels with better tests (i.e. if a regression is found, regression tests should be added at the lowest-level possible). | Tests path | Testing engine | Notes | | ---------- | -------------- | ----- | -| `spec/features/` | [Capybara] + [RSpec] | If your spec has the `:js` metadata, the browser driver will be [Poltergeist], otherwise it's using [RackTest]. | +| `spec/features/` | [Capybara] + [RSpec] | If your test has the `:js` metadata, the browser driver will be [Poltergeist], otherwise it's using [RackTest]. | ### Consider **not** writing a system test! @@ -89,7 +113,7 @@ we have enough Unit & Integration tests), we shouldn't need to duplicate their thorough testing at the System test level. It's very easy to add tests, but a lot harder to remove or improve tests, so one -should take care of not introducing too many (slow and duplicated) specs. +should take care of not introducing too many (slow and duplicated) tests. The reasons why we should follow these best practices are as follows: @@ -107,29 +131,33 @@ The reasons why we should follow these best practices are as follows: [Poltergeist]: https://github.com/teamcapybara/capybara#poltergeist [RackTest]: https://github.com/teamcapybara/capybara#racktest -## Black-box tests or end-to-end tests +## Black-box tests at the system level, aka end-to-end tests + +Formal definitions: + +- https://en.wikipedia.org/wiki/System_testing +- https://en.wikipedia.org/wiki/Black-box_testing GitLab consists of [multiple pieces] such as [GitLab Shell], [GitLab Workhorse], [Gitaly], [GitLab Pages], [GitLab Runner], and GitLab Rails. All theses pieces are configured and packaged by [GitLab Omnibus]. -[GitLab QA] is a tool that allows to test that all these pieces integrate well -together by building a Docker image for a given version of GitLab Rails and -running feature tests (i.e. using Capybara) against it. +The QA framework and instance-level scenarios are [part of GitLab Rails] so that +they're always in-sync with the codebase (especially the views). -The actual test scenarios and steps are [part of GitLab Rails] so that they're -always in-sync with the codebase. +Note that: -### Smoke tests - -Smoke tests are quick tests that may be run at any time (especially after the pre-deployment migrations). +- knowledge of the internals of the application are not required +- data needed for the tests can only be created using the GUI or the API +- expectations can only be made against the browser page and API responses -Much like feature tests - these tests run against the UI and ensure that basic functionality is working. +Every new feature should come with a [test plan]. -> See [Smoke Tests](smoke.md) for more information. +| Tests path | Testing engine | Notes | +| ---------- | -------------- | ----- | +| `qa/qa/specs/features/` | [Capybara] + [RSpec] + Custom QA framework | Tests should be placed under their corresponding [Product category] | -Read a separate document about [end-to-end tests](end_to_end_tests.md) to -learn more. +> See [end-to-end tests](end_to_end_tests.md) for more information. [multiple pieces]: ../architecture.md#components [GitLab Shell]: https://gitlab.com/gitlab-org/gitlab-shell @@ -138,8 +166,29 @@ learn more. [GitLab Pages]: https://gitlab.com/gitlab-org/gitlab-pages [GitLab Runner]: https://gitlab.com/gitlab-org/gitlab-runner [GitLab Omnibus]: https://gitlab.com/gitlab-org/omnibus-gitlab -[GitLab QA]: https://gitlab.com/gitlab-org/gitlab-qa [part of GitLab Rails]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/qa +[test plan]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/.gitlab/issue_templates/Test%20plan.md +[Product category]: https://about.gitlab.com/handbook/product/categories/ + +### Smoke tests + +Smoke tests are quick tests that may be run at any time (especially after the +pre-deployment migrations). + +These tests run against the UI and ensure that basic functionality is working. + +> See [Smoke Tests](smoke.md) for more information. + +### GitLab QA orchestrator + +[GitLab QA orchestrator] is a tool that allows to test that all these pieces +integrate well together by building a Docker image for a given version of GitLab +Rails and running end-to-end tests (i.e. using Capybara) against it. + +Learn more in the [GitLab QA orchestrator README][gitlab-qa-readme]. + +[GitLab QA orchestrator]: https://gitlab.com/gitlab-org/gitlab-qa +[gitlab-qa-readme]: https://gitlab.com/gitlab-org/gitlab-qa/tree/master/README.md ## EE-specific tests diff --git a/doc/development/ui_guide.md b/doc/development/ui_guide.md index df6ac452300..dd206bb2ae9 100644 --- a/doc/development/ui_guide.md +++ b/doc/development/ui_guide.md @@ -54,12 +54,12 @@ information from database or file system When exporting SVGs, be sure to follow the following guidelines: -1. Convert all strokes to outlines. -2. Use pathfinder tools to combine overlapping paths and create compound paths. -3. SVGs that are limited to one color should be exported without a fill color so the color can be set using CSS. -4. Ensure that exported SVGs have been run through an [SVG cleaner](https://github.com/RazrFalcon/SVGCleaner) to remove unused elements and attributes. +- Convert all strokes to outlines. +- Use pathfinder tools to combine overlapping paths and create compound paths. +- SVGs that are limited to one color should be exported without a fill color so the color can be set using CSS. +- Ensure that exported SVGs have been run through an [SVG cleaner](https://github.com/RazrFalcon/SVGCleaner) to remove unused elements and attributes. -You can open your svg in a text editor to ensure that it is clean. +You can open your svg in a text editor to ensure that it is clean. Incorrect files will look like this: ```xml diff --git a/doc/development/ux_guide/users.md b/doc/development/ux_guide/users.md index f9c395b2dff..30386e728c4 100644 --- a/doc/development/ux_guide/users.md +++ b/doc/development/ux_guide/users.md @@ -101,7 +101,7 @@ GitLab's interface initially attracted Nazim when he was comparing version contr ### Demographics **Age** - + 42 years old **Location** @@ -148,11 +148,11 @@ Matthieu describes GitLab as: >"the only tool that offers the real feeling of having everything you need in one place." -He credits himself as being entirely responsible for moving his company to GitLab. +He credits himself as being entirely responsible for moving his company to GitLab. ### Frustrations #### Updating to the latest release -Matthieu introduced his company to GitLab. He is responsible for maintaining and managing the company's installation in addition to his day job. He feels updates are too frequent and he doesn't always have sufficient time to update GitLab. As a result, he's not up to date with releases. +Matthieu introduced his company to GitLab. He is responsible for maintaining and managing the company's installation in addition to his day job. He feels updates are too frequent and he doesn't always have sufficient time to update GitLab. As a result, he's not up to date with releases. Matthieu tried to set up automatic updates, however, as he isn't a Systems Administrator, he wasn't confident in his setup. He feels he should be able to "upgrade without users even noticing" but hasn't figured out how to do this yet. Matthieu would like the "update process to be triggered from the Admin Panel, perhaps accompanied with a changelog and the option to skip updates." @@ -173,11 +173,11 @@ It's Matthieu's responsibility to get teams across his organization up and runni He states that there has been: "a sluggishness of others to adapt" and it's "a low-effort adaptation at that." ### Goals -* To save time. One of the reasons Matthieu moved his company to GitLab was to reduce the effort it took him to manage and configure multiple tools, thus saving him time. He has to balance his day job in addition to managing the company's GitLab installation and onboarding new teams to GitLab. -* To use a platform which is easy to manage. Matthieu isn't a Systems Administrator, and when updating GitLab, creating backups, etc. He would prefer to work within GitLab's UI. Explanations / guided instructions when configuring settings in GitLab's interface would really help Matthieu. He needs reassurance that what he is about to change is +* To save time. One of the reasons Matthieu moved his company to GitLab was to reduce the effort it took him to manage and configure multiple tools, thus saving him time. He has to balance his day job in addition to managing the company's GitLab installation and onboarding new teams to GitLab. +* To use a platform which is easy to manage. Matthieu isn't a Systems Administrator, and when updating GitLab, creating backups, etc. He would prefer to work within GitLab's UI. Explanations / guided instructions when configuring settings in GitLab's interface would really help Matthieu. He needs reassurance that what he is about to change is -1. the right setting -2. will provide him with the desired result he wants. +- The right setting. +- Will provide him with the desired result he wants. * Matthieu needs to educate his colleagues about GitLab. Matthieu's colleagues won't adopt GitLab as they're unaware of its capabilities and the positive impact it could have on their work. Matthieu needs support in getting this message across to them. @@ -307,4 +307,4 @@ Karolina has an interest in UX and therefore has strong opinions about how GitLa ### Goals * To develop her programming experience and to learn from other developers. * To contribute to both her own and other open source projects. -* To use a fast and intuitive version control platform.
\ No newline at end of file +* To use a fast and intuitive version control platform. diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md index b668c9de6a0..3630a28fae9 100644 --- a/doc/development/what_requires_downtime.md +++ b/doc/development/what_requires_downtime.md @@ -300,7 +300,7 @@ The same applies to `rename_column_using_background_migration`: 1. Create a migration using the helper, which will schedule background migrations to spread the writes over a longer period of time. -2. In the next monthly release, create a clean-up migration to steal from the +1. In the next monthly release, create a clean-up migration to steal from the Sidekiq queues, migrate any missing rows, and cleanup the rename. This migration should skip the steps after stealing from the Sidekiq queues if the column has already been renamed. diff --git a/doc/gitlab-basics/create-your-ssh-keys.md b/doc/gitlab-basics/create-your-ssh-keys.md index b6ebe374de3..881629c3bfd 100644 --- a/doc/gitlab-basics/create-your-ssh-keys.md +++ b/doc/gitlab-basics/create-your-ssh-keys.md @@ -12,7 +12,7 @@  -3. Paste your **public** key that you generated in the first step in the 'Key' +1. Paste your **public** key that you generated in the first step in the 'Key' box.  diff --git a/doc/install/installation.md b/doc/install/installation.md index 316411d1047..cac97b63d92 100644 --- a/doc/install/installation.md +++ b/doc/install/installation.md @@ -132,9 +132,9 @@ Remove the old Ruby 1.8 if present: Download Ruby and compile it: mkdir /tmp/ruby && cd /tmp/ruby - curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.4/ruby-2.4.5.tar.gz - echo '4d650f302f1ec00256450b112bb023644b6ab6dd ruby-2.4.5.tar.gz' | shasum -c - && tar xzf ruby-2.4.5.tar.gz - cd ruby-2.4.5 + curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.5/ruby-2.5.3.tar.gz + echo 'f919a9fbcdb7abecd887157b49833663c5c15fda ruby-2.5.3.tar.gz' | shasum -c - && tar xzf ruby-2.5.3.tar.gz + cd ruby-2.5.3 ./configure --disable-install-rdoc make diff --git a/doc/install/requirements.md b/doc/install/requirements.md index dcc6d75724e..97190544c3d 100644 --- a/doc/install/requirements.md +++ b/doc/install/requirements.md @@ -197,7 +197,13 @@ use the CI features. ## Supported web browsers -We support the current and the previous major release of Firefox, Chrome/Chromium, Safari and Microsoft browsers (Microsoft Edge and Internet Explorer 11). +We support the current and the previous major release of: + +- Firefox +- Chrome/Chromium +- Safari +- Microsoft Edge +- Internet Explorer 11 Each time a new browser version is released, we begin supporting that version and stop supporting the third most recent version. diff --git a/doc/integration/akismet.md b/doc/integration/akismet.md index a6436b5f926..200fe6f5206 100644 --- a/doc/integration/akismet.md +++ b/doc/integration/akismet.md @@ -20,17 +20,17 @@ To use Akismet: 1. Go to the URL: https://akismet.com/account/ -2. Sign-in or create a new account. +1. Sign-in or create a new account. -3. Click on **Show** to reveal the API key. +1. Click on **Show** to reveal the API key. -4. Go to Applications Settings on Admin Area (`admin/application_settings`) +1. Go to Applications Settings on Admin Area (`admin/application_settings`) -5. Check the **Enable Akismet** checkbox +1. Check the **Enable Akismet** checkbox -6. Fill in the API key from step 3. +1. Fill in the API key from step 3. -7. Save the configuration. +1. Save the configuration.  @@ -42,9 +42,9 @@ To use Akismet: As a way to better recognize between spam and ham, you can train the Akismet filter whenever there is a false positive or false negative. -When an entry is recognized as spam, it is rejected and added to the Spam Logs. +When an entry is recognized as spam, it is rejected and added to the Spam Logs. From here you can review if they are really spam. If one of them is not really -spam, you can use the **Submit as ham** button to tell Akismet that it falsely +spam, you can use the **Submit as ham** button to tell Akismet that it falsely recognized an entry as spam.  diff --git a/doc/integration/bitbucket.md b/doc/integration/bitbucket.md index bf587b5b296..a69db1d1a6e 100644 --- a/doc/integration/bitbucket.md +++ b/doc/integration/bitbucket.md @@ -54,6 +54,7 @@ you to use. ``` Account: Email, Read + Projects: Read Repositories: Read Pull Requests: Read Issues: Read diff --git a/doc/integration/google.md b/doc/integration/google.md index 73e2f5826ff..b91d40d4bd4 100644 --- a/doc/integration/google.md +++ b/doc/integration/google.md @@ -35,7 +35,7 @@ In Google's side: 1. You should now be able to see a Client ID and Client secret. Note them down or keep this page open as you will need them later. -1. From the **Dashboard** select **ENABLE APIS AND SERVICES > Compute > Google+ API > Enable** +1. From the **Dashboard** select **ENABLE APIS AND SERVICES > Social > Google+ API > Enable** 1. To enable projects to access [Google Kubernetes Engine](../user/project/clusters/index.md), you must also enable these APIs: - Google Kubernetes Engine API diff --git a/doc/integration/recaptcha.md b/doc/integration/recaptcha.md index 932cd479d56..8fdadb008ec 100644 --- a/doc/integration/recaptcha.md +++ b/doc/integration/recaptcha.md @@ -8,19 +8,13 @@ to confirm that a real user, not a bot, is attempting to create an account. To use reCAPTCHA, first you must create a site and private key. -1. Go to the URL: https://www.google.com/recaptcha/admin - -2. Fill out the form necessary to obtain reCAPTCHA keys. - -3. Login to your GitLab server, with administrator credentials. - -4. Go to Applications Settings on Admin Area (`admin/application_settings`) - -5. Fill all recaptcha fields with keys from previous steps - -6. Check the `Enable reCAPTCHA` checkbox - -7. Save the configuration. +1. Go to the URL: <https://www.google.com/recaptcha/admin>. +1. Fill out the form necessary to obtain reCAPTCHA keys. +1. Login to your GitLab server, with administrator credentials. +1. Go to Applications Settings on Admin Area (`admin/application_settings`). +1. Fill all recaptcha fields with keys from previous steps. +1. Check the `Enable reCAPTCHA` checkbox. +1. Save the configuration. ## Enabling reCAPTCHA for user logins via passwords diff --git a/doc/security/rack_attack.md b/doc/security/rack_attack.md index 3efb19c1526..07e7b3da13b 100644 --- a/doc/security/rack_attack.md +++ b/doc/security/rack_attack.md @@ -10,7 +10,7 @@ Rack Attack offers IP whitelisting, blacklisting, Fail2ban style filtering and tracking. **Note:** Starting with 11.2, Rack Attack is disabled by default. To continue -using this feature, please enable it in your `gitlab.rb` by setting +using this feature, please enable it in your `gitlab.rb` by setting `gitlab_rails['rack_attack_git_basic_auth'] = true`. By default, user sign-in, user sign-up (if enabled), and user password reset is @@ -41,7 +41,7 @@ For more information on how to use these options check out } ``` -3. Reconfigure GitLab: +1. Reconfigure GitLab: ``` sudo gitlab-ctl reconfigure @@ -98,26 +98,26 @@ In case you want to remove a blocked IP, follow these steps: grep "Rack_Attack" /var/log/gitlab/gitlab-rails/production.log ``` -2. Since the blacklist is stored in Redis, you need to open up `redis-cli`: +1. Since the blacklist is stored in Redis, you need to open up `redis-cli`: ```sh /opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.socket ``` -3. You can remove the block using the following syntax, replacing `<ip>` with +1. You can remove the block using the following syntax, replacing `<ip>` with the actual IP that is blacklisted: ``` del cache:gitlab:rack::attack:allow2ban:ban:<ip> ``` -4. Confirm that the key with the IP no longer shows up: +1. Confirm that the key with the IP no longer shows up: ``` keys *rack::attack* ``` -5. Optionally, add the IP to the whitelist to prevent it from being blacklisted +1. Optionally, add the IP to the whitelist to prevent it from being blacklisted again (see [settings](#settings)). ## Troubleshooting @@ -129,11 +129,11 @@ the load balancer. In that case, you will need to: 1. [Configure `nginx[real_ip_trusted_addresses]`](https://docs.gitlab.com/omnibus/settings/nginx.html#configuring-gitlab-trusted_proxies-and-the-nginx-real_ip-module). This will keep users' IPs from being listed as the load balancer IPs. -2. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings). -3. Reconfigure GitLab: +1. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings). +1. Reconfigure GitLab: ``` sudo gitlab-ctl reconfigure ``` -4. [Remove the block via Redis.](#remove-blocked-ips-from-rack-attack-via-redis) +1. [Remove the block via Redis.](#remove-blocked-ips-from-rack-attack-via-redis) diff --git a/doc/security/two_factor_authentication.md b/doc/security/two_factor_authentication.md index cd290a80314..b770f2544d2 100644 --- a/doc/security/two_factor_authentication.md +++ b/doc/security/two_factor_authentication.md @@ -13,8 +13,8 @@ You can read more about it here: Users on GitLab, can enable it without any admin's intervention. If you want to enforce everyone to set up 2FA, you can choose from two different ways: - 1. Enforce on next login - 2. Suggest on next login, but allow a grace period before enforcing. +- Enforce on next login. +- Suggest on next login, but allow a grace period before enforcing. In the Admin area under **Settings** (`/admin/application_settings`), look for the "Sign-in Restrictions" area, where you can configure both. diff --git a/doc/topics/git/troubleshooting_git.md b/doc/topics/git/troubleshooting_git.md index 8555c5e91ea..d1729d70158 100644 --- a/doc/topics/git/troubleshooting_git.md +++ b/doc/topics/git/troubleshooting_git.md @@ -78,5 +78,20 @@ git push In case you're running an older version of Git (< 2.9), consider upgrading to >= 2.9 (see [Broken pipe when pushing to Git repository][Broken-Pipe]). +## Timeout during git push/pull + +If pulling/pushing from/to your repository ends up taking more than 50 seconds, +a timeout will be issued with a log of the number of operations performed +and their respective timings, like the example below: + +``` +remote: Running checks for branch: master +remote: Scanning for LFS objects... (153ms) +remote: Calculating new repository size... (cancelled after 729ms) +``` + +This could be used to further investigate what operation is performing poorly +and provide GitLab with more information on how to improve the service. + [SSH troubleshooting]: ../../ssh/README.md#troubleshooting "SSH Troubleshooting" [Broken-Pipe]: https://stackoverflow.com/questions/19120120/broken-pipe-when-pushing-to-git-repository/36971469#36971469 "StackOverflow: 'Broken pipe when pushing to Git repository'" diff --git a/doc/university/training/end-user/README.md b/doc/university/training/end-user/README.md index e5eb5d97e3b..701533358c8 100644 --- a/doc/university/training/end-user/README.md +++ b/doc/university/training/end-user/README.md @@ -78,7 +78,7 @@ Workshop Time! ```bash git config --global user.name "Your Name" git config --global user.email you@example.com -``` +``` - If you don't use the global flag you can set up a different author for each project @@ -107,14 +107,14 @@ cd ~/development -or- mkdir ~/workspace -cd ~/workspace +cd ~/workspace ``` --- ## Git Basics ---- +--- ### Git Workflow @@ -136,7 +136,7 @@ cd ~/workspace issue tracking, Merge Requests, and other features. - The hosted version of GitLab is gitlab.com ---- +--- ### New Project @@ -150,12 +150,12 @@ cd ~/workspace ### Git and GitLab basics 1. Edit `edit_this_file.rb` in `training-examples` -2. See it listed as a changed file (working area) -3. View the differences -4. Stage the file -5. Commit -6. Push the commit to the remote -7. View the git log +1. See it listed as a changed file (working area) +1. View the differences +1. Stage the file +1. Commit +1. Push the commit to the remote +1. View the git log --- @@ -169,14 +169,14 @@ git push origin master git log ``` ---- +--- ### Feature Branching 1. Create a new feature branch called `squash_some_bugs` -2. Edit `bugs.rb` and remove all the bugs. -3. Commit -4. Push +1. Edit `bugs.rb` and remove all the bugs. +1. Commit +1. Push --- @@ -250,16 +250,17 @@ git push origin squash_some_bugs --- ### Example Plan + 1. Checkout a new branch and edit conflicts.rb. Add 'Line4' and 'Line5'. -2. Commit and push -3. Checkout master and edit conflicts.rb. Add 'Line6' and 'Line7' below 'Line3'. -4. Commit and push to master -5. Create a merge request and watch it fail -6. Rebase our new branch with master -7. Fix conflicts on the conflicts.rb file. -8. Stage the file and continue rebasing -9. Force push the changes -10. Finally continue with the Merge Request +1. Commit and push +1. Checkout master and edit conflicts.rb. Add 'Line6' and 'Line7' below 'Line3'. +1. Commit and push to master +1. Create a merge request and watch it fail +1. Rebase our new branch with master +1. Fix conflicts on the conflicts.rb file. +1. Stage the file and continue rebasing +1. Force push the changes +1. Finally continue with the Merge Request --- @@ -362,15 +363,15 @@ Don't reset after pushing ### Reset Workflow 1. Edit file again 'edit_this_file.rb' -2. Check status -3. Add and commit with wrong message -4. Check log -5. Amend commit -6. Check log -7. Soft reset -8. Check log -9. Pull for updates -10. Push changes +1. Check status +1. Add and commit with wrong message +1. Check log +1. Amend commit +1. Check log +1. Soft reset +1. Check log +1. Pull for updates +1. Push changes ---- @@ -389,9 +390,9 @@ Don't reset after pushing ### Note -git revert vs git reset -Reset removes the commit while revert removes the changes but leaves the commit -Revert is safer considering we can revert a revert +git revert vs git reset +Reset removes the commit while revert removes the changes but leaves the commit +Revert is safer considering we can revert a revert # Changed file diff --git a/doc/university/training/topics/bisect.md b/doc/university/training/topics/bisect.md index 01e93e4dcb0..4848d0412c1 100644 --- a/doc/university/training/topics/bisect.md +++ b/doc/university/training/topics/bisect.md @@ -2,7 +2,7 @@ comments: false --- -# Bisect +# Bisect ---------- @@ -17,11 +17,11 @@ comments: false ## Bisect 1. Start the bisect process -2. Enter the bad revision (usually latest commit) -3. Enter a known good revision (commit/branch) -4. Run code to see if bug still exists -5. Tell bisect the result -6. Repeat the previous 2 items until you find the offending commit +1. Enter the bad revision (usually latest commit) +1. Enter a known good revision (commit/branch) +1. Run code to see if bug still exists +1. Tell bisect the result +1. Repeat the previous 2 items until you find the offending commit ---------- diff --git a/doc/university/training/topics/getting_started.md b/doc/university/training/topics/getting_started.md index 1441e4b89b2..66cb08feacb 100644 --- a/doc/university/training/topics/getting_started.md +++ b/doc/university/training/topics/getting_started.md @@ -35,11 +35,10 @@ comments: false ## Instantiate workflow with clone -1. Create a project in your user namespace - - Choose to import from 'Any Repo by URL' and use - https://gitlab.com/gitlab-org/training-examples.git -2. Create a '`Workspace`' directory in your home directory. -3. Clone the '`training-examples`' project +1. Create a project in your user namespace. + - Choose to import from 'Any Repo by URL' and use <https://gitlab.com/gitlab-org/training-examples.git>. +1. Create a '`Workspace`' directory in your home directory. +1. Clone the '`training-examples`' project. ---------- diff --git a/doc/university/training/topics/git_log.md b/doc/university/training/topics/git_log.md index 3e39ea5cc9a..6ba6f9eb69d 100644 --- a/doc/university/training/topics/git_log.md +++ b/doc/university/training/topics/git_log.md @@ -46,11 +46,11 @@ Git log lists commit history. It allows searching and filtering. ## Git Log Workflow 1. Change to workspace directory -2. Clone the multi runner projects -3. Change to project dir -4. Search by author -5. Search by date -6. Combine +1. Clone the multi runner projects +1. Change to project dir +1. Search by author +1. Search by date +1. Combine ---------- diff --git a/doc/university/training/topics/merge_conflicts.md b/doc/university/training/topics/merge_conflicts.md index 9a1ce550868..071baddf508 100644 --- a/doc/university/training/topics/merge_conflicts.md +++ b/doc/university/training/topics/merge_conflicts.md @@ -16,15 +16,15 @@ comments: false ## Merge conflicts 1. Checkout a new branch and edit `conflicts.rb`. Add 'Line4' and 'Line5'. -2. Commit and push -3. Checkout master and edit `conflicts.rb`. Add 'Line6' and 'Line7' below 'Line3'. -4. Commit and push to master -5. Create a merge request and watch it fail -6. Rebase our new branch with master -7. Fix conflicts on the `conflicts.rb` file. -8. Stage the file and continue rebasing -9. Force push the changes -10. Finally continue with the Merge Request +1. Commit and push. +1. Checkout master and edit `conflicts.rb`. Add 'Line6' and 'Line7' below 'Line3'. +1. Commit and push to master. +1. Create a merge request and watch it fail. +1. Rebase our new branch with master. +1. Fix conflicts on the `conflicts.rb` file. +1. Stage the file and continue rebasing. +1. Force push the changes. +1. Finally continue with the Merge Request. ---------- diff --git a/doc/university/training/topics/rollback_commits.md b/doc/university/training/topics/rollback_commits.md index 11cb557651f..44304634f36 100644 --- a/doc/university/training/topics/rollback_commits.md +++ b/doc/university/training/topics/rollback_commits.md @@ -41,15 +41,15 @@ comments: false ## Reset Workflow 1. Edit file again 'edit_this_file.rb' -2. Check status -3. Add and commit with wrong message -4. Check log -5. Amend commit -6. Check log -7. Soft reset -8. Check log -9. Pull for updates -10. Push changes +1. Check status +1. Add and commit with wrong message +1. Check log +1. Amend commit +1. Check log +1. Soft reset +1. Check log +1. Pull for updates +1. Push changes ---------- diff --git a/doc/university/training/topics/stash.md b/doc/university/training/topics/stash.md index 315ced1a196..42eedea14e5 100644 --- a/doc/university/training/topics/stash.md +++ b/doc/university/training/topics/stash.md @@ -66,12 +66,12 @@ stashes. ## Git Stash 1. Modify a file -2. Stage file -3. Stash it -4. View our stash list -5. Confirm no pending changes through status -5. Apply with pop -6. View list to confirm changes +1. Stage file +1. Stash it +1. View our stash list +1. Confirm no pending changes through status +1. Apply with pop +1. View list to confirm changes ---------- diff --git a/doc/university/training/topics/tags.md b/doc/university/training/topics/tags.md index 9526bcbfb82..14c39457838 100644 --- a/doc/university/training/topics/tags.md +++ b/doc/university/training/topics/tags.md @@ -22,7 +22,7 @@ comments: false **Additional resources** -<http://git-scm.com/book/en/Git-Basics-Tagging> +<https://git-scm.com/book/en/Git-Basics-Tagging> ---------- diff --git a/doc/update/README.md b/doc/update/README.md index 7d3c4c310a4..d4fc0cc91bf 100644 --- a/doc/update/README.md +++ b/doc/update/README.md @@ -38,12 +38,12 @@ Starting with GitLab 9.1.0 it's possible to upgrade to a newer major, minor, or patch version of GitLab without having to take your GitLab instance offline. However, for this to work there are the following requirements: -1. You can only upgrade 1 minor release at a time. So from 9.1 to 9.2, not to +- You can only upgrade 1 minor release at a time. So from 9.1 to 9.2, not to 9.3. -2. You have to use [post-deployment +- You have to use [post-deployment migrations](../development/post_deployment_migrations.md) (included in - zero downtime update steps below) -3. You are using PostgreSQL. If you are using MySQL please look at the release + zero downtime update steps below). +- You are using PostgreSQL. If you are using MySQL please look at the release post to see if downtime is required. Most of the time you can safely upgrade from a patch release to the next minor diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md index 6025a5bbcda..d4853a5842e 100644 --- a/doc/user/admin_area/settings/continuous_integration.md +++ b/doc/user/admin_area/settings/continuous_integration.md @@ -49,3 +49,19 @@ and the default value is `30 days`. On GitLab.com they This setting is set per job and can be overridden in [`.gitlab-ci.yml`](../../../ci/yaml/README.md#artifacts-expire_in). To disable the expiration, set it to `0`. The default unit is in seconds. + +## Archive jobs **[CORE ONLY]** + +Archiving jobs is useful for reducing the CI/CD footprint on the system by +removing some of the capabilities of the jobs (metadata needed to run the job), +but persisting the traces and artifacts for auditing purposes. + +To set the duration for which the jobs will be considered as old and expired: + +1. Go to **Admin area > Settings > CI/CD > Continuous Integration and Deployment**. +1. Change the value of "Archive jobs". +1. Hit **Save changes** for the changes to take effect. + +Once that time passes, the jobs will be archived and no longer able to be +retried. Make it empty to never expire jobs. It has to be no less than 1 day, +for example: <code>15 days</code>, <code>1 month</code>, <code>2 years</code>. diff --git a/doc/user/markdown.md b/doc/user/markdown.md index 93aa41e9a98..6c6119a2691 100644 --- a/doc/user/markdown.md +++ b/doc/user/markdown.md @@ -1,6 +1,11 @@ -# Markdown +# GitLab Markdown -This markdown guide is valid for GitLab's system markdown entries and files. +This markdown guide is **valid for GitLab's system markdown entries and files**. +It is not valid for the [GitLab documentation website](https://docs.gitlab.com) +nor [GitLab's main website](https://about.gitlab.com), as they both use +[Kramdown](https://kramdown.gettalong.org) as their markdown engine. +The documentation website uses an extended Kramdown gem, [GitLab Kramdown](https://gitlab.com/gitlab-org/gitlab_kramdown). +Consult the [GitLab Kramdown Guide](https://about.gitlab.com/handbook/product/technical-writing/markdown-guide/) for a complete Kramdown reference._ ## GitLab Flavored Markdown (GFM) @@ -8,21 +13,21 @@ GitLab uses "GitLab Flavored Markdown" (GFM). It extends the [CommonMark specifi You can use GFM in the following areas: -- comments -- issues -- merge requests -- milestones -- snippets (the snippet must be named with a `.md` extension) -- wiki pages -- markdown documents inside the repository +- Comments +- Issues +- Merge requests +- Milestones +- Snippets (the snippet must be named with a `.md` extension) +- Wiki pages +- Markdown documents inside repositories You can also use other rich text files in GitLab. You might have to install a dependency to do so. Please see the [`github-markup` gem readme](https://github.com/gitlabhq/markup#markups) for more information. > **Notes:** > -> For the best result, we encourage you to check this document out as rendered -> by GitLab itself: [markdown.md] +> For the best result, we encourage you to check this document out as [rendered +> by GitLab itself](markdown.md). > > As of 11.1, GitLab uses the [CommonMark Ruby Library][commonmarker] for Markdown processing of all new issues, merge requests, comments, and other Markdown content @@ -30,6 +35,9 @@ in the GitLab system. As of 11.3, wiki pages and Markdown files (`.md`) in the repositories are also processed with CommonMark. Older content in issues/comments are still processed using the [Redcarpet Ruby library][redcarpet]. > +> The documentation website had its [markdown engine migrated from Redcarpet to Kramdown](https://gitlab.com/gitlab-com/gitlab-docs/merge_requests/108) +in October 2018. +> > _Where there are significant differences, we will try to call them out in this document._ ### Transitioning to CommonMark diff --git a/doc/user/profile/account/two_factor_authentication.md b/doc/user/profile/account/two_factor_authentication.md index 64219737d61..76f7e869ff7 100644 --- a/doc/user/profile/account/two_factor_authentication.md +++ b/doc/user/profile/account/two_factor_authentication.md @@ -158,7 +158,7 @@ authentication. If an SSH key is added to your GitLab account, you can generate a new set of recovery codes with SSH. 1. Run `ssh git@gitlab.example.com 2fa_recovery_codes`. -2. You are prompted to confirm that you want to generate new codes. Continuing this process invalidates previously saved codes. +1. You are prompted to confirm that you want to generate new codes. Continuing this process invalidates previously saved codes. ``` bash $ ssh git@gitlab.example.com 2fa_recovery_codes @@ -185,7 +185,7 @@ a new set of recovery codes with SSH. so you do not lose access to your account again. ``` -3. Go to the GitLab sign-in page and enter your username/email and password. +1. Go to the GitLab sign-in page and enter your username/email and password. When prompted for a two-factor code, enter one of the recovery codes obtained from the command-line output. diff --git a/doc/user/profile/index.md b/doc/user/profile/index.md index da7c30b6b39..2f989a26725 100644 --- a/doc/user/profile/index.md +++ b/doc/user/profile/index.md @@ -97,13 +97,13 @@ You and GitLab admins can see your the abovementioned information on your profil > [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/14078) in GitLab 11.3. -Enabling private contributions will include contributions to private projects, in the user contribution calendar graph and user recent activity. +Enabling private contributions will include contributions to private projects, in the user contribution calendar graph and user recent activity. To enable private contributions: 1. Navigate to your personal [profile settings](#profile-settings). -2. Check the "Private contributions" option. -3. Hit **Update profile settings**. +1. Check the "Private contributions" option. +1. Hit **Update profile settings**. ## Current status diff --git a/doc/user/project/clusters/eks_and_gitlab/img/new_project.png b/doc/user/project/clusters/eks_and_gitlab/img/new_project.png Binary files differdeleted file mode 100644 index 02afc099f10..00000000000 --- a/doc/user/project/clusters/eks_and_gitlab/img/new_project.png +++ /dev/null diff --git a/doc/user/project/clusters/eks_and_gitlab/img/rbac.png b/doc/user/project/clusters/eks_and_gitlab/img/rbac.png Binary files differnew file mode 100644 index 00000000000..517e4f7ca44 --- /dev/null +++ b/doc/user/project/clusters/eks_and_gitlab/img/rbac.png diff --git a/doc/user/project/clusters/eks_and_gitlab/index.md b/doc/user/project/clusters/eks_and_gitlab/index.md index ef19b05fb9e..fa2ed21f980 100644 --- a/doc/user/project/clusters/eks_and_gitlab/index.md +++ b/doc/user/project/clusters/eks_and_gitlab/index.md @@ -1,75 +1,171 @@ ---- -author: Joshua Lambert -author_gitlab: joshlambert -level: intermediate -article_type: tutorial -date: 2018-06-05 ---- - # Connecting and deploying to an Amazon EKS cluster -## Introduction +In this tutorial, we will show how to integrate an +[Amazon EKS](https://aws.amazon.com/eks/) cluster with GitLab and begin +deploying applications. -In this tutorial, we will show how easy it is to integrate an [Amazon EKS](https://aws.amazon.com/eks/) cluster with GitLab, and begin deploying applications. +## Introduction For an end-to-end walkthrough we will: -1. Start with a new project based on the sample Ruby on Rails template -1. Integrate an EKS cluster -1. Utilize [Auto DevOps](../../../../topics/autodevops/) to build, test, and deploy our application +1. Start with a new project based on the sample Ruby on Rails template. +1. Integrate an EKS cluster. +1. Utilize [Auto DevOps](../../../../topics/autodevops/) to build, test, and deploy our application. You will need: -1. An account on GitLab, like [GitLab.com](https://gitlab.com) -1. An Amazon EKS cluster -1. `kubectl` [installed and configured for access to the EKS cluster](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html#get-started-kubectl) +1. An account on GitLab, like [GitLab.com](https://gitlab.com). +1. An Amazon EKS cluster (with worker nodes properly configured). +1. `kubectl` [installed and configured for access to the EKS cluster](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html#get-started-kubectl). -If you don't have an Amazon EKS cluster, one can be created by following [the EKS getting started guide](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html). +If you don't have an Amazon EKS cluster, one can be created by following the +[EKS getting started guide](https://docs.aws.amazon.com/eks/latest/userguide/getting-started.html). ## Creating a new project -On GitLab, create a new project by clicking on the `+` icon in the top navigation bar, and selecting `New project`. - - +On GitLab, create a new project by clicking on the `+` icon in the top navigation +bar and selecting **New project**. -On the new project screen, click on the `Create from template` tab, and select `Use template` for the Ruby on Rails sample project. +On the new project screen, click on the **Create from template** tab, and select +"Use template" for the Ruby on Rails sample project. -Give the project a name, and then select `Create project`. +Give the project a name, and then select **Create project**.  -## Connecting the EKS cluster +## Configuring and connecting the EKS cluster + +From the left side bar, hover over **Operations > Kubernetes > Add Kubernetes cluster**, +then click **Add an existing Kubernetes cluster**. + +A few details from the EKS cluster will be required to connect it to GitLab: + +1. **Retrieve the certificate**: A valid Kubernetes certificate is needed to + authenticate to the EKS cluster. We will use the certificate created by default. + Open a shell and use `kubectl` to retrieve it: + + - List the secrets with `kubectl get secrets`, and one should named similar to + `default-token-xxxxx`. Copy that token name for use below. + - Get the certificate with: + + ```sh + kubectl get secret <secret name> -o jsonpath="{['data']['ca\.crt']}" | base64 -D + ``` + +1. **Create admin token**: A `cluster-admin` token is required to install and + manage Helm Tiller. GitLab establishes mutual SSL auth with Helm Tiller + and creates limited service accounts for each application. To create the + token we will create an admin service account as follows: + + 2.1. Create a file called `eks-admin-service-account.yaml` with contents: + + ```yaml + apiVersion: v1 + kind: ServiceAccount + metadata: + name: eks-admin + namespace: kube-system + ``` + + 2.2. Apply the service account to your cluster: + + ```bash + kubectl apply -f eks-admin-service-account.yaml + ``` + + Output: + + ```bash + serviceaccount "eks-admin" created + ``` -From the left side bar, hover over `Operations` and select `Kubernetes`, then click on `Add Kubernetes cluster`, and finally `Add an existing Kubernetes cluster`. + 2.3. Create a file called `eks-admin-cluster-role-binding.yaml` with contents: -A few details from the EKS cluster will be required to connect it to GitLab. + ```yaml + apiVersion: rbac.authorization.k8s.io/v1beta1 + kind: ClusterRoleBinding + metadata: + name: eks-admin + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin + subjects: + - kind: ServiceAccount + name: eks-admin + namespace: kube-system + ``` -1. A valid Kubernetes certificate and token are needed to authenticate to the EKS cluster. A pair is created by default, which can be used. Open a shell and use `kubectl` to retrieve them: - * List the secrets with `kubectl get secrets`, and one should named similar to `default-token-xxxxx`. Copy that token name for use below. - * Get the certificate with `kubectl get secret <secret name> -o jsonpath="{['data']['ca\.crt']}" | base64 -D` - * Retrieve the token with `kubectl get secret <secret name> -o jsonpath="{['data']['token']}" | base64 -D`. -1. The API server endpoint is also required, so GitLab can connect to the cluster. This is displayed on the AWS EKS console, when viewing the EKS cluster details. + 2.4. Apply the cluster role binding to your cluster: + + ```bash + kubectl apply -f eks-admin-cluster-role-binding.yaml + ``` + + Output: + + ```bash + clusterrolebinding "eks-admin" created + ``` + + 2.5. Retrieve the token for the `eks-admin` service account: + + ```bash + kubectl -n kube-system describe secret $(kubectl -n kube-system get secret | grep eks-admin | awk '{print $1}') + ``` + + Copy the `<authentication_token>` value from the output: + + ```yaml + Name: eks-admin-token-b5zv4 + Namespace: kube-system + Labels: <none> + Annotations: kubernetes.io/service-account.name=eks-admin + kubernetes.io/service-account.uid=bcfe66ac-39be-11e8-97e8-026dce96b6e8 + + Type: kubernetes.io/service-account-token + + Data + ==== + ca.crt: 1025 bytes + namespace: 11 bytes + token: <authentication_token> + ``` + +1. The API server endpoint is also required, so GitLab can connect to the cluster. + This is displayed on the AWS EKS console, when viewing the EKS cluster details. You now have all the information needed to connect the EKS cluster: -* Kubernetes cluster name: Provide a name for the cluster to identify it within GitLab. -* Environment scope: Leave this as `*` for now, since we are only connecting a single cluster. -* API URL: Paste in the API server endpoint retrieved above. -* CA Certificate: Paste the certificate data from the earlier step, as-is. -* Paste the token value. -* Project namespace: This can be left blank to accept the default namespace, based on the project name. +- Kubernetes cluster name: Provide a name for the cluster to identify it within GitLab. +- Environment scope: Leave this as `*` for now, since we are only connecting a single cluster. +- API URL: Paste in the API server endpoint retrieved above. +- CA Certificate: Paste the certificate data from the earlier step, as-is. +- Paste the admin token value. +- Project namespace: This can be left blank to accept the default namespace, based on the project name.  -Click on `Add Kubernetes cluster`, the cluster is now connected to GitLab. At this point, [Kubernetes deployment variables](../#deployment-variables) will automatically be available during CI jobs, making it easy to interact with the cluster. +Click on **Add Kubernetes cluster**, the cluster is now connected to GitLab. +At this point, [Kubernetes deployment variables](../#deployment-variables) will +automatically be available during CI/CD jobs, making it easy to interact with the cluster. If you would like to utilize your own CI/CD scripts to deploy to the cluster, you can stop here. -## Disable Role-Based Access Control (RBAC) +## Disable Role-Based Access Control (RBAC) (optional) -Presently, Auto DevOps and one-click app installs do not support [Kubernetes role-based access control](https://kubernetes.io/docs/reference/access-authn-authz/rbac/). Support is [being worked on](https://gitlab.com/groups/gitlab-org/-/epics/136), but in the interim RBAC must be disabled to utilize for these features. +When connecting a cluster via GitLab integration, you may specify whether the +cluster is RBAC-enabled or not. This will affect how GitLab interacts with the +cluster for certain operations. If you **did not** check the "RBAC-enabled cluster" +checkbox at creation time, GitLab will assume RBAC is disabled for your cluster +when interacting with it. If so, you must disable RBAC on your cluster for the +integration to work properly. -> **Note**: Disabling RBAC means that any application running in the cluster, or user who can authenticate to the cluster, has full API access. This is a [security concern](https://docs.gitlab.com/ee/user/project/clusters/#security-implications), and may not be desirable. + + +NOTE: **Note**: Disabling RBAC means that any application running in the cluster, +or user who can authenticate to the cluster, has full API access. This is a +[security concern](../index.md#security-implications), and may not be desirable. To effectively disable RBAC, global permissions can be applied granting full access: @@ -83,56 +179,100 @@ kubectl create clusterrolebinding permissive-binding \ ## Deploy services to the cluster -GitLab supports one-click deployment of helpful services to the cluster, many of which support Auto DevOps. Back on the Kubernetes cluster screen in GitLab, a list of applications is now available to deploy. +GitLab supports one-click deployment of helpful services to the cluster, many of +which support Auto DevOps. Back on the Kubernetes cluster screen in GitLab, a +list of applications is now available to deploy. -First install Helm Tiller, a package manager for Kubernetes. This enables deployment of the other applications. +First, install Helm Tiller, a package manager for Kubernetes. This enables +deployment of the other applications.  ### Deploying NGINX Ingress (optional) -Next, if you would like the deployed app to be reachable on the internet, deploy the Ingress. Note that this will also cause an [Elastic Load Balancer](https://aws.amazon.com/documentation/elastic-load-balancing/) to be created, which will incur additional AWS costs. +Next, if you would like the deployed app to be reachable on the internet, deploy +the Ingress. Note that this will also cause an +[Elastic Load Balancer](https://aws.amazon.com/documentation/elastic-load-balancing/) +to be created, which will incur additional AWS costs. -Once installed, you may see a `?` for `Ingress IP Address`. This is because the created ELB is available at a DNS name, not an IP address. To get the DNS name, run: `kubectl get service ingress-nginx-ingress-controller -n gitlab-managed-apps -o jsonpath="{.status.loadBalancer.ingress[0].hostname}"`. Note, you may see a trailing `%` on some Kubernetes versions, do not include it. +Once installed, you may see a `?` for "Ingress IP Address". This is because the +created ELB is available at a DNS name, not an IP address. To get the DNS name, +run: -The Ingress is now available at this address, and will route incoming requests to the proper service based on the DNS name in the request. To support this, a wildcard DNS CNAME record should be created for the desired domain name. For example `*.myekscluster.com` would point to the Ingress hostname obtained earlier. +```sh +kubectl get service ingress-nginx-ingress-controller -n gitlab-managed-apps -o jsonpath="{.status.loadBalancer.ingress[0].hostname}" +``` + +Note that you may see a trailing `%` on some Kubernetes versions, **do not include it**. + +The Ingress is now available at this address and will route incoming requests to +the proper service based on the DNS name in the request. To support this, a +wildcard DNS CNAME record should be created for the desired domain name. For example, +`*.myekscluster.com` would point to the Ingress hostname obtained earlier.  ### Deploying the GitLab Runner (optional) -If the project is on GitLab.com, free shared runners are available and you do not have to deploy one. If a project specific runner is desired, or there are no shared runners, it is easy to deploy one. +If the project is on GitLab.com, free shared Runners are available and you do +not have to deploy one. If a project specific Runner is desired, or there are no +shared Runners, it is easy to deploy one. -Simply click on the `Install` button for the GitLab Runner. It is important to note that the runner deployed is set as **privileged**, which means it essentially has root access to the underlying machine. This is required to build docker images, and so is on by default. +Simply click on the **Install** button for the GitLab Runner. It is important to +note that the Runner deployed is set as **privileged**, which means it essentially +has root access to the underlying machine. This is required to build docker images, +and so is on by default. ### Deploying Prometheus (optional) -GitLab is able to monitor applications automatically, utilizing [Prometheus](../../integrations/prometheus.html). Kubernetes container CPU and memory metrics are automatically collected, and response metrics are retrieved from NGINX Ingress as well. +GitLab is able to monitor applications automatically, utilizing +[Prometheus](../../integrations/prometheus.html). Kubernetes container CPU and +memory metrics are automatically collected, and response metrics are retrieved +from NGINX Ingress as well. -To enable monitoring, simply install Prometheus into the cluster with the `Install` button. +To enable monitoring, simply install Prometheus into the cluster with the +**Install** button. ## Create a default Storage Class -Amazon EKS does not have a default Storage Class out of the box, which means requests for persistent volumes will not be automatically fulfilled. As part of Auto DevOps, the deployed Postgres instance requests persistent storage, and without a default storage class it will fail to start. +Amazon EKS doesn't have a default Storage Class out of the box, which means +requests for persistent volumes will not be automatically fulfilled. As part +of Auto DevOps, the deployed Postgres instance requests persistent storage, +and without a default storage class it will fail to start. -If a default Storage Class does not already exist and is desired, follow Amazon's [short guide](https://docs.aws.amazon.com/eks/latest/userguide/storage-classes.html) to create one. +If a default Storage Class doesn't already exist and is desired, follow Amazon's +[guide on storage classes](https://docs.aws.amazon.com/eks/latest/userguide/storage-classes.html) +to create one. -Alternatively, disable Postgres by setting the project variable [`POSTGRES_ENABLED`](../../../../topics/autodevops/#environment-variables) to `false`. +Alternatively, disable Postgres by setting the project variable +[`POSTGRES_ENABLED`](../../../../topics/autodevops/#environment-variables) to `false`. ## Deploy the app to EKS -With RBAC disabled and services deployed, [Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/) can now be leveraged to build, test, and deploy the app. To enable, click on `Settings` in the left sidebar, then `CI/CD`. You will see a section for `Auto DevOps`, expand it. Click on the radio button to `Enable Auto DevOps`. +With RBAC disabled and services deployed, +[Auto DevOps](../../../../topics/autodevops/index.md) can now be leveraged +to build, test, and deploy the app. -If a wildcard DNS entry was created resolving to the Load Balancer, enter it in the `domain` field. Otherwise, the deployed app will not be externally available outside of the cluster. To save, click `Save changes`. +[Enable Auto DevOps](../../../../topics/autodevops/index.md##enablingdisabling-auto-devops-at-the-project-level) +if not already enabled. If a wildcard DNS entry was created resolving to the +Load Balancer, enter it in the `domain` field under the Auto DevOps settings. +Otherwise, the deployed app will not be externally available outside of the cluster.  -A new pipeline will automatically be created, which will begin to build, test, and deploy the app. +A new pipeline will automatically be created, which will begin to build, test, +and deploy the app. -After the pipeline has finished, your app will be running in EKS and available to users. Click on `CI/CD` tab in the left navigation bar, and choose `Environments`. +After the pipeline has finished, your app will be running in EKS and available +to users. Click on **CI/CD > Environments**.  -You will see a list of the environments and their deploy status, as well as options to browse to the app, view monitoring metrics, and even access a shell on the running pod. +You will see a list of the environments and their deploy status, as well as +options to browse to the app, view monitoring metrics, and even access a shell +on the running pod. + +## Learn more -To learn more about Auto DevOps, review our [documentation](../../../../topics/autodevops/). +To learn more on automatically deploying your applications, +read about [Auto DevOps](../../../../topics/autodevops/index.md). diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md index 1d1de01c120..c8003d00e73 100644 --- a/doc/user/project/clusters/index.md +++ b/doc/user/project/clusters/index.md @@ -49,8 +49,8 @@ new Kubernetes cluster to your project: NOTE: **Note:** You need Maintainer [permissions] and above to access the Kubernetes page. -1. Click on **Add Kubernetes cluster**. -1. Click on **Create with Google Kubernetes Engine**. +1. Click **Add Kubernetes cluster**. +1. Click **Create with Google Kubernetes Engine**. 1. Connect your Google account if you haven't done already by clicking the **Sign in with Google** button. 1. From there on, choose your cluster's settings: @@ -78,8 +78,8 @@ To add an existing Kubernetes cluster to your project: NOTE: **Note:** You need Maintainer [permissions] and above to access the Kubernetes page. -1. Click on **Add Kubernetes cluster**. -1. Click on **Add an existing Kubernetes cluster** and fill in the details: +1. Click **Add Kubernetes cluster**. +1. Click **Add an existing Kubernetes cluster** and fill in the details: - **Kubernetes cluster name** (required) - The name you wish to give the cluster. - **Environment scope** (required)- The [associated environment](#setting-the-environment-scope) to this cluster. @@ -228,7 +228,7 @@ twice, which can lead to confusion during deployments. | [Prometheus](https://prometheus.io/docs/introduction/overview/) | 10.4+ | Prometheus is an open-source monitoring and alerting system useful to supervise your deployed applications. | [stable/prometheus](https://github.com/helm/charts/tree/master/stable/prometheus) | | [GitLab Runner](https://docs.gitlab.com/runner/) | 10.6+ | GitLab Runner is the open source project that is used to run your jobs and send the results back to GitLab. It is used in conjunction with [GitLab CI/CD](https://about.gitlab.com/features/gitlab-ci-cd/), the open-source continuous integration service included with GitLab that coordinates the jobs. When installing the GitLab Runner via the applications, it will run in **privileged mode** by default. Make sure you read the [security implications](#security-implications) before doing so. | [runner/gitlab-runner](https://gitlab.com/charts/gitlab-runner) | | [JupyterHub](http://jupyter.org/) | 11.0+ | [JupyterHub](https://jupyterhub.readthedocs.io/en/stable/) is a multi-user service for managing notebooks across a team. [Jupyter Notebooks](https://jupyter-notebook.readthedocs.io/en/latest/) provide a web-based interactive programming environment used for data analysis, visualization, and machine learning. We use [this](https://gitlab.com/gitlab-org/jupyterhub-user-image/blob/master/Dockerfile) custom Jupyter image that installs additional useful packages on top of the base Jupyter. You will also see ready-to-use DevOps Runbooks built with Nurtch's [Rubix library](https://github.com/amit1rrr/rubix). More information on creating executable runbooks can be found at [Nurtch Documentation](http://docs.nurtch.com/en/latest). **Note**: Authentication will be enabled for any user of the GitLab server via OAuth2. HTTPS will be supported in a future release. | [jupyter/jupyterhub](https://jupyterhub.github.io/helm-chart/) | -| [Knative](https://cloud.google.com/knative) | 0.1.2 | Knative provides a platform to create, deploy, and manage serverless workloads from a Kubernetes cluster. It is used in conjunction with, and includes [Istio](https://istio.io) to provide an external IP address for all programs hosted by Knative. You will be prompted to enter a wildcard domain where your applications will be exposed. Configure your DNS server to use the external IP address for that domain. For any application created and installed, they will be accessible as <program_name>.<kubernetes_namespace>.<domain_name>. **Note**: This will require your kubernetes cluster to have RBAC enabled. | [knative/knative](https://storage.googleapis.com/triggermesh-charts) +| [Knative](https://cloud.google.com/knative) | 0.1.2 | Knative provides a platform to create, deploy, and manage serverless workloads from a Kubernetes cluster. It is used in conjunction with, and includes [Istio](https://istio.io) to provide an external IP address for all programs hosted by Knative. You will be prompted to enter a wildcard domain where your applications will be exposed. Configure your DNS server to use the external IP address for that domain. For any application created and installed, they will be accessible as `<program_name>.<kubernetes_namespace>.<domain_name>`. **Note**: This will require your kubernetes cluster to have RBAC enabled. | [knative/knative](https://storage.googleapis.com/triggermesh-charts) NOTE: **Note:** As of GitLab 11.6 Helm Tiller will be upgraded to the latest version supported @@ -259,10 +259,10 @@ your ingress application in which case you should manually determine it. ### Manually determining the IP address -If the cluster is on GKE, click on the **Google Kubernetes Engine** link in the +If the cluster is on GKE, click the **Google Kubernetes Engine** link in the **Advanced settings**, or go directly to the [Google Kubernetes Engine dashboard](https://console.cloud.google.com/kubernetes/) -and select the proper project and cluster. Then click on **Connect** and execute +and select the proper project and cluster. Then click **Connect** and execute the `gcloud` command in a local terminal or using the **Cloud Shell**. If the cluster is not on GKE, follow the specific instructions for your @@ -276,7 +276,8 @@ kubectl get svc --namespace=gitlab-managed-apps ingress-nginx-ingress-controller ``` NOTE: **Note:** -For Istio/Knative, the command will be different: +For Istio/Knative, use the following command: + ```bash kubectl get svc --namespace=istio-system knative-ingressgateway -o jsonpath='{.status.loadBalancer.ingress[0].ip} ' ``` @@ -288,6 +289,7 @@ kubectl get svc --all-namespaces -o jsonpath='{range.items[?(@.status.loadBalanc ``` > **Note**: Some Kubernetes clusters return a hostname instead, like [Amazon EKS](https://aws.amazon.com/eks/). For these platforms, run: + > ```bash > kubectl get service ingress-nginx-ingress-controller -n gitlab-managed-apps -o jsonpath="{.status.loadBalancer.ingress[0].hostname}". > ``` @@ -304,7 +306,7 @@ your apps will not be able to be reached, and you'd have to change the DNS record again. In order to avoid that, you should change it into a static reserved IP. -[Read how to promote an ephemeral external IP address in GKE.](https://cloud.google.com/compute/docs/ip-addresses/reserve-static-external-ip-address#promote_ephemeral_ip) +Read how to [promote an ephemeral external IP address in GKE](https://cloud.google.com/compute/docs/ip-addresses/reserve-static-external-ip-address#promote_ephemeral_ip). ### Pointing your DNS at the cluster IP @@ -410,7 +412,7 @@ service account of the cluster integration. After you have successfully added your cluster information, you can enable the Kubernetes cluster integration: -1. Click the "Enabled/Disabled" switch +1. Click the **Enabled/Disabled** switch 1. Hit **Save** for the changes to take effect You can now start using your Kubernetes cluster for your deployments. @@ -427,7 +429,7 @@ When you remove a cluster, you only remove its relation to GitLab, not the cluster itself. To remove the cluster, you can do so by visiting the GKE dashboard or using `kubectl`. -To remove the Kubernetes cluster integration from your project, simply click on the +To remove the Kubernetes cluster integration from your project, simply click the **Remove integration** button. You will then be able to follow the procedure and add a Kubernetes cluster again. @@ -490,7 +492,13 @@ the deployment variables above, ensuring any pods you create are labelled with ## Read more -- [Connecting and deploying to an Amazon EKS cluster](eks_and_gitlab/index.md) +### Integrating Amazon EKS cluster with GitLab + +- Learn how to [connect and deploy to an Amazon EKS cluster](eks_and_gitlab/index.md). + +### Serverless + +- [Run serverless workloads on Kubernetes with Knative.](serverless/index.md) [permissions]: ../../permissions.md [ee]: https://about.gitlab.com/pricing/ diff --git a/doc/user/project/clusters/serverless/img/deploy-stage.png b/doc/user/project/clusters/serverless/img/deploy-stage.png Binary files differnew file mode 100644 index 00000000000..dc2f8af9c63 --- /dev/null +++ b/doc/user/project/clusters/serverless/img/deploy-stage.png diff --git a/doc/user/project/clusters/serverless/img/dns-entry.png b/doc/user/project/clusters/serverless/img/dns-entry.png Binary files differnew file mode 100644 index 00000000000..2e7655c6041 --- /dev/null +++ b/doc/user/project/clusters/serverless/img/dns-entry.png diff --git a/doc/user/project/clusters/serverless/img/install-knative.png b/doc/user/project/clusters/serverless/img/install-knative.png Binary files differnew file mode 100644 index 00000000000..dd576a9df35 --- /dev/null +++ b/doc/user/project/clusters/serverless/img/install-knative.png diff --git a/doc/user/project/clusters/serverless/img/knative-app.png b/doc/user/project/clusters/serverless/img/knative-app.png Binary files differnew file mode 100644 index 00000000000..54301e1786f --- /dev/null +++ b/doc/user/project/clusters/serverless/img/knative-app.png diff --git a/doc/user/project/clusters/serverless/index.md b/doc/user/project/clusters/serverless/index.md new file mode 100644 index 00000000000..bdbc4f7f09d --- /dev/null +++ b/doc/user/project/clusters/serverless/index.md @@ -0,0 +1,137 @@ +# Serverless + +> Introduced in GitLab 11.5. + +Run serverless workloads on Kubernetes using [Knative](https://cloud.google.com/knative/). + +## Overview + +Knative extends Kubernetes to provide a set of middleware components that are useful to build modern, source-centric, container-based applications. Knative brings some significant benefits out of the box through its main components: + +- [Build:](https://github.com/knative/build) Source-to-container build orchestration +- [Eventing:](https://github.com/knative/eventing) Management and delivery of events +- [Serving:](https://github.com/knative/serving) Request-driven compute that can scale to zero + +For more information on Knative, visit the [Knative docs repo](https://github.com/knative/docs). + +## Requirements + +To run Knative on Gitlab, you will need: + +1. **Kubernetes:** An RBAC-enabled Kubernetes cluster is required to deploy Knative. + The simplest way to get started is to add a cluster using [GitLab's GKE integration](https://docs.gitlab.com/ee/user/project/clusters/#adding-and-creating-a-new-gke-cluster-via-gitlab). + GitLab recommends +1. **Helm Tiller:** Helm is a package manager for Kubernetes and is required to install + all the other applications. +1. **Domain Name:** Knative will provide its own load balancer using Istio. It will provide an + external IP address for all the applications served by Knative. You will be prompted to enter a + wildcard domain where your applications will be served. Configure your DNS server to use the + external IP address for that domain. +1. **Serverless `gitlab-ci.yml` Template:** GitLab uses [Kaniko](https://github.com/GoogleContainerTools/kaniko) + to build the application and the [TriggerMesh CLI](https://github.com/triggermesh/tm), to simplify the + deployment of knative services and functions. + + Add the following `.gitlab-ci.yml` to the root of your repository (you may skip this step if using the sample + [Knative Ruby App](https://gitlab.com/knative-examples/knative-ruby-app) mentioned below). + + ```yaml + stages: + - build + - deploy + + build: + stage: build + image: + name: gcr.io/kaniko-project/executor:debug + entrypoint: [""] + only: + - master + script: + - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json + - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE + + deploy: + stage: deploy + image: gcr.io/triggermesh/tm@sha256:e3ee74db94d215bd297738d93577481f3e4db38013326c90d57f873df7ab41d5 + only: + - master + environment: production + script: + - echo "$CI_REGISTRY_IMAGE" + - tm -n "$KUBE_NAMESPACE" --config "$KUBECONFIG" deploy service "$CI_PROJECT_NAME" --from-image "$CI_REGISTRY_IMAGE" --wait + ``` + +1. **Dockerfile:** Knative requires a Dockerfile in order to build your application. It should be included + at the root of your project's repo and expose port 8080. + +## Installing Knative via GitLab's Kubernetes integration + +NOTE: **Note:** +Minimum recommended cluster size to run Knative is 3-nodes, 6 vCPUs, and 22.50 GB memory. RBAC must be enabled. + +You may download the sample [Knative Ruby App](https://gitlab.com/knative-examples/knative-ruby-app) to get started. + +1. [Add a Kubernetes cluster](https://docs.gitlab.com/ce/user/project/clusters/) and install Helm. + +1. Once Helm has been successfully installed, on the Knative app section, enter the domain to be used with + your application and click "Install". + +  + +1. After the Knative installation has finished, retrieve the Istio Ingress IP address by running the following command: + + ```bash + kubectl get svc --namespace=istio-system knative-ingressgateway -o jsonpath='{.status.loadBalancer.ingress[0].ip} ' + ``` + + Output: + + ```bash + 35.161.143.124 my-machine-name:~ my-user$ + ``` + +1. The ingress is now available at this address and will route incoming requests to the proper service based on the DNS + name in the request. To support this, a wildcard DNS A record should be created for the desired domain name. For example, + if your Knative base domain is `knative.example.com` then you need to create an A record with domain `*.knative.example.com` + pointing the ip address of the ingress. + +  + +## Deploy the application with Knative + +With all the pieces in place, you can simply create a new CI pipeline to deploy the Knative application. Navigate to +**CI/CD >> Pipelines** and click the **Run Pipeline** button at the upper-right part of the screen. Then, on the +Pipelines page, click **Create pipeline**. + +## Obtain the URL for the Knative deployment + +Once all the stages of the pipeline finish, click the **deploy** stage. + + + +The output will look like this: + +```bash +Running with gitlab-runner 11.5.0~beta.844.g96d88322 (96d88322) + on docker-auto-scale 72989761 +Using Docker executor with image gcr.io/triggermesh/tm@sha256:e3ee74db94d215bd297738d93577481f3e4db38013326c90d57f873df7ab41d5 ... +Pulling docker image gcr.io/triggermesh/tm@sha256:e3ee74db94d215bd297738d93577481f3e4db38013326c90d57f873df7ab41d5 ... +Using docker image sha256:6b3f6590a9b30bd7aafb9573f047d930c70066e43955b4beb18a1eee175f6de1 for gcr.io/triggermesh/tm@sha256:e3ee74db94d215bd297738d93577481f3e4db38013326c90d57f873df7ab41d5 ... +Running on runner-72989761-project-4342902-concurrent-0 via runner-72989761-stg-srm-1541795796-27929c96... +Cloning repository... +Cloning into '/builds/danielgruesso/knative'... +Checking out 8671ad20 as master... +Skipping Git submodules setup +$ echo "$CI_REGISTRY_IMAGE" +registry.staging.gitlab.com/danielgruesso/knative +$ tm -n "$KUBE_NAMESPACE" --config "$KUBECONFIG" deploy service "$CI_PROJECT_NAME" --from-image "$CI_REGISTRY_IMAGE" --wait +Deployment started. Run "tm -n knative-4342902 describe service knative" to see the details +Waiting for ready state....... +Service domain: knative.knative-4342902.knative.info +Job succeeded +``` + +The second to last line, labeled **Service domain** contains the URL for the deployment. Copy and paste the domain into your +browser to see the app live. + +
\ No newline at end of file diff --git a/doc/user/project/container_registry.md b/doc/user/project/container_registry.md index 1b1827a2658..cac64fc0cb6 100644 --- a/doc/user/project/container_registry.md +++ b/doc/user/project/container_registry.md @@ -139,12 +139,12 @@ docker login registry.example.com -u <username> -p <token> 1. Check to make sure that the system clock on your Docker client and GitLab server have been synchronized (e.g. via NTP). -2. If you are using an S3-backed Registry, double check that the IAM +1. If you are using an S3-backed Registry, double check that the IAM permissions and the S3 credentials (including region) are correct. See [the sample IAM policy](https://docs.docker.com/registry/storage-drivers/s3/) for more details. -3. Check the Registry logs (e.g. `/var/log/gitlab/registry/current`) and the GitLab production logs +1. Check the Registry logs (e.g. `/var/log/gitlab/registry/current`) and the GitLab production logs for errors (e.g. `/var/log/gitlab/gitlab-rails/production.log`). You may be able to find clues there. diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md index dc73194309c..7688508c6ac 100644 --- a/doc/user/project/deploy_tokens/index.md +++ b/doc/user/project/deploy_tokens/index.md @@ -13,8 +13,8 @@ You can create as many deploy tokens as you like from the settings of your proje 1. Log in to your GitLab account. 1. Go to the project you want to create Deploy Tokens for. -1. Go to **Settings** > **Repository** -1. Click on "Expand" on **Deploy Tokens** section +1. Go to **Settings** > **Repository**. +1. Click on "Expand" on **Deploy Tokens** section. 1. Choose a name and optionally an expiry date for the token. 1. Choose the [desired scopes](#limiting-scopes-of-a-deploy-token). 1. Click on **Create deploy token**. @@ -46,8 +46,8 @@ the following table. To download a repository using a Deploy Token, you just need to: 1. Create a Deploy Token with `read_repository` as a scope. -2. Take note of your `username` and `token` -3. `git clone` the project using the Deploy Token: +1. Take note of your `username` and `token`. +1. `git clone` the project using the Deploy Token: ```sh git clone http://<username>:<deploy_token>@gitlab.example.com/tanuki/awesome_project.git @@ -60,8 +60,8 @@ Replace `<username>` and `<deploy_token>` with the proper values. To read the container registry images, you'll need to: 1. Create a Deploy Token with `read_registry` as a scope. -2. Take note of your `username` and `token` -3. Log in to GitLab’s Container Registry using the deploy token: +1. Take note of your `username` and `token`. +1. Log in to GitLab’s Container Registry using the deploy token: ```sh docker login registry.example.com -u <username> -p <deploy_token> diff --git a/doc/user/project/import/github.md b/doc/user/project/import/github.md index fcd6192e82f..3e4be043199 100644 --- a/doc/user/project/import/github.md +++ b/doc/user/project/import/github.md @@ -65,9 +65,9 @@ developer documentation. Before you begin, ensure that any GitHub users who you want to map to GitLab users have either: -1. A GitLab account that has logged in using the GitHub icon +- A GitLab account that has logged in using the GitHub icon \- or - -2. A GitLab account with an email address that matches the [public email address](https://help.github.com/articles/setting-your-commit-email-address-on-github/) of the GitHub user +- A GitLab account with an email address that matches the [public email address](https://help.github.com/articles/setting-your-commit-email-address-on-github/) of the GitHub user User-matching attempts occur in that order, and if a user is not identified either way, the activity is associated with the user account that is performing the import. @@ -77,10 +77,10 @@ If you are using a self-hosted GitLab instance, this process requires that you h [GitHub integration][gh-import]. 1. From the top navigation bar, click **+** and select **New project**. -2. Select the **Import project** tab and then select **GitHub**. -3. Select the first button to **List your GitHub repositories**. You are redirected to a page on github.com to authorize the GitLab application. -4. Click **Authorize gitlabhq**. You are redirected back to GitLab's Import page and all of your GitHub repositories are listed. -5. Continue on to [selecting which repositories to import](#selecting-which-repositories-to-import). +1. Select the **Import project** tab and then select **GitHub**. +1. Select the first button to **List your GitHub repositories**. You are redirected to a page on github.com to authorize the GitLab application. +1. Click **Authorize gitlabhq**. You are redirected back to GitLab's Import page and all of your GitHub repositories are listed. +1. Continue on to [selecting which repositories to import](#selecting-which-repositories-to-import). ### Using a GitHub token @@ -92,12 +92,12 @@ integration enabled, that should be the preferred method to import your reposito If you are not using the GitHub integration, you can still perform an authorization with GitHub to grant GitLab access your repositories: 1. Go to https://github.com/settings/tokens/new -2. Enter a token description. -3. Select the repo scope. -4. Click **Generate token**. -5. Copy the token hash. -6. Go back to GitLab and provide the token to the GitHub importer. -7. Hit the **List Your GitHub Repositories** button and wait while GitLab reads your repositories' information. +1. Enter a token description. +1. Select the repo scope. +1. Click **Generate token**. +1. Copy the token hash. +1. Go back to GitLab and provide the token to the GitHub importer. +1. Hit the **List Your GitHub Repositories** button and wait while GitLab reads your repositories' information. Once done, you'll be taken to the importer page to select the repositories to import. ### Selecting which repositories to import @@ -107,10 +107,10 @@ your GitHub repositories are listed. 1. By default, the proposed repository namespaces match the names as they exist in GitHub, but based on your permissions, you can choose to edit these names before you proceed to import any of them. -2. Select the **Import** button next to any number of repositories, or select **Import all repositories**. -3. The **Status** column shows the import status of each repository. You can choose to leave the page open and it will +1. Select the **Import** button next to any number of repositories, or select **Import all repositories**. +1. The **Status** column shows the import status of each repository. You can choose to leave the page open and it will update in realtime or you can return to it later. -4. Once a repository has been imported, click its GitLab path to open its GitLab URL. +1. Once a repository has been imported, click its GitLab path to open its GitLab URL. ## Mirroring and pipeline status sharing diff --git a/doc/user/project/integrations/bugzilla.md b/doc/user/project/integrations/bugzilla.md index 671804035cc..040e80d529d 100644 --- a/doc/user/project/integrations/bugzilla.md +++ b/doc/user/project/integrations/bugzilla.md @@ -16,8 +16,9 @@ Once you have configured and enabled Bugzilla you'll see the Bugzilla link on th ## Referencing issues in Bugzilla Issues in Bugzilla can be referenced in two alternative ways: -1. `#<ID>` where `<ID>` is a number (example `#143`). -2. `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is + +- `#<ID>` where `<ID>` is a number (example `#143`). +- `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is then followed by capital letters, numbers or underscores, and `<ID>` is a number (example `API_32-143`). diff --git a/doc/user/project/integrations/jira_cloud_configuration.md b/doc/user/project/integrations/jira_cloud_configuration.md index 2e6e8278e64..cae66526175 100644 --- a/doc/user/project/integrations/jira_cloud_configuration.md +++ b/doc/user/project/integrations/jira_cloud_configuration.md @@ -4,16 +4,15 @@ An API token is needed when integrating with JIRA Cloud, follow the steps below to create one: 1. Log in to https://id.atlassian.com with your email. -2. **Click API tokens**, then **Create API token**. +1. **Click API tokens**, then **Create API token**.   -3. Make sure to write down your new API token as you will need it in the next [steps](jira.md#configuring-gitlab). +1. Make sure to write down your new API token as you will need it in the next [steps](jira.md#configuring-gitlab). NOTE: **Note** It is important that the user associated with this email has 'write' access to projects in JIRA. The JIRA configuration is complete. You are going to need this new created token and the email you used to log in when [configuring GitLab in the next section](jira.md#configuring-gitlab). - diff --git a/doc/user/project/integrations/jira_server_configuration.md b/doc/user/project/integrations/jira_server_configuration.md index 7d84ad0b07c..20036183187 100644 --- a/doc/user/project/integrations/jira_server_configuration.md +++ b/doc/user/project/integrations/jira_server_configuration.md @@ -17,17 +17,17 @@ We have split this stage in steps so it is easier to follow.  -2. The next step is to create a new user (e.g., `gitlab`) who has write access +1. The next step is to create a new user (e.g., `gitlab`) who has write access to projects in Jira. Enter the user's name and a _valid_ e-mail address since Jira sends a verification e-mail to set up the password. _**Note:** Jira creates the username automatically by using the e-mail - prefix. You can change it later, if needed. Our integration does not support SSO (such as SAML). You will need to create - an HTTP basic authentication password. You can do this by visiting the user + prefix. You can change it later, if needed. Our integration does not support SSO (such as SAML). You will need to create + an HTTP basic authentication password. You can do this by visiting the user profile, looking up the username, and setting a password._  -3. Create a `gitlab-developers` group which will have write access +1. Create a `gitlab-developers` group which will have write access to projects in Jira. Go to the **Groups** tab and select **Create group**.  @@ -36,13 +36,13 @@ We have split this stage in steps so it is easier to follow.  -4. To give the newly-created group 'write' access, go to +1. To give the newly-created group 'write' access, go to **Application access > View configuration** and add the `gitlab-developers` group to Jira Core.  -5. Add the `gitlab` user to the `gitlab-developers` group by going to +1. Add the `gitlab` user to the `gitlab-developers` group by going to **Users > GitLab user > Add group** and selecting the `gitlab-developers` group from the dropdown menu. Notice that the group says _Access_, which is intended as part of this process. diff --git a/doc/user/project/integrations/redmine.md b/doc/user/project/integrations/redmine.md index de2cf6d4647..76a2617125e 100644 --- a/doc/user/project/integrations/redmine.md +++ b/doc/user/project/integrations/redmine.md @@ -18,15 +18,16 @@ in the table below.  -2. To disable the internal issue tracking system in a project, navigate to the General page, expand [Permissions](../settings/index.md#sharing-and-permissions), and slide the Issues switch invalid. +1. To disable the internal issue tracking system in a project, navigate to the General page, expand [Permissions](../settings/index.md#sharing-and-permissions), and slide the Issues switch invalid.  ## Referencing issues in Redmine Issues in Redmine can be referenced in two alternative ways: -1. `#<ID>` where `<ID>` is a number (example `#143`) -2. `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is + +- `#<ID>` where `<ID>` is a number (example `#143`). +- `<PROJECT>-<ID>` where `<PROJECT>` starts with a capital letter which is then followed by capital letters, numbers or underscores, and `<ID>` is a number (example `API_32-143`). diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md index 7c63967c829..4d1d95da6f0 100644 --- a/doc/user/project/integrations/webhooks.md +++ b/doc/user/project/integrations/webhooks.md @@ -338,10 +338,10 @@ payload will also include information about the target of the comment. For examp a comment on an issue will include the specific issue information under the `issue` key. Valid target types: -1. `commit` -2. `merge_request` -3. `issue` -4. `snippet` +- `commit` +- `merge_request` +- `issue` +- `snippet` #### Comment on commit diff --git a/doc/user/project/issues/automatic_issue_closing.md b/doc/user/project/issues/automatic_issue_closing.md index b6570c777ae..afb7d9ada5f 100644 --- a/doc/user/project/issues/automatic_issue_closing.md +++ b/doc/user/project/issues/automatic_issue_closing.md @@ -27,10 +27,11 @@ used: Note that `%{issue_ref}` is a complex regular expression defined inside GitLab's source code that can match references to: -1. a local issue (`#123`), -2. a cross-project issue (`group/project#123`) -3. a link to an issue -(`https://gitlab.example.com/group/project/issues/123`). + +- A local issue (`#123`). +- A cross-project issue (`group/project#123`). +- A link to an issue + (`https://gitlab.example.com/group/project/issues/123`). --- diff --git a/doc/user/project/merge_requests/img/comment-on-any-diff-line.png b/doc/user/project/merge_requests/img/comment-on-any-diff-line.png Binary files differnew file mode 100644 index 00000000000..856ede41527 --- /dev/null +++ b/doc/user/project/merge_requests/img/comment-on-any-diff-line.png diff --git a/doc/user/project/merge_requests/index.md b/doc/user/project/merge_requests/index.md index 6de2ab07fc4..f2f2497f0be 100644 --- a/doc/user/project/merge_requests/index.md +++ b/doc/user/project/merge_requests/index.md @@ -141,6 +141,15 @@ you hide discussions that are no longer relevant. [Read more about resolving discussion comments in merge requests reviews.](../../discussions/index.md) +## Commenting on any file line in merge requests + +> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/13950) in GitLab 11.5. + +GitLab provides a way of leaving comments in any part of the file being changed +in a Merge Request. To do so, click the **...** button in the gutter of the Merge Request diff UI to expand the diff lines and leave a comment, just as you would for a changed line. + + + ## Resolve conflicts When a merge request has conflicts, GitLab may provide the option to resolve diff --git a/doc/user/project/new_ci_build_permissions_model.md b/doc/user/project/new_ci_build_permissions_model.md index 23d5b34504c..9a53036b4d1 100644 --- a/doc/user/project/new_ci_build_permissions_model.md +++ b/doc/user/project/new_ci_build_permissions_model.md @@ -60,7 +60,7 @@ Let's consider the following scenario: hosted in private repositories and you have multiple CI jobs that make use of these repositories. -2. You invite a new [external user][ext]. CI jobs created by that user do not +1. You invite a new [external user][ext]. CI jobs created by that user do not have access to internal repositories, because the user also doesn't have the access from within GitLab. You as an employee have to grant explicit access for this user. This allows us to prevent from accidental data leakage. diff --git a/doc/user/project/repository/index.md b/doc/user/project/repository/index.md index 6d822d3f7f2..1710bba2fd0 100644 --- a/doc/user/project/repository/index.md +++ b/doc/user/project/repository/index.md @@ -53,17 +53,35 @@ To get started with the command line, please read through the Use GitLab's [file finder](../../../workflow/file_finder.md) to search for files in a repository. +### Supported markup languages and extensions + +GitLab supports a number of markup languages (sometimes called [lightweight +markup languages](https://en.wikipedia.org/wiki/Lightweight_markup_language)) +that you can use for the content of your files in a repository. They are mostly +used for documentation purposes. + +Just pick the right extension for your files and GitLab will render them +according to the markup language. + +| Markup language | Extensions | +| --------------- | ---------- | +| Plain text | `txt` | +| [Markdown](../../markdown.md) | `mdown`, `mkd`, `mkdn`, `md`, `markdown` | +| [reStructuredText](http://docutils.sourceforge.net/rst.html) | `rst` | +| [Asciidoc](https://asciidoctor.org/docs/what-is-asciidoc/) | `adoc`, `ad`, `asciidoc` | +| [Textile](https://txstyle.org/) | `textile` | +| [rdoc](http://rdoc.sourceforge.net/doc/index.html) | `rdoc` | +| [Orgmode](https://orgmode.org/) | `org` | +| [creole](http://www.wikicreole.org/) | `creole` | +| [Mediawiki](https://www.mediawiki.org/wiki/MediaWiki) | `wiki`, `mediawiki` | + ### Repository README and index files When a `README` or `index` file is present in a repository, its contents will be automatically pre-rendered by GitLab without opening it. -They can either be plain text or have an extension of a supported markup language: - -- Asciidoc: `README.adoc` or `index.adoc` -- Markdown: `README.md` or `index.md` -- reStructuredText: `README.rst` or `index.rst` -- Text: `README.txt` or `index.txt` +They can either be plain text or have an extension of a +[supported markup language](#supported-markup-languages-and-extensions): Some things to note about precedence: @@ -75,10 +93,6 @@ Some things to note about precedence: precedence over `README.md`, and `README.rst` will take precedence over `README`. -NOTE: **Note:** -`index` files without an extension will not automatically pre-render. You'll -have to explicitly open them to see their contents. - ### Jupyter Notebook files > [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/2508) in GitLab 9.1 diff --git a/doc/user/project/settings/index.md b/doc/user/project/settings/index.md index 084d1161633..d6754372816 100644 --- a/doc/user/project/settings/index.md +++ b/doc/user/project/settings/index.md @@ -18,6 +18,8 @@ Adjust your project's name, description, avatar, [default branch](../repository/  +The project description also partially supports [standard markdown](../../markdown.md#standard-markdown). You can use [emphasis](../../markdown.md#emphasis), [links](../../markdown.md#links), and [line-breaks](../../markdown.md#line-breaks) to add more context to the project description. + ### Sharing and permissions Set up your project's access, [visibility](../../../public_access/public_access.md), and enable [Container Registry](../container_registry.md) for your projects: diff --git a/doc/workflow/shortcuts.md b/doc/workflow/shortcuts.md index b2f1cbec204..7863dd8c242 100644 --- a/doc/workflow/shortcuts.md +++ b/doc/workflow/shortcuts.md @@ -93,4 +93,4 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?' | Keyboard Shortcut | Description | | ----------------- | ----------- | -| <kbd>⌘</kbd> + <kbd>p</kbd> | Go to file | +| <kbd>Cmd</kbd>/<kbd>Ctrl</kbd> + <kbd>p</kbd> | Go to file | diff --git a/lib/api/commits.rb b/lib/api/commits.rb index 3b8f3fedccf..337b92a6183 100644 --- a/lib/api/commits.rb +++ b/lib/api/commits.rb @@ -207,7 +207,7 @@ module API end desc 'Revert a commit in a branch' do - detail 'This feature was introduced in GitLab 11.6' + detail 'This feature was introduced in GitLab 11.5' success Entities::Commit end params do diff --git a/lib/api/entities.rb b/lib/api/entities.rb index 61d57c643f0..5572e86985c 100644 --- a/lib/api/entities.rb +++ b/lib/api/entities.rb @@ -1263,7 +1263,11 @@ module API expose :token end - class ImpersonationToken < PersonalAccessTokenWithToken + class ImpersonationToken < PersonalAccessToken + expose :impersonation + end + + class ImpersonationTokenWithToken < PersonalAccessTokenWithToken expose :impersonation end diff --git a/lib/api/users.rb b/lib/api/users.rb index 2a56506f3a5..b41fce76df0 100644 --- a/lib/api/users.rb +++ b/lib/api/users.rb @@ -531,7 +531,7 @@ module API desc 'Create a impersonation token. Available only for admins.' do detail 'This feature was introduced in GitLab 9.0' - success Entities::ImpersonationToken + success Entities::ImpersonationTokenWithToken end params do requires :name, type: String, desc: 'The name of the impersonation token' @@ -542,7 +542,7 @@ module API impersonation_token = finder.build(declared_params(include_missing: false)) if impersonation_token.save - present impersonation_token, with: Entities::ImpersonationToken + present impersonation_token, with: Entities::ImpersonationTokenWithToken else render_validation_error!(impersonation_token) end diff --git a/lib/banzai/filter/absolute_link_filter.rb b/lib/banzai/filter/absolute_link_filter.rb index 04ec568eee3..a9bdb004c4b 100644 --- a/lib/banzai/filter/absolute_link_filter.rb +++ b/lib/banzai/filter/absolute_link_filter.rb @@ -29,6 +29,7 @@ module Banzai end def absolute_link_attr(uri) + # Here we really want to expand relative path to absolute path URI.join(Gitlab.config.gitlab.url, uri).to_s end end diff --git a/lib/bitbucket_server/connection.rb b/lib/bitbucket_server/connection.rb index 45a437844bd..7efcdcf8619 100644 --- a/lib/bitbucket_server/connection.rb +++ b/lib/bitbucket_server/connection.rb @@ -88,35 +88,19 @@ module BitbucketServer def build_url(path) return path if path.starts_with?(root_url) - url_join_paths(root_url, path) + Gitlab::Utils.append_path(root_url, path) end def root_url - url_join_paths(base_uri, "/rest/api/#{api_version}") + Gitlab::Utils.append_path(base_uri, "rest/api/#{api_version}") end def delete_url(resource, path) if resource == :branches - url_join_paths(base_uri, "/rest/branch-utils/#{api_version}#{path}") + Gitlab::Utils.append_path(base_uri, "rest/branch-utils/#{api_version}#{path}") else build_url(path) end end - - # URI.join is stupid in that slashes are important: - # - # # URI.join('http://example.com/subpath', 'hello') - # => http://example.com/hello - # - # We really want http://example.com/subpath/hello - # - def url_join_paths(*paths) - paths.map { |path| strip_slashes(path) }.join(SEPARATOR) - end - - def strip_slashes(path) - path = path[1..-1] if path.starts_with?(SEPARATOR) - path.chomp(SEPARATOR) - end end end diff --git a/lib/gitlab/checks/commit_check.rb b/lib/gitlab/checks/commit_check.rb index 6dd74e8fb74..58267b6752f 100644 --- a/lib/gitlab/checks/commit_check.rb +++ b/lib/gitlab/checks/commit_check.rb @@ -10,8 +10,8 @@ module Gitlab def initialize(project, user, newrev, oldrev) @project = project @user = user - @newrev = user - @oldrev = user + @newrev = newrev + @oldrev = oldrev @file_paths = [] end diff --git a/lib/gitlab/ci/build/policy/changes.rb b/lib/gitlab/ci/build/policy/changes.rb index 7bf51519752..1663c875426 100644 --- a/lib/gitlab/ci/build/policy/changes.rb +++ b/lib/gitlab/ci/build/policy/changes.rb @@ -14,7 +14,7 @@ module Gitlab pipeline.modified_paths.any? do |path| @globs.any? do |glob| - File.fnmatch?(glob, path, File::FNM_PATHNAME | File::FNM_DOTMATCH) + File.fnmatch?(glob, path, File::FNM_PATHNAME | File::FNM_DOTMATCH | File::FNM_EXTGLOB) end end end diff --git a/lib/gitlab/ci/parsers/test/junit.rb b/lib/gitlab/ci/parsers/test/junit.rb index ed5a79d9b9b..2791730fd26 100644 --- a/lib/gitlab/ci/parsers/test/junit.rb +++ b/lib/gitlab/ci/parsers/test/junit.rb @@ -14,7 +14,7 @@ module Gitlab test_case = create_test_case(test_case) test_suite.add_test_case(test_case) end - rescue REXML::ParseException + rescue Nokogiri::XML::SyntaxError raise JunitParserError, "XML parsing failed" rescue raise JunitParserError, "JUnit parsing failed" diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml index c759bb7098e..149506ea498 100644 --- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml @@ -27,7 +27,7 @@ # Continuous deployment to production is enabled by default. # If you want to deploy to staging first, set STAGING_ENABLED environment variable. # If you want to enable incremental rollout, either manual or time based, -# set INCREMENTAL_ROLLOUT_TYPE environment variable to "manual" or "timed". +# set INCREMENTAL_ROLLOUT_MODE environment variable to "manual" or "timed". # If you want to use canary deployments, set CANARY_ENABLED environment variable. # # If Auto DevOps fails to detect the proper buildpack, or if you want to @@ -116,7 +116,9 @@ code_quality: license_management: stage: test - image: "registry.gitlab.com/gitlab-org/security-products/license-management:$CI_SERVER_VERSION_MAJOR-$CI_SERVER_VERSION_MINOR-stable" + image: + name: "registry.gitlab.com/gitlab-org/security-products/license-management:$CI_SERVER_VERSION_MAJOR-$CI_SERVER_VERSION_MINOR-stable" + entrypoint: [""] allow_failure: true script: - license_management @@ -147,10 +149,10 @@ performance: only: refs: - branches - kubernetes: active except: variables: - $PERFORMANCE_DISABLED + - $KUBECONFIG == null sast: stage: test @@ -225,7 +227,6 @@ dast: only: refs: - branches - kubernetes: active variables: - $GITLAB_FEATURES =~ /\bdast\b/ except: @@ -233,6 +234,7 @@ dast: - master variables: - $DAST_DISABLED + - $KUBECONFIG == null review: stage: review @@ -254,12 +256,12 @@ review: only: refs: - branches - kubernetes: active except: refs: - master variables: - $REVIEW_DISABLED + - $KUBECONFIG == null stop_review: stage: cleanup @@ -277,12 +279,12 @@ stop_review: only: refs: - branches - kubernetes: active except: refs: - master variables: - $REVIEW_DISABLED + - $KUBECONFIG == null # Staging deploys are disabled by default since # continuous deployment to production is enabled by default @@ -306,9 +308,11 @@ staging: only: refs: - master - kubernetes: active variables: - $STAGING_ENABLED + except: + variables: + - $KUBECONFIG == null # Canaries are also disabled by default, but if you want them, # and know what the downsides are, you can enable this by setting @@ -331,9 +335,11 @@ canary: only: refs: - master - kubernetes: active variables: - $CANARY_ENABLED + except: + variables: + - $KUBECONFIG == null .production: &production_template stage: production @@ -359,13 +365,13 @@ production: only: refs: - master - kubernetes: active except: variables: - $STAGING_ENABLED - $CANARY_ENABLED - $INCREMENTAL_ROLLOUT_ENABLED - $INCREMENTAL_ROLLOUT_MODE + - $KUBECONFIG == null production_manual: <<: *production_template @@ -374,7 +380,6 @@ production_manual: only: refs: - master - kubernetes: active variables: - $STAGING_ENABLED - $CANARY_ENABLED @@ -382,6 +387,7 @@ production_manual: variables: - $INCREMENTAL_ROLLOUT_ENABLED - $INCREMENTAL_ROLLOUT_MODE + - $KUBECONFIG == null # This job implements incremental rollout on for every push to `master`. @@ -411,13 +417,13 @@ production_manual: only: refs: - master - kubernetes: active variables: - $INCREMENTAL_ROLLOUT_MODE == "manual" - $INCREMENTAL_ROLLOUT_ENABLED except: variables: - $INCREMENTAL_ROLLOUT_MODE == "timed" + - $KUBECONFIG == null .timed_rollout_template: &timed_rollout_template <<: *rollout_template @@ -426,9 +432,11 @@ production_manual: only: refs: - master - kubernetes: active variables: - $INCREMENTAL_ROLLOUT_MODE == "timed" + except: + variables: + - $KUBECONFIG == null timed rollout 10%: <<: *timed_rollout_template diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb index f3bd8b69869..8ba44dff06f 100644 --- a/lib/gitlab/diff/file.rb +++ b/lib/gitlab/diff/file.rb @@ -28,6 +28,7 @@ module Gitlab @repository = repository @diff_refs = diff_refs @fallback_diff_refs = fallback_diff_refs + @unfolded = false # Ensure items are collected in the the batch new_blob_lazy @@ -137,6 +138,24 @@ module Gitlab Gitlab::Diff::Parser.new.parse(raw_diff.each_line, diff_file: self).to_a end + # Changes diff_lines according to the given position. That is, + # it checks whether the position requires blob lines into the diff + # in order to be presented. + def unfold_diff_lines(position) + return unless position + + unfolder = Gitlab::Diff::LinesUnfolder.new(self, position) + + if unfolder.unfold_required? + @diff_lines = unfolder.unfolded_diff_lines + @unfolded = true + end + end + + def unfolded? + @unfolded + end + def highlighted_diff_lines @highlighted_diff_lines ||= Gitlab::Diff::Highlight.new(self, repository: self.repository).highlight diff --git a/lib/gitlab/diff/line.rb b/lib/gitlab/diff/line.rb index 74fed7c4b1b..f0c4977fc50 100644 --- a/lib/gitlab/diff/line.rb +++ b/lib/gitlab/diff/line.rb @@ -5,9 +5,9 @@ module Gitlab class Line SERIALIZE_KEYS = %i(line_code rich_text text type index old_pos new_pos).freeze - attr_reader :line_code, :type, :index, :old_pos, :new_pos + attr_reader :line_code, :type, :old_pos, :new_pos attr_writer :rich_text - attr_accessor :text + attr_accessor :text, :index def initialize(text, type, index, old_pos, new_pos, parent_file: nil, line_code: nil, rich_text: nil) @text, @type, @index = text, type, index @@ -21,7 +21,14 @@ module Gitlab end def self.init_from_hash(hash) - new(hash[:text], hash[:type], hash[:index], hash[:old_pos], hash[:new_pos], line_code: hash[:line_code], rich_text: hash[:rich_text]) + new(hash[:text], + hash[:type], + hash[:index], + hash[:old_pos], + hash[:new_pos], + parent_file: hash[:parent_file], + line_code: hash[:line_code], + rich_text: hash[:rich_text]) end def to_hash diff --git a/lib/gitlab/diff/lines_unfolder.rb b/lib/gitlab/diff/lines_unfolder.rb new file mode 100644 index 00000000000..9306b7e16a2 --- /dev/null +++ b/lib/gitlab/diff/lines_unfolder.rb @@ -0,0 +1,235 @@ +# frozen_string_literal: true + +# Given a position, calculates which Blob lines should be extracted, treated and +# injected in the current diff file lines in order to present a "unfolded" diff. +module Gitlab + module Diff + class LinesUnfolder + include Gitlab::Utils::StrongMemoize + + UNFOLD_CONTEXT_SIZE = 3 + + def initialize(diff_file, position) + @diff_file = diff_file + @blob = diff_file.old_blob + @position = position + @generate_top_match_line = true + @generate_bottom_match_line = true + + # These methods update `@generate_top_match_line` and + # `@generate_bottom_match_line`. + @from_blob_line = calculate_from_blob_line! + @to_blob_line = calculate_to_blob_line! + end + + # Returns merged diff lines with required blob lines with correct + # positions. + def unfolded_diff_lines + strong_memoize(:unfolded_diff_lines) do + next unless unfold_required? + + merged_diff_with_blob_lines + end + end + + # Returns the extracted lines from the old blob which should be merged + # with the current diff lines. + def blob_lines + strong_memoize(:blob_lines) do + # Blob lines, unlike diffs, doesn't start with an empty space for + # unchanged line, so the parsing and highlighting step can get fuzzy + # without the following change. + line_prefix = ' ' + blob_as_diff_lines = @blob.data.each_line.map { |line| "#{line_prefix}#{line}" } + + lines = Gitlab::Diff::Parser.new.parse(blob_as_diff_lines, diff_file: @diff_file).to_a + + from = from_blob_line - 1 + to = to_blob_line - 1 + + lines[from..to] + end + end + + def unfold_required? + strong_memoize(:unfold_required) do + next false unless @diff_file.text? + next false unless @position.unchanged? + next false if @diff_file.new_file? || @diff_file.deleted_file? + next false unless @position.old_line + # Invalid position (MR import scenario) + next false if @position.old_line > @blob.lines.size + next false if @diff_file.diff_lines.empty? + next false if @diff_file.line_for_position(@position) + next false unless unfold_line + + true + end + end + + private + + attr_reader :from_blob_line, :to_blob_line + + def merged_diff_with_blob_lines + lines = @diff_file.diff_lines + match_line = unfold_line + insert_index = bottom? ? -1 : match_line.index + + lines -= [match_line] unless bottom? + + lines.insert(insert_index, *blob_lines_with_matches) + + # The inserted blob lines have invalid indexes, so we need + # to reindex them. + reindex(lines) + + lines + end + + # Returns 'unchanged' blob lines with recalculated `old_pos` and + # `new_pos` and the recalculated new match line (needed if we for instance + # we unfolded once, but there are still folded lines). + def blob_lines_with_matches + old_pos = from_blob_line + new_pos = from_blob_line + offset + + new_blob_lines = [] + + new_blob_lines.push(top_blob_match_line) if top_blob_match_line + + blob_lines.each do |line| + new_blob_lines << Gitlab::Diff::Line.new(line.text, line.type, nil, old_pos, new_pos, + parent_file: @diff_file) + + old_pos += 1 + new_pos += 1 + end + + new_blob_lines.push(bottom_blob_match_line) if bottom_blob_match_line + + new_blob_lines + end + + def reindex(lines) + lines.each_with_index { |line, i| line.index = i } + end + + def top_blob_match_line + strong_memoize(:top_blob_match_line) do + next unless @generate_top_match_line + + old_pos = from_blob_line + new_pos = from_blob_line + offset + + build_match_line(old_pos, new_pos) + end + end + + def bottom_blob_match_line + strong_memoize(:bottom_blob_match_line) do + # The bottom line match addition is already handled on + # Diff::File#diff_lines_for_serializer + next if bottom? + next unless @generate_bottom_match_line + + position = line_after_unfold_position.old_pos + + old_pos = position + new_pos = position + offset + + build_match_line(old_pos, new_pos) + end + end + + def build_match_line(old_pos, new_pos) + blob_lines_length = blob_lines.length + old_line_ref = [old_pos, blob_lines_length].join(',') + new_line_ref = [new_pos, blob_lines_length].join(',') + new_match_line_str = "@@ -#{old_line_ref}+#{new_line_ref} @@" + + Gitlab::Diff::Line.new(new_match_line_str, 'match', nil, old_pos, new_pos) + end + + # Returns the first line position that should be extracted + # from `blob_lines`. + def calculate_from_blob_line! + return unless unfold_required? + + from = comment_position - UNFOLD_CONTEXT_SIZE + + # There's no line before the match if it's in the top-most + # position. + prev_line_number = line_before_unfold_position&.old_pos || 0 + + if from <= prev_line_number + 1 + @generate_top_match_line = false + from = prev_line_number + 1 + end + + from + end + + # Returns the last line position that should be extracted + # from `blob_lines`. + def calculate_to_blob_line! + return unless unfold_required? + + to = comment_position + UNFOLD_CONTEXT_SIZE + + return to if bottom? + + next_line_number = line_after_unfold_position.old_pos + + if to >= next_line_number - 1 + @generate_bottom_match_line = false + to = next_line_number - 1 + end + + to + end + + def offset + unfold_line.new_pos - unfold_line.old_pos + end + + def line_before_unfold_position + return unless index = unfold_line&.index + + @diff_file.diff_lines[index - 1] if index > 0 + end + + def line_after_unfold_position + return unless index = unfold_line&.index + + @diff_file.diff_lines[index + 1] if index >= 0 + end + + def bottom? + strong_memoize(:bottom) do + @position.old_line > last_line.old_pos + end + end + + # Returns the line which needed to be expanded in order to send a comment + # in `@position`. + def unfold_line + strong_memoize(:unfold_line) do + next last_line if bottom? + + @diff_file.diff_lines.find do |line| + line.old_pos > comment_position && line.type == 'match' + end + end + end + + def comment_position + @position.old_line + end + + def last_line + @diff_file.diff_lines.last + end + end + end +end diff --git a/lib/gitlab/diff/position.rb b/lib/gitlab/diff/position.rb index 9c4d9377593..e8f98f52111 100644 --- a/lib/gitlab/diff/position.rb +++ b/lib/gitlab/diff/position.rb @@ -103,6 +103,10 @@ module Gitlab @diff_refs ||= DiffRefs.new(base_sha: base_sha, start_sha: start_sha, head_sha: head_sha) end + def unfolded_diff?(repository) + diff_file(repository)&.unfolded? + end + def diff_file(repository) return @diff_file if defined?(@diff_file) @@ -136,7 +140,13 @@ module Gitlab return unless diff_refs.complete? return unless comparison = diff_refs.compare_in(repository.project) - comparison.diffs(diff_options).diff_files.first + file = comparison.diffs(diff_options).diff_files.first + + # We need to unfold diff lines according to the position in order + # to correctly calculate the line code and trace position changes. + file&.unfold_diff_lines(self) + + file end def get_formatter_class(type) diff --git a/lib/gitlab/file_detector.rb b/lib/gitlab/file_detector.rb index d6338b09e3d..2770469ca9f 100644 --- a/lib/gitlab/file_detector.rb +++ b/lib/gitlab/file_detector.rb @@ -8,7 +8,7 @@ module Gitlab module FileDetector PATTERNS = { # Project files - readme: %r{\A(readme|index)[^/]*\z}i, + readme: /\A(#{Regexp.union(*Gitlab::MarkupHelper::PLAIN_FILENAMES).source})(\.(#{Regexp.union(*Gitlab::MarkupHelper::EXTENSIONS).source}))?\z/i, changelog: %r{\A(changelog|history|changes|news)[^/]*\z}i, license: %r{\A((un)?licen[sc]e|copying)(\.[^/]+)?\z}i, contributing: %r{\Acontributing[^/]*\z}i, diff --git a/lib/gitlab/fogbugz_import/client.rb b/lib/gitlab/fogbugz_import/client.rb index acb000e3e23..dd747a79673 100644 --- a/lib/gitlab/fogbugz_import/client.rb +++ b/lib/gitlab/fogbugz_import/client.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'fogbugz' module Gitlab diff --git a/lib/gitlab/fogbugz_import/importer.rb b/lib/gitlab/fogbugz_import/importer.rb index 98ea5b309a1..431911d1eee 100644 --- a/lib/gitlab/fogbugz_import/importer.rb +++ b/lib/gitlab/fogbugz_import/importer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module FogbugzImport class Importer diff --git a/lib/gitlab/fogbugz_import/project_creator.rb b/lib/gitlab/fogbugz_import/project_creator.rb index 1918d5b208d..3c71031a8d9 100644 --- a/lib/gitlab/fogbugz_import/project_creator.rb +++ b/lib/gitlab/fogbugz_import/project_creator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module FogbugzImport class ProjectCreator diff --git a/lib/gitlab/fogbugz_import/repository.rb b/lib/gitlab/fogbugz_import/repository.rb index d1dc63db2b2..b958dcf6cbf 100644 --- a/lib/gitlab/fogbugz_import/repository.rb +++ b/lib/gitlab/fogbugz_import/repository.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module FogbugzImport class Repository diff --git a/lib/gitlab/gfm/reference_rewriter.rb b/lib/gitlab/gfm/reference_rewriter.rb index 455814a9159..08d7db49ad7 100644 --- a/lib/gitlab/gfm/reference_rewriter.rb +++ b/lib/gitlab/gfm/reference_rewriter.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Gfm ## @@ -31,19 +33,19 @@ module Gitlab class ReferenceRewriter RewriteError = Class.new(StandardError) - def initialize(text, source_project, current_user) + def initialize(text, source_parent, current_user) @text = text - @source_project = source_project + @source_parent = source_parent @current_user = current_user @original_html = markdown(text) @pattern = Gitlab::ReferenceExtractor.references_pattern end - def rewrite(target_project) + def rewrite(target_parent) return @text unless needs_rewrite? @text.gsub(@pattern) do |reference| - unfold_reference(reference, Regexp.last_match, target_project) + unfold_reference(reference, Regexp.last_match, target_parent) end end @@ -53,14 +55,14 @@ module Gitlab private - def unfold_reference(reference, match, target_project) + def unfold_reference(reference, match, target_parent) before = @text[0...match.begin(0)] after = @text[match.end(0)..-1] referable = find_referable(reference) return reference unless referable - cross_reference = build_cross_reference(referable, target_project) + cross_reference = build_cross_reference(referable, target_parent) return reference if reference == cross_reference if cross_reference.nil? @@ -72,17 +74,17 @@ module Gitlab end def find_referable(reference) - extractor = Gitlab::ReferenceExtractor.new(@source_project, + extractor = Gitlab::ReferenceExtractor.new(@source_parent, @current_user) extractor.analyze(reference) extractor.all.first end - def build_cross_reference(referable, target_project) + def build_cross_reference(referable, target_parent) if referable.respond_to?(:project) - referable.to_reference(target_project) + referable.to_reference(target_parent) else - referable.to_reference(@source_project, target_project: target_project) + referable.to_reference(@source_parent, target_project: target_parent) end end @@ -91,7 +93,7 @@ module Gitlab end def markdown(text) - Banzai.render(text, project: @source_project, no_original_data: true) + Banzai.render(text, project: @source_parent, no_original_data: true) end end end diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb index f7e66697da3..3f06badf5d9 100644 --- a/lib/gitlab/gfm/uploads_rewriter.rb +++ b/lib/gitlab/gfm/uploads_rewriter.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'fileutils' module Gitlab @@ -16,14 +18,15 @@ module Gitlab @pattern = FileUploader::MARKDOWN_PATTERN end - def rewrite(target_project) + def rewrite(target_parent) return @text unless needs_rewrite? @text.gsub(@pattern) do |markdown| file = find_file(@source_project, $~[:secret], $~[:file]) break markdown unless file.try(:exists?) - moved = FileUploader.copy_to(file, target_project) + klass = target_parent.is_a?(Namespace) ? NamespaceFileUploader : FileUploader + moved = klass.copy_to(file, target_parent) moved.markdown_link end end diff --git a/lib/gitlab/git/attributes_at_ref_parser.rb b/lib/gitlab/git/attributes_at_ref_parser.rb index 26b5bd520d5..cbddf836ce8 100644 --- a/lib/gitlab/git/attributes_at_ref_parser.rb +++ b/lib/gitlab/git/attributes_at_ref_parser.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git # Parses root .gitattributes file at a given ref diff --git a/lib/gitlab/git/attributes_parser.rb b/lib/gitlab/git/attributes_parser.rb index 08f4d7d4f5c..8b9d74ae8e7 100644 --- a/lib/gitlab/git/attributes_parser.rb +++ b/lib/gitlab/git/attributes_parser.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git # Class for parsing Git attribute files and extracting the attributes for diff --git a/lib/gitlab/git/blame.rb b/lib/gitlab/git/blame.rb index e25e15f5c80..b118eda37f8 100644 --- a/lib/gitlab/git/blame.rb +++ b/lib/gitlab/git/blame.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Blame diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb index 0bd1d3420a2..9dd1c484d59 100644 --- a/lib/gitlab/git/blob.rb +++ b/lib/gitlab/git/blob.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: seems to be completely migrated (behind feature flags). module Gitlab diff --git a/lib/gitlab/git/branch.rb b/lib/gitlab/git/branch.rb index 6351cfb83e3..9447cfa0fb6 100644 --- a/lib/gitlab/git/branch.rb +++ b/lib/gitlab/git/branch.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Branch < Ref diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb index 2820491b65d..4f05c4b73a1 100644 --- a/lib/gitlab/git/commit.rb +++ b/lib/gitlab/git/commit.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitlab::Git::Commit is a wrapper around Gitaly::GitCommit module Gitlab module Git diff --git a/lib/gitlab/git/commit_stats.rb b/lib/gitlab/git/commit_stats.rb index 83a9fd5f81a..8815088d23c 100644 --- a/lib/gitlab/git/commit_stats.rb +++ b/lib/gitlab/git/commit_stats.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitlab::Git::CommitStats counts the additions, deletions, and total changes # in a commit. module Gitlab diff --git a/lib/gitlab/git/compare.rb b/lib/gitlab/git/compare.rb index 7cb842256d0..ab5245ba7cb 100644 --- a/lib/gitlab/git/compare.rb +++ b/lib/gitlab/git/compare.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/conflict/file.rb b/lib/gitlab/git/conflict/file.rb index f08dab59ce4..7ffe4a7ae81 100644 --- a/lib/gitlab/git/conflict/file.rb +++ b/lib/gitlab/git/conflict/file.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module Conflict diff --git a/lib/gitlab/git/conflict/parser.rb b/lib/gitlab/git/conflict/parser.rb index fb5717dd556..20de8ebde4e 100644 --- a/lib/gitlab/git/conflict/parser.rb +++ b/lib/gitlab/git/conflict/parser.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module Conflict diff --git a/lib/gitlab/git/conflict/resolution.rb b/lib/gitlab/git/conflict/resolution.rb index ab9be683e15..04299a2d10c 100644 --- a/lib/gitlab/git/conflict/resolution.rb +++ b/lib/gitlab/git/conflict/resolution.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module Conflict diff --git a/lib/gitlab/git/conflict/resolver.rb b/lib/gitlab/git/conflict/resolver.rb index 307f1b8cb66..26e82643a4c 100644 --- a/lib/gitlab/git/conflict/resolver.rb +++ b/lib/gitlab/git/conflict/resolver.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module Conflict diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb index b2e2d49dd0b..74a4633424f 100644 --- a/lib/gitlab/git/diff.rb +++ b/lib/gitlab/git/diff.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Diff diff --git a/lib/gitlab/git/diff_collection.rb b/lib/gitlab/git/diff_collection.rb index 47ebca7c4a2..5c70cb6c66c 100644 --- a/lib/gitlab/git/diff_collection.rb +++ b/lib/gitlab/git/diff_collection.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/gitmodules_parser.rb b/lib/gitlab/git/gitmodules_parser.rb index 4b505312f60..575e12390cd 100644 --- a/lib/gitlab/git/gitmodules_parser.rb +++ b/lib/gitlab/git/gitmodules_parser.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/hook_env.rb b/lib/gitlab/git/hook_env.rb index 620568d8817..892a069a3b7 100644 --- a/lib/gitlab/git/hook_env.rb +++ b/lib/gitlab/git/hook_env.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/index.rb b/lib/gitlab/git/index.rb index c2e4274e3ee..3b9b516308f 100644 --- a/lib/gitlab/git/index.rb +++ b/lib/gitlab/git/index.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Index diff --git a/lib/gitlab/git/lfs_changes.rb b/lib/gitlab/git/lfs_changes.rb index d7148165408..8e2a925dfea 100644 --- a/lib/gitlab/git/lfs_changes.rb +++ b/lib/gitlab/git/lfs_changes.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class LfsChanges diff --git a/lib/gitlab/git/lfs_pointer_file.rb b/lib/gitlab/git/lfs_pointer_file.rb index 2ae0a889590..b7019a221ac 100644 --- a/lib/gitlab/git/lfs_pointer_file.rb +++ b/lib/gitlab/git/lfs_pointer_file.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class LfsPointerFile diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb index 0584629ac84..8797d3dce24 100644 --- a/lib/gitlab/git/operation_service.rb +++ b/lib/gitlab/git/operation_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class OperationService diff --git a/lib/gitlab/git/path_helper.rb b/lib/gitlab/git/path_helper.rb index 57b82a37d6c..e3a2031eeca 100644 --- a/lib/gitlab/git/path_helper.rb +++ b/lib/gitlab/git/path_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/pre_receive_error.rb b/lib/gitlab/git/pre_receive_error.rb index ac1ab7c39d5..03caace6fce 100644 --- a/lib/gitlab/git/pre_receive_error.rb +++ b/lib/gitlab/git/pre_receive_error.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git # diff --git a/lib/gitlab/git/raw_diff_change.rb b/lib/gitlab/git/raw_diff_change.rb index 98de9328071..e1002af40f6 100644 --- a/lib/gitlab/git/raw_diff_change.rb +++ b/lib/gitlab/git/raw_diff_change.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git # This class behaves like a struct with fields :blob_id, :blob_size, :operation, :old_path, :new_path diff --git a/lib/gitlab/git/ref.rb b/lib/gitlab/git/ref.rb index 31a280155bd..eec91194949 100644 --- a/lib/gitlab/git/ref.rb +++ b/lib/gitlab/git/ref.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Ref diff --git a/lib/gitlab/git/remote_mirror.rb b/lib/gitlab/git/remote_mirror.rb index 7f9520de5ce..e992d522e7f 100644 --- a/lib/gitlab/git/remote_mirror.rb +++ b/lib/gitlab/git/remote_mirror.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class RemoteMirror diff --git a/lib/gitlab/git/remote_repository.rb b/lib/gitlab/git/remote_repository.rb index f40e59a8dd0..234541d8145 100644 --- a/lib/gitlab/git/remote_repository.rb +++ b/lib/gitlab/git/remote_repository.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git # diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index 1642c4c5687..993955d1a6b 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'tempfile' require 'forwardable' require "rubygems/package" @@ -419,13 +421,17 @@ module Gitlab end def diff_stats(left_id, right_id) + if [left_id, right_id].any? { |ref| ref.blank? || Gitlab::Git.blank_ref?(ref) } + return empty_diff_stats + end + stats = wrapped_gitaly_errors do gitaly_commit_client.diff_stats(left_id, right_id) end Gitlab::Git::DiffStatsCollection.new(stats) rescue CommandError, TypeError - Gitlab::Git::DiffStatsCollection.new([]) + empty_diff_stats end # Returns a RefName for a given SHA @@ -962,6 +968,10 @@ module Gitlab private + def empty_diff_stats + Gitlab::Git::DiffStatsCollection.new([]) + end + def uncached_has_local_branches? wrapped_gitaly_errors do gitaly_repository_client.has_local_branches? diff --git a/lib/gitlab/git/repository_mirroring.rb b/lib/gitlab/git/repository_mirroring.rb index 752a91fbb60..7e63a6dc7cb 100644 --- a/lib/gitlab/git/repository_mirroring.rb +++ b/lib/gitlab/git/repository_mirroring.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module RepositoryMirroring diff --git a/lib/gitlab/git/tag.rb b/lib/gitlab/git/tag.rb index bbf2ecdb1fa..ade708d0541 100644 --- a/lib/gitlab/git/tag.rb +++ b/lib/gitlab/git/tag.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Tag < Ref diff --git a/lib/gitlab/git/tree.rb b/lib/gitlab/git/tree.rb index b5b701699f0..51542bcaaa2 100644 --- a/lib/gitlab/git/tree.rb +++ b/lib/gitlab/git/tree.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Tree diff --git a/lib/gitlab/git/user.rb b/lib/gitlab/git/user.rb index 338e1a30c45..2c798844798 100644 --- a/lib/gitlab/git/user.rb +++ b/lib/gitlab/git/user.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class User diff --git a/lib/gitlab/git/util.rb b/lib/gitlab/git/util.rb index 4708f22dcb3..03c2c1367b0 100644 --- a/lib/gitlab/git/util.rb +++ b/lib/gitlab/git/util.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Gitaly note: JV: no RPC's here. module Gitlab diff --git a/lib/gitlab/git/version.rb b/lib/gitlab/git/version.rb index 4bd91898457..64c89656167 100644 --- a/lib/gitlab/git/version.rb +++ b/lib/gitlab/git/version.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module Version diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb index 02c643d0da0..c43331bed60 100644 --- a/lib/gitlab/git/wiki.rb +++ b/lib/gitlab/git/wiki.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class Wiki diff --git a/lib/gitlab/git/wiki_file.rb b/lib/gitlab/git/wiki_file.rb index 64313bb04e8..c05a5adc00c 100644 --- a/lib/gitlab/git/wiki_file.rb +++ b/lib/gitlab/git/wiki_file.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class WikiFile diff --git a/lib/gitlab/git/wiki_page.rb b/lib/gitlab/git/wiki_page.rb index c4087c9ebdc..f6cac398548 100644 --- a/lib/gitlab/git/wiki_page.rb +++ b/lib/gitlab/git/wiki_page.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class WikiPage diff --git a/lib/gitlab/git/wiki_page_version.rb b/lib/gitlab/git/wiki_page_version.rb index d5e7e70fd31..475a9d4d1b9 100644 --- a/lib/gitlab/git/wiki_page_version.rb +++ b/lib/gitlab/git/wiki_page_version.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git class WikiPageVersion diff --git a/lib/gitlab/git/wraps_gitaly_errors.rb b/lib/gitlab/git/wraps_gitaly_errors.rb index 4b161f7e6ce..9963bcfbf1c 100644 --- a/lib/gitlab/git/wraps_gitaly_errors.rb +++ b/lib/gitlab/git/wraps_gitaly_errors.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Git module WrapsGitalyErrors diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb index d99a9f15371..8b455dc7696 100644 --- a/lib/gitlab/gitaly_client.rb +++ b/lib/gitlab/gitaly_client.rb @@ -139,7 +139,7 @@ module Gitlab ensure duration = Gitlab::Metrics::System.monotonic_time - start - # Keep track, seperately, for the performance bar + # Keep track, separately, for the performance bar self.query_time += duration gitaly_controller_action_duration_seconds.observe( current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s), diff --git a/lib/gitlab/gitaly_client/attributes_bag.rb b/lib/gitlab/gitaly_client/attributes_bag.rb index 198a1de91c7..3f1a0ef4888 100644 --- a/lib/gitlab/gitaly_client/attributes_bag.rb +++ b/lib/gitlab/gitaly_client/attributes_bag.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient # This module expects an `ATTRS` const to be defined on the subclass diff --git a/lib/gitlab/gitaly_client/blob_service.rb b/lib/gitlab/gitaly_client/blob_service.rb index 086ce31e678..39547328210 100644 --- a/lib/gitlab/gitaly_client/blob_service.rb +++ b/lib/gitlab/gitaly_client/blob_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class BlobService @@ -15,7 +17,7 @@ module Gitlab ) response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_blob, request, timeout: GitalyClient.fast_timeout) - data = '' + data = [] blob = nil response.each do |msg| if blob.nil? @@ -27,6 +29,8 @@ module Gitlab return nil if blob.oid.blank? + data = data.join + Gitlab::Git::Blob.new( id: blob.oid, size: blob.size, diff --git a/lib/gitlab/gitaly_client/blobs_stitcher.rb b/lib/gitlab/gitaly_client/blobs_stitcher.rb index 5ca592ff812..01bab854082 100644 --- a/lib/gitlab/gitaly_client/blobs_stitcher.rb +++ b/lib/gitlab/gitaly_client/blobs_stitcher.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class BlobsStitcher diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb index 085b2a127a5..4e46cb9f05c 100644 --- a/lib/gitlab/gitaly_client/commit_service.rb +++ b/lib/gitlab/gitaly_client/commit_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class CommitService @@ -93,7 +95,7 @@ module Gitlab response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request, timeout: GitalyClient.medium_timeout) entry = nil - data = '' + data = [] response.each do |msg| if entry.nil? entry = msg @@ -103,7 +105,7 @@ module Gitlab data << msg.data end - entry.data = data + entry.data = data.join entry unless entry.oid.blank? end @@ -254,7 +256,7 @@ module Gitlab ) response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request, timeout: GitalyClient.medium_timeout) - response.reduce("") { |memo, msg| memo << msg.data } + response.reduce([]) { |memo, msg| memo << msg.data }.join end def find_commit(revision) @@ -345,8 +347,8 @@ module Gitlab request = Gitaly::ExtractCommitSignatureRequest.new(repository: @gitaly_repo, commit_id: commit_id) response = GitalyClient.call(@repository.storage, :commit_service, :extract_commit_signature, request) - signature = ''.b - signed_text = ''.b + signature = +''.b + signed_text = +''.b response.each do |message| signature << message.signature @@ -364,7 +366,7 @@ module Gitlab request = Gitaly::GetCommitSignaturesRequest.new(repository: @gitaly_repo, commit_ids: commit_ids) response = GitalyClient.call(@repository.storage, :commit_service, :get_commit_signatures, request, timeout: GitalyClient.fast_timeout) - signatures = Hash.new { |h, k| h[k] = [''.b, ''.b] } + signatures = Hash.new { |h, k| h[k] = [+''.b, +''.b] } current_commit_id = nil response.each do |message| @@ -383,7 +385,7 @@ module Gitlab request = Gitaly::GetCommitMessagesRequest.new(repository: @gitaly_repo, commit_ids: commit_ids) response = GitalyClient.call(@repository.storage, :commit_service, :get_commit_messages, request, timeout: GitalyClient.fast_timeout) - messages = Hash.new { |h, k| h[k] = ''.b } + messages = Hash.new { |h, k| h[k] = +''.b } current_commit_id = nil response.each do |rpc_message| diff --git a/lib/gitlab/gitaly_client/conflict_files_stitcher.rb b/lib/gitlab/gitaly_client/conflict_files_stitcher.rb index c275a065bce..0e00f6e8c44 100644 --- a/lib/gitlab/gitaly_client/conflict_files_stitcher.rb +++ b/lib/gitlab/gitaly_client/conflict_files_stitcher.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class ConflictFilesStitcher @@ -17,7 +19,7 @@ module Gitlab current_file = file_from_gitaly_header(gitaly_file.header) else - current_file.raw_content << gitaly_file.content + current_file.raw_content = "#{current_file.raw_content}#{gitaly_file.content}" end end end diff --git a/lib/gitlab/gitaly_client/conflicts_service.rb b/lib/gitlab/gitaly_client/conflicts_service.rb index aa7e03301f5..6304f998563 100644 --- a/lib/gitlab/gitaly_client/conflicts_service.rb +++ b/lib/gitlab/gitaly_client/conflicts_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class ConflictsService diff --git a/lib/gitlab/gitaly_client/diff.rb b/lib/gitlab/gitaly_client/diff.rb index af9d674535b..dd192ccde1a 100644 --- a/lib/gitlab/gitaly_client/diff.rb +++ b/lib/gitlab/gitaly_client/diff.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class Diff diff --git a/lib/gitlab/gitaly_client/diff_stitcher.rb b/lib/gitlab/gitaly_client/diff_stitcher.rb index da243ee2d1a..98d327a7329 100644 --- a/lib/gitlab/gitaly_client/diff_stitcher.rb +++ b/lib/gitlab/gitaly_client/diff_stitcher.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class DiffStitcher @@ -20,7 +22,7 @@ module Gitlab current_diff = GitalyClient::Diff.new(diff_params) else - current_diff.patch += diff_msg.raw_patch_data + current_diff.patch = "#{current_diff.patch}#{diff_msg.raw_patch_data}" end if diff_msg.end_of_patch diff --git a/lib/gitlab/gitaly_client/health_check_service.rb b/lib/gitlab/gitaly_client/health_check_service.rb index 6c1213f5e20..0c495f60633 100644 --- a/lib/gitlab/gitaly_client/health_check_service.rb +++ b/lib/gitlab/gitaly_client/health_check_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class HealthCheckService diff --git a/lib/gitlab/gitaly_client/namespace_service.rb b/lib/gitlab/gitaly_client/namespace_service.rb index d4e982b649a..f0be3cbebd2 100644 --- a/lib/gitlab/gitaly_client/namespace_service.rb +++ b/lib/gitlab/gitaly_client/namespace_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class NamespaceService diff --git a/lib/gitlab/gitaly_client/notification_service.rb b/lib/gitlab/gitaly_client/notification_service.rb index 326e6f7dafc..873c3e4086d 100644 --- a/lib/gitlab/gitaly_client/notification_service.rb +++ b/lib/gitlab/gitaly_client/notification_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class NotificationService diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb index 4c78b790ce5..c32c2c0b2fb 100644 --- a/lib/gitlab/gitaly_client/operation_service.rb +++ b/lib/gitlab/gitaly_client/operation_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class OperationService diff --git a/lib/gitlab/gitaly_client/queue_enumerator.rb b/lib/gitlab/gitaly_client/queue_enumerator.rb index b8018029552..3a412102abe 100644 --- a/lib/gitlab/gitaly_client/queue_enumerator.rb +++ b/lib/gitlab/gitaly_client/queue_enumerator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class QueueEnumerator diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb index 8acc22e809e..d5633d167ac 100644 --- a/lib/gitlab/gitaly_client/ref_service.rb +++ b/lib/gitlab/gitaly_client/ref_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class RefService @@ -218,7 +220,7 @@ module Gitlab request = Gitaly::GetTagMessagesRequest.new(repository: @gitaly_repo, tag_ids: tag_ids) response = GitalyClient.call(@repository.storage, :ref_service, :get_tag_messages, request, timeout: GitalyClient.fast_timeout) - messages = Hash.new { |h, k| h[k] = ''.b } + messages = Hash.new { |h, k| h[k] = +''.b } current_tag_id = nil response.each do |rpc_message| diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb index 4661448621b..24e8a5e16d3 100644 --- a/lib/gitlab/gitaly_client/remote_service.rb +++ b/lib/gitlab/gitaly_client/remote_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class RemoteService diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb index d7b36946b65..f968ebc2cbf 100644 --- a/lib/gitlab/gitaly_client/repository_service.rb +++ b/lib/gitlab/gitaly_client/repository_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class RepositoryService @@ -56,9 +58,9 @@ module Gitlab request = Gitaly::GetInfoAttributesRequest.new(repository: @gitaly_repo) response = GitalyClient.call(@storage, :repository_service, :get_info_attributes, request, timeout: GitalyClient.fast_timeout) - response.each_with_object("") do |message, attributes| + response.each_with_object([]) do |message, attributes| attributes << message.attributes - end + end.join end def fetch_remote(remote, ssh_auth:, forced:, no_tags:, timeout:, prune: true) diff --git a/lib/gitlab/gitaly_client/server_service.rb b/lib/gitlab/gitaly_client/server_service.rb index ad898278353..0ade6942db9 100644 --- a/lib/gitlab/gitaly_client/server_service.rb +++ b/lib/gitlab/gitaly_client/server_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient # Meant for extraction of server data, and later maybe to perform misc task diff --git a/lib/gitlab/gitaly_client/storage_service.rb b/lib/gitlab/gitaly_client/storage_service.rb index 3a26dd58ff4..4edcb0b8ba9 100644 --- a/lib/gitlab/gitaly_client/storage_service.rb +++ b/lib/gitlab/gitaly_client/storage_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class StorageService diff --git a/lib/gitlab/gitaly_client/storage_settings.rb b/lib/gitlab/gitaly_client/storage_settings.rb index 26d1f53f26c..754cccb6b3f 100644 --- a/lib/gitlab/gitaly_client/storage_settings.rb +++ b/lib/gitlab/gitaly_client/storage_settings.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient # This is a chokepoint that is meant to help us stop remove all places diff --git a/lib/gitlab/gitaly_client/util.rb b/lib/gitlab/gitaly_client/util.rb index 9c19c51d412..dce5d6a8ad0 100644 --- a/lib/gitlab/gitaly_client/util.rb +++ b/lib/gitlab/gitaly_client/util.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient module Util diff --git a/lib/gitlab/gitaly_client/wiki_file.rb b/lib/gitlab/gitaly_client/wiki_file.rb index 47c60c92484..ef2b23732d1 100644 --- a/lib/gitlab/gitaly_client/wiki_file.rb +++ b/lib/gitlab/gitaly_client/wiki_file.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class WikiFile diff --git a/lib/gitlab/gitaly_client/wiki_page.rb b/lib/gitlab/gitaly_client/wiki_page.rb index a02d15db5dd..757a429fb8a 100644 --- a/lib/gitlab/gitaly_client/wiki_page.rb +++ b/lib/gitlab/gitaly_client/wiki_page.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitalyClient class WikiPage diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb index 7c2c228ad01..2b3d622af4d 100644 --- a/lib/gitlab/gitaly_client/wiki_service.rb +++ b/lib/gitlab/gitaly_client/wiki_service.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require 'stringio' module Gitlab @@ -139,7 +141,7 @@ module Gitlab next unless message.name.present? || wiki_file if wiki_file - wiki_file.raw_data << message.raw_data + wiki_file.raw_data = "#{wiki_file.raw_data}#{message.raw_data}" else wiki_file = GitalyClient::WikiFile.new(message.to_h) # All gRPC strings in a response are frozen, so we get @@ -160,7 +162,7 @@ module Gitlab ) response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request) - response.reduce("") { |memo, msg| memo << msg.data } + response.reduce([]) { |memo, msg| memo << msg.data }.join end private diff --git a/lib/gitlab/gitlab_import/client.rb b/lib/gitlab/gitlab_import/client.rb index 38ef12491df..86474159f8b 100644 --- a/lib/gitlab/gitlab_import/client.rb +++ b/lib/gitlab/gitlab_import/client.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitlabImport class Client diff --git a/lib/gitlab/gitlab_import/importer.rb b/lib/gitlab/gitlab_import/importer.rb index 047487f1d24..e84863deba8 100644 --- a/lib/gitlab/gitlab_import/importer.rb +++ b/lib/gitlab/gitlab_import/importer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitlabImport class Importer @@ -22,22 +24,22 @@ module Gitlab issues = client.issues(project_identifier) issues.each do |issue| - body = @formatter.author_line(issue["author"]["name"]) - body += issue["description"] + body = [@formatter.author_line(issue["author"]["name"])] + body << issue["description"] comments = client.issue_comments(project_identifier, issue["iid"]) if comments.any? - body += @formatter.comments_header + body << @formatter.comments_header end comments.each do |comment| - body += @formatter.comment(comment["author"]["name"], comment["created_at"], comment["body"]) + body << @formatter.comment(comment["author"]["name"], comment["created_at"], comment["body"]) end project.issues.create!( iid: issue["iid"], - description: body, + description: body.join, title: issue["title"], state: issue["state"], updated_at: issue["updated_at"], diff --git a/lib/gitlab/gitlab_import/project_creator.rb b/lib/gitlab/gitlab_import/project_creator.rb index 430b8c10058..35feea17351 100644 --- a/lib/gitlab/gitlab_import/project_creator.rb +++ b/lib/gitlab/gitlab_import/project_creator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GitlabImport class ProjectCreator diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb index 860c39feb64..15137140639 100644 --- a/lib/gitlab/gon_helper.rb +++ b/lib/gitlab/gon_helper.rb @@ -8,7 +8,10 @@ module Gitlab def add_gon_variables gon.api_version = 'v4' - gon.default_avatar_url = URI.join(Gitlab.config.gitlab.url, ActionController::Base.helpers.image_path('no_avatar.png')).to_s + gon.default_avatar_url = + Gitlab::Utils.append_path( + Gitlab.config.gitlab.url, + ActionController::Base.helpers.image_path('no_avatar.png')) gon.max_file_size = Gitlab::CurrentSettings.max_attachment_size gon.asset_host = ActionController::Base.asset_host gon.webpack_public_path = webpack_public_path diff --git a/lib/gitlab/google_code_import/client.rb b/lib/gitlab/google_code_import/client.rb index b1dbf554e41..52d714880b5 100644 --- a/lib/gitlab/google_code_import/client.rb +++ b/lib/gitlab/google_code_import/client.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GoogleCodeImport class Client diff --git a/lib/gitlab/google_code_import/importer.rb b/lib/gitlab/google_code_import/importer.rb index 0c08c0fedaa..1e7203cb82a 100644 --- a/lib/gitlab/google_code_import/importer.rb +++ b/lib/gitlab/google_code_import/importer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GoogleCodeImport class Importer diff --git a/lib/gitlab/google_code_import/project_creator.rb b/lib/gitlab/google_code_import/project_creator.rb index 326cfcaa8af..eaef85acb98 100644 --- a/lib/gitlab/google_code_import/project_creator.rb +++ b/lib/gitlab/google_code_import/project_creator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GoogleCodeImport class ProjectCreator diff --git a/lib/gitlab/google_code_import/repository.rb b/lib/gitlab/google_code_import/repository.rb index ad33fc2cad2..19627c8cd35 100644 --- a/lib/gitlab/google_code_import/repository.rb +++ b/lib/gitlab/google_code_import/repository.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GoogleCodeImport class Repository diff --git a/lib/gitlab/gpg/commit.rb b/lib/gitlab/gpg/commit.rb index 2bc081a6181..31bab20b044 100644 --- a/lib/gitlab/gpg/commit.rb +++ b/lib/gitlab/gpg/commit.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Gpg class Commit diff --git a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb index 6972bd685f7..d892d27a917 100644 --- a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb +++ b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Gpg class InvalidGpgSignatureUpdater diff --git a/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb b/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb index 41004408dec..9bb1e8fc7a2 100644 --- a/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb +++ b/lib/gitlab/grape_logging/formatters/lograge_with_timestamp.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module GrapeLogging module Formatters diff --git a/lib/gitlab/grape_logging/loggers/queue_duration_logger.rb b/lib/gitlab/grape_logging/loggers/queue_duration_logger.rb index 0adac79f25a..705e23adff2 100644 --- a/lib/gitlab/grape_logging/loggers/queue_duration_logger.rb +++ b/lib/gitlab/grape_logging/loggers/queue_duration_logger.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # This grape_logging module (https://github.com/aserafin/grape_logging) makes it # possible to log how much time an API request was queued by Workhorse. module Gitlab diff --git a/lib/gitlab/grape_logging/loggers/user_logger.rb b/lib/gitlab/grape_logging/loggers/user_logger.rb index fa172861967..6caa6c715e7 100644 --- a/lib/gitlab/grape_logging/loggers/user_logger.rb +++ b/lib/gitlab/grape_logging/loggers/user_logger.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # This grape_logging module (https://github.com/aserafin/grape_logging) makes it # possible to log the user who performed the Grape API action by retrieving # the user context from the request environment. diff --git a/lib/gitlab/graphql/authorize.rb b/lib/gitlab/graphql/authorize.rb index 93a903915b0..5e48bf9043d 100644 --- a/lib/gitlab/graphql/authorize.rb +++ b/lib/gitlab/graphql/authorize.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql # Allow fields to declare permissions their objects must have. The field diff --git a/lib/gitlab/graphql/authorize/authorize_resource.rb b/lib/gitlab/graphql/authorize/authorize_resource.rb index 40895686a8a..a56c4f6368d 100644 --- a/lib/gitlab/graphql/authorize/authorize_resource.rb +++ b/lib/gitlab/graphql/authorize/authorize_resource.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Authorize diff --git a/lib/gitlab/graphql/authorize/instrumentation.rb b/lib/gitlab/graphql/authorize/instrumentation.rb index 6cb8e617f62..d638d2b43ee 100644 --- a/lib/gitlab/graphql/authorize/instrumentation.rb +++ b/lib/gitlab/graphql/authorize/instrumentation.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Authorize diff --git a/lib/gitlab/graphql/connections.rb b/lib/gitlab/graphql/connections.rb index 2582ffeb2a8..fbccdfa7b08 100644 --- a/lib/gitlab/graphql/connections.rb +++ b/lib/gitlab/graphql/connections.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Connections diff --git a/lib/gitlab/graphql/connections/keyset_connection.rb b/lib/gitlab/graphql/connections/keyset_connection.rb index 3c0d7e9784a..851054c0393 100644 --- a/lib/gitlab/graphql/connections/keyset_connection.rb +++ b/lib/gitlab/graphql/connections/keyset_connection.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Connections diff --git a/lib/gitlab/graphql/errors.rb b/lib/gitlab/graphql/errors.rb index f8c7ec24be1..fe74549e322 100644 --- a/lib/gitlab/graphql/errors.rb +++ b/lib/gitlab/graphql/errors.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Errors diff --git a/lib/gitlab/graphql/expose_permissions.rb b/lib/gitlab/graphql/expose_permissions.rb index e3779995406..365b7cca24f 100644 --- a/lib/gitlab/graphql/expose_permissions.rb +++ b/lib/gitlab/graphql/expose_permissions.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module ExposePermissions diff --git a/lib/gitlab/graphql/present.rb b/lib/gitlab/graphql/present.rb index 2c7b64f1be9..7f69bf601d6 100644 --- a/lib/gitlab/graphql/present.rb +++ b/lib/gitlab/graphql/present.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Present diff --git a/lib/gitlab/graphql/present/instrumentation.rb b/lib/gitlab/graphql/present/instrumentation.rb index f87fd147b15..ab03c40c22d 100644 --- a/lib/gitlab/graphql/present/instrumentation.rb +++ b/lib/gitlab/graphql/present/instrumentation.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql module Present diff --git a/lib/gitlab/graphql/variables.rb b/lib/gitlab/graphql/variables.rb index ffbaf65b512..b13ea37c21f 100644 --- a/lib/gitlab/graphql/variables.rb +++ b/lib/gitlab/graphql/variables.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphql class Variables diff --git a/lib/gitlab/graphs/commits.rb b/lib/gitlab/graphs/commits.rb index c4ffc19df09..66e1b2e78b4 100644 --- a/lib/gitlab/graphs/commits.rb +++ b/lib/gitlab/graphs/commits.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module Graphs class Commits diff --git a/lib/gitlab/hashed_storage/migrator.rb b/lib/gitlab/hashed_storage/migrator.rb index 4edc251facb..1f29cf10cad 100644 --- a/lib/gitlab/hashed_storage/migrator.rb +++ b/lib/gitlab/hashed_storage/migrator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HashedStorage # Hashed Storage Migrator diff --git a/lib/gitlab/hashed_storage/rake_helper.rb b/lib/gitlab/hashed_storage/rake_helper.rb index 22edd5f999d..38f552fab03 100644 --- a/lib/gitlab/hashed_storage/rake_helper.rb +++ b/lib/gitlab/hashed_storage/rake_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HashedStorage module RakeHelper diff --git a/lib/gitlab/health_checks/base_abstract_check.rb b/lib/gitlab/health_checks/base_abstract_check.rb index 8b365dab185..1d31f59999c 100644 --- a/lib/gitlab/health_checks/base_abstract_check.rb +++ b/lib/gitlab/health_checks/base_abstract_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module BaseAbstractCheck diff --git a/lib/gitlab/health_checks/db_check.rb b/lib/gitlab/health_checks/db_check.rb index 08495c0a59e..2bcd25cd3cc 100644 --- a/lib/gitlab/health_checks/db_check.rb +++ b/lib/gitlab/health_checks/db_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks class DbCheck diff --git a/lib/gitlab/health_checks/gitaly_check.rb b/lib/gitlab/health_checks/gitaly_check.rb index 1f623e0b6ec..898733fea5d 100644 --- a/lib/gitlab/health_checks/gitaly_check.rb +++ b/lib/gitlab/health_checks/gitaly_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks class GitalyCheck diff --git a/lib/gitlab/health_checks/metric.rb b/lib/gitlab/health_checks/metric.rb index d62d9136886..62a5216d159 100644 --- a/lib/gitlab/health_checks/metric.rb +++ b/lib/gitlab/health_checks/metric.rb @@ -1,3 +1,6 @@ -module Gitlab::HealthChecks # rubocop:disable Naming/FileName +# rubocop:disable Naming/FileName +# frozen_string_literal: true + +module Gitlab::HealthChecks Metric = Struct.new(:name, :value, :labels) end diff --git a/lib/gitlab/health_checks/prometheus_text_format.rb b/lib/gitlab/health_checks/prometheus_text_format.rb index b3c759b4730..2a8f9d31cd5 100644 --- a/lib/gitlab/health_checks/prometheus_text_format.rb +++ b/lib/gitlab/health_checks/prometheus_text_format.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks class PrometheusTextFormat diff --git a/lib/gitlab/health_checks/redis/cache_check.rb b/lib/gitlab/health_checks/redis/cache_check.rb index 2f6c4db12bb..0c8fe83893b 100644 --- a/lib/gitlab/health_checks/redis/cache_check.rb +++ b/lib/gitlab/health_checks/redis/cache_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module Redis diff --git a/lib/gitlab/health_checks/redis/queues_check.rb b/lib/gitlab/health_checks/redis/queues_check.rb index 63d2882c5b2..b1e33b9f459 100644 --- a/lib/gitlab/health_checks/redis/queues_check.rb +++ b/lib/gitlab/health_checks/redis/queues_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module Redis diff --git a/lib/gitlab/health_checks/redis/redis_check.rb b/lib/gitlab/health_checks/redis/redis_check.rb index 8ceb0a0aa46..f7e46fce134 100644 --- a/lib/gitlab/health_checks/redis/redis_check.rb +++ b/lib/gitlab/health_checks/redis/redis_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module Redis diff --git a/lib/gitlab/health_checks/redis/shared_state_check.rb b/lib/gitlab/health_checks/redis/shared_state_check.rb index f1ea1ffe1be..285ac271929 100644 --- a/lib/gitlab/health_checks/redis/shared_state_check.rb +++ b/lib/gitlab/health_checks/redis/shared_state_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module Redis diff --git a/lib/gitlab/health_checks/result.rb b/lib/gitlab/health_checks/result.rb index e323e2c9723..d32a6980eb8 100644 --- a/lib/gitlab/health_checks/result.rb +++ b/lib/gitlab/health_checks/result.rb @@ -1,3 +1,6 @@ -module Gitlab::HealthChecks # rubocop:disable Naming/FileName +# rubocop:disable Naming/FileName +# frozen_string_literal: true + +module Gitlab::HealthChecks Result = Struct.new(:success, :message, :labels) end diff --git a/lib/gitlab/health_checks/simple_abstract_check.rb b/lib/gitlab/health_checks/simple_abstract_check.rb index 96945ce5b20..3588260d6eb 100644 --- a/lib/gitlab/health_checks/simple_abstract_check.rb +++ b/lib/gitlab/health_checks/simple_abstract_check.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Gitlab module HealthChecks module SimpleAbstractCheck diff --git a/lib/gitlab/http_io.rb b/lib/gitlab/http_io.rb index 9d7763fc5ac..e768b8adb12 100644 --- a/lib/gitlab/http_io.rb +++ b/lib/gitlab/http_io.rb @@ -161,14 +161,14 @@ module Gitlab ## # Note: If provider does not return content_range, then we set it as we requested # Provider: minio - # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 - # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 + # - When the file size is larger than requested Content-range, the Content-range is included in responses with Net::HTTPPartialContent 206 + # - When the file size is smaller than requested Content-range, the Content-range is included in responses with Net::HTTPPartialContent 206 # Provider: AWS - # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 - # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 + # - When the file size is larger than requested Content-range, the Content-range is included in responses with Net::HTTPPartialContent 206 + # - When the file size is smaller than requested Content-range, the Content-range is included in responses with Net::HTTPPartialContent 206 # Provider: GCS - # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206 - # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200 + # - When the file size is larger than requested Content-range, the Content-range is included in responses with Net::HTTPPartialContent 206 + # - When the file size is smaller than requested Content-range, the Content-range is included in responses with Net::HTTPOK 200 @chunk_range ||= (chunk_start...(chunk_start + @chunk.bytesize)) end diff --git a/lib/gitlab/kubernetes/helm/api.rb b/lib/gitlab/kubernetes/helm/api.rb index e21bc531444..7c026ac9e68 100644 --- a/lib/gitlab/kubernetes/helm/api.rb +++ b/lib/gitlab/kubernetes/helm/api.rb @@ -54,7 +54,11 @@ module Gitlab def create_config_map(command) command.config_map_resource.tap do |config_map_resource| - kubeclient.create_config_map(config_map_resource) + if config_map_exists?(config_map_resource) + kubeclient.update_config_map(config_map_resource) + else + kubeclient.create_config_map(config_map_resource) + end end end @@ -88,23 +92,21 @@ module Gitlab end end - def service_account_exists?(resource) - resource_exists? do - kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace) - end + def config_map_exists?(resource) + kubeclient.get_config_map(resource.metadata.name, resource.metadata.namespace) + rescue ::Kubeclient::ResourceNotFoundError + false end - def cluster_role_binding_exists?(resource) - resource_exists? do - kubeclient.get_cluster_role_binding(resource.metadata.name) - end + def service_account_exists?(resource) + kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace) + rescue ::Kubeclient::ResourceNotFoundError + false end - def resource_exists? - yield - rescue ::Kubeclient::HttpError => e - raise e unless e.error_code == 404 - + def cluster_role_binding_exists?(resource) + kubeclient.get_cluster_role_binding(resource.metadata.name) + rescue ::Kubeclient::ResourceNotFoundError false end end diff --git a/lib/gitlab/kubernetes/kube_client.rb b/lib/gitlab/kubernetes/kube_client.rb index f266177bec1..b947f6b551e 100644 --- a/lib/gitlab/kubernetes/kube_client.rb +++ b/lib/gitlab/kubernetes/kube_client.rb @@ -16,7 +16,8 @@ module Gitlab SUPPORTED_API_GROUPS = { core: { group: 'api', version: 'v1' }, rbac: { group: 'apis/rbac.authorization.k8s.io', version: 'v1' }, - extensions: { group: 'apis/extensions', version: 'v1beta1' } + extensions: { group: 'apis/extensions', version: 'v1beta1' }, + knative: { group: 'apis/serving.knative.dev', version: 'v1alpha1' } }.freeze SUPPORTED_API_GROUPS.each do |name, params| diff --git a/lib/gitlab/kubernetes/namespace.rb b/lib/gitlab/kubernetes/namespace.rb index e6ff6160ab9..783c8a24741 100644 --- a/lib/gitlab/kubernetes/namespace.rb +++ b/lib/gitlab/kubernetes/namespace.rb @@ -10,9 +10,7 @@ module Gitlab def exists? @client.get_namespace(name) - rescue ::Kubeclient::HttpError => ke - raise ke unless ke.error_code == 404 - + rescue ::Kubeclient::ResourceNotFoundError false end diff --git a/lib/gitlab/manifest_import/manifest.rb b/lib/gitlab/manifest_import/manifest.rb index 4d6034fb956..b69b9ac4b64 100644 --- a/lib/gitlab/manifest_import/manifest.rb +++ b/lib/gitlab/manifest_import/manifest.rb @@ -63,7 +63,7 @@ module Gitlab end def repository_url(name) - URI.join(remote, name).to_s + Gitlab::Utils.append_path(remote, name) end def remote diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb index 1fd8f147b44..6943567fb6d 100644 --- a/lib/gitlab/middleware/go.rb +++ b/lib/gitlab/middleware/go.rb @@ -38,7 +38,7 @@ module Gitlab def go_body(path) config = Gitlab.config - project_url = URI.join(config.gitlab.url, path) + project_url = Gitlab::Utils.append_path(config.gitlab.url, path) import_prefix = strip_url(project_url.to_s) repository_url = if Gitlab::CurrentSettings.enabled_git_access_protocol == 'ssh' diff --git a/lib/gitlab/quick_actions/command_definition.rb b/lib/gitlab/quick_actions/command_definition.rb index 96415271316..c682eb22890 100644 --- a/lib/gitlab/quick_actions/command_definition.rb +++ b/lib/gitlab/quick_actions/command_definition.rb @@ -2,13 +2,14 @@ module Gitlab module QuickActions class CommandDefinition attr_accessor :name, :aliases, :description, :explanation, :params, - :condition_block, :parse_params_block, :action_block + :condition_block, :parse_params_block, :action_block, :warning def initialize(name, attributes = {}) @name = name @aliases = attributes[:aliases] || [] @description = attributes[:description] || '' + @warning = attributes[:warning] || '' @explanation = attributes[:explanation] || '' @params = attributes[:params] || [] @condition_block = attributes[:condition_block] @@ -33,11 +34,13 @@ module Gitlab def explain(context, arg) return unless available?(context) - if explanation.respond_to?(:call) - execute_block(explanation, context, arg) - else - explanation - end + message = if explanation.respond_to?(:call) + execute_block(explanation, context, arg) + else + explanation + end + + warning.empty? ? message : "#{message} (#{warning})" end def execute(context, arg) @@ -61,6 +64,7 @@ module Gitlab name: name, aliases: aliases, description: desc, + warning: warning, params: prms } end diff --git a/lib/gitlab/quick_actions/dsl.rb b/lib/gitlab/quick_actions/dsl.rb index d82dccd0db5..192c7ec2ff5 100644 --- a/lib/gitlab/quick_actions/dsl.rb +++ b/lib/gitlab/quick_actions/dsl.rb @@ -31,6 +31,10 @@ module Gitlab @description = block_given? ? block : text end + def warning(message = '') + @warning = message + end + # Allows to define params for the next quick action. # These params are shown in the autocomplete menu. # @@ -133,6 +137,7 @@ module Gitlab name, aliases: aliases, description: @description, + warning: @warning, explanation: @explanation, params: @params, condition_block: @condition_block, @@ -150,6 +155,7 @@ module Gitlab @explanation = nil @params = nil @condition_block = nil + @warning = nil @parse_params_block = nil end end diff --git a/lib/gitlab/sentry.rb b/lib/gitlab/sentry.rb index 24e3866128b..8079c5882c4 100644 --- a/lib/gitlab/sentry.rb +++ b/lib/gitlab/sentry.rb @@ -7,7 +7,7 @@ module Gitlab end def self.context(current_user = nil) - return unless self.enabled? + return unless enabled? Raven.tags_context(locale: I18n.locale) @@ -29,14 +29,22 @@ module Gitlab # # Provide an issue URL for follow up. def self.track_exception(exception, issue_url: nil, extra: {}) + track_acceptable_exception(exception, issue_url: issue_url, extra: extra) + + raise exception if should_raise? + end + + # This should be used when you do not want to raise an exception in + # development and test. If you need development and test to behave + # just the same as production you can use this instead of + # track_exception. + def self.track_acceptable_exception(exception, issue_url: nil, extra: {}) if enabled? extra[:issue_url] = issue_url if issue_url context # Make sure we've set everything we know in the context Raven.capture_exception(exception, extra: extra) end - - raise exception if should_raise? end def self.program_context diff --git a/lib/gitlab/view/presenter/base.rb b/lib/gitlab/view/presenter/base.rb index 36162faa1eb..c3fd6d317aa 100644 --- a/lib/gitlab/view/presenter/base.rb +++ b/lib/gitlab/view/presenter/base.rb @@ -11,8 +11,8 @@ module Gitlab attr_reader :subject - def can?(user, action, overriden_subject = nil) - super(user, action, overriden_subject || subject) + def can?(user, action, overridden_subject = nil) + super(user, action, overridden_subject || subject) end # delegate all #can? queries to the subject diff --git a/lib/json_web_token/hmac_token.rb b/lib/json_web_token/hmac_token.rb new file mode 100644 index 00000000000..ceb1b9c913f --- /dev/null +++ b/lib/json_web_token/hmac_token.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'jwt' + +module JSONWebToken + class HMACToken < Token + IAT_LEEWAY = 60 + JWT_ALGORITHM = 'HS256' + + def initialize(secret) + super() + + @secret = secret + end + + def self.decode(token, secret, leeway: IAT_LEEWAY, verify_iat: true) + JWT.decode(token, secret, true, leeway: leeway, verify_iat: verify_iat, algorithm: JWT_ALGORITHM) + end + + def encoded + JWT.encode(payload, secret, JWT_ALGORITHM) + end + + private + + attr_reader :secret + end +end diff --git a/lib/json_web_token/token.rb b/lib/json_web_token/token.rb index ce5d6f248d0..c59beef02c9 100644 --- a/lib/json_web_token/token.rb +++ b/lib/json_web_token/token.rb @@ -1,17 +1,22 @@ # frozen_string_literal: true +require 'securerandom' + module JSONWebToken class Token attr_accessor :issuer, :subject, :audience, :id attr_accessor :issued_at, :not_before, :expire_time + DEFAULT_NOT_BEFORE_TIME = 5 + DEFAULT_EXPIRE_TIME = 60 + def initialize @id = SecureRandom.uuid @issued_at = Time.now # we give a few seconds for time shift - @not_before = issued_at - 5.seconds + @not_before = issued_at - DEFAULT_NOT_BEFORE_TIME # default 60 seconds should be more than enough for this authentication token - @expire_time = issued_at + 1.minute + @expire_time = issued_at + DEFAULT_EXPIRE_TIME @custom_payload = {} end diff --git a/lib/tasks/gitlab/check.rake b/lib/tasks/gitlab/check.rake index 663bebfe71a..a2c3e32948f 100644 --- a/lib/tasks/gitlab/check.rake +++ b/lib/tasks/gitlab/check.rake @@ -45,7 +45,6 @@ namespace :gitlab do start_checking "GitLab Shell" check_gitlab_shell - check_repos_hooks_directory_is_link check_gitlab_shell_self_test finished_checking "GitLab Shell" @@ -54,42 +53,6 @@ namespace :gitlab do # Checks ######################## - def check_repos_hooks_directory_is_link - print "hooks directories in repos are links: ... " - - gitlab_shell_hooks_path = Gitlab.config.gitlab_shell.hooks_path - - unless Project.count > 0 - puts "can't check, you have no projects".color(:magenta) - return - end - - puts "" - - Project.find_each(batch_size: 100) do |project| - print sanitized_message(project) - project_hook_directory = File.join(project.repository.path_to_repo, "hooks") - - if project.empty_repo? - puts "repository is empty".color(:magenta) - elsif File.directory?(project_hook_directory) && File.directory?(gitlab_shell_hooks_path) && - (File.realpath(project_hook_directory) == File.realpath(gitlab_shell_hooks_path)) - puts 'ok'.color(:green) - else - puts "wrong or missing hooks".color(:red) - try_fixing_it( - sudo_gitlab("#{File.join(gitlab_shell_path, 'bin/create-hooks')} #{repository_storage_paths_args.join(' ')}"), - 'Check the hooks_path in config/gitlab.yml', - 'Check your gitlab-shell installation' - ) - for_more_information( - see_installation_guide_section "GitLab Shell" - ) - fix_and_rerun - end - end - end - def check_gitlab_shell_self_test gitlab_shell_repo_base = gitlab_shell_path check_cmd = File.expand_path('bin/check', gitlab_shell_repo_base) diff --git a/locale/ar_SA/gitlab.po b/locale/ar_SA/gitlab.po index 903d72d9ea8..4a2b56f2806 100644 --- a/locale/ar_SA/gitlab.po +++ b/locale/ar_SA/gitlab.po @@ -681,7 +681,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2913,7 +2913,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/bg/gitlab.po b/locale/bg/gitlab.po index e3a5e191023..6c6eeeb6580 100644 --- a/locale/bg/gitlab.po +++ b/locale/bg/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/ca_ES/gitlab.po b/locale/ca_ES/gitlab.po index 91d5fb17000..a957023bb25 100644 --- a/locale/ca_ES/gitlab.po +++ b/locale/ca_ES/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/cs_CZ/gitlab.po b/locale/cs_CZ/gitlab.po index 867b9b27ad2..9801999299f 100644 --- a/locale/cs_CZ/gitlab.po +++ b/locale/cs_CZ/gitlab.po @@ -621,7 +621,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2845,7 +2845,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/da_DK/gitlab.po b/locale/da_DK/gitlab.po index f8412f28cbe..9c7b37f7f18 100644 --- a/locale/da_DK/gitlab.po +++ b/locale/da_DK/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/de/gitlab.po b/locale/de/gitlab.po index 5f9fbc1cc39..07568765abb 100644 --- a/locale/de/gitlab.po +++ b/locale/de/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/eo/gitlab.po b/locale/eo/gitlab.po index 15d537ff3b4..84ef902d5e1 100644 --- a/locale/eo/gitlab.po +++ b/locale/eo/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/es/gitlab.po b/locale/es/gitlab.po index 0d8ee05b364..32e495695c6 100644 --- a/locale/es/gitlab.po +++ b/locale/es/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/et_EE/gitlab.po b/locale/et_EE/gitlab.po index 1f699a8e06d..bc6b3b67d42 100644 --- a/locale/et_EE/gitlab.po +++ b/locale/et_EE/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/fil_PH/gitlab.po b/locale/fil_PH/gitlab.po index c9b371f8df3..ecbf47aa928 100644 --- a/locale/fil_PH/gitlab.po +++ b/locale/fil_PH/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/fr/gitlab.po b/locale/fr/gitlab.po index 1925235d48a..6e89ec73142 100644 --- a/locale/fr/gitlab.po +++ b/locale/fr/gitlab.po @@ -561,7 +561,7 @@ msgstr "Une erreur est survenue lors de la création de la nouvelle branche." msgid "An error occured whilst fetching the job trace." msgstr "Une erreur est survenue pendant le rapatriement de la trace de la tâche." -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "Une erreur est survenue lors du rapatriement de dernier pipeline." msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "Modifier l’identité de %{user_name}" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Intégration d’Elasticsearch. AWS Elasticsearch IAM." msgid "Email" diff --git a/locale/gitlab.pot b/locale/gitlab.pot index d3e1a51370e..f18821adb5f 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -501,7 +501,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." diff --git a/locale/gl_ES/gitlab.po b/locale/gl_ES/gitlab.po index 44c47901b56..5e2dbce1104 100644 --- a/locale/gl_ES/gitlab.po +++ b/locale/gl_ES/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/he_IL/gitlab.po b/locale/he_IL/gitlab.po index 7cff1c5d712..9aadf885770 100644 --- a/locale/he_IL/gitlab.po +++ b/locale/he_IL/gitlab.po @@ -621,7 +621,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2845,7 +2845,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/id_ID/gitlab.po b/locale/id_ID/gitlab.po index 2f3c35e4253..641886e65b0 100644 --- a/locale/id_ID/gitlab.po +++ b/locale/id_ID/gitlab.po @@ -531,7 +531,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/it/gitlab.po b/locale/it/gitlab.po index 984a200c958..3b43d563dc5 100644 --- a/locale/it/gitlab.po +++ b/locale/it/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/ja/gitlab.po b/locale/ja/gitlab.po index ac940230ac2..a254bfba027 100644 --- a/locale/ja/gitlab.po +++ b/locale/ja/gitlab.po @@ -531,7 +531,7 @@ msgstr "新しいブランチの作成中にエラーが発生しました。" msgid "An error occured whilst fetching the job trace." msgstr "ジョブトレースの取得中にエラーが発生しました。" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "最新のパイプラインの取得中にエラーが発生しました。" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "%{user_name} の ID を編集する" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Elasticsearch の統合。Elasticsearch AWS IAM。" msgid "Email" diff --git a/locale/ko/gitlab.po b/locale/ko/gitlab.po index a50f5bfacef..daced0494cb 100644 --- a/locale/ko/gitlab.po +++ b/locale/ko/gitlab.po @@ -531,7 +531,7 @@ msgstr "새 브랜치를 만드는 동안 오류가 발생했습니다." msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/mn_MN/gitlab.po b/locale/mn_MN/gitlab.po index c256a2d2b1f..cf2c7224171 100644 --- a/locale/mn_MN/gitlab.po +++ b/locale/mn_MN/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/nb_NO/gitlab.po b/locale/nb_NO/gitlab.po index 9be8a4e8d0b..df490378e9c 100644 --- a/locale/nb_NO/gitlab.po +++ b/locale/nb_NO/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/nl_NL/gitlab.po b/locale/nl_NL/gitlab.po index 2ad2eaafc33..8f9c3161a26 100644 --- a/locale/nl_NL/gitlab.po +++ b/locale/nl_NL/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/pl_PL/gitlab.po b/locale/pl_PL/gitlab.po index a4bd1fe747b..ebe5ee77d71 100644 --- a/locale/pl_PL/gitlab.po +++ b/locale/pl_PL/gitlab.po @@ -621,7 +621,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2845,7 +2845,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/pt_BR/gitlab.po b/locale/pt_BR/gitlab.po index 79e365319c9..39aa1256e4b 100644 --- a/locale/pt_BR/gitlab.po +++ b/locale/pt_BR/gitlab.po @@ -561,7 +561,7 @@ msgstr "Um erro ocorreu ao criar o novo branch." msgid "An error occured whilst fetching the job trace." msgstr "Ocorreu um erro ao carregar o rastro da tarefa." -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "Ocorreu um erro ao carregar o último pipeline." msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "Editar identidade para %{user_name}" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Integração com Elasticsearch. Elasticsearch AWS IAM." msgid "Email" diff --git a/locale/ro_RO/gitlab.po b/locale/ro_RO/gitlab.po index 49613a3f462..a6b70d486fb 100644 --- a/locale/ro_RO/gitlab.po +++ b/locale/ro_RO/gitlab.po @@ -591,7 +591,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2811,7 +2811,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/ru/gitlab.po b/locale/ru/gitlab.po index b8a4805498b..739f96cba9a 100644 --- a/locale/ru/gitlab.po +++ b/locale/ru/gitlab.po @@ -621,7 +621,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2845,7 +2845,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/sq_AL/gitlab.po b/locale/sq_AL/gitlab.po index 633b2a6bda7..75df8a29e6b 100644 --- a/locale/sq_AL/gitlab.po +++ b/locale/sq_AL/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/tr_TR/gitlab.po b/locale/tr_TR/gitlab.po index 723f4876053..1a8d2faf307 100644 --- a/locale/tr_TR/gitlab.po +++ b/locale/tr_TR/gitlab.po @@ -561,7 +561,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2777,7 +2777,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/uk/gitlab.po b/locale/uk/gitlab.po index 99ba26152ee..30f1b5769d6 100644 --- a/locale/uk/gitlab.po +++ b/locale/uk/gitlab.po @@ -621,7 +621,7 @@ msgstr "Помилка при створенні нової гілки." msgid "An error occured whilst fetching the job trace." msgstr "Трапилася помилка при отриманні логу завдання." -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "Трапилася помилка при отриманні даних останнього конвеєра." msgid "An error occured whilst loading all the files." @@ -2845,7 +2845,7 @@ msgstr "Редагувати ідентифікацію для %{user_name}" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Інтеграція з Elasticsearch. Elasticsearch AWS IAM." msgid "Email" diff --git a/locale/zh_CN/gitlab.po b/locale/zh_CN/gitlab.po index 2d3aeca7cc7..19fa17eaff1 100644 --- a/locale/zh_CN/gitlab.po +++ b/locale/zh_CN/gitlab.po @@ -531,7 +531,7 @@ msgstr "创建分支时发生错误。" msgid "An error occured whilst fetching the job trace." msgstr "获取作业日志时发生错误。" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "获取流水线时发生错误。" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "编辑 %{user_name} 的身份信息" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Elasticsearch集成及Elasticsearch AWS IAM。" msgid "Email" diff --git a/locale/zh_HK/gitlab.po b/locale/zh_HK/gitlab.po index d8fe2d5b13e..6987f307dbb 100644 --- a/locale/zh_HK/gitlab.po +++ b/locale/zh_HK/gitlab.po @@ -531,7 +531,7 @@ msgstr "" msgid "An error occured whilst fetching the job trace." msgstr "" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "" msgid "Elasticsearch" msgstr "" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "" msgid "Email" diff --git a/locale/zh_TW/gitlab.po b/locale/zh_TW/gitlab.po index 1bccf6de738..4e0b07e717f 100644 --- a/locale/zh_TW/gitlab.po +++ b/locale/zh_TW/gitlab.po @@ -531,7 +531,7 @@ msgstr "創建新分支時發生錯誤。" msgid "An error occured whilst fetching the job trace." msgstr "取得工作追蹤資訊時發生錯誤" -msgid "An error occured whilst fetching the latest pipline." +msgid "An error occured whilst fetching the latest pipeline." msgstr "取得最新流水線時發生錯誤" msgid "An error occured whilst loading all the files." @@ -2743,7 +2743,7 @@ msgstr "編輯 %{user_name} 的身份" msgid "Elasticsearch" msgstr "Elasticsearch" -msgid "Elasticsearch intergration. Elasticsearch AWS IAM." +msgid "Elasticsearch integration. Elasticsearch AWS IAM." msgstr "Elasticsearch 整合。 Elasticsearch AWS IAM。" msgid "Email" diff --git a/qa/qa/page/merge_request/show.rb b/qa/qa/page/merge_request/show.rb index 2e69a89e386..2fd30e15ffb 100644 --- a/qa/qa/page/merge_request/show.rb +++ b/qa/qa/page/merge_request/show.rb @@ -55,6 +55,10 @@ module QA element :labels_block end + view 'app/views/projects/merge_requests/_mr_title.html.haml' do + element :edit_button + end + def fast_forward_possible? !has_text?('Fast-forward merge is not possible') end @@ -163,6 +167,10 @@ module QA all_elements(:discussion_reply).last.click fill_element :reply_input, reply_text end + + def edit! + click_element :edit_button + end end end end diff --git a/qa/qa/page/profile/personal_access_tokens.rb b/qa/qa/page/profile/personal_access_tokens.rb index 2f0202951bb..9191dbe9cf3 100644 --- a/qa/qa/page/profile/personal_access_tokens.rb +++ b/qa/qa/page/profile/personal_access_tokens.rb @@ -8,7 +8,7 @@ module QA element :scopes_api_radios, "label :scopes" # rubocop:disable QA/ElementWithPattern end - view 'app/views/profiles/personal_access_tokens/index.html.haml' do + view 'app/views/shared/_personal_access_tokens_created_container.html.haml' do element :create_token_field, "text_field_tag 'created-personal-access-token'" # rubocop:disable QA/ElementWithPattern end diff --git a/qa/qa/resource/merge_request.rb b/qa/qa/resource/merge_request.rb index 466a7942dc6..77afb3cfcba 100644 --- a/qa/qa/resource/merge_request.rb +++ b/qa/qa/resource/merge_request.rb @@ -11,7 +11,9 @@ module QA :target_branch, :assignee, :milestone, - :labels + :labels, + :file_name, + :file_content attribute :project do Project.fabricate! do |resource| @@ -35,8 +37,8 @@ module QA resource.branch_name = target_branch resource.remote_branch = source_branch resource.new_branch = false - resource.file_name = "added_file.txt" - resource.file_content = "File Added" + resource.file_name = file_name + resource.file_content = file_content end end @@ -48,6 +50,8 @@ module QA @assignee = nil @milestone = nil @labels = [] + @file_name = "added_file.txt" + @file_content = "File Added" end def fabricate! diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb index bc88e6450f5..135925c007f 100644 --- a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb +++ b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb @@ -10,7 +10,7 @@ module QA let(:key_title) { "key for ssh tests #{Time.now.to_f}" } let(:ssh_key) do - Factory::Resource::SSHKey.fabricate! do |resource| + Resource::SSHKey.fabricate! do |resource| resource.title = key_title end end @@ -38,7 +38,7 @@ module QA it 'user pushes to the repository' do # Create a project to push to - project = Factory::Resource::Project.fabricate! do |project| + project = Resource::Project.fabricate! do |project| project.name = 'git-protocol-project' end diff --git a/scripts/build_assets_image b/scripts/build_assets_image index 1d77524d503..4e5ef977161 100755 --- a/scripts/build_assets_image +++ b/scripts/build_assets_image @@ -1,5 +1,11 @@ #!/bin/bash +# Exit early if we don't want to build the image +if [[ "${BUILD_ASSETS_IMAGE}" != "true" ]] +then + exit 0 +fi + # Generate the image name based on the project this is being run in ASSETS_IMAGE_NAME=$(echo ${CI_PROJECT_NAME} | awk '{ diff --git a/scripts/rails4-gemfile-lock-check b/scripts/rails4-gemfile-lock-check new file mode 100755 index 00000000000..a74a49874e1 --- /dev/null +++ b/scripts/rails4-gemfile-lock-check @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +echo -e "=> Checking if Gemfile.rails4.lock is up-to-date...\\n" + +cp Gemfile.rails4.lock Gemfile.rails4.lock.orig +BUNDLE_GEMFILE=Gemfile.rails4 bundle install "$BUNDLE_INSTALL_FLAGS" +diff -u Gemfile.rails4.lock.orig Gemfile.rails4.lock >/dev/null 2>&1 + +if [ $? == 1 ] +then + diff -u Gemfile.rails4.lock.orig Gemfile.rails4.lock + + echo -e "\\n✖ ERROR: Gemfile.rails4.lock is not up-to-date! + Please run 'BUNDLE_GEMFILE=Gemfile.rails4 bundle install'\\n" >&2 + exit 1 +fi + +echo "✔ Gemfile.rails4.lock is up-to-date" +exit 0 diff --git a/scripts/rails5-gemfile-lock-check b/scripts/rails5-gemfile-lock-check deleted file mode 100755 index da6f1b7145e..00000000000 --- a/scripts/rails5-gemfile-lock-check +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -echo -e "=> Checking if Gemfile.rails5.lock is up-to-date...\\n" - -cp Gemfile.rails5.lock Gemfile.rails5.lock.orig -BUNDLE_GEMFILE=Gemfile.rails5 bundle install "$BUNDLE_INSTALL_FLAGS" -diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock >/dev/null 2>&1 - -if [ $? == 1 ] -then - diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock - - echo -e "\\n✖ ERROR: Gemfile.rails5.lock is not up-to-date! - Please run 'BUNDLE_GEMFILE=Gemfile.rails5 bundle install'\\n" >&2 - exit 1 -fi - -echo "✔ Gemfile.rails5.lock is up-to-date" -exit 0 diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh index b180860899a..00e23f12bc0 100755 --- a/scripts/review_apps/review-apps.sh +++ b/scripts/review_apps/review-apps.sh @@ -179,21 +179,35 @@ function delete() { track="${1-stable}" name="$CI_ENVIRONMENT_SLUG" + if [ -z "$CI_ENVIRONMENT_SLUG" ]; then + echo "No release given, aborting the delete!" + return + fi + if [[ "$track" != "stable" ]]; then name="$name-$track" fi + if ! deployExists "${KUBE_NAMESPACE}" "${name}"; then + echo "The release $name doesn't exist, aborting the cleanup!" + return + fi + echo "Deleting release '$name'..." helm delete --purge "$name" || true } function cleanup() { - echo "Cleaning up $CI_ENVIRONMENT_SLUG..." - kubectl -n "$KUBE_NAMESPACE" get ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa 2>&1 \ - | grep "$CI_ENVIRONMENT_SLUG" \ - | awk '{print $1}' \ - | xargs kubectl -n "$KUBE_NAMESPACE" delete \ - || true + if [ -z "$CI_ENVIRONMENT_SLUG" ]; then + echo "No release given, aborting the delete!" + return + fi + + echo "Cleaning up '$CI_ENVIRONMENT_SLUG'..." + kubectl -n "$KUBE_NAMESPACE" delete \ + ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa \ + -l release="$CI_ENVIRONMENT_SLUG" \ + || true } function install_external_dns() { diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb index 4f1f6bb31f3..379b2d6b935 100644 --- a/spec/controllers/concerns/send_file_upload_spec.rb +++ b/spec/controllers/concerns/send_file_upload_spec.rb @@ -76,7 +76,7 @@ describe SendFileUpload do it 'sends a file with a custom type' do headers = double - expected_headers = %r(response-content-disposition=attachment%3Bfilename%3D%22test.js%22&response-content-type=application/javascript) + expected_headers = %r(response-content-disposition=attachment%3Bfilename%3D%22test.js%22&response-content-type=application/ecmascript) expect(Gitlab::Workhorse).to receive(:send_url).with(expected_headers).and_call_original expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/) diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb index ed08a4c1bf2..f5860d4296b 100644 --- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb +++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb @@ -39,8 +39,10 @@ describe Profiles::PersonalAccessTokensController do let!(:active_personal_access_token) { create(:personal_access_token, user: user) } let!(:inactive_personal_access_token) { create(:personal_access_token, :revoked, user: user) } let!(:impersonation_personal_access_token) { create(:personal_access_token, :impersonation, user: user) } + let(:token_value) { 's3cr3t' } before do + PersonalAccessToken.redis_store!(user.id, token_value) get :index end @@ -56,5 +58,9 @@ describe Profiles::PersonalAccessTokensController do expect(assigns(:active_personal_access_tokens)).not_to include(impersonation_personal_access_token) expect(assigns(:inactive_personal_access_tokens)).not_to include(impersonation_personal_access_token) end + + it "retrieves newly created personal access token value" do + expect(assigns(:new_personal_access_token)).to eql(token_value) + end end end diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb index 28f7e4634a5..5fdf7f1229d 100644 --- a/spec/controllers/projects/blob_controller_spec.rb +++ b/spec/controllers/projects/blob_controller_spec.rb @@ -157,7 +157,7 @@ describe Projects::BlobController do match_line = JSON.parse(response.body).first - expect(match_line['type']).to eq('context') + expect(match_line['type']).to be_nil end it 'adds bottom match line when "t"o is less than blob size' do @@ -177,7 +177,7 @@ describe Projects::BlobController do match_line = JSON.parse(response.body).last - expect(match_line['type']).to eq('context') + expect(match_line['type']).to be_nil end end end diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb index 898f3863008..d334a2ff566 100644 --- a/spec/controllers/registrations_controller_spec.rb +++ b/spec/controllers/registrations_controller_spec.rb @@ -49,7 +49,7 @@ describe RegistrationsController do end it 'displays an error when the reCAPTCHA is not solved' do - # Without this, `verify_recaptcha` arbitraily returns true in test env + # Without this, `verify_recaptcha` arbitrarily returns true in test env Recaptcha.configuration.skip_verify_env.delete('test') post(:create, user_params) diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb index 8e25b61e2f1..c691b3f478b 100644 --- a/spec/controllers/sessions_controller_spec.rb +++ b/spec/controllers/sessions_controller_spec.rb @@ -89,7 +89,7 @@ describe SessionsController do end it 'displays an error when the reCAPTCHA is not solved' do - # Without this, `verify_recaptcha` arbitraily returns true in test env + # Without this, `verify_recaptcha` arbitrarily returns true in test env Recaptcha.configuration.skip_verify_env.delete('test') counter = double(:counter) diff --git a/spec/factories/clusters/kubernetes_namespaces.rb b/spec/factories/clusters/kubernetes_namespaces.rb index 3f10f0ecc74..3a4f5193550 100644 --- a/spec/factories/clusters/kubernetes_namespaces.rb +++ b/spec/factories/clusters/kubernetes_namespaces.rb @@ -13,7 +13,7 @@ FactoryBot.define do end trait :with_token do - service_account_token { Faker::Lorem.characters(10) } + service_account_token { FFaker::Lorem.characters(10) } end end end diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb index fe475e1f7a0..0b5ab16ad71 100644 --- a/spec/fast_spec_helper.rb +++ b/spec/fast_spec_helper.rb @@ -9,3 +9,4 @@ require 'active_support/all' ActiveSupport::Dependencies.autoload_paths << 'lib' ActiveSupport::Dependencies.autoload_paths << 'ee/lib' +ActiveSupport::XmlMini.backend = 'Nokogiri' diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb index e16eae219a4..c7860bebb06 100644 --- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb +++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb @@ -12,6 +12,10 @@ describe 'Admin > Users > Impersonation Tokens', :js do find(".settings-message") end + def created_impersonation_token + find("#created-personal-access-token").value + end + before do sign_in(admin) end @@ -39,6 +43,7 @@ describe 'Admin > Users > Impersonation Tokens', :js do expect(active_impersonation_tokens).to have_text('api') expect(active_impersonation_tokens).to have_text('read_user') expect(PersonalAccessTokensFinder.new(impersonation: true).execute.count).to equal(1) + expect(created_impersonation_token).not_to be_empty end end diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb index 1456a2f0375..f2e4c5779df 100644 --- a/spec/features/issues/form_spec.rb +++ b/spec/features/issues/form_spec.rb @@ -27,7 +27,7 @@ describe 'New/edit issue', :js do before do # Using `allow_any_instance_of`/`and_wrap_original`, `original` would # somehow refer to the very block we defined to _wrap_ that method, instead of - # the original method, resulting in infinite recurison when called. + # the original method, resulting in infinite recursion when called. # This is likely a bug with helper modules included into dynamically generated view classes. # To work around this, we have to hold on to and call to the original implementation manually. original_issue_dropdown_options = FormHelper.instance_method(:issue_assignees_dropdown_options) diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb index 3dfcbc2fcb8..297cd808460 100644 --- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb +++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb @@ -55,11 +55,11 @@ describe 'User creates branch and merge request on issue page', :js do test_branch_name_checking(input_branch_name) test_source_checking(input_source) - # The button inside dropdown should be disabled if any errors occured. + # The button inside dropdown should be disabled if any errors occurred. expect(page).to have_button('Create branch', disabled: true) end - # The top level button should be disabled if any errors occured. + # The top level button should be disabled if any errors occurred. expect(page).to have_button('Create branch', disabled: true) end diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb index fa148715855..51b78d3e7d1 100644 --- a/spec/features/merge_request/user_posts_diff_notes_spec.rb +++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb @@ -85,12 +85,13 @@ describe 'Merge request > User posts diff notes', :js do # `.line_holder` will be an unfolded line. let(:line_holder) { first('#a5cc2925ca8258af241be7e5b0381edf30266302 .line_holder') } - it 'does not allow commenting on the left side' do - should_not_allow_commenting(line_holder, 'left') + it 'allows commenting on the left side' do + should_allow_commenting(line_holder, 'left') end - it 'does not allow commenting on the right side' do - should_not_allow_commenting(line_holder, 'right') + it 'allows commenting on the right side' do + # Automatically shifts comment box to left side. + should_allow_commenting(line_holder, 'right') end end end @@ -147,8 +148,8 @@ describe 'Merge request > User posts diff notes', :js do # `.line_holder` will be an unfolded line. let(:line_holder) { first('.line_holder[id="a5cc2925ca8258af241be7e5b0381edf30266302_1_1"]') } - it 'does not allow commenting' do - should_not_allow_commenting line_holder + it 'allows commenting' do + should_allow_commenting line_holder end end diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb index 8461cd0027c..dee213a11d4 100644 --- a/spec/features/profiles/personal_access_tokens_spec.rb +++ b/spec/features/profiles/personal_access_tokens_spec.rb @@ -43,10 +43,12 @@ describe 'Profile > Personal Access Tokens', :js do check "read_user" click_on "Create personal access token" + expect(active_personal_access_tokens).to have_text(name) expect(active_personal_access_tokens).to have_text('In') expect(active_personal_access_tokens).to have_text('api') expect(active_personal_access_tokens).to have_text('read_user') + expect(created_personal_access_token).not_to be_empty end context "when creation fails" do @@ -57,6 +59,7 @@ describe 'Profile > Personal Access Tokens', :js do expect { click_on "Create personal access token" }.not_to change { PersonalAccessToken.count } expect(page).to have_content("Name cannot be nil") + expect(page).not_to have_selector("#created-personal-access-token") end end end diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index a1323699969..99a7fbb63bd 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -719,7 +719,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do context 'on mobile', :js do let(:job) { create(:ci_build, pipeline: pipeline) } - it 'renders collpased sidebar' do + it 'renders collapsed sidebar' do page.current_window.resize_to(600, 800) visit project_job_path(project, job) @@ -738,7 +738,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do wait_for_requests expect(page).to have_css('.js-job-sidebar.right-sidebar-expanded') - expect(page).not_to have_css('.js-job-sidebar.right-sidebar-collpased') + expect(page).not_to have_css('.js-job-sidebar.right-sidebar-collapsed') end end diff --git a/spec/finders/pipelines_finder_spec.rb b/spec/finders/pipelines_finder_spec.rb index c6e832ad69b..c2c304589c9 100644 --- a/spec/finders/pipelines_finder_spec.rb +++ b/spec/finders/pipelines_finder_spec.rb @@ -225,7 +225,7 @@ describe PipelinesFinder do end end - context 'when the project has limited access to piplines' do + context 'when the project has limited access to pipelines' do let(:project) { create(:project, :private, :repository) } let(:current_user) { create(:user) } let!(:pipelines) { create_list(:ci_pipeline, 2, project: project) } diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace index 7bfe3f83b7b..3d8beb0dec2 100644 --- a/spec/fixtures/trace/sample_trace +++ b/spec/fixtures/trace/sample_trace @@ -2334,12 +2334,12 @@ Boards::Lists::MoveService keeps position of lists when list type is closed when list type is set to label keeps position of lists when new position is nil - keeps position of lists when new positon is equal to old position - keeps position of lists when new positon is negative - keeps position of lists when new positon is equal to number of labels lists - keeps position of lists when new positon is greater than number of labels lists - increments position of intermediate lists when new positon is equal to first position - decrements position of intermediate lists when new positon is equal to last position + keeps position of lists when new position is equal to old position + keeps position of lists when new position is negative + keeps position of lists when new position is equal to number of labels lists + keeps position of lists when new position is greater than number of labels lists + increments position of intermediate lists when new position is equal to first position + decrements position of intermediate lists when new position is equal to last position decrements position of intermediate lists when new position is greater than old position increments position of intermediate lists when new position is lower than old position when board parent is a group @@ -2347,12 +2347,12 @@ Boards::Lists::MoveService keeps position of lists when list type is closed when list type is set to label keeps position of lists when new position is nil - keeps position of lists when new positon is equal to old position - keeps position of lists when new positon is negative - keeps position of lists when new positon is equal to number of labels lists - keeps position of lists when new positon is greater than number of labels lists - increments position of intermediate lists when new positon is equal to first position - decrements position of intermediate lists when new positon is equal to last position + keeps position of lists when new position is equal to old position + keeps position of lists when new position is negative + keeps position of lists when new position is equal to number of labels lists + keeps position of lists when new position is greater than number of labels lists + increments position of intermediate lists when new position is equal to first position + decrements position of intermediate lists when new position is equal to last position decrements position of intermediate lists when new position is greater than old position increments position of intermediate lists when new position is lower than old position diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb index 9a2372de69f..8e336469c27 100644 --- a/spec/helpers/profiles_helper_spec.rb +++ b/spec/helpers/profiles_helper_spec.rb @@ -4,12 +4,17 @@ describe ProfilesHelper do describe '#commit_email_select_options' do it 'returns an array with private commit email along with all the verified emails' do user = create(:user) + create(:email, user: user) + confirmed_email1 = create(:email, :confirmed, user: user) + confirmed_email2 = create(:email, :confirmed, user: user) + private_email = user.private_commit_email - verified_emails = user.verified_emails - [private_email] emails = [ ["Use a private email - #{private_email}", Gitlab::PrivateCommitEmail::TOKEN], - verified_emails + user.email, + confirmed_email1.email, + confirmed_email2.email ] expect(helper.commit_email_select_options(user)).to match_array(emails) diff --git a/spec/javascripts/diffs/components/commit_item_spec.js b/spec/javascripts/diffs/components/commit_item_spec.js index 7606847ada9..8b2ca6506c4 100644 --- a/spec/javascripts/diffs/components/commit_item_spec.js +++ b/spec/javascripts/diffs/components/commit_item_spec.js @@ -21,7 +21,7 @@ const getAvatarElement = vm => vm.$el.querySelector('.user-avatar-link'); const getCommitterElement = vm => vm.$el.querySelector('.commiter'); const getCommitActionsElement = vm => vm.$el.querySelector('.commit-actions'); -describe('diffs/components/commit_widget', () => { +describe('diffs/components/commit_item', () => { const Component = Vue.extend(CommitItem); const timeago = getTimeago(); const { commit } = getDiffWithCommit(); @@ -37,15 +37,15 @@ describe('diffs/components/commit_widget', () => { it('renders commit title', () => { const titleElement = getTitleElement(vm); - expect(titleElement).toHaveAttr('href', commit.commitUrl); - expect(titleElement).toHaveText(commit.titleHtml); + expect(titleElement).toHaveAttr('href', commit.commit_url); + expect(titleElement).toHaveText(commit.title_html); }); it('renders commit description', () => { const descElement = getDescElement(vm); const descExpandElement = getDescExpandElement(vm); - const expected = commit.descriptionHtml.replace(/
/g, ''); + const expected = commit.description_html.replace(/
/g, ''); expect(trimText(descElement.innerHTML)).toEqual(trimText(expected)); expect(descExpandElement).not.toBeNull(); @@ -56,7 +56,7 @@ describe('diffs/components/commit_widget', () => { const labelElement = shaElement.querySelector('.label'); const buttonElement = shaElement.querySelector('button'); - expect(labelElement.textContent).toEqual(commit.shortId); + expect(labelElement.textContent).toEqual(commit.short_id); expect(buttonElement).toHaveData('clipboard-text', commit.id); }); @@ -64,27 +64,27 @@ describe('diffs/components/commit_widget', () => { const avatarElement = getAvatarElement(vm); const imgElement = avatarElement.querySelector('img'); - expect(avatarElement).toHaveAttr('href', commit.author.webUrl); + expect(avatarElement).toHaveAttr('href', commit.author.web_url); expect(imgElement).toHaveClass('s36'); expect(imgElement).toHaveAttr('alt', commit.author.name); - expect(imgElement).toHaveAttr('src', commit.author.avatarUrl); + expect(imgElement).toHaveAttr('src', commit.author.avatar_url); }); it('renders committer text', () => { const committerElement = getCommitterElement(vm); const nameElement = committerElement.querySelector('a'); - const expectTimeText = timeago.format(commit.authoredDate); + const expectTimeText = timeago.format(commit.authored_date); const expectedText = `${commit.author.name} authored ${expectTimeText}`; expect(trimText(committerElement.textContent)).toEqual(expectedText); - expect(nameElement).toHaveAttr('href', commit.author.webUrl); + expect(nameElement).toHaveAttr('href', commit.author.web_url); expect(nameElement).toHaveText(commit.author.name); }); describe('without commit description', () => { beforeEach(done => { - vm.commit.descriptionHtml = ''; + vm.commit.description_html = ''; vm.$nextTick() .then(done) @@ -103,9 +103,9 @@ describe('diffs/components/commit_widget', () => { describe('with no matching user', () => { beforeEach(done => { vm.commit.author = null; - vm.commit.authorEmail = TEST_AUTHOR_EMAIL; - vm.commit.authorName = TEST_AUTHOR_NAME; - vm.commit.authorGravatarUrl = TEST_AUTHOR_GRAVATAR; + vm.commit.author_email = TEST_AUTHOR_EMAIL; + vm.commit.author_name = TEST_AUTHOR_NAME; + vm.commit.author_gravatar_url = TEST_AUTHOR_GRAVATAR; vm.$nextTick() .then(done) @@ -132,7 +132,7 @@ describe('diffs/components/commit_widget', () => { describe('with signature', () => { beforeEach(done => { - vm.commit.signatureHtml = TEST_SIGNATURE_HTML; + vm.commit.signature_html = TEST_SIGNATURE_HTML; vm.$nextTick() .then(done) @@ -148,7 +148,7 @@ describe('diffs/components/commit_widget', () => { describe('with pipeline status', () => { beforeEach(done => { - vm.commit.pipelineStatusPath = TEST_PIPELINE_STATUS_PATH; + vm.commit.pipeline_status_path = TEST_PIPELINE_STATUS_PATH; vm.$nextTick() .then(done) diff --git a/spec/javascripts/diffs/components/commit_widget_spec.js b/spec/javascripts/diffs/components/commit_widget_spec.js index 951eb57255d..2b60bd232ed 100644 --- a/spec/javascripts/diffs/components/commit_widget_spec.js +++ b/spec/javascripts/diffs/components/commit_widget_spec.js @@ -19,6 +19,6 @@ describe('diffs/components/commit_widget', () => { const commitElement = vm.$el.querySelector('li.commit'); expect(commitElement).not.toBeNull(); - expect(commitElement).toContainText(commit.shortId); + expect(commitElement).toContainText(commit.short_id); }); }); diff --git a/spec/javascripts/diffs/components/diff_content_spec.js b/spec/javascripts/diffs/components/diff_content_spec.js index 36bd042f3c4..c25f6167163 100644 --- a/spec/javascripts/diffs/components/diff_content_spec.js +++ b/spec/javascripts/diffs/components/diff_content_spec.js @@ -56,14 +56,14 @@ describe('DiffContent', () => { describe('image diff', () => { beforeEach(done => { - vm.diffFile.newPath = GREEN_BOX_IMAGE_URL; - vm.diffFile.newSha = 'DEF'; - vm.diffFile.oldPath = RED_BOX_IMAGE_URL; - vm.diffFile.oldSha = 'ABC'; - vm.diffFile.viewPath = ''; + vm.diffFile.new_path = GREEN_BOX_IMAGE_URL; + vm.diffFile.new_sha = 'DEF'; + vm.diffFile.old_path = RED_BOX_IMAGE_URL; + vm.diffFile.old_sha = 'ABC'; + vm.diffFile.view_path = ''; vm.diffFile.discussions = [{ ...discussionsMockData }]; vm.$store.state.diffs.commentForms.push({ - fileHash: vm.diffFile.fileHash, + fileHash: vm.diffFile.file_hash, x: 10, y: 20, width: 100, @@ -113,10 +113,10 @@ describe('DiffContent', () => { describe('file diff', () => { it('should have download buttons in place', done => { const el = vm.$el; - vm.diffFile.newPath = 'test.abc'; - vm.diffFile.newSha = 'DEF'; - vm.diffFile.oldPath = 'test.abc'; - vm.diffFile.oldSha = 'ABC'; + vm.diffFile.new_path = 'test.abc'; + vm.diffFile.new_sha = 'DEF'; + vm.diffFile.old_path = 'test.abc'; + vm.diffFile.old_sha = 'ABC'; vm.$nextTick(() => { expect(el.querySelectorAll('.js-diff-inline-view').length).toEqual(0); diff --git a/spec/javascripts/diffs/components/diff_file_header_spec.js b/spec/javascripts/diffs/components/diff_file_header_spec.js index 0192d583c6c..9530b50c729 100644 --- a/spec/javascripts/diffs/components/diff_file_header_spec.js +++ b/spec/javascripts/diffs/components/diff_file_header_spec.js @@ -3,7 +3,6 @@ import Vuex from 'vuex'; import diffsModule from '~/diffs/store/modules'; import notesModule from '~/notes/stores/modules'; import DiffFileHeader from '~/diffs/components/diff_file_header.vue'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; Vue.use(Vuex); @@ -24,9 +23,9 @@ describe('diff_file_header', () => { }); beforeEach(() => { - const diffFile = convertObjectPropsToCamelCase(diffDiscussionMock.diff_file, { deep: true }); + const diffFile = diffDiscussionMock.diff_file; props = { - diffFile, + diffFile: { ...diffFile }, canCurrentUserFork: false, }; }); @@ -62,8 +61,8 @@ describe('diff_file_header', () => { beforeEach(() => { props.discussionPath = 'link://to/discussion'; Object.assign(props.diffFile, { - submoduleLink: 'link://to/submodule', - submoduleTreeUrl: 'some://tree/url', + submodule_link: 'link://to/submodule', + submodule_tree_url: 'some://tree/url', }); }); @@ -80,18 +79,18 @@ describe('diff_file_header', () => { vm = mountComponentWithStore(Component, { props, store }); - expect(vm.titleLink).toBe(props.diffFile.submoduleTreeUrl); + expect(vm.titleLink).toBe(props.diffFile.submodule_tree_url); }); it('returns the submoduleLink for submodules without submoduleTreeUrl', () => { Object.assign(props.diffFile, { submodule: true, - submoduleTreeUrl: null, + submodule_tree_url: null, }); vm = mountComponentWithStore(Component, { props, store }); - expect(vm.titleLink).toBe(props.diffFile.submoduleLink); + expect(vm.titleLink).toBe(props.diffFile.submodule_link); }); it('sets the correct path to the discussion', () => { @@ -107,7 +106,7 @@ describe('diff_file_header', () => { beforeEach(() => { Object.assign(props.diffFile, { blob: { id: 'b10b1db10b1d' }, - filePath: 'path/to/file', + file_path: 'path/to/file', }); }); @@ -116,7 +115,7 @@ describe('diff_file_header', () => { vm = mountComponentWithStore(Component, { props, store }); - expect(vm.filePath).toBe(props.diffFile.filePath); + expect(vm.filePath).toBe(props.diffFile.file_path); }); it('appends the truncated blob id for submodules', () => { @@ -125,14 +124,14 @@ describe('diff_file_header', () => { vm = mountComponentWithStore(Component, { props, store }); expect(vm.filePath).toBe( - `${props.diffFile.filePath} @ ${props.diffFile.blob.id.substr(0, 8)}`, + `${props.diffFile.file_path} @ ${props.diffFile.blob.id.substr(0, 8)}`, ); }); }); describe('titleTag', () => { it('returns a link tag if fileHash is set', () => { - props.diffFile.fileHash = 'some hash'; + props.diffFile.file_hash = 'some hash'; vm = mountComponentWithStore(Component, { props, store }); @@ -140,7 +139,7 @@ describe('diff_file_header', () => { }); it('returns a span tag if fileHash is not set', () => { - props.diffFile.fileHash = null; + props.diffFile.file_hash = null; vm = mountComponentWithStore(Component, { props, store }); @@ -151,8 +150,8 @@ describe('diff_file_header', () => { describe('isUsingLfs', () => { beforeEach(() => { Object.assign(props.diffFile, { - storedExternally: true, - externalStorage: 'lfs', + stored_externally: true, + external_storage: 'lfs', }); }); @@ -163,7 +162,7 @@ describe('diff_file_header', () => { }); it('returns false if file is not stored externally', () => { - props.diffFile.storedExternally = false; + props.diffFile.stored_externally = false; vm = mountComponentWithStore(Component, { props, store }); @@ -171,7 +170,7 @@ describe('diff_file_header', () => { }); it('returns false if file is not stored in LFS', () => { - props.diffFile.externalStorage = 'not lfs'; + props.diffFile.external_storage = 'not lfs'; vm = mountComponentWithStore(Component, { props, store }); @@ -200,7 +199,7 @@ describe('diff_file_header', () => { describe('viewFileButtonText', () => { it('contains the truncated content SHA', () => { const dummySha = 'deebd00f is no SHA'; - props.diffFile.contentSha = dummySha; + props.diffFile.content_sha = dummySha; vm = mountComponentWithStore(Component, { props, store }); @@ -212,7 +211,7 @@ describe('diff_file_header', () => { describe('viewReplacedFileButtonText', () => { it('contains the truncated base SHA', () => { const dummySha = 'deadabba sings no more'; - props.diffFile.diffRefs.baseSha = dummySha; + props.diffFile.diff_refs.base_sha = dummySha; vm = mountComponentWithStore(Component, { props, store }); @@ -281,32 +280,32 @@ describe('diff_file_header', () => { const filePaths = () => vm.$el.querySelectorAll('.file-title-name'); it('displays the path of a added file', () => { - props.diffFile.renamedFile = false; + props.diffFile.renamed_file = false; vm = mountComponentWithStore(Component, { props, store }); expect(filePaths()).toHaveLength(1); - expect(filePaths()[0]).toHaveText(props.diffFile.filePath); + expect(filePaths()[0]).toHaveText(props.diffFile.file_path); }); it('displays path for deleted file', () => { - props.diffFile.renamedFile = false; - props.diffFile.deletedFile = true; + props.diffFile.renamed_file = false; + props.diffFile.deleted_file = true; vm = mountComponentWithStore(Component, { props, store }); expect(filePaths()).toHaveLength(1); - expect(filePaths()[0]).toHaveText(`${props.diffFile.filePath} deleted`); + expect(filePaths()[0]).toHaveText(`${props.diffFile.file_path} deleted`); }); it('displays old and new path if the file was renamed', () => { - props.diffFile.renamedFile = true; + props.diffFile.renamed_file = true; vm = mountComponentWithStore(Component, { props, store }); expect(filePaths()).toHaveLength(2); - expect(filePaths()[0]).toHaveText(props.diffFile.oldPath); - expect(filePaths()[1]).toHaveText(props.diffFile.newPath); + expect(filePaths()[0]).toHaveText(props.diffFile.old_path); + expect(filePaths()[1]).toHaveText(props.diffFile.new_path); }); }); @@ -323,19 +322,19 @@ describe('diff_file_header', () => { describe('file mode', () => { it('it displays old and new file mode if it changed', () => { - props.diffFile.modeChanged = true; + props.diffFile.mode_changed = true; vm = mountComponentWithStore(Component, { props, store }); const { fileMode } = vm.$refs; expect(fileMode).not.toBe(undefined); - expect(fileMode).toContainText(props.diffFile.aMode); - expect(fileMode).toContainText(props.diffFile.bMode); + expect(fileMode).toContainText(props.diffFile.a_mode); + expect(fileMode).toContainText(props.diffFile.b_mode); }); it('does not display the file mode if it has not changed', () => { - props.diffFile.modeChanged = false; + props.diffFile.mode_changed = false; vm = mountComponentWithStore(Component, { props, store }); @@ -350,8 +349,8 @@ describe('diff_file_header', () => { it('displays the LFS label for files stored in LFS', () => { Object.assign(props.diffFile, { - storedExternally: true, - externalStorage: 'lfs', + stored_externally: true, + external_storage: 'lfs', }); vm = mountComponentWithStore(Component, { props, store }); @@ -361,7 +360,7 @@ describe('diff_file_header', () => { }); it('does not display the LFS label for files stored in repository', () => { - props.diffFile.storedExternally = false; + props.diffFile.stored_externally = false; vm = mountComponentWithStore(Component, { props, store }); @@ -378,7 +377,7 @@ describe('diff_file_header', () => { it('should show edit button when file is editable', () => { props.addMergeRequestButtons = true; - props.diffFile.editPath = '/'; + props.diffFile.edit_path = '/'; vm = mountComponentWithStore(Component, { props, store }); expect(vm.$el.querySelector('.js-edit-blob')).toContainText('Edit'); @@ -386,8 +385,8 @@ describe('diff_file_header', () => { it('should not show edit button when file is deleted', () => { props.addMergeRequestButtons = true; - props.diffFile.deletedFile = true; - props.diffFile.editPath = '/'; + props.diffFile.deleted_file = true; + props.diffFile.edit_path = '/'; vm = mountComponentWithStore(Component, { props, store }); expect(vm.$el.querySelector('.js-edit-blob')).toEqual(null); @@ -397,7 +396,7 @@ describe('diff_file_header', () => { describe('addMergeRequestButtons', () => { beforeEach(() => { props.addMergeRequestButtons = true; - props.diffFile.editPath = ''; + props.diffFile.edit_path = ''; }); describe('view on environment button', () => { @@ -405,8 +404,8 @@ describe('diff_file_header', () => { const title = 'url.title'; it('displays link to external url', () => { - props.diffFile.externalUrl = url; - props.diffFile.formattedExternalUrl = title; + props.diffFile.external_url = url; + props.diffFile.formatted_external_url = title; vm = mountComponentWithStore(Component, { props, store }); @@ -415,8 +414,8 @@ describe('diff_file_header', () => { }); it('hides link if no external url', () => { - props.diffFile.externalUrl = ''; - props.diffFile.formattedExternalUrl = title; + props.diffFile.external_url = ''; + props.diffFile.formattedExternal_url = title; vm = mountComponentWithStore(Component, { props, store }); @@ -434,11 +433,11 @@ describe('diff_file_header', () => { path: 'lib/base.js', name: 'base.js', mode: '100644', - readableText: true, + readable_text: true, icon: 'file-text-o', }; propsCopy.addMergeRequestButtons = true; - propsCopy.diffFile.deletedFile = true; + propsCopy.diffFile.deleted_file = true; vm = mountComponentWithStore(Component, { props: propsCopy, @@ -459,11 +458,11 @@ describe('diff_file_header', () => { path: 'lib/base.js', name: 'base.js', mode: '100644', - readableText: true, + readable_text: true, icon: 'file-text-o', }; propsCopy.addMergeRequestButtons = true; - propsCopy.diffFile.deletedFile = true; + propsCopy.diffFile.deleted_file = true; const discussionGetter = () => [diffDiscussionMock]; const notesModuleMock = notesModule(); diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js index 882ad3685a2..51bb4807960 100644 --- a/spec/javascripts/diffs/components/diff_file_spec.js +++ b/spec/javascripts/diffs/components/diff_file_spec.js @@ -17,14 +17,14 @@ describe('DiffFile', () => { describe('template', () => { it('should render component with file header, file content components', () => { const el = vm.$el; - const { fileHash, filePath } = vm.file; + const { file_hash, file_path } = vm.file; - expect(el.id).toEqual(fileHash); + expect(el.id).toEqual(file_hash); expect(el.classList.contains('diff-file')).toEqual(true); expect(el.querySelectorAll('.diff-content.hidden').length).toEqual(0); expect(el.querySelector('.js-file-title')).toBeDefined(); - expect(el.querySelector('.file-title-name').innerText.indexOf(filePath)).toBeGreaterThan(-1); + expect(el.querySelector('.file-title-name').innerText.indexOf(file_path)).toBeGreaterThan(-1); expect(el.querySelector('.js-syntax-highlight')).toBeDefined(); expect(vm.file.renderIt).toEqual(false); @@ -52,7 +52,7 @@ describe('DiffFile', () => { it('should have collapsed text and link', done => { vm.file.renderIt = true; vm.file.collapsed = false; - vm.file.highlightedDiffLines = null; + vm.file.highlighted_diff_lines = null; vm.$nextTick(() => { expect(vm.$el.innerText).toContain('This diff is collapsed'); @@ -90,8 +90,8 @@ describe('DiffFile', () => { describe('too large diff', () => { it('should have too large warning and blob link', done => { const BLOB_LINK = '/file/view/path'; - vm.file.tooLarge = true; - vm.file.viewPath = BLOB_LINK; + vm.file.too_large = true; + vm.file.view_path = BLOB_LINK; vm.$nextTick(() => { expect(vm.$el.innerText).toContain( @@ -107,4 +107,26 @@ describe('DiffFile', () => { }); }); }); + + describe('watch collapsed', () => { + it('calls handleLoadCollapsedDiff if collapsed changed & file has no lines', done => { + spyOn(vm, 'handleLoadCollapsedDiff'); + + vm.file.highlighted_diff_lines = undefined; + vm.file.parallel_diff_lines = []; + vm.file.collapsed = true; + + vm.$nextTick() + .then(() => { + vm.file.collapsed = false; + + return vm.$nextTick(); + }) + .then(() => { + expect(vm.handleLoadCollapsedDiff).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + }); }); diff --git a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js index 6972e0ee913..038db8eaa7c 100644 --- a/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js +++ b/spec/javascripts/diffs/components/diff_line_gutter_content_spec.js @@ -11,16 +11,16 @@ describe('DiffLineGutterContent', () => { const cmp = Vue.extend(DiffLineGutterContent); const props = Object.assign({}, options); props.line = { - lineCode: 'LC_42', + line_code: 'LC_42', type: 'new', - oldLine: null, - newLine: 1, - discussions: [], + old_line: null, + new_line: 1, + discussions: [{ ...discussionsMockData }], text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - richText: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - metaData: null, + rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', + meta_data: null, }; - props.fileHash = getDiffFileMock().fileHash; + props.fileHash = getDiffFileMock().file_hash; props.contextLinesPath = '/context/lines/path'; return createComponentWithStore(cmp, store, props).$mount(); @@ -37,7 +37,7 @@ describe('DiffLineGutterContent', () => { it('should return # if there is no lineCode', () => { const component = createComponent(); - component.line.lineCode = ''; + component.line.line_code = ''; expect(component.lineHref).toEqual('#'); }); @@ -46,6 +46,7 @@ describe('DiffLineGutterContent', () => { describe('discussions, hasDiscussions, shouldShowAvatarsOnGutter', () => { it('should return empty array when there is no discussion', () => { const component = createComponent(); + component.line.discussions = []; expect(component.hasDiscussions).toEqual(false); expect(component.shouldShowAvatarsOnGutter).toEqual(false); @@ -54,8 +55,8 @@ describe('DiffLineGutterContent', () => { it('should return discussions for the given lineCode', () => { const cmp = Vue.extend(DiffLineGutterContent); const props = { - line: getDiffFileMock().highlightedDiffLines[1], - fileHash: getDiffFileMock().fileHash, + line: getDiffFileMock().highlighted_diff_lines[1], + fileHash: getDiffFileMock().file_hash, showCommentButton: true, contextLinesPath: '/context/lines/path', }; @@ -104,10 +105,10 @@ describe('DiffLineGutterContent', () => { it('should render user avatars', () => { const component = createComponent({ showCommentButton: true, - lineCode: getDiffFileMock().highlightedDiffLines[1].lineCode, + lineCode: getDiffFileMock().highlighted_diff_lines[1].line_code, }); - expect(component.$el.querySelector('.diff-comment-avatar-holders')).toBeDefined(); + expect(component.$el.querySelector('.diff-comment-avatar-holders')).not.toBe(null); }); }); }); diff --git a/spec/javascripts/diffs/components/diff_line_note_form_spec.js b/spec/javascripts/diffs/components/diff_line_note_form_spec.js index c39b54d9cc9..81b66cf7c9b 100644 --- a/spec/javascripts/diffs/components/diff_line_note_form_spec.js +++ b/spec/javascripts/diffs/components/diff_line_note_form_spec.js @@ -13,10 +13,10 @@ describe('DiffLineNoteForm', () => { beforeEach(() => { diffFile = getDiffFileMock(); - diffLines = diffFile.highlightedDiffLines; + diffLines = diffFile.highlighted_diff_lines; component = createComponentWithStore(Vue.extend(DiffLineNoteForm), store, { - diffFileHash: diffFile.fileHash, + diffFileHash: diffFile.file_hash, diffLines, line: diffLines[0], noteTargetLine: diffLines[0], @@ -61,7 +61,7 @@ describe('DiffLineNoteForm', () => { expect(window.confirm).not.toHaveBeenCalled(); component.$nextTick(() => { expect(component.cancelCommentForm).toHaveBeenCalledWith({ - lineCode: diffLines[0].lineCode, + lineCode: diffLines[0].line_code, }); expect(component.resetAutoSave).toHaveBeenCalled(); diff --git a/spec/javascripts/diffs/components/inline_diff_view_spec.js b/spec/javascripts/diffs/components/inline_diff_view_spec.js index 705558e860b..2316ee29106 100644 --- a/spec/javascripts/diffs/components/inline_diff_view_spec.js +++ b/spec/javascripts/diffs/components/inline_diff_view_spec.js @@ -1,4 +1,5 @@ import Vue from 'vue'; +import '~/behaviors/markdown/render_gfm'; import InlineDiffView from '~/diffs/components/inline_diff_view.vue'; import store from '~/mr_notes/stores'; import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper'; @@ -10,14 +11,16 @@ describe('InlineDiffView', () => { const getDiffFileMock = () => Object.assign({}, diffFileMockData); const getDiscussionsMockData = () => [Object.assign({}, discussionsMockData)]; - beforeEach(() => { + beforeEach(done => { const diffFile = getDiffFileMock(); store.dispatch('diffs/setInlineDiffViewType'); component = createComponentWithStore(Vue.extend(InlineDiffView), store, { diffFile, - diffLines: diffFile.highlightedDiffLines, + diffLines: diffFile.highlighted_diff_lines, }).$mount(); + + Vue.nextTick(done); }); describe('template', () => { @@ -32,7 +35,7 @@ describe('InlineDiffView', () => { it('should render discussions', done => { const el = component.$el; - component.$store.dispatch('setInitialNotes', getDiscussionsMockData()); + component.diffLines[1].discussions = getDiscussionsMockData(); Vue.nextTick(() => { expect(el.querySelectorAll('.notes_holder').length).toEqual(1); diff --git a/spec/javascripts/diffs/components/parallel_diff_view_spec.js b/spec/javascripts/diffs/components/parallel_diff_view_spec.js index 091e01868d3..6f6b1c41915 100644 --- a/spec/javascripts/diffs/components/parallel_diff_view_spec.js +++ b/spec/javascripts/diffs/components/parallel_diff_view_spec.js @@ -14,7 +14,7 @@ describe('ParallelDiffView', () => { component = createComponentWithStore(Vue.extend(ParallelDiffView), store, { diffFile, - diffLines: diffFile.parallelDiffLines, + diffLines: diffFile.parallel_diff_lines, }).$mount(); }); diff --git a/spec/javascripts/diffs/components/tree_list_spec.js b/spec/javascripts/diffs/components/tree_list_spec.js index fc94d0bab5b..a0b380adfd6 100644 --- a/spec/javascripts/diffs/components/tree_list_spec.js +++ b/spec/javascripts/diffs/components/tree_list_spec.js @@ -81,7 +81,7 @@ describe('Diffs tree list component', () => { }); it('filters tree list to blobs matching search', done => { - vm.search = 'index'; + vm.search = 'app/index'; vm.$nextTick(() => { expect(vm.$el.querySelectorAll('.file-row').length).toBe(1); diff --git a/spec/javascripts/diffs/mock_data/diff_file.js b/spec/javascripts/diffs/mock_data/diff_file.js index be194ab414f..031c9842f2f 100644 --- a/spec/javascripts/diffs/mock_data/diff_file.js +++ b/spec/javascripts/diffs/mock_data/diff_file.js @@ -1,130 +1,130 @@ export default { submodule: false, - submoduleLink: null, + submodule_link: null, blob: { id: '9e10516ca50788acf18c518a231914a21e5f16f7', path: 'CHANGELOG', name: 'CHANGELOG', mode: '100644', - readableText: true, + readable_text: true, icon: 'file-text-o', }, - blobPath: 'CHANGELOG', - blobName: 'CHANGELOG', - blobIcon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>', - fileHash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a', - filePath: 'CHANGELOG', - newFile: false, - deletedFile: false, - renamedFile: false, - oldPath: 'CHANGELOG', - newPath: 'CHANGELOG', - modeChanged: false, - aMode: '100644', - bMode: '100644', + blob_path: 'CHANGELOG', + blob_name: 'CHANGELOG', + blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>', + file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a', + file_path: 'CHANGELOG', + new_file: false, + deleted_file: false, + renamed_file: false, + old_path: 'CHANGELOG', + new_path: 'CHANGELOG', + mode_changed: false, + a_mode: '100644', + b_mode: '100644', text: true, viewer: { name: 'text', }, - addedLines: 2, - removedLines: 0, - diffRefs: { - baseSha: 'e63f41fe459e62e1228fcef60d7189127aeba95a', - startSha: 'd9eaefe5a676b820c57ff18cf5b68316025f7962', - headSha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13', + added_lines: 2, + removed_lines: 0, + diff_refs: { + base_sha: 'e63f41fe459e62e1228fcef60d7189127aeba95a', + start_sha: 'd9eaefe5a676b820c57ff18cf5b68316025f7962', + head_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13', }, - contentSha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13', - storedExternally: null, - externalStorage: null, - oldPathHtml: 'CHANGELOG', - newPathHtml: 'CHANGELOG', - editPath: '/gitlab-org/gitlab-test/edit/spooky-stuff/CHANGELOG', - viewPath: '/gitlab-org/gitlab-test/blob/spooky-stuff/CHANGELOG', - replacedViewPath: null, + content_sha: 'c48ee0d1bf3b30453f5b32250ce03134beaa6d13', + stored_externally: null, + external_storage: null, + old_path_html: 'CHANGELOG', + new_path_html: 'CHANGELOG', + edit_path: '/gitlab-org/gitlab-test/edit/spooky-stuff/CHANGELOG', + view_path: '/gitlab-org/gitlab-test/blob/spooky-stuff/CHANGELOG', + replaced_view_path: null, collapsed: false, renderIt: false, - tooLarge: false, - contextLinesPath: + too_large: false, + context_lines_path: '/gitlab-org/gitlab-test/blob/c48ee0d1bf3b30453f5b32250ce03134beaa6d13/CHANGELOG/diff', - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1', type: 'new', - oldLine: null, - newLine: 1, + old_line: null, + new_line: 1, discussions: [], text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - richText: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - metaData: null, + rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', + meta_data: null, }, { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2', type: 'new', - oldLine: null, - newLine: 2, + old_line: null, + new_line: 2, discussions: [], text: '+<span id="LC2" class="line" lang="plaintext"></span>\n', - richText: '+<span id="LC2" class="line" lang="plaintext"></span>\n', - metaData: null, + rich_text: '+<span id="LC2" class="line" lang="plaintext"></span>\n', + meta_data: null, }, { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', type: null, - oldLine: 1, - newLine: 3, + old_line: 1, + new_line: 3, discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - richText: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - metaData: null, + rich_text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', + meta_data: null, }, { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', type: null, - oldLine: 2, - newLine: 4, + old_line: 2, + new_line: 4, discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', - richText: ' <span id="LC4" class="line" lang="plaintext"></span>\n', - metaData: null, + rich_text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', + meta_data: null, }, { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', type: null, - oldLine: 3, - newLine: 5, + old_line: 3, + new_line: 5, discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - richText: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - metaData: null, + rich_text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', + meta_data: null, }, { - lineCode: null, + line_code: null, type: 'match', - oldLine: null, - newLine: null, + old_line: null, + new_line: null, discussions: [], text: '', - richText: '', - metaData: { - oldPos: 3, - newPos: 5, + rich_text: '', + meta_data: { + old_pos: 3, + new_pos: 5, }, }, ], - parallelDiffLines: [ + parallel_diff_lines: [ { left: { type: 'empty-cell', }, right: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_1', type: 'new', - oldLine: null, - newLine: 1, + old_line: null, + new_line: 1, discussions: [], text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - richText: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', - metaData: null, + rich_text: '<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n', + meta_data: null, }, }, { @@ -132,107 +132,107 @@ export default { type: 'empty-cell', }, right: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2', type: 'new', - oldLine: null, - newLine: 2, + old_line: null, + new_line: 2, discussions: [], text: '+<span id="LC2" class="line" lang="plaintext"></span>\n', - richText: '<span id="LC2" class="line" lang="plaintext"></span>\n', - metaData: null, + rich_text: '<span id="LC2" class="line" lang="plaintext"></span>\n', + meta_data: null, }, }, { left: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', + line_Code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', type: null, - oldLine: 1, - newLine: 3, + old_line: 1, + new_line: 3, discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - richText: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - metaData: null, + rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', + meta_data: null, }, right: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', type: null, - oldLine: 1, - newLine: 3, + old_line: 1, + new_line: 3, discussions: [], text: ' <span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - richText: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', - metaData: null, + rich_text: '<span id="LC3" class="line" lang="plaintext">v6.8.0</span>\n', + meta_data: null, }, }, { left: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', type: null, - oldLine: 2, - newLine: 4, + old_line: 2, + new_line: 4, discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', - richText: '<span id="LC4" class="line" lang="plaintext"></span>\n', - metaData: null, + rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n', + meta_data: null, }, right: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_2_4', type: null, - oldLine: 2, - newLine: 4, + old_line: 2, + new_line: 4, discussions: [], text: ' <span id="LC4" class="line" lang="plaintext"></span>\n', - richText: '<span id="LC4" class="line" lang="plaintext"></span>\n', - metaData: null, + rich_text: '<span id="LC4" class="line" lang="plaintext"></span>\n', + meta_data: null, }, }, { left: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', type: null, - oldLine: 3, - newLine: 5, + old_line: 3, + new_line: 5, discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - richText: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - metaData: null, + rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', + meta_data: null, }, right: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_3_5', type: null, - oldLine: 3, - newLine: 5, + old_line: 3, + new_line: 5, discussions: [], text: ' <span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - richText: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', - metaData: null, + rich_text: '<span id="LC5" class="line" lang="plaintext">v6.7.0</span>\n', + meta_data: null, }, }, { left: { - lineCode: null, + line_code: null, type: 'match', - oldLine: null, - newLine: null, + old_line: null, + new_line: null, discussions: [], text: '', - richText: '', - metaData: { - oldPos: 3, - newPos: 5, + rich_text: '', + meta_data: { + old_pos: 3, + new_pos: 5, }, }, right: { - lineCode: null, + line_code: null, type: 'match', - oldLine: null, - newLine: null, + old_line: null, + new_line: null, discussions: [], text: '', - richText: '', - metaData: { - oldPos: 3, - newPos: 5, + rich_text: '', + meta_data: { + old_pos: 3, + new_pos: 5, }, }, }, diff --git a/spec/javascripts/diffs/mock_data/diff_with_commit.js b/spec/javascripts/diffs/mock_data/diff_with_commit.js index bee04fa4932..d646294ee84 100644 --- a/spec/javascripts/diffs/mock_data/diff_with_commit.js +++ b/spec/javascripts/diffs/mock_data/diff_with_commit.js @@ -1,9 +1,7 @@ -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; - const FIXTURE = 'merge_request_diffs/with_commit.json'; preloadFixtures(FIXTURE); export default function getDiffWithCommit() { - return convertObjectPropsToCamelCase(getJSONFixture(FIXTURE), { deep: true }); + return getJSONFixture(FIXTURE); } diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js index 17d0f31bdd3..d94a9cd1710 100644 --- a/spec/javascripts/diffs/store/actions_spec.js +++ b/spec/javascripts/diffs/store/actions_spec.js @@ -97,46 +97,46 @@ describe('DiffsStoreActions', () => { const state = { diffFiles: [ { - fileHash: 'ABC', - parallelDiffLines: [ + file_hash: 'ABC', + parallel_diff_lines: [ { left: { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [], }, right: { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [], }, }, ], - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [], - oldLine: 5, - newLine: null, + old_line: 5, + new_line: null, }, ], - diffRefs: { - baseSha: 'abc', - headSha: 'def', - startSha: 'ghi', + diff_refs: { + base_sha: 'abc', + head_sha: 'def', + start_sha: 'ghi', }, - newPath: 'file1', - oldPath: 'file2', + new_path: 'file1', + old_path: 'file2', }, ], }; const diffPosition = { - baseSha: 'abc', - headSha: 'def', - startSha: 'ghi', - newLine: null, - newPath: 'file1', - oldLine: 5, - oldPath: 'file2', + base_sha: 'abc', + head_sha: 'def', + start_sha: 'ghi', + new_line: null, + new_path: 'file1', + old_line: 5, + old_path: 'file2', }; const singleDiscussion = { @@ -145,7 +145,7 @@ describe('DiffsStoreActions', () => { diff_file: { file_hash: 'ABC', }, - fileHash: 'ABC', + file_hash: 'ABC', resolvable: true, position: diffPosition, original_position: diffPosition, @@ -164,24 +164,22 @@ describe('DiffsStoreActions', () => { discussion: singleDiscussion, diffPositionByLineCode: { ABC_1_1: { - baseSha: 'abc', - headSha: 'def', - startSha: 'ghi', - newLine: null, - newPath: 'file1', - oldLine: 5, - oldPath: 'file2', - lineCode: 'ABC_1_1', - positionType: 'text', + base_sha: 'abc', + head_sha: 'def', + start_sha: 'ghi', + new_line: null, + new_path: 'file1', + old_line: 5, + old_path: 'file2', + line_code: 'ABC_1_1', + position_type: 'text', }, }, }, }, ], [], - () => { - done(); - }, + done, ); }); }); @@ -191,11 +189,11 @@ describe('DiffsStoreActions', () => { const state = { diffFiles: [ { - fileHash: 'ABC', - parallelDiffLines: [ + file_hash: 'ABC', + parallel_diff_lines: [ { left: { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [ { id: 1, @@ -203,14 +201,14 @@ describe('DiffsStoreActions', () => { ], }, right: { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [], }, }, ], - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: 'ABC_1_1', + line_code: 'ABC_1_1', discussions: [], }, ], @@ -219,7 +217,7 @@ describe('DiffsStoreActions', () => { }; const singleDiscussion = { id: '1', - fileHash: 'ABC', + file_hash: 'ABC', line_code: 'ABC_1_1', }; @@ -238,9 +236,7 @@ describe('DiffsStoreActions', () => { }, ], [], - () => { - done(); - }, + done, ); }); }); diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js index 9c3a38fd526..2449bb65d07 100644 --- a/spec/javascripts/diffs/store/getters_spec.js +++ b/spec/javascripts/diffs/store/getters_spec.js @@ -195,12 +195,12 @@ describe('Diffs Module Getters', () => { discussionMock.expanded = true; line.left = { - lineCode: 'ABC', + line_code: 'ABC', discussions: [discussionMock], }; line.right = { - lineCode: 'DEF', + line_code: 'DEF', discussions: [discussionMock1], }; }); @@ -259,7 +259,7 @@ describe('Diffs Module Getters', () => { describe('getDiffFileDiscussions', () => { it('returns an array with discussions when fileHash matches and the discussion belongs to a diff', () => { - discussionMock.diff_file.file_hash = diffFileMock.fileHash; + discussionMock.diff_file.file_hash = diffFileMock.file_hash; expect( getters.getDiffFileDiscussions(localState, {}, {}, { discussions: [discussionMock] })( @@ -279,10 +279,10 @@ describe('Diffs Module Getters', () => { describe('getDiffFileByHash', () => { it('returns file by hash', () => { const fileA = { - fileHash: '123', + file_hash: '123', }; const fileB = { - fileHash: '456', + file_hash: '456', }; localState.diffFiles = [fileA, fileB]; diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js index fed04cbaed8..598d723c940 100644 --- a/spec/javascripts/diffs/store/mutations_spec.js +++ b/spec/javascripts/diffs/store/mutations_spec.js @@ -37,7 +37,7 @@ describe('DiffsStoreMutations', () => { mutations[types.SET_DIFF_DATA](state, diffMock); - const firstLine = state.diffFiles[0].parallelDiffLines[0]; + const firstLine = state.diffFiles[0].parallel_diff_lines[0]; expect(firstLine.right.text).toBeUndefined(); expect(state.diffFiles[0].renderIt).toEqual(true); @@ -98,19 +98,19 @@ describe('DiffsStoreMutations', () => { it('should call utils.addContextLines with proper params', () => { const options = { lineNumbers: { oldLineNumber: 1, newLineNumber: 2 }, - contextLines: [{ oldLine: 1 }], + contextLines: [{ old_line: 1, new_line: 1, line_code: 'ff9200_1_1', discussions: [] }], fileHash: 'ff9200', params: { bottom: true, }, }; const diffFile = { - fileHash: options.fileHash, - highlightedDiffLines: [], - parallelDiffLines: [], + file_hash: options.fileHash, + highlighted_diff_lines: [], + parallel_diff_lines: [], }; const state = { diffFiles: [diffFile] }; - const lines = [{ oldLine: 1 }]; + const lines = [{ old_line: 1, new_line: 1 }]; const findDiffFileSpy = spyOnDependency(mutations, 'findDiffFile').and.returnValue(diffFile); const removeMatchLineSpy = spyOnDependency(mutations, 'removeMatchLine'); @@ -133,8 +133,8 @@ describe('DiffsStoreMutations', () => { ); expect(addContextLinesSpy).toHaveBeenCalledWith({ - inlineLines: diffFile.highlightedDiffLines, - parallelLines: diffFile.parallelDiffLines, + inlineLines: diffFile.highlighted_diff_lines, + parallelLines: diffFile.parallel_diff_lines, contextLines: options.contextLines, bottom: options.params.bottom, lineNumbers: options.lineNumbers, @@ -144,54 +144,50 @@ describe('DiffsStoreMutations', () => { describe('ADD_COLLAPSED_DIFFS', () => { it('should update the state with the given data for the given file hash', () => { - const spy = spyOnDependency(mutations, 'convertObjectPropsToCamelCase').and.callThrough(); - const fileHash = 123; - const state = { diffFiles: [{}, { fileHash, existingField: 0 }] }; - const data = { diff_files: [{ file_hash: fileHash, extra_field: 1, existingField: 1 }] }; + const state = { diffFiles: [{}, { file_hash: fileHash, existing_field: 0 }] }; + const data = { diff_files: [{ file_hash: fileHash, extra_field: 1, existing_field: 1 }] }; mutations[types.ADD_COLLAPSED_DIFFS](state, { file: state.diffFiles[1], data }); - expect(spy).toHaveBeenCalledWith(data, { deep: true }); - - expect(state.diffFiles[1].fileHash).toEqual(fileHash); - expect(state.diffFiles[1].existingField).toEqual(1); - expect(state.diffFiles[1].extraField).toEqual(1); + expect(state.diffFiles[1].file_hash).toEqual(fileHash); + expect(state.diffFiles[1].existing_field).toEqual(1); + expect(state.diffFiles[1].extra_field).toEqual(1); }); }); describe('SET_LINE_DISCUSSIONS_FOR_FILE', () => { it('should add discussions to the given line', () => { const diffPosition = { - baseSha: 'ed13df29948c41ba367caa757ab3ec4892509910', - headSha: 'b921914f9a834ac47e6fd9420f78db0f83559130', - newLine: null, - newPath: '500-lines-4.txt', - oldLine: 5, - oldPath: '500-lines-4.txt', - startSha: 'ed13df29948c41ba367caa757ab3ec4892509910', + base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910', + head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130', + new_line: null, + new_path: '500-lines-4.txt', + old_line: 5, + old_path: '500-lines-4.txt', + start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910', }; const state = { latestDiff: true, diffFiles: [ { - fileHash: 'ABC', - parallelDiffLines: [ + file_hash: 'ABC', + parallel_diff_lines: [ { left: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, right: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, }, ], - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, ], @@ -206,7 +202,7 @@ describe('DiffsStoreMutations', () => { original_position: diffPosition, position: diffPosition, diff_file: { - file_hash: state.diffFiles[0].fileHash, + file_hash: state.diffFiles[0].file_hash, }, }; @@ -219,46 +215,46 @@ describe('DiffsStoreMutations', () => { diffPositionByLineCode, }); - expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(1); - expect(state.diffFiles[0].parallelDiffLines[0].left.discussions[0].id).toEqual(1); - expect(state.diffFiles[0].parallelDiffLines[0].right.discussions).toEqual([]); + expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1); + expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1); + expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]); - expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(1); - expect(state.diffFiles[0].highlightedDiffLines[0].discussions[0].id).toEqual(1); + expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1); + expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1); }); it('should add legacy discussions to the given line', () => { const diffPosition = { - baseSha: 'ed13df29948c41ba367caa757ab3ec4892509910', - headSha: 'b921914f9a834ac47e6fd9420f78db0f83559130', - newLine: null, - newPath: '500-lines-4.txt', - oldLine: 5, - oldPath: '500-lines-4.txt', - startSha: 'ed13df29948c41ba367caa757ab3ec4892509910', - lineCode: 'ABC_1', + base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910', + head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130', + new_line: null, + new_path: '500-lines-4.txt', + old_line: 5, + old_path: '500-lines-4.txt', + start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910', + line_code: 'ABC_1', }; const state = { latestDiff: true, diffFiles: [ { - fileHash: 'ABC', - parallelDiffLines: [ + file_hash: 'ABC', + parallel_diff_lines: [ { left: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, right: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, }, ], - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, ], @@ -271,7 +267,7 @@ describe('DiffsStoreMutations', () => { diff_discussion: true, active: true, diff_file: { - file_hash: state.diffFiles[0].fileHash, + file_hash: state.diffFiles[0].file_hash, }, }; @@ -284,11 +280,11 @@ describe('DiffsStoreMutations', () => { diffPositionByLineCode, }); - expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(1); - expect(state.diffFiles[0].parallelDiffLines[0].left.discussions[0].id).toEqual(1); + expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1); + expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1); - expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(1); - expect(state.diffFiles[0].highlightedDiffLines[0].discussions[0].id).toEqual(1); + expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1); + expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1); }); }); @@ -297,11 +293,11 @@ describe('DiffsStoreMutations', () => { const state = { diffFiles: [ { - fileHash: 'ABC', - parallelDiffLines: [ + file_hash: 'ABC', + parallel_diff_lines: [ { left: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [ { id: 1, @@ -314,14 +310,14 @@ describe('DiffsStoreMutations', () => { ], }, right: { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [], }, }, ], - highlightedDiffLines: [ + highlighted_diff_lines: [ { - lineCode: 'ABC_1', + line_code: 'ABC_1', discussions: [ { id: 1, @@ -343,8 +339,8 @@ describe('DiffsStoreMutations', () => { lineCode: 'ABC_1', }); - expect(state.diffFiles[0].parallelDiffLines[0].left.discussions.length).toEqual(0); - expect(state.diffFiles[0].highlightedDiffLines[0].discussions.length).toEqual(0); + expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(0); + expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(0); }); }); diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js index f49dee3696d..d4ef17c5ef8 100644 --- a/spec/javascripts/diffs/store/utils_spec.js +++ b/spec/javascripts/diffs/store/utils_spec.js @@ -18,7 +18,7 @@ const getDiffFileMock = () => Object.assign({}, diffFileMockData); describe('DiffsStoreUtils', () => { describe('findDiffFile', () => { - const files = [{ fileHash: 1, name: 'one' }]; + const files = [{ file_hash: 1, name: 'one' }]; it('should return correct file', () => { expect(utils.findDiffFile(files, 1).name).toEqual('one'); @@ -41,13 +41,13 @@ describe('DiffsStoreUtils', () => { describe('findIndexInInlineLines', () => { it('should return correct index for given line numbers', () => { - expectSet(utils.findIndexInInlineLines, getDiffFileMock().highlightedDiffLines); + expectSet(utils.findIndexInInlineLines, getDiffFileMock().highlighted_diff_lines); }); }); describe('findIndexInParallelLines', () => { it('should return correct index for given line numbers', () => { - expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallelDiffLines, {}); + expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, {}); }); }); }); @@ -56,33 +56,39 @@ describe('DiffsStoreUtils', () => { it('should remove match line properly by regarding the bottom parameter', () => { const diffFile = getDiffFileMock(); const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 }; - const inlineIndex = utils.findIndexInInlineLines(diffFile.highlightedDiffLines, lineNumbers); - const parallelIndex = utils.findIndexInParallelLines(diffFile.parallelDiffLines, lineNumbers); - const atInlineIndex = diffFile.highlightedDiffLines[inlineIndex]; - const atParallelIndex = diffFile.parallelDiffLines[parallelIndex]; + const inlineIndex = utils.findIndexInInlineLines( + diffFile.highlighted_diff_lines, + lineNumbers, + ); + const parallelIndex = utils.findIndexInParallelLines( + diffFile.parallel_diff_lines, + lineNumbers, + ); + const atInlineIndex = diffFile.highlighted_diff_lines[inlineIndex]; + const atParallelIndex = diffFile.parallel_diff_lines[parallelIndex]; utils.removeMatchLine(diffFile, lineNumbers, false); - expect(diffFile.highlightedDiffLines[inlineIndex]).not.toEqual(atInlineIndex); - expect(diffFile.parallelDiffLines[parallelIndex]).not.toEqual(atParallelIndex); + expect(diffFile.highlighted_diff_lines[inlineIndex]).not.toEqual(atInlineIndex); + expect(diffFile.parallel_diff_lines[parallelIndex]).not.toEqual(atParallelIndex); utils.removeMatchLine(diffFile, lineNumbers, true); - expect(diffFile.highlightedDiffLines[inlineIndex + 1]).not.toEqual(atInlineIndex); - expect(diffFile.parallelDiffLines[parallelIndex + 1]).not.toEqual(atParallelIndex); + expect(diffFile.highlighted_diff_lines[inlineIndex + 1]).not.toEqual(atInlineIndex); + expect(diffFile.parallel_diff_lines[parallelIndex + 1]).not.toEqual(atParallelIndex); }); }); describe('addContextLines', () => { it('should add context lines properly with bottom parameter', () => { const diffFile = getDiffFileMock(); - const inlineLines = diffFile.highlightedDiffLines; - const parallelLines = diffFile.parallelDiffLines; + const inlineLines = diffFile.highlighted_diff_lines; + const parallelLines = diffFile.parallel_diff_lines; const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 }; const contextLines = [{ lineNumber: 42 }]; const options = { inlineLines, parallelLines, contextLines, lineNumbers, bottom: true }; - const inlineIndex = utils.findIndexInInlineLines(diffFile.highlightedDiffLines, lineNumbers); - const parallelIndex = utils.findIndexInParallelLines(diffFile.parallelDiffLines, lineNumbers); + const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers); + const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers); const normalizedParallelLine = { left: options.contextLines[0], right: options.contextLines[0], @@ -112,30 +118,30 @@ describe('DiffsStoreUtils', () => { noteableType: MERGE_REQUEST_NOTEABLE_TYPE, diffFile, noteTargetLine: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', - metaData: null, - newLine: 3, - oldLine: 1, + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', + meta_data: null, + new_line: 3, + old_line: 1, }, diffViewType: PARALLEL_DIFF_VIEW_TYPE, linePosition: LINE_POSITION_LEFT, }; const position = JSON.stringify({ - base_sha: diffFile.diffRefs.baseSha, - start_sha: diffFile.diffRefs.startSha, - head_sha: diffFile.diffRefs.headSha, - old_path: diffFile.oldPath, - new_path: diffFile.newPath, + base_sha: diffFile.diff_refs.base_sha, + start_sha: diffFile.diff_refs.start_sha, + head_sha: diffFile.diff_refs.head_sha, + old_path: diffFile.old_path, + new_path: diffFile.new_path, position_type: TEXT_DIFF_POSITION_TYPE, - old_line: options.noteTargetLine.oldLine, - new_line: options.noteTargetLine.newLine, + old_line: options.noteTargetLine.old_line, + new_line: options.noteTargetLine.new_line, }); const postData = { view: options.diffViewType, line_type: options.linePosition === LINE_POSITION_RIGHT ? NEW_LINE_TYPE : OLD_LINE_TYPE, - merge_request_diff_head_sha: diffFile.diffRefs.headSha, + merge_request_diff_head_sha: diffFile.diff_refs.head_sha, in_reply_to_discussion_id: '', note_project_id: '', target_type: options.noteableType, @@ -146,7 +152,7 @@ describe('DiffsStoreUtils', () => { noteable_id: options.noteableData.id, commit_id: '', type: DIFF_NOTE_TYPE, - line_code: options.noteTargetLine.lineCode, + line_code: options.noteTargetLine.line_code, note: options.note, position, }, @@ -160,8 +166,8 @@ describe('DiffsStoreUtils', () => { it('should create legacy note form data', () => { const diffFile = getDiffFileMock(); - delete diffFile.diffRefs.startSha; - delete diffFile.diffRefs.headSha; + delete diffFile.diff_refs.start_sha; + delete diffFile.diff_refs.head_sha; noteableDataMock.targetType = MERGE_REQUEST_NOTEABLE_TYPE; @@ -171,24 +177,24 @@ describe('DiffsStoreUtils', () => { noteableType: MERGE_REQUEST_NOTEABLE_TYPE, diffFile, noteTargetLine: { - lineCode: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', - metaData: null, - newLine: 3, - oldLine: 1, + line_code: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_3', + meta_data: null, + new_line: 3, + old_line: 1, }, diffViewType: PARALLEL_DIFF_VIEW_TYPE, linePosition: LINE_POSITION_LEFT, }; const position = JSON.stringify({ - base_sha: diffFile.diffRefs.baseSha, + base_sha: diffFile.diff_refs.base_sha, start_sha: undefined, head_sha: undefined, - old_path: diffFile.oldPath, - new_path: diffFile.newPath, + old_path: diffFile.old_path, + new_path: diffFile.new_path, position_type: TEXT_DIFF_POSITION_TYPE, - old_line: options.noteTargetLine.oldLine, - new_line: options.noteTargetLine.newLine, + old_line: options.noteTargetLine.old_line, + new_line: options.noteTargetLine.new_line, }); const postData = { @@ -205,7 +211,7 @@ describe('DiffsStoreUtils', () => { noteable_id: options.noteableData.id, commit_id: '', type: LEGACY_DIFF_NOTE_TYPE, - line_code: options.noteTargetLine.lineCode, + line_code: options.noteTargetLine.line_code, note: options.note, position, }, @@ -225,61 +231,61 @@ describe('DiffsStoreUtils', () => { const lines = [{ type: null }, { type: MATCH_LINE_TYPE }]; const linesWithReferences = utils.addLineReferences(lines, lineNumbers, true); - expect(linesWithReferences[0].oldLine).toEqual(lineNumbers.oldLineNumber + 1); - expect(linesWithReferences[0].newLine).toEqual(lineNumbers.newLineNumber + 1); - expect(linesWithReferences[1].metaData.oldPos).toEqual(4); - expect(linesWithReferences[1].metaData.newPos).toEqual(5); + expect(linesWithReferences[0].old_line).toEqual(lineNumbers.oldLineNumber + 1); + expect(linesWithReferences[0].new_line).toEqual(lineNumbers.newLineNumber + 1); + expect(linesWithReferences[1].meta_data.old_pos).toEqual(4); + expect(linesWithReferences[1].meta_data.new_pos).toEqual(5); }); it('should add correct line references when bottom falsy', () => { const lines = [{ type: null }, { type: MATCH_LINE_TYPE }, { type: null }]; const linesWithReferences = utils.addLineReferences(lines, lineNumbers); - expect(linesWithReferences[0].oldLine).toEqual(0); - expect(linesWithReferences[0].newLine).toEqual(1); - expect(linesWithReferences[1].metaData.oldPos).toEqual(2); - expect(linesWithReferences[1].metaData.newPos).toEqual(3); + expect(linesWithReferences[0].old_line).toEqual(0); + expect(linesWithReferences[0].new_line).toEqual(1); + expect(linesWithReferences[1].meta_data.old_pos).toEqual(2); + expect(linesWithReferences[1].meta_data.new_pos).toEqual(3); }); }); describe('trimFirstCharOfLineContent', () => { it('trims the line when it starts with a space', () => { - expect(utils.trimFirstCharOfLineContent({ richText: ' diff' })).toEqual({ + expect(utils.trimFirstCharOfLineContent({ rich_text: ' diff' })).toEqual({ discussions: [], - richText: 'diff', + rich_text: 'diff', }); }); it('trims the line when it starts with a +', () => { - expect(utils.trimFirstCharOfLineContent({ richText: '+diff' })).toEqual({ + expect(utils.trimFirstCharOfLineContent({ rich_text: '+diff' })).toEqual({ discussions: [], - richText: 'diff', + rich_text: 'diff', }); }); it('trims the line when it starts with a -', () => { - expect(utils.trimFirstCharOfLineContent({ richText: '-diff' })).toEqual({ + expect(utils.trimFirstCharOfLineContent({ rich_text: '-diff' })).toEqual({ discussions: [], - richText: 'diff', + rich_text: 'diff', }); }); it('does not trims the line when it starts with a letter', () => { - expect(utils.trimFirstCharOfLineContent({ richText: 'diff' })).toEqual({ + expect(utils.trimFirstCharOfLineContent({ rich_text: 'diff' })).toEqual({ discussions: [], - richText: 'diff', + rich_text: 'diff', }); }); it('does not modify the provided object', () => { const lineObj = { discussions: [], - richText: ' diff', + rich_text: ' diff', }; utils.trimFirstCharOfLineContent(lineObj); - expect(lineObj).toEqual({ discussions: [], richText: ' diff' }); + expect(lineObj).toEqual({ discussions: [], rich_text: ' diff' }); }); it('handles a undefined or null parameter', () => { @@ -289,33 +295,33 @@ describe('DiffsStoreUtils', () => { describe('prepareDiffData', () => { it('sets the renderIt and collapsed attribute on files', () => { - const preparedDiff = { diffFiles: [getDiffFileMock()] }; + const preparedDiff = { diff_files: [getDiffFileMock()] }; utils.prepareDiffData(preparedDiff); - const firstParallelDiffLine = preparedDiff.diffFiles[0].parallelDiffLines[2]; + const firstParallelDiffLine = preparedDiff.diff_files[0].parallel_diff_lines[2]; expect(firstParallelDiffLine.left.discussions.length).toBe(0); expect(firstParallelDiffLine.left).not.toHaveAttr('text'); expect(firstParallelDiffLine.right.discussions.length).toBe(0); expect(firstParallelDiffLine.right).not.toHaveAttr('text'); - const firstParallelChar = firstParallelDiffLine.right.richText.charAt(0); + const firstParallelChar = firstParallelDiffLine.right.rich_text.charAt(0); expect(firstParallelChar).not.toBe(' '); expect(firstParallelChar).not.toBe('+'); expect(firstParallelChar).not.toBe('-'); - const checkLine = preparedDiff.diffFiles[0].highlightedDiffLines[0]; + const checkLine = preparedDiff.diff_files[0].highlighted_diff_lines[0]; expect(checkLine.discussions.length).toBe(0); expect(checkLine).not.toHaveAttr('text'); - const firstChar = checkLine.richText.charAt(0); + const firstChar = checkLine.rich_text.charAt(0); expect(firstChar).not.toBe(' '); expect(firstChar).not.toBe('+'); expect(firstChar).not.toBe('-'); - expect(preparedDiff.diffFiles[0].renderIt).toBeTruthy(); - expect(preparedDiff.diffFiles[0].collapsed).toBeFalsy(); + expect(preparedDiff.diff_files[0].renderIt).toBeTruthy(); + expect(preparedDiff.diff_files[0].collapsed).toBeFalsy(); }); }); @@ -398,7 +404,7 @@ describe('DiffsStoreUtils', () => { discussion, diffPosition: { ...diffPosition, - lineCode: 'ABC_1', + line_code: 'ABC_1', }, latestDiff: true, }), @@ -429,36 +435,36 @@ describe('DiffsStoreUtils', () => { beforeAll(() => { files = [ { - newPath: 'app/index.js', - deletedFile: false, - newFile: false, - removedLines: 10, - addedLines: 0, - fileHash: 'test', + new_path: 'app/index.js', + deleted_file: false, + new_file: false, + removed_lines: 10, + added_lines: 0, + file_hash: 'test', }, { - newPath: 'app/test/index.js', - deletedFile: false, - newFile: true, - removedLines: 0, - addedLines: 0, - fileHash: 'test', + new_path: 'app/test/index.js', + deleted_file: false, + new_file: true, + removed_lines: 0, + added_lines: 0, + file_hash: 'test', }, { - newPath: 'app/test/filepathneedstruncating.js', - deletedFile: false, - newFile: true, - removedLines: 0, - addedLines: 0, - fileHash: 'test', + new_path: 'app/test/filepathneedstruncating.js', + deleted_file: false, + new_file: true, + removed_lines: 0, + added_lines: 0, + file_hash: 'test', }, { - newPath: 'package.json', - deletedFile: true, - newFile: false, - removedLines: 0, - addedLines: 0, - fileHash: 'test', + new_path: 'package.json', + deleted_file: true, + new_file: false, + removed_lines: 0, + added_lines: 0, + file_hash: 'test', }, ]; }); diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js index 0c1d5f5b0b4..4f561df7943 100644 --- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js @@ -754,6 +754,50 @@ describe('Filtered Search Visual Tokens', () => { expect(updateLabelTokenColorSpy.calls.count()).toBe(0); expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0); }); + + it('does not update user token appearance for `none` filter', () => { + const { tokenNameElement } = findElements(authorToken); + + const tokenName = tokenNameElement.innerText; + const tokenValue = 'none'; + + subject.renderVisualTokenValue(authorToken, tokenName, tokenValue); + + expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0); + }); + + it('does not update user token appearance for `any` filter', () => { + const { tokenNameElement } = findElements(authorToken); + + const tokenName = tokenNameElement.innerText; + const tokenValue = 'any'; + + subject.renderVisualTokenValue(authorToken, tokenName, tokenValue); + + expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0); + }); + + it('does not update label token color for `none` filter', () => { + const { tokenNameElement } = findElements(bugLabelToken); + + const tokenName = tokenNameElement.innerText; + const tokenValue = 'none'; + + subject.renderVisualTokenValue(bugLabelToken, tokenName, tokenValue); + + expect(updateLabelTokenColorSpy.calls.count()).toBe(0); + }); + + it('does not update label token color for `any` filter', () => { + const { tokenNameElement } = findElements(bugLabelToken); + + const tokenName = tokenNameElement.innerText; + const tokenValue = 'any'; + + subject.renderVisualTokenValue(bugLabelToken, tokenName, tokenValue); + + expect(updateLabelTokenColorSpy.calls.count()).toBe(0); + }); }); describe('updateUserTokenAppearance', () => { @@ -763,19 +807,6 @@ describe('Filtered Search Visual Tokens', () => { spyOn(UsersCache, 'retrieve').and.callFake(username => usersCacheSpy(username)); }); - it('ignores special value "none"', done => { - usersCacheSpy = username => { - expect(username).toBe('none'); - done.fail('Should not resolve "none"!'); - }; - const { tokenValueContainer, tokenValueElement } = findElements(authorToken); - - subject - .updateUserTokenAppearance(tokenValueContainer, tokenValueElement, 'none') - .then(done) - .catch(done.fail); - }); - it('ignores error if UsersCache throws', done => { spyOn(window, 'Flash'); const dummyError = new Error('Earth rotated backwards'); diff --git a/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js index d85354c3681..c9c09ee9afe 100644 --- a/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js +++ b/spec/javascripts/ide/stores/modules/pipelines/actions_spec.js @@ -77,7 +77,7 @@ describe('IDE pipelines actions', () => { { type: 'setErrorMessage', payload: { - text: 'An error occured whilst fetching the latest pipline.', + text: 'An error occured whilst fetching the latest pipeline.', action: jasmine.any(Function), actionText: 'Please try again', actionPayload: null, diff --git a/spec/javascripts/jobs/components/empty_state_spec.js b/spec/javascripts/jobs/components/empty_state_spec.js index 0a39709221c..a2df79bdda0 100644 --- a/spec/javascripts/jobs/components/empty_state_spec.js +++ b/spec/javascripts/jobs/components/empty_state_spec.js @@ -84,6 +84,7 @@ describe('Empty State', () => { vm = mountComponent(Component, { ...props, content, + action: null, }); expect(vm.$el.querySelector('.js-job-empty-state-action')).toBeNull(); diff --git a/spec/javascripts/notes/components/diff_with_note_spec.js b/spec/javascripts/notes/components/diff_with_note_spec.js index 0c16103714a..95461396f10 100644 --- a/spec/javascripts/notes/components/diff_with_note_spec.js +++ b/spec/javascripts/notes/components/diff_with_note_spec.js @@ -1,6 +1,5 @@ import Vue from 'vue'; import DiffWithNote from '~/notes/components/diff_with_note.vue'; -import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { createStore } from '~/mr_notes/stores'; import { mountComponentWithStore } from 'spec/helpers'; @@ -11,7 +10,7 @@ describe('diff_with_note', () => { let store; let vm; const diffDiscussionMock = getJSONFixture(discussionFixture)[0]; - const diffDiscussion = convertObjectPropsToCamelCase(diffDiscussionMock); + const diffDiscussion = diffDiscussionMock; const Component = Vue.extend(DiffWithNote); const props = { discussion: diffDiscussion, @@ -65,7 +64,7 @@ describe('diff_with_note', () => { describe('image diff', () => { beforeEach(() => { const imageDiffDiscussionMock = getJSONFixture(imageDiscussionFixture)[0]; - props.discussion = convertObjectPropsToCamelCase(imageDiffDiscussionMock); + props.discussion = imageDiffDiscussionMock; }); it('shows image diff', () => { diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js index 0c0bc45b201..fcdd834e4a0 100644 --- a/spec/javascripts/notes/stores/actions_spec.js +++ b/spec/javascripts/notes/stores/actions_spec.js @@ -396,6 +396,9 @@ describe('Actions Notes Store', () => { { type: 'updateMergeRequestWidget', }, + { + type: 'startTaskList', + }, ], done, ); diff --git a/spec/javascripts/pipelines/graph/job_item_spec.js b/spec/javascripts/pipelines/graph/job_item_spec.js index 41b614cc95e..88e1789184d 100644 --- a/spec/javascripts/pipelines/graph/job_item_spec.js +++ b/spec/javascripts/pipelines/graph/job_item_spec.js @@ -140,14 +140,12 @@ describe('pipeline graph job item', () => { }); describe('tooltip placement', () => { - const tooltipBoundary = 'a[data-boundary="viewport"]'; - it('does not set tooltip boundary by default', () => { component = mountComponent(JobComponent, { job: mockJob, }); - expect(component.$el.querySelector(tooltipBoundary)).toBeNull(); + expect(component.tooltipBoundary).toBeNull(); }); it('sets tooltip boundary to viewport for small dropdowns', () => { @@ -156,7 +154,7 @@ describe('pipeline graph job item', () => { dropdownLength: 1, }); - expect(component.$el.querySelector(tooltipBoundary)).not.toBeNull(); + expect(component.tooltipBoundary).toEqual('viewport'); }); it('does not set tooltip boundary for large lists', () => { @@ -165,7 +163,7 @@ describe('pipeline graph job item', () => { dropdownLength: 7, }); - expect(component.$el.querySelector(tooltipBoundary)).toBeNull(); + expect(component.tooltipBoundary).toBeNull(); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/deployment_spec.js b/spec/javascripts/vue_mr_widget/components/deployment_spec.js index 2f1bd00fa10..ebbcaeb6f30 100644 --- a/spec/javascripts/vue_mr_widget/components/deployment_spec.js +++ b/spec/javascripts/vue_mr_widget/components/deployment_spec.js @@ -174,57 +174,13 @@ describe('Deployment component', () => { }); }); - describe('with `features.ciEnvironmentsStatusChanges` enabled', () => { - beforeEach(() => { - window.gon = window.gon || {}; - window.gon.features = window.gon.features || {}; - window.gon.features.ciEnvironmentsStatusChanges = true; - vm = mountComponent(Component, { deployment: { ...deploymentMockData } }); - }); - - afterEach(() => { - window.gon.features = {}; - }); - - it('renders dropdown with changes', () => { - expect(vm.$el.querySelector('.js-mr-wigdet-deployment-dropdown')).not.toBeNull(); - expect(vm.$el.querySelector('.js-deploy-url-feature-flag')).toBeNull(); - }); - }); - - describe('with `features.ciEnvironmentsStatusChanges` disabled', () => { - beforeEach(() => { - window.gon = window.gon || {}; - window.gon.features = window.gon.features || {}; - window.gon.features.ciEnvironmentsStatusChanges = false; - - vm = mountComponent(Component, { deployment: { ...deploymentMockData } }); - }); - - afterEach(() => { - delete window.gon.features.ciEnvironmentsStatusChanges; - }); - - it('renders the old link to the review app', () => { - expect(vm.$el.querySelector('.js-mr-wigdet-deployment-dropdown')).toBeNull(); - expect(vm.$el.querySelector('.js-deploy-url-feature-flag')).not.toBeNull(); - }); - }); - describe('without changes', () => { beforeEach(() => { - window.gon = window.gon || {}; - window.gon.features = window.gon.features || {}; - window.gon.features.ciEnvironmentsStatusChanges = true; delete deploymentMockData.changes; vm = mountComponent(Component, { deployment: { ...deploymentMockData } }); }); - afterEach(() => { - delete window.gon.features.ciEnvironmentsStatusChanges; - }); - it('renders the link to the review app without dropdown', () => { expect(vm.$el.querySelector('.js-mr-wigdet-deployment-dropdown')).toBeNull(); expect(vm.$el.querySelector('.js-deploy-url-feature-flag')).not.toBeNull(); diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js index 09fbe87b27e..f72bf627c10 100644 --- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js +++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js @@ -458,10 +458,6 @@ describe('mrWidgetOptions', () => { }; beforeEach(done => { - window.gon = window.gon || {}; - window.gon.features = window.gon.features || {}; - window.gon.features.ciEnvironmentsStatusChanges = true; - vm.mr.deployments.push( { ...deploymentMockData, diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js index 2d3e178d249..7f2e246d656 100644 --- a/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js +++ b/spec/javascripts/vue_shared/components/diff_viewer/viewers/image_diff_viewer_spec.js @@ -97,6 +97,26 @@ describe('ImageDiffViewer', () => { }); }); + it('renders image diff for renamed', done => { + vm = new Vue({ + components: { + imageDiffViewer, + }, + template: ` + <image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path=""> + <span slot="image-overlay" class="overlay">test</span> + </image-diff-viewer> + `, + }).$mount(); + + setTimeout(() => { + expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL); + expect(vm.$el.querySelector('.overlay')).not.toBe(null); + + done(); + }); + }); + describe('swipeMode', () => { beforeEach(done => { createComponent({ diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb index ab401108c84..523d00c1272 100644 --- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb +++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb @@ -49,6 +49,12 @@ describe Gitlab::Ci::Build::Policy::Changes do expect(policy).to be_satisfied_by(pipeline, seed) end + it 'is satisfied by matching a pattern with a glob' do + policy = described_class.new(%w[some/**/*.{rb,txt}]) + + expect(policy).to be_satisfied_by(pipeline, seed) + end + it 'is not satisfied when pattern does not match path' do policy = described_class.new(%w[some/*.rb]) @@ -61,6 +67,12 @@ describe Gitlab::Ci::Build::Policy::Changes do expect(policy).not_to be_satisfied_by(pipeline, seed) end + it 'is not satified when pattern with glob does not match' do + policy = described_class.new(%w[invalid/*.{md,rake}]) + + expect(policy).not_to be_satisfied_by(pipeline, seed) + end + context 'when pipelines does not run for a branch update' do before do pipeline.before_sha = Gitlab::Git::BLANK_SHA diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb index 7211187e511..553fc0fb9bf 100644 --- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb +++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb @@ -16,7 +16,7 @@ describe Gitlab::Ci::Build::Policy::Refs do end end - context 'when maching tags' do + context 'when matching tags' do context 'when pipeline runs for a tag' do let(:pipeline) do build_stubbed(:ci_pipeline, ref: 'feature', tag: true) @@ -56,10 +56,10 @@ describe Gitlab::Ci::Build::Policy::Refs do end end - context 'when maching a source' do + context 'when matching a source' do let(:pipeline) { build_stubbed(:ci_pipeline, source: :push) } - it 'is satisifed when provided source keyword matches' do + it 'is satisfied when provided source keyword matches' do expect(described_class.new(%w[pushes])) .to be_satisfied_by(pipeline) end diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb index d48aac15f28..bd1f2c92844 100644 --- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb @@ -8,7 +8,7 @@ describe Gitlab::Ci::Config::Entry::Artifacts do let(:config) { { paths: %w[public/] } } describe '#value' do - it 'returns artifacs configuration' do + it 'returns artifacts configuration' do expect(entry.value).to eq config end end diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb index bef93fe7af7..83001b7fdd8 100644 --- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb @@ -58,7 +58,7 @@ describe Gitlab::Ci::Config::Entry::Policy do end context 'when using complex policy' do - context 'when specifiying refs policy' do + context 'when specifying refs policy' do let(:config) { { refs: ['master'] } } it 'is a correct configuraton' do diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb index 1140bfdf6c3..38943138cbf 100644 --- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb @@ -19,7 +19,7 @@ describe Gitlab::Ci::Config::Entry::Reports do shared_examples 'a valid entry' do |keyword, file| describe '#value' do - it 'returns artifacs configuration' do + it 'returns artifacts configuration' do expect(entry.value).to eq({ "#{keyword}": [file] } ) end end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 4e83b27e4a5..23f27939dd2 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -1338,7 +1338,12 @@ describe Gitlab::Database::MigrationHelpers do end describe '#index_exists_by_name?' do - it 'returns true if an index exists' do + # TODO: remove rails5-only after removing rails4 tests + # rails 4 can not handle multiple indexes on the same column set if + # index was added by 't.index' - t.index is used by default in schema.rb in + # rails 5. Let's run this test only in rails 5 env: + # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_113602758 + it 'returns true if an index exists', :rails5 do expect(model.index_exists_by_name?(:projects, 'index_projects_on_path')) .to be_truthy end diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 2f51642b58e..3417896e259 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -41,6 +41,52 @@ describe Gitlab::Diff::File do end end + describe '#unfold_diff_lines' do + let(:unfolded_lines) { double('expanded-lines') } + let(:unfolder) { instance_double(Gitlab::Diff::LinesUnfolder) } + let(:position) { instance_double(Gitlab::Diff::Position, old_line: 10) } + + before do + allow(Gitlab::Diff::LinesUnfolder).to receive(:new) { unfolder } + end + + context 'when unfold required' do + before do + allow(unfolder).to receive(:unfold_required?) { true } + allow(unfolder).to receive(:unfolded_diff_lines) { unfolded_lines } + end + + it 'changes @unfolded to true' do + diff_file.unfold_diff_lines(position) + + expect(diff_file).to be_unfolded + end + + it 'updates @diff_lines' do + diff_file.unfold_diff_lines(position) + + expect(diff_file.diff_lines).to eq(unfolded_lines) + end + end + + context 'when unfold not required' do + before do + allow(unfolder).to receive(:unfold_required?) { false } + end + + it 'keeps @unfolded false' do + diff_file.unfold_diff_lines(position) + + expect(diff_file).not_to be_unfolded + end + + it 'does not update @diff_lines' do + expect { diff_file.unfold_diff_lines(position) } + .not_to change(diff_file, :diff_lines) + end + end + end + describe '#mode_changed?' do it { expect(diff_file.mode_changed?).to be_falsey } end diff --git a/spec/lib/gitlab/diff/lines_unfolder_spec.rb b/spec/lib/gitlab/diff/lines_unfolder_spec.rb new file mode 100644 index 00000000000..8e00c8e0e30 --- /dev/null +++ b/spec/lib/gitlab/diff/lines_unfolder_spec.rb @@ -0,0 +1,750 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Gitlab::Diff::LinesUnfolder do + let(:raw_diff) do + <<-DIFF.strip_heredoc + @@ -7,9 +7,6 @@ + "tags": ["devel", "development", "nightly"], + "desktop-file-name-prefix": "(Development) ", + "finish-args": [ + - "--share=ipc", "--socket=x11", + - "--socket=wayland", + - "--talk-name=org.gnome.OnlineAccounts", + "--talk-name=org.freedesktop.Tracker1", + "--filesystem=home", + "--talk-name=org.gtk.vfs", "--talk-name=org.gtk.vfs.*", + @@ -62,7 +59,7 @@ + }, + { + "name": "gnome-desktop", + - "config-opts": ["--disable-debug-tools", "--disable-udev"], + + "config-opts": ["--disable-debug-tools", "--disable-"], + "sources": [ + { + "type": "git", + @@ -83,11 +80,6 @@ + "buildsystem": "meson", + "builddir": true, + "name": "nautilus", + - "config-opts": [ + - "-Denable-desktop=false", + - "-Denable-selinux=false", + - "--libdir=/app/lib" + - ], + "sources": [ + { + "type": "git", + DIFF + end + + let(:raw_old_blob) do + <<-BLOB.strip_heredoc + { + "app-id": "org.gnome.Nautilus", + "runtime": "org.gnome.Platform", + "runtime-version": "master", + "sdk": "org.gnome.Sdk", + "command": "nautilus", + "tags": ["devel", "development", "nightly"], + "desktop-file-name-prefix": "(Development) ", + "finish-args": [ + "--share=ipc", "--socket=x11", + "--socket=wayland", + "--talk-name=org.gnome.OnlineAccounts", + "--talk-name=org.freedesktop.Tracker1", + "--filesystem=home", + "--talk-name=org.gtk.vfs", "--talk-name=org.gtk.vfs.*", + "--filesystem=xdg-run/dconf", "--filesystem=~/.config/dconf:ro", + "--talk-name=ca.desrt.dconf", "--env=DCONF_USER_CONFIG_DIR=.config/dconf" + ], + "cleanup": [ "/include", "/share/bash-completion" ], + "modules": [ + { + "name": "exiv2", + "sources": [ + { + "type": "archive", + "url": "http://exiv2.org/builds/exiv2-0.26-trunk.tar.gz", + "sha256": "c75e3c4a0811bf700d92c82319373b7a825a2331c12b8b37d41eb58e4f18eafb" + }, + { + "type": "shell", + "commands": [ + "cp -f /usr/share/gnu-config/config.sub ./config/", + "cp -f /usr/share/gnu-config/config.guess ./config/" + ] + } + ] + }, + { + "name": "gexiv2", + "config-opts": [ "--disable-introspection" ], + "sources": [ + { + "type": "git", + "url": "https://git.gnome.org/browse/gexiv2" + } + ] + }, + { + "name": "tracker", + "cleanup": [ "/bin", "/etc", "/libexec" ], + "config-opts": [ "--disable-miner-apps", "--disable-static", + "--disable-tracker-extract", "--disable-tracker-needle", + "--disable-tracker-preferences", "--disable-artwork", + "--disable-tracker-writeback", "--disable-miner-user-guides", + "--with-bash-completion-dir=no" ], + "sources": [ + { + "type": "git", + "url": "https://git.gnome.org/browse/tracker" + } + ] + }, + { + "name": "gnome-desktop", + "config-opts": ["--disable-debug-tools", "--disable-udev"], + "sources": [ + { + "type": "git", + "url": "https://git.gnome.org/browse/gnome-desktop" + } + ] + }, + { + "name": "gnome-autoar", + "sources": [ + { + "type": "git", + "url": "https://git.gnome.org/browse/gnome-autoar" + } + ] + }, + { + "buildsystem": "meson", + "builddir": true, + "name": "nautilus", + "config-opts": [ + "-Denable-desktop=false", + "-Denable-selinux=false", + "--libdir=/app/lib" + ], + "sources": [ + { + "type": "git", + "url": "https://gitlab.gnome.org/GNOME/nautilus.git" + } + ] + } + ] + }, + { + "app-id": "foo", + "runtime": "foo", + "runtime-version": "foo", + "sdk": "foo", + "command": "foo", + "tags": ["foo", "bar", "kux"], + "desktop-file-name-prefix": "(Foo) ", + { + "buildsystem": "meson", + "builddir": true, + "name": "nautilus", + "sources": [ + { + "type": "git", + "url": "https://gitlab.gnome.org/GNOME/nautilus.git" + } + ] + } + }, + { + "app-id": "foo", + "runtime": "foo", + "runtime-version": "foo", + "sdk": "foo", + "command": "foo", + "tags": ["foo", "bar", "kux"], + "desktop-file-name-prefix": "(Foo) ", + { + "buildsystem": "meson", + "builddir": true, + "name": "nautilus", + "sources": [ + { + "type": "git", + "url": "https://gitlab.gnome.org/GNOME/nautilus.git" + } + ] + } + } + BLOB + end + + let(:project) { create(:project) } + + let(:old_blob) { Gitlab::Git::Blob.new(data: raw_old_blob) } + + let(:diff) do + Gitlab::Git::Diff.new(diff: raw_diff, + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + a_mode: "100644", + b_mode: "100644", + new_file: false, + renamed_file: false, + deleted_file: false, + too_large: false) + end + + let(:diff_file) do + Gitlab::Diff::File.new(diff, repository: project.repository) + end + + before do + allow(old_blob).to receive(:load_all_data!) + allow(diff_file).to receive(:old_blob) { old_blob } + end + + subject { described_class.new(diff_file, position) } + + context 'position requires a middle expansion and new match lines' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 43, + new_line: 40) + end + + context 'blob lines' do + let(:expected_blob_lines) do + [[40, 40, " \"config-opts\": [ \"--disable-introspection\" ],"], + [41, 41, " \"sources\": ["], + [42, 42, " {"], + [43, 43, " \"type\": \"git\","], + [44, 44, " \"url\": \"https://git.gnome.org/browse/gexiv2\""], + [45, 45, " }"], + [46, 46, " ]"]] + end + + it 'returns the extracted blob lines correctly' do + extracted_lines = subject.blob_lines + + expect(extracted_lines.size).to eq(7) + + extracted_lines.each_with_index do |line, i| + expect([line.old_line, line.new_line, line.text]).to eq(expected_blob_lines[i]) + end + end + end + + context 'diff lines' do + let(:expected_diff_lines) do + [[7, 7, "@@ -7,9 +7,6 @@"], + [7, 7, " \"tags\": [\"devel\", \"development\", \"nightly\"],"], + [8, 8, " \"desktop-file-name-prefix\": \"(Development) \","], + [9, 9, " \"finish-args\": ["], + [10, 10, "- \"--share=ipc\", \"--socket=x11\","], + [11, 10, "- \"--socket=wayland\","], + [12, 10, "- \"--talk-name=org.gnome.OnlineAccounts\","], + [13, 10, " \"--talk-name=org.freedesktop.Tracker1\","], + [14, 11, " \"--filesystem=home\","], + [15, 12, " \"--talk-name=org.gtk.vfs\", \"--talk-name=org.gtk.vfs.*\","], + + # New match line + [40, 37, "@@ -40,7+37,7 @@"], + + # Injected blob lines + [40, 37, " \"config-opts\": [ \"--disable-introspection\" ],"], + [41, 38, " \"sources\": ["], + [42, 39, " {"], + [43, 40, " \"type\": \"git\","], # comment + [44, 41, " \"url\": \"https://git.gnome.org/browse/gexiv2\""], + [45, 42, " }"], + [46, 43, " ]"], + # end + + # Second match line + [62, 59, "@@ -62,7+59,7 @@"], + + [62, 59, " },"], + [63, 60, " {"], + [64, 61, " \"name\": \"gnome-desktop\","], + [65, 62, "- \"config-opts\": [\"--disable-debug-tools\", \"--disable-udev\"],"], + [66, 62, "+ \"config-opts\": [\"--disable-debug-tools\", \"--disable-\"],"], + [66, 63, " \"sources\": ["], + [67, 64, " {"], + [68, 65, " \"type\": \"git\","], + [83, 80, "@@ -83,11 +80,6 @@"], + [83, 80, " \"buildsystem\": \"meson\","], + [84, 81, " \"builddir\": true,"], + [85, 82, " \"name\": \"nautilus\","], + [86, 83, "- \"config-opts\": ["], + [87, 83, "- \"-Denable-desktop=false\","], + [88, 83, "- \"-Denable-selinux=false\","], + [89, 83, "- \"--libdir=/app/lib\""], + [90, 83, "- ],"], + [91, 83, " \"sources\": ["], + [92, 84, " {"], + [93, 85, " \"type\": \"git\","]] + end + + it 'return merge of blob lines with diff lines correctly' do + new_diff_lines = subject.unfolded_diff_lines + + expected_diff_lines.each_with_index do |expected_line, i| + line = new_diff_lines[i] + + expect([line.old_pos, line.new_pos, line.text]) + .to eq([expected_line[0], expected_line[1], expected_line[2]]) + end + end + + it 'merged lines have correct line codes' do + new_diff_lines = subject.unfolded_diff_lines + + new_diff_lines.each_with_index do |line, i| + old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1] + + unless line.type == 'match' + expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos)) + end + end + end + end + end + + context 'position requires a middle expansion and no top match line' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 16, + new_line: 17) + end + + context 'blob lines' do + let(:expected_blob_lines) do + [[16, 16, " \"--filesystem=xdg-run/dconf\", \"--filesystem=~/.config/dconf:ro\","], + [17, 17, " \"--talk-name=ca.desrt.dconf\", \"--env=DCONF_USER_CONFIG_DIR=.config/dconf\""], + [18, 18, " ],"], + [19, 19, " \"cleanup\": [ \"/include\", \"/share/bash-completion\" ],"]] + end + + it 'returns the extracted blob lines correctly' do + extracted_lines = subject.blob_lines + + expect(extracted_lines.size).to eq(4) + + extracted_lines.each_with_index do |line, i| + expect([line.old_line, line.new_line, line.text]).to eq(expected_blob_lines[i]) + end + end + end + + context 'diff lines' do + let(:expected_diff_lines) do + [[7, 7, "@@ -7,9 +7,6 @@"], + [7, 7, " \"tags\": [\"devel\", \"development\", \"nightly\"],"], + [8, 8, " \"desktop-file-name-prefix\": \"(Development) \","], + [9, 9, " \"finish-args\": ["], + [10, 10, "- \"--share=ipc\", \"--socket=x11\","], + [11, 10, "- \"--socket=wayland\","], + [12, 10, "- \"--talk-name=org.gnome.OnlineAccounts\","], + [13, 10, " \"--talk-name=org.freedesktop.Tracker1\","], + [14, 11, " \"--filesystem=home\","], + [15, 12, " \"--talk-name=org.gtk.vfs\", \"--talk-name=org.gtk.vfs.*\","], + # No new match needed + + # Injected blob lines + [16, 13, " \"--filesystem=xdg-run/dconf\", \"--filesystem=~/.config/dconf:ro\","], + [17, 14, " \"--talk-name=ca.desrt.dconf\", \"--env=DCONF_USER_CONFIG_DIR=.config/dconf\""], + [18, 15, " ],"], + [19, 16, " \"cleanup\": [ \"/include\", \"/share/bash-completion\" ],"], + # end + + # Second match line + [62, 59, "@@ -62,4+59,4 @@"], + + [62, 59, " },"], + [63, 60, " {"], + [64, 61, " \"name\": \"gnome-desktop\","], + [65, 62, "- \"config-opts\": [\"--disable-debug-tools\", \"--disable-udev\"],"], + [66, 62, "+ \"config-opts\": [\"--disable-debug-tools\", \"--disable-\"],"], + [66, 63, " \"sources\": ["], + [67, 64, " {"], + [68, 65, " \"type\": \"git\","], + [83, 80, "@@ -83,11 +80,6 @@"], + [83, 80, " \"buildsystem\": \"meson\","], + [84, 81, " \"builddir\": true,"], + [85, 82, " \"name\": \"nautilus\","], + [86, 83, "- \"config-opts\": ["], + [87, 83, "- \"-Denable-desktop=false\","], + [88, 83, "- \"-Denable-selinux=false\","], + [89, 83, "- \"--libdir=/app/lib\""], + [90, 83, "- ],"], + [91, 83, " \"sources\": ["], + [92, 84, " {"], + [93, 85, " \"type\": \"git\","]] + end + + it 'return merge of blob lines with diff lines correctly' do + new_diff_lines = subject.unfolded_diff_lines + + expected_diff_lines.each_with_index do |expected_line, i| + line = new_diff_lines[i] + + expect([line.old_pos, line.new_pos, line.text]) + .to eq([expected_line[0], expected_line[1], expected_line[2]]) + end + end + + it 'merged lines have correct line codes' do + new_diff_lines = subject.unfolded_diff_lines + + new_diff_lines.each_with_index do |line, i| + old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1] + + unless line.type == 'match' + expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos)) + end + end + end + end + end + + context 'position requires a middle expansion and no bottom match line' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 82, + new_line: 79) + end + + context 'blob lines' do + let(:expected_blob_lines) do + [[79, 79, " }"], + [80, 80, " ]"], + [81, 81, " },"], + [82, 82, " {"]] + end + + it 'returns the extracted blob lines correctly' do + extracted_lines = subject.blob_lines + + expect(extracted_lines.size).to eq(4) + + extracted_lines.each_with_index do |line, i| + expect([line.old_line, line.new_line, line.text]).to eq(expected_blob_lines[i]) + end + end + end + + context 'diff lines' do + let(:expected_diff_lines) do + [[7, 7, "@@ -7,9 +7,6 @@"], + [7, 7, " \"tags\": [\"devel\", \"development\", \"nightly\"],"], + [8, 8, " \"desktop-file-name-prefix\": \"(Development) \","], + [9, 9, " \"finish-args\": ["], + [10, 10, "- \"--share=ipc\", \"--socket=x11\","], + [11, 10, "- \"--socket=wayland\","], + [12, 10, "- \"--talk-name=org.gnome.OnlineAccounts\","], + [13, 10, " \"--talk-name=org.freedesktop.Tracker1\","], + [14, 11, " \"--filesystem=home\","], + [15, 12, " \"--talk-name=org.gtk.vfs\", \"--talk-name=org.gtk.vfs.*\","], + [62, 59, "@@ -62,7 +59,7 @@"], + [62, 59, " },"], + [63, 60, " {"], + [64, 61, " \"name\": \"gnome-desktop\","], + [65, 62, "- \"config-opts\": [\"--disable-debug-tools\", \"--disable-udev\"],"], + [66, 62, "+ \"config-opts\": [\"--disable-debug-tools\", \"--disable-\"],"], + [66, 63, " \"sources\": ["], + [67, 64, " {"], + [68, 65, " \"type\": \"git\","], + + # New top match line + [79, 76, "@@ -79,4+76,4 @@"], + + # Injected blob lines + [79, 76, " }"], + [80, 77, " ]"], + [81, 78, " },"], + [82, 79, " {"], + # end + + # No new second match line + [83, 80, " \"buildsystem\": \"meson\","], + [84, 81, " \"builddir\": true,"], + [85, 82, " \"name\": \"nautilus\","], + [86, 83, "- \"config-opts\": ["], + [87, 83, "- \"-Denable-desktop=false\","], + [88, 83, "- \"-Denable-selinux=false\","], + [89, 83, "- \"--libdir=/app/lib\""], + [90, 83, "- ],"], + [91, 83, " \"sources\": ["], + [92, 84, " {"], + [93, 85, " \"type\": \"git\","]] + end + + it 'return merge of blob lines with diff lines correctly' do + new_diff_lines = subject.unfolded_diff_lines + + expected_diff_lines.each_with_index do |expected_line, i| + line = new_diff_lines[i] + + expect([line.old_pos, line.new_pos, line.text]) + .to eq([expected_line[0], expected_line[1], expected_line[2]]) + end + end + + it 'merged lines have correct line codes' do + new_diff_lines = subject.unfolded_diff_lines + + new_diff_lines.each_with_index do |line, i| + old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1] + + unless line.type == 'match' + expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos)) + end + end + end + end + end + + context 'position requires a short top expansion' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 6, + new_line: 6) + end + + context 'blob lines' do + let(:expected_blob_lines) do + [[3, 3, " \"runtime\": \"org.gnome.Platform\","], + [4, 4, " \"runtime-version\": \"master\","], + [5, 5, " \"sdk\": \"org.gnome.Sdk\","], + [6, 6, " \"command\": \"nautilus\","]] + end + + it 'returns the extracted blob lines correctly' do + extracted_lines = subject.blob_lines + + expect(extracted_lines.size).to eq(4) + + extracted_lines.each_with_index do |line, i| + expect([line.old_line, line.new_line, line.text]).to eq(expected_blob_lines[i]) + end + end + end + + context 'diff lines' do + let(:expected_diff_lines) do + # New match line + [[3, 3, "@@ -3,4+3,4 @@"], + + # Injected blob lines + [3, 3, " \"runtime\": \"org.gnome.Platform\","], + [4, 4, " \"runtime-version\": \"master\","], + [5, 5, " \"sdk\": \"org.gnome.Sdk\","], + [6, 6, " \"command\": \"nautilus\","], + # end + [7, 7, " \"tags\": [\"devel\", \"development\", \"nightly\"],"], + [8, 8, " \"desktop-file-name-prefix\": \"(Development) \","], + [9, 9, " \"finish-args\": ["], + [10, 10, "- \"--share=ipc\", \"--socket=x11\","], + [11, 10, "- \"--socket=wayland\","], + [12, 10, "- \"--talk-name=org.gnome.OnlineAccounts\","], + [13, 10, " \"--talk-name=org.freedesktop.Tracker1\","], + [14, 11, " \"--filesystem=home\","], + [15, 12, " \"--talk-name=org.gtk.vfs\", \"--talk-name=org.gtk.vfs.*\","], + [62, 59, "@@ -62,7 +59,7 @@"], + [62, 59, " },"], + [63, 60, " {"], + [64, 61, " \"name\": \"gnome-desktop\","], + [65, 62, "- \"config-opts\": [\"--disable-debug-tools\", \"--disable-udev\"],"], + [66, 62, "+ \"config-opts\": [\"--disable-debug-tools\", \"--disable-\"],"], + [66, 63, " \"sources\": ["], + [67, 64, " {"], + [68, 65, " \"type\": \"git\","], + [83, 80, "@@ -83,11 +80,6 @@"], + [83, 80, " \"buildsystem\": \"meson\","], + [84, 81, " \"builddir\": true,"], + [85, 82, " \"name\": \"nautilus\","], + [86, 83, "- \"config-opts\": ["], + [87, 83, "- \"-Denable-desktop=false\","], + [88, 83, "- \"-Denable-selinux=false\","], + [89, 83, "- \"--libdir=/app/lib\""], + [90, 83, "- ],"], + [91, 83, " \"sources\": ["], + [92, 84, " {"], + [93, 85, " \"type\": \"git\","]] + end + + it 'return merge of blob lines with diff lines correctly' do + new_diff_lines = subject.unfolded_diff_lines + + expected_diff_lines.each_with_index do |expected_line, i| + line = new_diff_lines[i] + + expect([line.old_pos, line.new_pos, line.text]) + .to eq([expected_line[0], expected_line[1], expected_line[2]]) + end + end + + it 'merged lines have correct line codes' do + new_diff_lines = subject.unfolded_diff_lines + + new_diff_lines.each_with_index do |line, i| + old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1] + + unless line.type == 'match' + expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos)) + end + end + end + end + end + + context 'position sits between two match lines (no expasion needed)' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 64, + new_line: 61) + end + + context 'diff lines' do + it 'returns nil' do + expect(subject.unfolded_diff_lines).to be_nil + end + end + end + + context 'position requires bottom expansion and new match lines' do + let(:position) do + Gitlab::Diff::Position.new(base_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + start_sha: "1c59dfa64afbea8c721bb09a06a9d326c952ea19", + head_sha: "1487062132228de836236c522fe52fed4980a46c", + old_path: "build-aux/flatpak/org.gnome.Nautilus.json", + new_path: "build-aux/flatpak/org.gnome.Nautilus.json", + position_type: "text", + old_line: 107, + new_line: 99) + end + + context 'blob lines' do + let(:expected_blob_lines) do + [[104, 104, " \"sdk\": \"foo\","], + [105, 105, " \"command\": \"foo\","], + [106, 106, " \"tags\": [\"foo\", \"bar\", \"kux\"],"], + [107, 107, " \"desktop-file-name-prefix\": \"(Foo) \","], + [108, 108, " {"], + [109, 109, " \"buildsystem\": \"meson\","], + [110, 110, " \"builddir\": true,"]] + end + + it 'returns the extracted blob lines correctly' do + extracted_lines = subject.blob_lines + + expect(extracted_lines.size).to eq(7) + + extracted_lines.each_with_index do |line, i| + expect([line.old_line, line.new_line, line.text]).to eq(expected_blob_lines[i]) + end + end + end + + context 'diff lines' do + let(:expected_diff_lines) do + [[7, 7, "@@ -7,9 +7,6 @@"], + [7, 7, " \"tags\": [\"devel\", \"development\", \"nightly\"],"], + [8, 8, " \"desktop-file-name-prefix\": \"(Development) \","], + [9, 9, " \"finish-args\": ["], + [10, 10, "- \"--share=ipc\", \"--socket=x11\","], + [11, 10, "- \"--socket=wayland\","], + [12, 10, "- \"--talk-name=org.gnome.OnlineAccounts\","], + [13, 10, " \"--talk-name=org.freedesktop.Tracker1\","], + [14, 11, " \"--filesystem=home\","], + [15, 12, " \"--talk-name=org.gtk.vfs\", \"--talk-name=org.gtk.vfs.*\","], + [62, 59, "@@ -62,7 +59,7 @@"], + [62, 59, " },"], + [63, 60, " {"], + [64, 61, " \"name\": \"gnome-desktop\","], + [65, 62, "- \"config-opts\": [\"--disable-debug-tools\", \"--disable-udev\"],"], + [66, 62, "+ \"config-opts\": [\"--disable-debug-tools\", \"--disable-\"],"], + [66, 63, " \"sources\": ["], + [67, 64, " {"], + [68, 65, " \"type\": \"git\","], + [83, 80, "@@ -83,11 +80,6 @@"], + [83, 80, " \"buildsystem\": \"meson\","], + [84, 81, " \"builddir\": true,"], + [85, 82, " \"name\": \"nautilus\","], + [86, 83, "- \"config-opts\": ["], + [87, 83, "- \"-Denable-desktop=false\","], + [88, 83, "- \"-Denable-selinux=false\","], + [89, 83, "- \"--libdir=/app/lib\""], + [90, 83, "- ],"], + [91, 83, " \"sources\": ["], + [92, 84, " {"], + [93, 85, " \"type\": \"git\","], + # New match line + [104, 96, "@@ -104,7+96,7 @@"], + + # Injected blob lines + [104, 96, " \"sdk\": \"foo\","], + [105, 97, " \"command\": \"foo\","], + [106, 98, " \"tags\": [\"foo\", \"bar\", \"kux\"],"], + [107, 99, " \"desktop-file-name-prefix\": \"(Foo) \","], + [108, 100, " {"], + [109, 101, " \"buildsystem\": \"meson\","], + [110, 102, " \"builddir\": true,"]] + # end + end + + it 'return merge of blob lines with diff lines correctly' do + new_diff_lines = subject.unfolded_diff_lines + + expected_diff_lines.each_with_index do |expected_line, i| + line = new_diff_lines[i] + + expect([line.old_pos, line.new_pos, line.text]) + .to eq([expected_line[0], expected_line[1], expected_line[2]]) + end + end + + it 'merged lines have correct line codes' do + new_diff_lines = subject.unfolded_diff_lines + + new_diff_lines.each_with_index do |line, i| + old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1] + + unless line.type == 'match' + expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos)) + end + end + end + end + end +end diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb index edab53247e9..4ba9094b24e 100644 --- a/spec/lib/gitlab/file_detector_spec.rb +++ b/spec/lib/gitlab/file_detector_spec.rb @@ -15,14 +15,22 @@ describe Gitlab::FileDetector do describe '.type_of' do it 'returns the type of a README file' do - %w[README readme INDEX index].each do |filename| + filenames = Gitlab::MarkupHelper::PLAIN_FILENAMES + Gitlab::MarkupHelper::PLAIN_FILENAMES.map(&:upcase) + extensions = Gitlab::MarkupHelper::EXTENSIONS + Gitlab::MarkupHelper::EXTENSIONS.map(&:upcase) + + filenames.each do |filename| expect(described_class.type_of(filename)).to eq(:readme) - %w[.md .adoc .rst].each do |extname| - expect(described_class.type_of(filename + extname)).to eq(:readme) + + extensions.each do |extname| + expect(described_class.type_of("#{filename}.#{extname}")).to eq(:readme) end end end + it 'returns nil for a README.rb file' do + expect(described_class.type_of('README.rb')).to be_nil + end + it 'returns nil for a README file in a directory' do expect(described_class.type_of('foo/README.md')).to be_nil end diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb index 9ef27081f98..6be35eee0fd 100644 --- a/spec/lib/gitlab/git/commit_spec.rb +++ b/spec/lib/gitlab/git/commit_spec.rb @@ -94,7 +94,7 @@ describe Gitlab::Git::Commit, :seed_helper do context 'body_size less than threshold' do let(:body_size) { 123 } - it 'fetches commit message seperately' do + it 'fetches commit message separately' do expect(described_class).to receive(:get_message).with(repository, id) commit.safe_message diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 54291e847d8..1fe73c12fc0 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -1095,12 +1095,26 @@ describe Gitlab::Git::Repository, :seed_helper do end it 'returns no Gitaly::DiffStats when there is a nil SHA' do + expect_any_instance_of(Gitlab::GitalyClient::CommitService) + .not_to receive(:diff_stats) + collection = repository.diff_stats(nil, 'master') expect(collection).to be_a(Gitlab::Git::DiffStatsCollection) expect(collection).to be_a(Enumerable) expect(collection.to_a).to be_empty end + + it 'returns no Gitaly::DiffStats when there is a BLANK_SHA' do + expect_any_instance_of(Gitlab::GitalyClient::CommitService) + .not_to receive(:diff_stats) + + collection = repository.diff_stats(Gitlab::Git::BLANK_SHA, 'master') + + expect(collection).to be_a(Gitlab::Git::DiffStatsCollection) + expect(collection).to be_a(Enumerable) + expect(collection.to_a).to be_empty + end end describe "#ls_files" do diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb index 2d9db576a6c..c5bad062c2a 100644 --- a/spec/lib/gitlab/git/tag_spec.rb +++ b/spec/lib/gitlab/git/tag_spec.rb @@ -68,7 +68,7 @@ describe Gitlab::Git::Tag, :seed_helper do context 'message_size less than threshold' do let(:message_size) { 123 } - it 'fetches tag message seperately' do + it 'fetches tag message separately' do expect(described_class).to receive(:get_message).with(repository, gitaly_tag.id) tag.message diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb index 25684ea9e2c..efca8564894 100644 --- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb @@ -240,7 +240,12 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi .and_return(user.id) end - it 'returns the existing merge request' do + # TODO: remove rails5-only after removing rails4 tests + # rails 4 can not handle multiple indexes on the same column set if + # index was added by 't.index' - t.index is used by default in schema.rb in + # rails 5. Let's run this test only in rails 5 env: + # see https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/21492#note_113602758 + it 'returns the existing merge request', :rails5 do mr1, exists1 = importer.create_merge_request mr2, exists2 = importer.create_merge_request diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb index 9200724ed23..8bce7a4cdf5 100644 --- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb +++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb @@ -36,6 +36,7 @@ describe Gitlab::Kubernetes::Helm::Api do describe '#install' do before do allow(client).to receive(:create_pod).and_return(nil) + allow(client).to receive(:get_config_map).and_return(nil) allow(client).to receive(:create_config_map).and_return(nil) allow(client).to receive(:create_service_account).and_return(nil) allow(client).to receive(:create_cluster_role_binding).and_return(nil) @@ -57,6 +58,18 @@ describe Gitlab::Kubernetes::Helm::Api do subject.install(command) end + + context 'config map already exists' do + before do + expect(client).to receive(:get_config_map).with("values-content-configuration-#{application_name}", gitlab_namespace).and_return(resource) + end + + it 'updates the config map' do + expect(client).to receive(:update_config_map).with(resource).once + + subject.install(command) + end + end end context 'without a service account' do @@ -88,8 +101,8 @@ describe Gitlab::Kubernetes::Helm::Api do context 'service account and cluster role binding does not exist' do before do - expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil)) - expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil)) + expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) + expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) end it 'creates a service account, followed the cluster role binding on kubeclient' do @@ -102,8 +115,8 @@ describe Gitlab::Kubernetes::Helm::Api do context 'service account already exists' do before do - expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) - expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_raise(Kubeclient::HttpError.new(404, 'Not found', nil)) + expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) + expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) end it 'updates the service account, followed by creating the cluster role binding' do @@ -116,8 +129,8 @@ describe Gitlab::Kubernetes::Helm::Api do context 'service account and cluster role binding already exists' do before do - expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) - expect(client).to receive('get_cluster_role_binding').with('tiller-admin').and_return(cluster_role_binding_resource) + expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) + expect(client).to receive(:get_cluster_role_binding).with('tiller-admin').and_return(cluster_role_binding_resource) end it 'updates the service account, followed by creating the cluster role binding' do @@ -130,7 +143,7 @@ describe Gitlab::Kubernetes::Helm::Api do context 'a non-404 error is thrown' do before do - expect(client).to receive('get_service_account').with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil)) + expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil)) end it 'raises an error' do diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb index eed4135d8a2..3979a43216c 100644 --- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb +++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb @@ -66,6 +66,20 @@ describe Gitlab::Kubernetes::KubeClient do end end + describe '#knative_client' do + subject { client.knative_client } + + it_behaves_like 'a Kubeclient' + + it 'has the extensions API group endpoint' do + expect(subject.api_endpoint.to_s).to match(%r{\/apis\/serving.knative.dev\Z}) + end + + it 'has the api_version' do + expect(subject.instance_variable_get(:@api_version)).to eq('v1alpha1') + end + end + describe 'core API' do let(:core_client) { client.core_client } diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb index e098612f6fb..e1c35c355f4 100644 --- a/spec/lib/gitlab/kubernetes/namespace_spec.rb +++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::Kubernetes::Namespace do describe '#exists?' do context 'when namespace do not exits' do - let(:exception) { ::Kubeclient::HttpError.new(404, "namespace #{name} not found", nil) } + let(:exception) { ::Kubeclient::ResourceNotFoundError.new(404, "namespace #{name} not found", nil) } it 'returns false' do expect(client).to receive(:get_namespace).with(name).once.and_raise(exception) diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb index b03c1e23ca3..5dae82a63b4 100644 --- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb +++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb @@ -210,6 +210,19 @@ describe Gitlab::QuickActions::CommandDefinition do end end + context 'when warning is set' do + before do + subject.explanation = 'Explanation' + subject.warning = 'dangerous!' + end + + it 'returns this static string' do + result = subject.explain({}, nil) + + expect(result).to eq 'Explanation (dangerous!)' + end + end + context 'when the explanation is dynamic' do before do subject.explanation = proc { |arg| "Dynamic #{arg}" } diff --git a/spec/lib/gitlab/quick_actions/dsl_spec.rb b/spec/lib/gitlab/quick_actions/dsl_spec.rb index 067a30fd7e2..fd4df8694ba 100644 --- a/spec/lib/gitlab/quick_actions/dsl_spec.rb +++ b/spec/lib/gitlab/quick_actions/dsl_spec.rb @@ -12,6 +12,7 @@ describe Gitlab::QuickActions::Dsl do params 'The first argument' explanation 'Static explanation' + warning 'Possible problem!' command :explanation_with_aliases, :once, :first do |arg| arg end @@ -64,6 +65,7 @@ describe Gitlab::QuickActions::Dsl do expect(no_args_def.condition_block).to be_nil expect(no_args_def.action_block).to be_a_kind_of(Proc) expect(no_args_def.parse_params_block).to be_nil + expect(no_args_def.warning).to eq('') expect(explanation_with_aliases_def.name).to eq(:explanation_with_aliases) expect(explanation_with_aliases_def.aliases).to eq([:once, :first]) @@ -73,6 +75,7 @@ describe Gitlab::QuickActions::Dsl do expect(explanation_with_aliases_def.condition_block).to be_nil expect(explanation_with_aliases_def.action_block).to be_a_kind_of(Proc) expect(explanation_with_aliases_def.parse_params_block).to be_nil + expect(explanation_with_aliases_def.warning).to eq('Possible problem!') expect(dynamic_description_def.name).to eq(:dynamic_description) expect(dynamic_description_def.aliases).to eq([]) @@ -82,6 +85,7 @@ describe Gitlab::QuickActions::Dsl do expect(dynamic_description_def.condition_block).to be_nil expect(dynamic_description_def.action_block).to be_a_kind_of(Proc) expect(dynamic_description_def.parse_params_block).to be_nil + expect(dynamic_description_def.warning).to eq('') expect(cc_def.name).to eq(:cc) expect(cc_def.aliases).to eq([]) @@ -91,6 +95,7 @@ describe Gitlab::QuickActions::Dsl do expect(cc_def.condition_block).to be_nil expect(cc_def.action_block).to be_nil expect(cc_def.parse_params_block).to be_nil + expect(cc_def.warning).to eq('') expect(cond_action_def.name).to eq(:cond_action) expect(cond_action_def.aliases).to eq([]) @@ -100,6 +105,7 @@ describe Gitlab::QuickActions::Dsl do expect(cond_action_def.condition_block).to be_a_kind_of(Proc) expect(cond_action_def.action_block).to be_a_kind_of(Proc) expect(cond_action_def.parse_params_block).to be_nil + expect(cond_action_def.warning).to eq('') expect(with_params_parsing_def.name).to eq(:with_params_parsing) expect(with_params_parsing_def.aliases).to eq([]) @@ -109,6 +115,7 @@ describe Gitlab::QuickActions::Dsl do expect(with_params_parsing_def.condition_block).to be_nil expect(with_params_parsing_def.action_block).to be_a_kind_of(Proc) expect(with_params_parsing_def.parse_params_block).to be_a_kind_of(Proc) + expect(with_params_parsing_def.warning).to eq('') expect(substitution_def.name).to eq(:something) expect(substitution_def.aliases).to eq([]) @@ -118,6 +125,7 @@ describe Gitlab::QuickActions::Dsl do expect(substitution_def.condition_block).to be_nil expect(substitution_def.action_block.call('text')).to eq('text Some complicated thing you want in here') expect(substitution_def.parse_params_block).to be_nil + expect(substitution_def.warning).to eq('') end end end diff --git a/spec/lib/gitlab/sentry_spec.rb b/spec/lib/gitlab/sentry_spec.rb index 499757da061..d3b41b27b80 100644 --- a/spec/lib/gitlab/sentry_spec.rb +++ b/spec/lib/gitlab/sentry_spec.rb @@ -52,4 +52,28 @@ describe Gitlab::Sentry do end end end + + context '.track_acceptable_exception' do + let(:exception) { RuntimeError.new('boom') } + + before do + allow(described_class).to receive(:enabled?).and_return(true) + end + + it 'calls Raven.capture_exception' do + expected_extras = { + some_other_info: 'info', + issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1' + } + + expect(Raven).to receive(:capture_exception) + .with(exception, extra: a_hash_including(expected_extras)) + + described_class.track_acceptable_exception( + exception, + issue_url: 'http://gitlab.com/gitlab-org/gitlab-ce/issues/1', + extra: { some_other_info: 'info' } + ) + end + end end diff --git a/spec/lib/json_web_token/hmac_token_spec.rb b/spec/lib/json_web_token/hmac_token_spec.rb new file mode 100644 index 00000000000..f2cbc381967 --- /dev/null +++ b/spec/lib/json_web_token/hmac_token_spec.rb @@ -0,0 +1,133 @@ +# frozen_string_literal: true + +require 'json' +require 'timecop' + +describe JSONWebToken::HMACToken do + let(:secret) { 'shh secret squirrel' } + + shared_examples 'a valid, non-expired token' do + it 'is an Array with two elements' do + expect(decoded_token).to be_a(Array) + expect(decoded_token.count).to eq(2) + end + + it 'contains the following keys in the first Array element Hash - jti, iat, nbf, exp' do + expect(decoded_token[0].keys).to include('jti', 'iat', 'nbf', 'exp') + end + + it 'contains the following keys in the second Array element Hash - typ and alg' do + expect(decoded_token[1]['typ']).to eql('JWT') + expect(decoded_token[1]['alg']).to eql('HS256') + end + end + + describe '.decode' do + let(:leeway) { described_class::IAT_LEEWAY } + let(:decoded_token) { described_class.decode(encoded_token, secret, leeway: leeway) } + + context 'with an invalid token' do + context 'that is junk' do + let(:encoded_token) { 'junk' } + + it "raises exception saying 'Not enough or too many segments'" do + expect { decoded_token }.to raise_error(JWT::DecodeError, 'Not enough or too many segments') + end + end + + context 'that has been fiddled with' do + let(:encoded_token) do + described_class.new(secret).encoded.tap { |token| token[0] = 'E' } + end + + it "raises exception saying 'Invalid segment encoding'" do + expect { decoded_token }.to raise_error(JWT::DecodeError, 'Invalid segment encoding') + end + end + + context 'that was generated using a different secret' do + let(:encoded_token) { described_class.new('some other secret').encoded } + + it "raises exception saying 'Signature verification raised" do + expect { decoded_token }.to raise_error(JWT::VerificationError, 'Signature verification raised') + end + end + + context 'that is expired' do + # Needs the ! so Timecop.freeze() is effective + let!(:encoded_token) { described_class.new(secret).encoded } + + it "raises exception saying 'Signature has expired'" do + # Needs to be 120 seconds, because the default expiry is 60 seconds + # with an additional 60 second leeway. + Timecop.freeze(Time.now + 120) do + expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired') + end + end + end + end + + context 'with a valid token' do + let(:encoded_token) do + hmac_token = described_class.new(secret) + hmac_token.expire_time = Time.now + expire_time + hmac_token.encoded + end + + context 'that has expired' do + let(:expire_time) { 0 } + + context 'with the default leeway' do + Timecop.freeze(Time.now + 1) do + it_behaves_like 'a valid, non-expired token' + end + end + + context 'with a leeway of 0 seconds' do + let(:leeway) { 0 } + + it "raises exception saying 'Signature has expired'" do + Timecop.freeze(Time.now + 1) do + expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired') + end + end + end + end + + context 'that has not expired' do + let(:expire_time) { described_class::DEFAULT_EXPIRE_TIME } + + it_behaves_like 'a valid, non-expired token' + end + end + end + + describe '#encoded' do + let(:decoded_token) { described_class.decode(encoded_token, secret) } + + context 'without data' do + let(:encoded_token) { described_class.new(secret).encoded } + + it_behaves_like 'a valid, non-expired token' + end + + context 'with data' do + let(:data) { { secret_key: 'secret value' }.to_json } + let(:encoded_token) do + ec = described_class.new(secret) + ec[:data] = data + ec.encoded + end + + it_behaves_like 'a valid, non-expired token' + + it "contains the 'data' key in the first Array element Hash" do + expect(decoded_token[0]).to have_key('data') + end + + it 'can re-read back the data' do + expect(decoded_token[0]['data']).to eql(data) + end + end + end +end diff --git a/spec/models/clusters/kubernetes_namespace_spec.rb b/spec/models/clusters/kubernetes_namespace_spec.rb index 0dfeea5cd2f..c068c4d7739 100644 --- a/spec/models/clusters/kubernetes_namespace_spec.rb +++ b/spec/models/clusters/kubernetes_namespace_spec.rb @@ -8,6 +8,22 @@ RSpec.describe Clusters::KubernetesNamespace, type: :model do it { is_expected.to belong_to(:cluster) } it { is_expected.to have_one(:platform_kubernetes) } + describe 'has_service_account_token' do + subject { described_class.has_service_account_token } + + context 'namespace has service_account_token' do + let!(:namespace) { create(:cluster_kubernetes_namespace, :with_token) } + + it { is_expected.to include(namespace) } + end + + context 'namespace has no service_account_token' do + let!(:namespace) { create(:cluster_kubernetes_namespace) } + + it { is_expected.not_to include(namespace) } + end + end + describe 'namespace uniqueness validation' do let(:cluster_project) { create(:cluster_project) } let(:kubernetes_namespace) { build(:cluster_kubernetes_namespace, namespace: 'my-namespace') } diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb index f5d261c4e9d..99fd6ccc4d8 100644 --- a/spec/models/clusters/platforms/kubernetes_spec.rb +++ b/spec/models/clusters/platforms/kubernetes_spec.rb @@ -210,9 +210,11 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching let(:api_url) { 'https://kube.domain.com' } let(:ca_pem) { 'CA PEM DATA' } + subject { kubernetes.predefined_variables(project: cluster.project) } + shared_examples 'setting variables' do it 'sets the variables' do - expect(kubernetes.predefined_variables(project: cluster.project)).to include( + expect(subject).to include( { key: 'KUBE_URL', value: api_url, public: true }, { key: 'KUBE_CA_PEM', value: ca_pem, public: true }, { key: 'KUBE_CA_PEM_FILE', value: ca_pem, public: true, file: true } @@ -220,6 +222,30 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching end end + context 'kubernetes namespace is created with no service account token' do + let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, cluster: cluster) } + + it_behaves_like 'setting variables' + + it 'sets KUBE_TOKEN' do + expect(subject).to include( + { key: 'KUBE_TOKEN', value: kubernetes.token, public: false } + ) + end + end + + context 'kubernetes namespace is created with no service account token' do + let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, :with_token, cluster: cluster) } + + it_behaves_like 'setting variables' + + it 'sets KUBE_TOKEN' do + expect(subject).to include( + { key: 'KUBE_TOKEN', value: kubernetes_namespace.service_account_token, public: false } + ) + end + end + context 'namespace is provided' do let(:namespace) { 'my-project' } @@ -228,12 +254,24 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching end it_behaves_like 'setting variables' + + it 'sets KUBE_TOKEN' do + expect(subject).to include( + { key: 'KUBE_TOKEN', value: kubernetes.token, public: false } + ) + end end context 'no namespace provided' do let(:namespace) { kubernetes.actual_namespace } it_behaves_like 'setting variables' + + it 'sets KUBE_TOKEN' do + expect(subject).to include( + { key: 'KUBE_TOKEN', value: kubernetes.token, public: false } + ) + end end end diff --git a/spec/models/concerns/awardable_spec.rb b/spec/models/concerns/awardable_spec.rb index debc02fa51f..5713106418d 100644 --- a/spec/models/concerns/awardable_spec.rb +++ b/spec/models/concerns/awardable_spec.rb @@ -37,8 +37,8 @@ describe Awardable do create(:award_emoji, awardable: issue3, name: "star", user: award_emoji.user) create(:award_emoji, awardable: issue3, name: "star", user: award_emoji2.user) - expect(Issue.awarded(award_emoji.user)).to eq [issue, issue3] - expect(Issue.awarded(award_emoji2.user)).to eq [issue2, issue3] + expect(Issue.awarded(award_emoji.user)).to contain_exactly(issue, issue3) + expect(Issue.awarded(award_emoji2.user)).to contain_exactly(issue2, issue3) end end diff --git a/spec/models/concerns/deployable_spec.rb b/spec/models/concerns/deployable_spec.rb index ac79c75a55e..6951be903fe 100644 --- a/spec/models/concerns/deployable_spec.rb +++ b/spec/models/concerns/deployable_spec.rb @@ -49,5 +49,26 @@ describe Deployable do expect(environment).to be_nil end end + + context 'when environment scope contains invalid character' do + let(:job) do + create( + :ci_build, + name: 'job:deploy-to-test-site', + environment: '$CI_JOB_NAME', + options: { + environment: { + name: '$CI_JOB_NAME', + url: 'http://staging.example.com/$CI_JOB_NAME', + on_stop: 'stop_review_app' + } + }) + end + + it 'does not create a deployment and environment record' do + expect(deployment).to be_nil + expect(environment).to be_nil + end + end end end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index c7202b481d3..a58dc8e25e8 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -1782,7 +1782,7 @@ describe MergeRequest do allow(subject).to receive(:head_pipeline) { nil } end - it { expect(subject.mergeable_ci_state?).to be_truthy } + it { expect(subject.mergeable_ci_state?).to be_falsey } end end @@ -2651,6 +2651,10 @@ describe MergeRequest do describe '#includes_any_commits?' do it 'returns false' do + expect(subject.includes_any_commits?([])).to be_falsey + end + + it 'returns false' do expect(subject.includes_any_commits?([Gitlab::Git::BLANK_SHA])).to be_falsey end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index b2ca6e98068..bdff68cee8b 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -2415,7 +2415,7 @@ describe Project do end context 'when user configured kubernetes from CI/CD > Clusters and KubernetesNamespace migration has been executed' do - let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace) } + let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, :with_token) } let!(:cluster) { kubernetes_namespace.cluster } let(:project) { kubernetes_namespace.project } diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index e6d01c9689f..bb913ae0e79 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -2018,11 +2018,11 @@ describe API::Users do expect(json_response['message']).to eq('403 Forbidden') end - it 'returns a personal access token' do + it 'returns an impersonation token' do get api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin) expect(response).to have_gitlab_http_status(200) - expect(json_response['token']).to be_present + expect(json_response['token']).not_to be_present expect(json_response['impersonation']).to be_truthy end end diff --git a/spec/serializers/environment_status_entity_spec.rb b/spec/serializers/environment_status_entity_spec.rb index 52bd40ecb5e..8a6a38fe5f8 100644 --- a/spec/serializers/environment_status_entity_spec.rb +++ b/spec/serializers/environment_status_entity_spec.rb @@ -33,19 +33,47 @@ describe EnvironmentStatusEntity do it { is_expected.not_to include(:metrics_url) } it { is_expected.not_to include(:metrics_monitoring_url) } - context 'when :ci_environments_status_changes feature flag is disabled' do + context 'when the user is project maintainer' do before do - stub_feature_flags(ci_environments_status_changes: false) + project.add_maintainer(user) end - it { is_expected.not_to include(:changes) } + it { is_expected.to include(:stop_url) } end - context 'when the user is project maintainer' do + context 'when deployment has metrics' do + let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) } + + let(:simple_metrics) do + { + success: true, + metrics: {}, + last_update: 42 + } + end + before do project.add_maintainer(user) + allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter) + allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics) + allow(entity).to receive(:deployment).and_return(deployment) end - it { is_expected.to include(:stop_url) } + context 'when deployment succeeded' do + let(:deployment) { create(:deployment, :succeed, :review_app) } + + it 'returns metrics url' do + expect(subject[:metrics_url]) + .to eq("/#{project.namespace.name}/#{project.name}/environments/#{environment.id}/deployments/#{deployment.iid}/metrics") + end + end + + context 'when deployment is running' do + let(:deployment) { create(:deployment, :running, :review_app) } + + it 'does not return metrics url' do + expect(subject[:metrics_url]).to be_nil + end + end end end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index 5c87ed5c3c6..4d9c5aabbda 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -387,15 +387,24 @@ describe Ci::CreatePipelineService do context 'with environment' do before do - config = YAML.dump(deploy: { environment: { name: "review/$CI_COMMIT_REF_NAME" }, script: 'ls' }) + config = YAML.dump( + deploy: { + environment: { name: "review/$CI_COMMIT_REF_NAME" }, + script: 'ls', + tags: ['hello'] + }) + stub_ci_pipeline_yaml_file(config) end - it 'creates the environment' do + it 'creates the environment with tags' do result = execute_service expect(result).to be_persisted expect(Environment.find_by(name: "review/master")).to be_present + expect(result.builds.first.tag_list).to contain_exactly('hello') + expect(result.builds.first.deployment).to be_persisted + expect(result.builds.first.deployment.deployable).to be_a(Ci::Build) end end @@ -599,5 +608,53 @@ describe Ci::CreatePipelineService do .to eq variables_attributes.map(&:with_indifferent_access) end end + + context 'when pipeline has a job with environment' do + let(:pipeline) { execute_service } + + before do + stub_ci_pipeline_yaml_file(YAML.dump(config)) + end + + context 'when environment name is valid' do + let(:config) do + { + review_app: { + script: 'deploy', + environment: { + name: 'review/${CI_COMMIT_REF_NAME}', + url: 'http://${CI_COMMIT_REF_SLUG}-staging.example.com' + } + } + } + end + + it 'has a job with environment' do + expect(pipeline.builds.count).to eq(1) + expect(pipeline.builds.first.persisted_environment.name).to eq('review/master') + expect(pipeline.builds.first.deployment).to be_created + end + end + + context 'when environment name is invalid' do + let(:config) do + { + 'job:deploy-to-test-site': { + script: 'deploy', + environment: { + name: '${CI_JOB_NAME}', + url: 'https://$APP_URL' + } + } + } + end + + it 'has a job without environment' do + expect(pipeline.builds.count).to eq(1) + expect(pipeline.builds.first.persisted_environment).to be_nil + expect(pipeline.builds.first.deployment).to be_nil + end + end + end end end diff --git a/spec/services/clusters/applications/check_installation_progress_service_spec.rb b/spec/services/clusters/applications/check_installation_progress_service_spec.rb index 1a565bb734d..ea17f2bb423 100644 --- a/spec/services/clusters/applications/check_installation_progress_service_spec.rb +++ b/spec/services/clusters/applications/check_installation_progress_service_spec.rb @@ -19,6 +19,10 @@ describe Clusters::Applications::CheckInstallationProgressService do shared_examples 'a not yet terminated installation' do |a_phase| let(:phase) { a_phase } + before do + expect(service).to receive(:installation_phase).once.and_return(phase) + end + context "when phase is #{a_phase}" do context 'when not timeouted' do it 'reschedule a new check' do @@ -50,8 +54,6 @@ describe Clusters::Applications::CheckInstallationProgressService do end before do - expect(service).to receive(:installation_phase).once.and_return(phase) - allow(service).to receive(:installation_errors).and_return(errors) allow(service).to receive(:remove_installation_pod).and_return(nil) end @@ -60,6 +62,10 @@ describe Clusters::Applications::CheckInstallationProgressService do context 'when installation POD succeeded' do let(:phase) { Gitlab::Kubernetes::Pod::SUCCEEDED } + before do + expect(service).to receive(:installation_phase).once.and_return(phase) + end + it_behaves_like 'a terminated installation' it 'make the application installed' do @@ -76,6 +82,10 @@ describe Clusters::Applications::CheckInstallationProgressService do let(:phase) { Gitlab::Kubernetes::Pod::FAILED } let(:errors) { 'test installation failed' } + before do + expect(service).to receive(:installation_phase).once.and_return(phase) + end + it_behaves_like 'a terminated installation' it 'make the application errored' do @@ -87,5 +97,22 @@ describe Clusters::Applications::CheckInstallationProgressService do end RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated installation', phase } + + context 'when installation raises a Kubeclient::HttpError' do + let(:cluster) { create(:cluster, :provided_by_user, :project) } + + before do + application.update!(cluster: cluster) + + expect(service).to receive(:installation_phase).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil)) + end + + it 'shows the response code from the error' do + service.execute + + expect(application).to be_errored + expect(application.status_reason).to eq('Kubernetes error: 401') + end + end end end diff --git a/spec/services/clusters/applications/install_service_spec.rb b/spec/services/clusters/applications/install_service_spec.rb index 4bd19f5bd79..2f801d019fe 100644 --- a/spec/services/clusters/applications/install_service_spec.rb +++ b/spec/services/clusters/applications/install_service_spec.rb @@ -42,7 +42,7 @@ describe Clusters::Applications::InstallService do service.execute expect(application).to be_errored - expect(application.status_reason).to match('Kubernetes error.') + expect(application.status_reason).to match('Kubernetes error: 500') end end diff --git a/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb b/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb index 4d1a6bb7b3a..a5806559b14 100644 --- a/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb +++ b/spec/services/clusters/gcp/kubernetes/fetch_kubernetes_token_service_spec.rb @@ -19,13 +19,16 @@ describe Clusters::Gcp::Kubernetes::FetchKubernetesTokenService do subject { described_class.new(kubeclient, service_account_token_name, namespace).execute } + before do + stub_kubeclient_discover(api_url) + end + context 'when params correct' do let(:decoded_token) { 'xxx.token.xxx' } let(:token) { Base64.encode64(decoded_token) } context 'when gitlab-token exists' do before do - stub_kubeclient_discover(api_url) stub_kubeclient_get_secret( api_url, { @@ -39,9 +42,17 @@ describe Clusters::Gcp::Kubernetes::FetchKubernetesTokenService do it { is_expected.to eq(decoded_token) } end + context 'when there is a 500 error' do + before do + stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 500) + end + + it { expect { subject }.to raise_error(Kubeclient::HttpError) } + end + context 'when gitlab-token does not exist' do before do - allow(kubeclient).to receive(:get_secret).and_raise(Kubeclient::HttpError.new(404, 'Not found', nil)) + stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 404) end it { is_expected.to be_nil } diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb new file mode 100644 index 00000000000..20bda6984bd --- /dev/null +++ b/spec/services/issuable/clone/attributes_rewriter_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Issuable::Clone::AttributesRewriter do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:project1) { create(:project, :public, group: group) } + let(:project2) { create(:project, :public, group: group) } + let(:original_issue) { create(:issue, project: project1) } + let(:new_issue) { create(:issue, project: project2) } + + subject { described_class.new(user, original_issue, new_issue) } + + context 'setting labels' do + it 'sets labels present in the new project and group labels' do + project1_label_1 = create(:label, title: 'label1', project: project1) + project1_label_2 = create(:label, title: 'label2', project: project1) + project2_label_1 = create(:label, title: 'label1', project: project2) + group_label = create(:group_label, title: 'group_label', group: group) + create(:label, title: 'label3', project: project2) + + original_issue.update(labels: [project1_label_1, project1_label_2, group_label]) + + subject.execute + + expect(new_issue.reload.labels).to match_array([project2_label_1, group_label]) + end + + it 'does not set any labels when not used on the original issue' do + subject.execute + + expect(new_issue.reload.labels).to be_empty + end + + it 'copies the resource label events' do + resource_label_events = create_list(:resource_label_event, 2, issue: original_issue) + + subject.execute + + expected = resource_label_events.map(&:label_id) + + expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected) + end + end + + context 'setting milestones' do + it 'sets milestone to nil when old issue milestone is not in the new project' do + milestone = create(:milestone, title: 'milestone', project: project1) + + original_issue.update(milestone: milestone) + + subject.execute + + expect(new_issue.reload.milestone).to be_nil + end + + it 'copies the milestone when old issue milestone title is in the new project' do + milestone_project1 = create(:milestone, title: 'milestone', project: project1) + milestone_project2 = create(:milestone, title: 'milestone', project: project2) + + original_issue.update(milestone: milestone_project1) + + subject.execute + + expect(new_issue.reload.milestone).to eq(milestone_project2) + end + + it 'copies the milestone when old issue milestone is a group milestone' do + milestone = create(:milestone, title: 'milestone', group: group) + + original_issue.update(milestone: milestone) + + subject.execute + + expect(new_issue.reload.milestone).to eq(milestone) + end + end +end diff --git a/spec/services/issuable/clone/content_rewriter_spec.rb b/spec/services/issuable/clone/content_rewriter_spec.rb new file mode 100644 index 00000000000..4d3cb0bd254 --- /dev/null +++ b/spec/services/issuable/clone/content_rewriter_spec.rb @@ -0,0 +1,153 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Issuable::Clone::ContentRewriter do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:project1) { create(:project, :public, group: group) } + let(:project2) { create(:project, :public, group: group) } + + let(:other_issue) { create(:issue, project: project1) } + let(:merge_request) { create(:merge_request) } + + subject { described_class.new(user, original_issue, new_issue)} + + let(:description) { 'Simple text' } + let(:original_issue) { create(:issue, description: description, project: project1) } + let(:new_issue) { create(:issue, project: project2) } + + context 'rewriting award emojis' do + it 'copies the award emojis' do + create(:award_emoji, awardable: original_issue, name: 'thumbsup') + create(:award_emoji, awardable: original_issue, name: 'thumbsdown') + + expect { subject.execute }.to change { AwardEmoji.count }.by(2) + + expect(new_issue.award_emoji.map(&:name)).to match_array(%w(thumbsup thumbsdown)) + end + end + + context 'rewriting description' do + before do + subject.execute + end + + context 'when description is a simple text' do + it 'does not rewrite the description' do + expect(new_issue.reload.description).to eq(original_issue.description) + end + end + + context 'when description contains a local reference' do + let(:description) { "See ##{other_issue.iid}" } + + it 'rewrites the local reference correctly' do + expected_description = "See #{project1.path}##{other_issue.iid}" + + expect(new_issue.reload.description).to eq(expected_description) + end + end + + context 'when description contains a cross reference' do + let(:description) { "See #{merge_request.project.full_path}!#{merge_request.iid}" } + + it 'rewrites the cross reference correctly' do + expected_description = "See #{merge_request.project.full_path}!#{merge_request.iid}" + + expect(new_issue.reload.description).to eq(expected_description) + end + end + + context 'when description contains a user reference' do + let(:description) { "FYU #{user.to_reference}" } + + it 'works with a user reference' do + expect(new_issue.reload.description).to eq("FYU #{user.to_reference}") + end + end + + context 'when description contains uploads' do + let(:uploader) { build(:file_uploader, project: project1) } + let(:description) { "Text and #{uploader.markdown_link}" } + + it 'rewrites uploads in the description' do + upload = Upload.last + + expect(new_issue.description).not_to eq(description) + expect(new_issue.description).to match(/Text and #{FileUploader::MARKDOWN_PATTERN}/) + expect(upload.secret).not_to eq(uploader.secret) + expect(new_issue.description).to include(upload.secret) + expect(new_issue.description).to include(upload.path) + end + end + end + + context 'rewriting notes' do + context 'simple notes' do + let!(:notes) do + [ + create(:note, noteable: original_issue, project: project1, + created_at: 2.weeks.ago, updated_at: 1.week.ago), + create(:note, noteable: original_issue, project: project1), + create(:note, system: true, noteable: original_issue, project: project1) + ] + end + let!(:system_note_metadata) { create(:system_note_metadata, note: notes.last) } + let!(:award_emoji) { create(:award_emoji, awardable: notes.first, name: 'thumbsup')} + + before do + subject.execute + end + + it 'rewrites existing notes in valid order' do + expect(new_issue.notes.order('id ASC').pluck(:note).first(3)).to eq(notes.map(&:note)) + end + + it 'copies all the issue notes' do + expect(new_issue.notes.count).to eq(3) + end + + it 'does not change the note attributes' do + subject.execute + + new_note = new_issue.notes.first + + expect(new_note.note).to eq(notes.first.note) + expect(new_note.author).to eq(notes.first.author) + end + + it 'copies the award emojis' do + subject.execute + + new_note = new_issue.notes.first + new_note.award_emoji.first.name = 'thumbsup' + end + + it 'copies system_note_metadata for system note' do + new_note = new_issue.notes.last + + expect(new_note.system_note_metadata.action).to eq(system_note_metadata.action) + expect(new_note.system_note_metadata.id).not_to eq(system_note_metadata.id) + end + end + + context 'notes with reference' do + let(:text) do + "See ##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}" + end + let!(:note) { create(:note, noteable: original_issue, note: text, project: project1) } + + it 'rewrites the references correctly' do + subject.execute + + new_note = new_issue.notes.first + + expected_text = "See #{other_issue.project.path}##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}" + + expect(new_note.note).to eq(expected_text) + expect(new_note.author).to eq(note.author) + end + end + end +end diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index b5767583952..1e088bc7d9b 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -10,11 +10,9 @@ describe Issues::MoveService do let(:sub_group_2) { create(:group, :private, parent: group) } let(:old_project) { create(:project, namespace: sub_group_1) } let(:new_project) { create(:project, namespace: sub_group_2) } - let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:old_issue) do - create(:issue, title: title, description: description, - project: old_project, author: author, milestone: milestone1) + create(:issue, title: title, description: description, project: old_project, author: author) end subject(:move_service) do @@ -25,16 +23,6 @@ describe Issues::MoveService do before do old_project.add_reporter(user) new_project.add_reporter(user) - - labels = Array.new(2) { |x| "label%d" % (x + 1) } - - labels.each do |label| - old_issue.labels << create(:label, - project_id: old_project.id, - title: label) - - new_project.labels << create(:label, title: label) - end end end @@ -48,91 +36,6 @@ describe Issues::MoveService do context 'issue movable' do include_context 'user can move issue' - context 'move to new milestone' do - let(:new_issue) { move_service.execute(old_issue, new_project) } - - context 'project milestone' do - let!(:milestone2) do - create(:milestone, project_id: new_project.id, title: 'v9.0') - end - - it 'assigns milestone to new issue' do - expect(new_issue.reload.milestone.title).to eq 'v9.0' - expect(new_issue.reload.milestone).to eq(milestone2) - end - end - - context 'group milestones' do - let!(:group) { create(:group, :private) } - let!(:group_milestone_1) do - create(:milestone, group_id: group.id, title: 'v9.0_group') - end - - before do - old_issue.update(milestone: group_milestone_1) - old_project.update(namespace: group) - new_project.update(namespace: group) - - group.add_users([user], GroupMember::DEVELOPER) - end - - context 'when moving to a project of the same group' do - it 'keeps the same group milestone' do - expect(new_issue.reload.project).to eq(new_project) - expect(new_issue.reload.milestone).to eq(group_milestone_1) - end - end - - context 'when moving to a project of a different group' do - let!(:group_2) { create(:group, :private) } - - let!(:group_milestone_2) do - create(:milestone, group_id: group_2.id, title: 'v9.0_group') - end - - before do - old_issue.update(milestone: group_milestone_1) - new_project.update(namespace: group_2) - - group_2.add_users([user], GroupMember::DEVELOPER) - end - - it 'assigns to new group milestone of same title' do - expect(new_issue.reload.project).to eq(new_project) - expect(new_issue.reload.milestone).to eq(group_milestone_2) - end - end - end - end - - context 'issue with group labels', :nested_groups do - it 'assigns group labels to new issue' do - label = create(:group_label, group: group) - label_issue = create(:labeled_issue, description: description, project: old_project, - milestone: milestone1, labels: [label]) - old_project.add_reporter(user) - new_project.add_reporter(user) - - new_issue = move_service.execute(label_issue, new_project) - - expect(new_issue).to have_attributes( - project: new_project, - labels: include(label) - ) - end - end - - context 'issue with resource label events' do - it 'assigns resource label events to new issue' do - old_issue.resource_label_events = create_list(:resource_label_event, 2, issue: old_issue) - - new_issue = move_service.execute(old_issue, new_project) - - expected = old_issue.resource_label_events.map(&:label_id) - expect(new_issue.resource_label_events.map(&:label_id)).to match_array(expected) - end - end - context 'generic issue' do include_context 'issue move executed' @@ -140,18 +43,6 @@ describe Issues::MoveService do expect(new_issue.project).to eq new_project end - it 'assign labels to new issue' do - expected_label_titles = new_issue.reload.labels.map(&:title) - expect(expected_label_titles).to include 'label1' - expect(expected_label_titles).to include 'label2' - expect(expected_label_titles.size).to eq 2 - - new_issue.labels.each do |label| - expect(new_project.labels).to include(label) - expect(old_project.labels).not_to include(label) - end - end - it 'rewrites issue title' do expect(new_issue.title).to eq title end @@ -203,140 +94,25 @@ describe Issues::MoveService do end end - context 'issue with notes' do - context 'notes without references' do - let(:notes_params) do - [{ system: false, note: 'Some comment 1' }, - { system: true, note: 'Some system note' }, - { system: false, note: 'Some comment 2' }] - end - let(:award_names) { %w(thumbsup thumbsdown facepalm) } - let(:notes_contents) { notes_params.map { |n| n[:note] } } - - before do - note_params = { noteable: old_issue, project: old_project, author: author } - notes_params.each_with_index do |note, index| - new_note = create(:note, note_params.merge(note)) - award_emoji_params = { awardable: new_note, name: award_names[index] } - create(:award_emoji, award_emoji_params) - end - end - - include_context 'issue move executed' - - let(:all_notes) { new_issue.notes.order('id ASC') } - let(:system_notes) { all_notes.system } - let(:user_notes) { all_notes.user } - - it 'rewrites existing notes in valid order' do - expect(all_notes.pluck(:note).first(3)).to eq notes_contents - end - - it 'creates new emojis for the new notes' do - expect(all_notes.map(&:award_emoji).to_a.flatten.map(&:name)).to eq award_names - end - - it 'adds a system note about move after rewritten notes' do - expect(system_notes.last.note).to match /^moved from/ - end - - it 'preserves orignal author of comment' do - expect(user_notes.pluck(:author_id)).to all(eq(author.id)) - end - end - - context 'note that has been updated' do - let!(:note) do - create(:note, noteable: old_issue, project: old_project, - author: author, updated_at: Date.yesterday, - created_at: Date.yesterday) - end - - include_context 'issue move executed' - - it 'preserves time when note has been created at' do - expect(new_issue.notes.first.created_at).to eq note.created_at - end + context 'issue with assignee' do + let(:assignee) { create(:user) } - it 'preserves time when note has been updated at' do - expect(new_issue.notes.first.updated_at).to eq note.updated_at - end - end - - context 'issue with assignee' do - let(:assignee) { create(:user) } - - before do - old_issue.assignees = [assignee] - end - - it 'preserves assignee with access to the new issue' do - new_project.add_reporter(assignee) - - new_issue = move_service.execute(old_issue, new_project) - - expect(new_issue.assignees).to eq([assignee]) - end - - it 'ignores assignee without access to the new issue' do - new_issue = move_service.execute(old_issue, new_project) - - expect(new_issue.assignees).to be_empty - end - end - - context 'notes with references' do - before do - create(:merge_request, source_project: old_project) - create(:note, noteable: old_issue, project: old_project, author: author, - note: 'Note with reference to merge request !1') - end - - include_context 'issue move executed' - let(:new_note) { new_issue.notes.first } - - it 'rewrites references using a cross reference to old project' do - expect(new_note.note) - .to eq "Note with reference to merge request #{old_project.to_reference(new_project)}!1" - end - end - - context 'issue description with uploads' do - let(:uploader) { build(:file_uploader, project: old_project) } - let(:description) { "Text and #{uploader.markdown_link}" } - - include_context 'issue move executed' - - it 'rewrites uploads in description' do - expect(new_issue.description).not_to eq description - expect(new_issue.description) - .to match(/Text and #{FileUploader::MARKDOWN_PATTERN}/) - expect(new_issue.description).not_to include uploader.secret - end + before do + old_issue.assignees = [assignee] end - end - describe 'rewriting references' do - include_context 'issue move executed' + it 'preserves assignee with access to the new issue' do + new_project.add_reporter(assignee) - context 'issue references' do - let(:another_issue) { create(:issue, project: old_project) } - let(:description) { "Some description #{another_issue.to_reference}" } + new_issue = move_service.execute(old_issue, new_project) - it 'rewrites referenced issues creating cross project reference' do - expect(new_issue.description) - .to eq "Some description #{another_issue.to_reference(new_project)}" - end + expect(new_issue.assignees).to eq([assignee]) end - context "user references" do - let(:another_issue) { create(:issue, project: old_project) } - let(:description) { "Some description #{user.to_reference}" } + it 'ignores assignee without access to the new issue' do + new_issue = move_service.execute(old_issue, new_project) - it "doesn't throw any errors for issues containing user references" do - expect(new_issue.description) - .to eq "Some description #{user.to_reference}" - end + expect(new_issue.assignees).to be_empty end end @@ -416,25 +192,5 @@ describe Issues::MoveService do it { expect { move }.to raise_error(StandardError, /permissions/) } end end - - context 'movable issue with no assigned labels' do - before do - old_project.add_reporter(user) - new_project.add_reporter(user) - - labels = Array.new(2) { |x| "label%d" % (x + 1) } - - labels.each do |label| - new_project.labels << create(:label, title: label) - end - end - - include_context 'issue move executed' - - it 'does not assign labels to new issue' do - expected_label_titles = new_issue.reload.labels.map(&:title) - expect(expected_label_titles.size).to eq 0 - end - end end end diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb index 546c9f277c5..5acd01828cb 100644 --- a/spec/services/merge_requests/reload_diffs_service_spec.rb +++ b/spec/services/merge_requests/reload_diffs_service_spec.rb @@ -31,32 +31,11 @@ describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_cachin end context 'cache clearing' do - before do - allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(true) - allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(true) - end - - it 'retrieves the diff files to cache the highlighted result' do - new_diff = merge_request.create_merge_request_diff - cache_key = new_diff.diffs_collection.cache_key - - expect(merge_request).to receive(:create_merge_request_diff).and_return(new_diff) - expect(Rails.cache).to receive(:read).with(cache_key).and_call_original - expect(Rails.cache).to receive(:write).with(cache_key, anything, anything).and_call_original - - subject.execute - end - it 'clears the cache for older diffs on the merge request' do old_diff = merge_request.merge_request_diff old_cache_key = old_diff.diffs_collection.cache_key - new_diff = merge_request.create_merge_request_diff - new_cache_key = new_diff.diffs_collection.cache_key - expect(merge_request).to receive(:create_merge_request_diff).and_return(new_diff) expect(Rails.cache).to receive(:delete).with(old_cache_key).and_call_original - expect(Rails.cache).to receive(:read).with(new_cache_key).and_call_original - expect(Rails.cache).to receive(:write).with(new_cache_key, anything, anything).and_call_original subject.execute end diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb index b1290fd0d47..80b015d4cd0 100644 --- a/spec/services/notes/create_service_spec.rb +++ b/spec/services/notes/create_service_spec.rb @@ -57,6 +57,57 @@ describe Notes::CreateService do end end + context 'noteable highlight cache clearing' do + let(:project_with_repo) { create(:project, :repository) } + let(:merge_request) do + create(:merge_request, source_project: project_with_repo, + target_project: project_with_repo) + end + + let(:position) do + Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb", + new_path: "files/ruby/popen.rb", + old_line: nil, + new_line: 14, + diff_refs: merge_request.diff_refs) + end + + let(:new_opts) do + opts.merge(in_reply_to_discussion_id: nil, + type: 'DiffNote', + noteable_type: 'MergeRequest', + noteable_id: merge_request.id, + position: position.to_h) + end + + before do + allow_any_instance_of(Gitlab::Diff::Position) + .to receive(:unfolded_diff?) { true } + end + + it 'clears noteable diff cache when it was unfolded for the note position' do + expect_any_instance_of(Gitlab::Diff::HighlightCache).to receive(:clear) + + described_class.new(project_with_repo, user, new_opts).execute + end + + it 'does not clear cache when note is not the first of the discussion' do + prev_note = + create(:diff_note_on_merge_request, noteable: merge_request, + project: project_with_repo) + reply_opts = + opts.merge(in_reply_to_discussion_id: prev_note.discussion_id, + type: 'DiffNote', + noteable_type: 'MergeRequest', + noteable_id: merge_request.id, + position: position.to_h) + + expect(merge_request).not_to receive(:diffs) + + described_class.new(project_with_repo, user, reply_opts).execute + end + end + context 'note diff file' do let(:project_with_repo) { create(:project, :repository) } let(:merge_request) do diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb index 64445be560e..b1f4e87e8ea 100644 --- a/spec/services/notes/destroy_service_spec.rb +++ b/spec/services/notes/destroy_service_spec.rb @@ -21,5 +21,38 @@ describe Notes::DestroyService do expect { described_class.new(project, user).execute(note) } .to change { user.todos_pending_count }.from(1).to(0) end + + context 'noteable highlight cache clearing' do + let(:repo_project) { create(:project, :repository) } + let(:merge_request) do + create(:merge_request, source_project: repo_project, + target_project: repo_project) + end + + let(:note) do + create(:diff_note_on_merge_request, project: repo_project, + noteable: merge_request) + end + + before do + allow(note.position).to receive(:unfolded_diff?) { true } + end + + it 'clears noteable diff cache when it was unfolded for the note position' do + expect(merge_request).to receive_message_chain(:diffs, :clear_cache) + + described_class.new(repo_project, user).execute(note) + end + + it 'does not clear cache when note is not the first of the discussion' do + reply_note = create(:diff_note_on_merge_request, in_reply_to: note, + project: repo_project, + noteable: merge_request) + + expect(merge_request).not_to receive(:diffs) + + described_class.new(repo_project, user).execute(reply_note) + end + end end end diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb index e0335880e8e..81b2c17fdb5 100644 --- a/spec/services/system_hooks_service_spec.rb +++ b/spec/services/system_hooks_service_spec.rb @@ -32,7 +32,7 @@ describe SystemHooksService do end it do - project.old_path_with_namespace = 'transfered_from_path' + project.old_path_with_namespace = 'transferred_from_path' expect(event_data(project, :transfer)).to include( :event_name, :name, :created_at, :updated_at, :path, :project_id, :owner_name, :owner_email, :project_visibility, diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb index 051e8c87f39..17bc880dec5 100644 --- a/spec/services/users/build_service_spec.rb +++ b/spec/services/users/build_service_spec.rb @@ -159,9 +159,9 @@ describe Users::BuildService do true | true | 'fl@example.com' | '' | true true | false | 'fl@example.com' | '' | true - true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true - true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true - true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true + true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false + true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false + true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index cd69160be10..3fedb9ed48c 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -235,6 +235,10 @@ RSpec.configure do |config| example.run if Gitlab::Database.mysql? end + config.around(:each, :rails5) do |example| + example.run if Gitlab.rails5? + end + # This makes sure the `ApplicationController#can?` method is stubbed with the # original implementation for all view specs. config.before(:each, type: :view) do diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb index a03d9c4045f..ccaf86aa3a6 100644 --- a/spec/support/helpers/kubernetes_helpers.rb +++ b/spec/support/helpers/kubernetes_helpers.rb @@ -17,6 +17,7 @@ module KubernetesHelpers WebMock.stub_request(:get, api_url + '/api/v1').to_return(kube_response(kube_v1_discovery_body)) WebMock.stub_request(:get, api_url + '/apis/extensions/v1beta1').to_return(kube_response(kube_v1beta1_discovery_body)) WebMock.stub_request(:get, api_url + '/apis/rbac.authorization.k8s.io/v1').to_return(kube_response(kube_v1_rbac_authorization_discovery_body)) + WebMock.stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1').to_return(kube_response(kube_v1alpha1_serving_knative_discovery_body)) end def stub_kubeclient_pods(response = nil) @@ -41,9 +42,9 @@ module KubernetesHelpers .to_return(kube_response(kube_v1_secret_body(options))) end - def stub_kubeclient_get_secret_error(api_url, name, namespace: 'default') + def stub_kubeclient_get_secret_error(api_url, name, namespace: 'default', status: 404) WebMock.stub_request(:get, api_url + "/api/v1/namespaces/#{namespace}/secrets/#{name}") - .to_return(status: [404, "Internal Server Error"]) + .to_return(status: [status, "Internal Server Error"]) end def stub_kubeclient_create_service_account(api_url, namespace: 'default') @@ -134,6 +135,18 @@ module KubernetesHelpers } end + def kube_v1alpha1_serving_knative_discovery_body + { + "kind" => "APIResourceList", + "resources" => [ + { "name" => "revisions", "namespaced" => true, "kind" => "Revision" }, + { "name" => "services", "namespaced" => true, "kind" => "Service" }, + { "name" => "configurations", "namespaced" => true, "kind" => "Configuration" }, + { "name" => "routes", "namespaced" => true, "kind" => "Route" } + ] + } + end + def kube_pods_body { "kind" => "PodList", diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index 7e24efda5dd..c74e0bf1955 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -81,19 +81,24 @@ describe FileUploader do end describe 'copy_to' do + let(:new_project) { create(:project) } + let(:moved) { described_class.copy_to(subject, new_project) } + shared_examples 'returns a valid uploader' do describe 'returned uploader' do - let(:new_project) { create(:project) } - let(:moved) { described_class.copy_to(subject, new_project) } - it 'generates a new secret' do expect(subject).to be expect(described_class).to receive(:generate_secret).once.and_call_original expect(moved).to be end - it 'create new upload' do - expect(moved.upload).not_to eq(subject.upload) + it 'creates new upload correctly' do + upload = moved.upload + + expect(upload).not_to eq(subject.upload) + expect(upload.model).to eq(new_project) + expect(upload.uploader).to eq('FileUploader') + expect(upload.secret).not_to eq(subject.upload.secret) end it 'copies the file' do @@ -111,6 +116,12 @@ describe FileUploader do end include_examples 'returns a valid uploader' + + it 'copies the file to the correct location' do + expect(moved.upload.path).to eq("#{moved.upload.secret}/dk.png") + expect(moved.file.path).to end_with("public/uploads/#{new_project.disk_path}/#{moved.upload.secret}/dk.png") + expect(moved.filename).to eq('dk.png') + end end context 'files are stored remotely' do @@ -121,6 +132,12 @@ describe FileUploader do end include_examples 'returns a valid uploader' + + it 'copies the file to the correct location' do + expect(moved.upload.path).to eq("#{new_project.disk_path}/#{moved.upload.secret}/dk.png") + expect(moved.file.path).to eq("#{new_project.disk_path}/#{moved.upload.secret}/dk.png") + expect(moved.filename).to eq('dk.png') + end end end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index 799c6db57fa..d09725ee4be 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -55,4 +55,62 @@ describe NamespaceFileUploader do it_behaves_like "migrates", to_store: described_class::Store::REMOTE it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL end + + describe 'copy_to' do + let(:group) { create(:group) } + let(:moved) { described_class.copy_to(subject, group) } + + shared_examples 'returns a valid uploader' do + it 'generates a new secret' do + expect(subject).to be + expect(described_class).to receive(:generate_secret).once.and_call_original + expect(moved).to be + end + + it 'creates new upload correctly' do + upload = moved.upload + + expect(upload).not_to eq(subject.upload) + expect(upload.model).to eq(group) + expect(upload.uploader).to eq('NamespaceFileUploader') + expect(upload.secret).not_to eq(subject.upload.secret) + end + + it 'copies the file' do + expect(subject.file).to exist + expect(moved.file).to exist + expect(subject.file).not_to eq(moved.file) + expect(subject.object_store).to eq(moved.object_store) + end + end + + context 'files are stored locally' do + before do + subject.store!(fixture_file_upload('spec/fixtures/dk.png')) + end + + include_examples 'returns a valid uploader' + + it 'copies the file to the correct location' do + expect(moved.upload.path).to eq("#{moved.upload.secret}/dk.png") + expect(moved.file.path).to end_with("system/namespace/#{group.id}/#{moved.upload.secret}/dk.png") + expect(moved.filename).to eq('dk.png') + end + end + + context 'files are stored remotely' do + before do + stub_uploads_object_storage + subject.store!(fixture_file_upload('spec/fixtures/dk.png')) + subject.migrate!(ObjectStorage::Store::REMOTE) + end + + include_examples 'returns a valid uploader' + + it 'copies the file to the correct location' do + expect(moved.file.path).to eq("namespace/#{group.id}/#{moved.upload.secret}/dk.png") + expect(moved.filename).to eq('dk.png') + end + end + end end diff --git a/spec/workers/emails_on_push_worker_spec.rb b/spec/workers/emails_on_push_worker_spec.rb index f17c5ac6aac..05b4fb49ea3 100644 --- a/spec/workers/emails_on_push_worker_spec.rb +++ b/spec/workers/emails_on_push_worker_spec.rb @@ -101,7 +101,7 @@ describe EmailsOnPushWorker, :mailer do context "when there are multiple recipients" do before do - # This is a hack because we modify the mail object before sending, for efficency, + # This is a hack because we modify the mail object before sending, for efficiency, # but the TestMailer adapter just appends the objects to an array. To clone a mail # object, create a new one! # https://github.com/mikel/mail/issues/314#issuecomment-12750108 |