summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLin Jen-Shin <godfat@godfat.org>2017-08-14 19:33:35 +0800
committerLin Jen-Shin <godfat@godfat.org>2017-08-14 19:33:35 +0800
commit349db4802bd6148e5a66872376df1b187ac70b6e (patch)
treec8a195dec3e68e3f59dff99396a1c9c64ced2dd8
parentba8321a52af4f5258526ed4f864bbf3e7a752571 (diff)
parentdcca25e98a49c2925dafeac5a79bff4cd99da472 (diff)
downloadgitlab-ce-349db4802bd6148e5a66872376df1b187ac70b6e.tar.gz
Merge remote-tracking branch 'upstream/master' into 36089-handle-ref-failure-better
* upstream/master: (47 commits) Update CHANGELOG.md for 9.4.5 Update charlock_holmes add a changelog entry switch to multi-line before block restructure the #new_key notification spec don't send devise notifications to the ghost user reset_delivered_emails before testing #new_key skip the :read_project check for new_project_member move the member spec to be with the other ones add a spec for new_group_member add a spec for never emailing the ghost user rubocop fix a membership with no user is always notifiable check notifiability for more emails add Member#notifiable?(type, opts) make NotificationRecipient a little more customizable Add notes about database performance for MySQL fix confidential border issue as well as confidential styles leaking on new MR Migrate force push check to Gitaly Add option to disable project export on instance ...
-rw-r--r--.gitlab-ci.yml214
-rw-r--r--.rubocop.yml5
-rw-r--r--CHANGELOG.md21
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock6
-rw-r--r--app/assets/javascripts/dispatcher.js3
-rw-r--r--app/assets/javascripts/projects/project_new.js2
-rw-r--r--app/assets/stylesheets/framework/dropdowns.scss8
-rw-r--r--app/assets/stylesheets/framework/typography.scss2
-rw-r--r--app/assets/stylesheets/pages/note_form.scss20
-rw-r--r--app/assets/stylesheets/pages/pipelines.scss1
-rw-r--r--app/controllers/admin/appearances_controller.rb2
-rw-r--r--app/controllers/concerns/authenticates_with_two_factor.rb2
-rw-r--r--app/controllers/dashboard/projects_controller.rb8
-rw-r--r--app/controllers/dashboard_controller.rb6
-rw-r--r--app/controllers/groups_controller.rb6
-rw-r--r--app/controllers/projects_controller.rb14
-rw-r--r--app/controllers/unicorn_test_controller.rb4
-rw-r--r--app/finders/admin/projects_finder.rb2
-rw-r--r--app/helpers/appearances_helper.rb2
-rw-r--r--app/helpers/application_settings_helper.rb1
-rw-r--r--app/models/appearance.rb20
-rw-r--r--app/models/application_setting.rb1
-rw-r--r--app/models/broadcast_message.rb14
-rw-r--r--app/models/event.rb59
-rw-r--r--app/models/event_collection.rb98
-rw-r--r--app/models/event_for_migration.rb5
-rw-r--r--app/models/member.rb11
-rw-r--r--app/models/members/group_member.rb4
-rw-r--r--app/models/members/project_member.rb4
-rw-r--r--app/models/notification_recipient.rb23
-rw-r--r--app/models/push_event.rb126
-rw-r--r--app/models/push_event_payload.rb22
-rw-r--r--app/models/user.rb1
-rw-r--r--app/services/event_create_service.rb9
-rw-r--r--app/services/notification_service.rb51
-rw-r--r--app/services/push_event_payload_service.rb120
-rw-r--r--app/uploaders/personal_file_uploader.rb2
-rw-r--r--app/views/admin/application_settings/_form.html.haml6
-rw-r--r--app/views/events/_commit.html.haml4
-rw-r--r--app/views/events/_event_push.atom.haml19
-rw-r--r--app/views/events/event/_push.html.haml13
-rw-r--r--app/views/projects/_export.html.haml41
-rw-r--r--app/views/projects/_md_preview.html.haml4
-rw-r--r--app/views/projects/edit.html.haml37
-rw-r--r--changelogs/unreleased/13325-bugfix-silence-on-disabled-notifications.yml6
-rw-r--r--changelogs/unreleased/34492-firefox-job.yml4
-rw-r--r--changelogs/unreleased/35052-please-select-a-file-when-attempting-to-upload-or-replace-from-the-ui.yml4
-rw-r--r--changelogs/unreleased/35232-next-unresolved.yml4
-rw-r--r--changelogs/unreleased/35697-allow-logged-in-user-to-read-user-list.yml4
-rw-r--r--changelogs/unreleased/36158-new-issue-button.yml4
-rw-r--r--changelogs/unreleased/36213-return-is_admin-in-users-api-when-current_user-is-admin.yml6
-rw-r--r--changelogs/unreleased/appearances-caching-and-schema.yml4
-rw-r--r--changelogs/unreleased/broadcast-messages-cache.yml4
-rw-r--r--changelogs/unreleased/bvl-rollback-renamed-system-namespace.yml4
-rw-r--r--changelogs/unreleased/disable-project-export.yml4
-rw-r--r--changelogs/unreleased/fix-group-milestone-link-in-issuable-sidebar.yml4
-rw-r--r--changelogs/unreleased/fix-oauth-checkboxes.yml4
-rw-r--r--changelogs/unreleased/fix-sm-34547-cannot-connect-to-ci-server-error-messages.yml5
-rw-r--r--changelogs/unreleased/fix-sm-35931-active-ci-pipelineschedule-have-nullified-next_run_at.yml4
-rw-r--r--changelogs/unreleased/mattermost_fixes.yml4
-rw-r--r--changelogs/unreleased/migrate-events-into-a-new-format.yml4
-rw-r--r--changelogs/unreleased/mk-fix-case-insensitive-redirect-matching.yml4
-rw-r--r--changelogs/unreleased/mk-fix-deploy-key-deletion.yml4
-rw-r--r--changelogs/unreleased/mk-validate-username-change-with-container-registry-tags.yml4
-rw-r--r--changelogs/unreleased/project-foreign-keys-without-errors.yml4
-rw-r--r--changelogs/unreleased/search-flickering.yml4
-rw-r--r--changelogs/unreleased/tc-fix-wildcard-protected-delete-merged.yml4
-rw-r--r--changelogs/unreleased/use-a-specialized-class-for-querying-events.yml4
-rw-r--r--changelogs/unreleased/zj-ref-path-monospace.yml4
-rw-r--r--config/routes/uploads.rb4
-rw-r--r--db/migrate/20170316163800_rename_system_namespaces.rb231
-rw-r--r--db/migrate/20170316163845_move_uploads_to_system_dir.rb2
-rw-r--r--db/migrate/20170608152747_prepare_events_table_for_push_events_migration.rb51
-rw-r--r--db/migrate/20170608152748_create_push_event_payloads_tables.rb46
-rw-r--r--db/migrate/20170717074009_move_system_upload_folder.rb10
-rw-r--r--db/migrate/20170727123534_add_index_on_events_project_id_id.rb37
-rw-r--r--db/migrate/20170809133343_add_broadcast_messages_index.rb21
-rw-r--r--db/migrate/20170809134534_add_broadcast_message_not_null_constraints.rb17
-rw-r--r--db/migrate/20170809142252_cleanup_appearances_schema.rb33
-rw-r--r--db/migrate/20170809161910_add_project_export_enabled_to_application_settings.rb14
-rw-r--r--db/post_migrate/20170317162059_update_upload_paths_to_system.rb2
-rw-r--r--db/post_migrate/20170406111121_clean_upload_symlinks.rb2
-rw-r--r--db/post_migrate/20170606202615_move_appearance_to_system_dir.rb2
-rw-r--r--db/post_migrate/20170612071012_move_personal_snippets_files.rb4
-rw-r--r--db/post_migrate/20170627101016_schedule_event_migrations.rb40
-rw-r--r--db/post_migrate/20170807190736_move_personal_snippet_files_into_correct_folder.rb29
-rw-r--r--db/schema.rb54
-rw-r--r--doc/api/events.md42
-rw-r--r--doc/development/testing.md32
-rw-r--r--doc/install/requirements.md4
-rw-r--r--doc/update/patch_versions.md18
-rw-r--r--doc/user/project/settings/import_export.md3
-rw-r--r--features/steps/shared/project.rb30
-rw-r--r--lib/api/entities.rb12
-rw-r--r--lib/api/settings.rb1
-rw-r--r--lib/api/users.rb13
-rw-r--r--lib/api/v3/entities.rb12
-rw-r--r--lib/file_streamer.rb16
-rw-r--r--lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb176
-rw-r--r--lib/gitlab/background_migration/move_personal_snippet_files.rb79
-rw-r--r--lib/gitlab/checks/force_push.rb19
-rw-r--r--lib/gitlab/database.rb4
-rw-r--r--lib/gitlab/gitaly_client/util.rb4
-rw-r--r--lib/gitlab/import_export/import_export.yml26
-rw-r--r--lib/gitlab/middleware/webpack_proxy.rb2
-rw-r--r--lib/gitlab/o_auth/session.rb2
-rw-r--r--lib/gitlab/seeder.rb2
-rw-r--r--lib/rspec_flaky/example.rb46
-rw-r--r--lib/rspec_flaky/flaky_example.rb39
-rw-r--r--lib/rspec_flaky/listener.rb75
-rwxr-xr-xscripts/detect-new-flaky-examples21
-rwxr-xr-xscripts/merge-reports2
-rw-r--r--spec/controllers/admin/projects_controller_spec.rb12
-rw-r--r--spec/controllers/projects_controller_spec.rb112
-rw-r--r--spec/controllers/snippets_controller_spec.rb8
-rw-r--r--spec/controllers/uploads_controller_spec.rb4
-rw-r--r--spec/controllers/users_controller_spec.rb10
-rw-r--r--spec/factories/events.rb16
-rw-r--r--spec/features/admin/admin_settings_spec.rb2
-rw-r--r--spec/features/boards/sidebar_spec.rb12
-rw-r--r--spec/features/calendar_spec.rb16
-rw-r--r--spec/features/dashboard/activity_spec.rb28
-rw-r--r--spec/features/groups/milestone_spec.rb10
-rw-r--r--spec/features/issues_spec.rb1
-rw-r--r--spec/features/projects/user_edits_files_spec.rb17
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb6
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb2
-rw-r--r--spec/finders/admin/projects_finder_spec.rb6
-rw-r--r--spec/finders/contributed_projects_finder_spec.rb4
-rw-r--r--spec/lib/event_filter_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb423
-rw-r--r--spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb72
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb22
-rw-r--r--spec/lib/gitlab/git/storage/circuit_breaker_spec.rb6
-rw-r--r--spec/lib/gitlab/health_checks/fs_shards_check_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml3
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml8
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb18
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb4
-rw-r--r--spec/lib/rspec_flaky/example_spec.rb89
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb104
-rw-r--r--spec/lib/rspec_flaky/listener_spec.rb178
-rw-r--r--spec/migrations/clean_upload_symlinks_spec.rb2
-rw-r--r--spec/migrations/move_personal_snippets_files_spec.rb10
-rw-r--r--spec/migrations/move_system_upload_folder_spec.rb18
-rw-r--r--spec/migrations/move_uploads_to_system_dir_spec.rb2
-rw-r--r--spec/migrations/rename_system_namespaces_spec.rb254
-rw-r--r--spec/migrations/update_upload_paths_to_system_spec.rb8
-rw-r--r--spec/models/appearance_spec.rb35
-rw-r--r--spec/models/broadcast_message_spec.rb20
-rw-r--r--spec/models/event_collection_spec.rb51
-rw-r--r--spec/models/event_spec.rb32
-rw-r--r--spec/models/issue_spec.rb10
-rw-r--r--spec/models/members/project_member_spec.rb2
-rw-r--r--spec/models/project_spec.rb4
-rw-r--r--spec/models/push_event_payload_spec.rb16
-rw-r--r--spec/models/push_event_spec.rb202
-rw-r--r--spec/models/user_spec.rb25
-rw-r--r--spec/requests/api/events_spec.rb28
-rw-r--r--spec/requests/api/internal_spec.rb9
-rw-r--r--spec/requests/api/settings_spec.rb5
-rw-r--r--spec/requests/api/users_spec.rb10
-rw-r--r--spec/requests/api/v3/users_spec.rb25
-rw-r--r--spec/serializers/analytics_build_entity_spec.rb8
-rw-r--r--spec/services/event_create_service_spec.rb44
-rw-r--r--spec/services/git_push_service_spec.rb7
-rw-r--r--spec/services/notification_service_spec.rb100
-rw-r--r--spec/services/push_event_payload_service_spec.rb218
-rw-r--r--spec/simplecov_env.rb25
-rw-r--r--spec/spec_helper.rb13
-rw-r--r--spec/uploaders/file_mover_spec.rb14
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb4
-rw-r--r--spec/views/projects/edit.html.haml_spec.rb14
-rw-r--r--spec/workers/prune_old_events_worker_spec.rb8
177 files changed, 3756 insertions, 1081 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e10958b3bee..024f2929252 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,6 +27,7 @@ variables:
GET_SOURCES_ATTEMPTS: "3"
KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json
KNAPSACK_SPINACH_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/spinach_report-master.json
+ FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/${CI_PROJECT_NAME}/report-master.json
before_script:
- bundle --version
@@ -45,16 +46,17 @@ stages:
tags:
- gitlab-org
-.knapsack-state: &knapsack-state
+.tests-metadata-state: &tests-metadata-state
services: []
variables:
SETUP_DB: "false"
USE_BUNDLE_INSTALL: "false"
- KNAPSACK_S3_BUCKET: "gitlab-ce-cache"
+ TESTS_METADATA_S3_BUCKET: "gitlab-ce-cache"
artifacts:
expire_in: 31d
paths:
- knapsack/
+ - rspec_flaky/
.use-pg: &use-pg
services:
@@ -86,7 +88,7 @@ stages:
except:
- /(^docs[\/-].*|.*-docs$)/
-.rspec-knapsack: &rspec-knapsack
+.rspec-metadata: &rspec-metadata
<<: *dedicated-runner
<<: *pull-cache
stage: test
@@ -96,8 +98,13 @@ stages:
- export CI_NODE_TOTAL=${JOB_NAME[-1]}
- export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export KNAPSACK_GENERATE_REPORT=true
+ - export ALL_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/all_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
+ - export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/new_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
+ - export FLAKY_RSPEC_GENERATE_REPORT=true
- export CACHE_CLASSES=true
- cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}
+ - cp ${FLAKY_RSPEC_SUITE_REPORT_PATH} ${ALL_FLAKY_RSPEC_REPORT_PATH}
+ - '[[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}'
- scripts/gitaly-test-spawn
- knapsack rspec "--color --format documentation"
artifacts:
@@ -106,20 +113,21 @@ stages:
paths:
- coverage/
- knapsack/
+ - rspec_flaky/
- tmp/capybara/
-.rspec-knapsack-pg: &rspec-knapsack-pg
- <<: *rspec-knapsack
+.rspec-metadata-pg: &rspec-metadata-pg
+ <<: *rspec-metadata
<<: *use-pg
<<: *except-docs
-.rspec-knapsack-mysql: &rspec-knapsack-mysql
- <<: *rspec-knapsack
+.rspec-metadata-mysql: &rspec-metadata-mysql
+ <<: *rspec-metadata
<<: *use-mysql
<<: *only-if-want-mysql
<<: *except-docs
-.spinach-knapsack: &spinach-knapsack
+.spinach-metadata: &spinach-metadata
<<: *dedicated-runner
<<: *pull-cache
stage: test
@@ -140,13 +148,13 @@ stages:
- knapsack/
- tmp/capybara/
-.spinach-knapsack-pg: &spinach-knapsack-pg
- <<: *spinach-knapsack
+.spinach-metadata-pg: &spinach-metadata-pg
+ <<: *spinach-metadata
<<: *use-pg
<<: *except-docs
-.spinach-knapsack-mysql: &spinach-knapsack-mysql
- <<: *spinach-knapsack
+.spinach-metadata-mysql: &spinach-metadata-mysql
+ <<: *spinach-metadata
<<: *use-mysql
<<: *only-if-want-mysql
<<: *except-docs
@@ -176,40 +184,70 @@ build-package:
- //@gitlab-org/gitlab-ce
- //@gitlab-org/gitlab-ee
-# Prepare and merge knapsack tests
-knapsack:
- <<: *knapsack-state
+# Retrieve knapsack and rspec_flaky reports
+retrieve-tests-metadata:
+ <<: *tests-metadata-state
<<: *dedicated-runner
<<: *except-docs
stage: prepare
cache:
- key: knapsack
- paths:
- - knapsack/
+ key: tests_metadata
policy: pull
script:
- mkdir -p knapsack/${CI_PROJECT_NAME}/
- - wget -O $KNAPSACK_RSPEC_SUITE_REPORT_PATH http://${KNAPSACK_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_RSPEC_SUITE_REPORT_PATH || rm $KNAPSACK_RSPEC_SUITE_REPORT_PATH
- - wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${KNAPSACK_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH
+ - wget -O $KNAPSACK_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_RSPEC_SUITE_REPORT_PATH || rm $KNAPSACK_RSPEC_SUITE_REPORT_PATH
+ - wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH
- '[[ -f $KNAPSACK_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_RSPEC_SUITE_REPORT_PATH}'
- '[[ -f $KNAPSACK_SPINACH_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_SPINACH_SUITE_REPORT_PATH}'
+ - mkdir -p rspec_flaky/${CI_PROJECT_NAME}/
+ - wget -O $FLAKY_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$FLAKY_RSPEC_SUITE_REPORT_PATH || rm $FLAKY_RSPEC_SUITE_REPORT_PATH
+ - '[[ -f $FLAKY_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_SUITE_REPORT_PATH}'
-update-knapsack:
- <<: *knapsack-state
+update-tests-metadata:
+ <<: *tests-metadata-state
<<: *dedicated-runner
<<: *only-canonical-masters
stage: post-test
cache:
- key: knapsack
+ key: tests_metadata
paths:
- knapsack/
+ - rspec_flaky/
policy: push
script:
- retry gem install fog-aws mime-types
- scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec-pg_node_*.json
- scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach-pg_node_*.json
- - '[[ -z ${KNAPSACK_S3_BUCKET} ]] || scripts/sync-reports put $KNAPSACK_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
+ - scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/${CI_PROJECT_NAME}/all_node_*.json
+ - '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
+ - '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $FLAKY_RSPEC_SUITE_REPORT_PATH'
- rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json
+ - rm -f rspec_flaky/${CI_PROJECT_NAME}/all_node_*.json
+
+flaky-examples-check:
+ <<: *dedicated-runner
+ image: ruby:2.3-alpine
+ services: []
+ before_script: []
+ cache: {}
+ variables:
+ SETUP_DB: "false"
+ USE_BUNDLE_INSTALL: "false"
+ NEW_FLAKY_SPECS_REPORT: rspec_flaky/${CI_PROJECT_NAME}/new_rspec_flaky_examples.json
+ stage: post-test
+ allow_failure: yes
+ only:
+ - branches
+ except:
+ - master
+ artifacts:
+ expire_in: 30d
+ paths:
+ - rspec_flaky/
+ script:
+ - '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}'
+ - scripts/merge-reports $NEW_FLAKY_SPECS_REPORT rspec_flaky/${CI_PROJECT_NAME}/new_node_*.json
+ - scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env:
<<: *use-pg
@@ -232,69 +270,69 @@ setup-test-env:
- public/assets
- tmp/tests
-rspec-pg 0 25: *rspec-knapsack-pg
-rspec-pg 1 25: *rspec-knapsack-pg
-rspec-pg 2 25: *rspec-knapsack-pg
-rspec-pg 3 25: *rspec-knapsack-pg
-rspec-pg 4 25: *rspec-knapsack-pg
-rspec-pg 5 25: *rspec-knapsack-pg
-rspec-pg 6 25: *rspec-knapsack-pg
-rspec-pg 7 25: *rspec-knapsack-pg
-rspec-pg 8 25: *rspec-knapsack-pg
-rspec-pg 9 25: *rspec-knapsack-pg
-rspec-pg 10 25: *rspec-knapsack-pg
-rspec-pg 11 25: *rspec-knapsack-pg
-rspec-pg 12 25: *rspec-knapsack-pg
-rspec-pg 13 25: *rspec-knapsack-pg
-rspec-pg 14 25: *rspec-knapsack-pg
-rspec-pg 15 25: *rspec-knapsack-pg
-rspec-pg 16 25: *rspec-knapsack-pg
-rspec-pg 17 25: *rspec-knapsack-pg
-rspec-pg 18 25: *rspec-knapsack-pg
-rspec-pg 19 25: *rspec-knapsack-pg
-rspec-pg 20 25: *rspec-knapsack-pg
-rspec-pg 21 25: *rspec-knapsack-pg
-rspec-pg 22 25: *rspec-knapsack-pg
-rspec-pg 23 25: *rspec-knapsack-pg
-rspec-pg 24 25: *rspec-knapsack-pg
-
-rspec-mysql 0 25: *rspec-knapsack-mysql
-rspec-mysql 1 25: *rspec-knapsack-mysql
-rspec-mysql 2 25: *rspec-knapsack-mysql
-rspec-mysql 3 25: *rspec-knapsack-mysql
-rspec-mysql 4 25: *rspec-knapsack-mysql
-rspec-mysql 5 25: *rspec-knapsack-mysql
-rspec-mysql 6 25: *rspec-knapsack-mysql
-rspec-mysql 7 25: *rspec-knapsack-mysql
-rspec-mysql 8 25: *rspec-knapsack-mysql
-rspec-mysql 9 25: *rspec-knapsack-mysql
-rspec-mysql 10 25: *rspec-knapsack-mysql
-rspec-mysql 11 25: *rspec-knapsack-mysql
-rspec-mysql 12 25: *rspec-knapsack-mysql
-rspec-mysql 13 25: *rspec-knapsack-mysql
-rspec-mysql 14 25: *rspec-knapsack-mysql
-rspec-mysql 15 25: *rspec-knapsack-mysql
-rspec-mysql 16 25: *rspec-knapsack-mysql
-rspec-mysql 17 25: *rspec-knapsack-mysql
-rspec-mysql 18 25: *rspec-knapsack-mysql
-rspec-mysql 19 25: *rspec-knapsack-mysql
-rspec-mysql 20 25: *rspec-knapsack-mysql
-rspec-mysql 21 25: *rspec-knapsack-mysql
-rspec-mysql 22 25: *rspec-knapsack-mysql
-rspec-mysql 23 25: *rspec-knapsack-mysql
-rspec-mysql 24 25: *rspec-knapsack-mysql
-
-spinach-pg 0 5: *spinach-knapsack-pg
-spinach-pg 1 5: *spinach-knapsack-pg
-spinach-pg 2 5: *spinach-knapsack-pg
-spinach-pg 3 5: *spinach-knapsack-pg
-spinach-pg 4 5: *spinach-knapsack-pg
-
-spinach-mysql 0 5: *spinach-knapsack-mysql
-spinach-mysql 1 5: *spinach-knapsack-mysql
-spinach-mysql 2 5: *spinach-knapsack-mysql
-spinach-mysql 3 5: *spinach-knapsack-mysql
-spinach-mysql 4 5: *spinach-knapsack-mysql
+rspec-pg 0 25: *rspec-metadata-pg
+rspec-pg 1 25: *rspec-metadata-pg
+rspec-pg 2 25: *rspec-metadata-pg
+rspec-pg 3 25: *rspec-metadata-pg
+rspec-pg 4 25: *rspec-metadata-pg
+rspec-pg 5 25: *rspec-metadata-pg
+rspec-pg 6 25: *rspec-metadata-pg
+rspec-pg 7 25: *rspec-metadata-pg
+rspec-pg 8 25: *rspec-metadata-pg
+rspec-pg 9 25: *rspec-metadata-pg
+rspec-pg 10 25: *rspec-metadata-pg
+rspec-pg 11 25: *rspec-metadata-pg
+rspec-pg 12 25: *rspec-metadata-pg
+rspec-pg 13 25: *rspec-metadata-pg
+rspec-pg 14 25: *rspec-metadata-pg
+rspec-pg 15 25: *rspec-metadata-pg
+rspec-pg 16 25: *rspec-metadata-pg
+rspec-pg 17 25: *rspec-metadata-pg
+rspec-pg 18 25: *rspec-metadata-pg
+rspec-pg 19 25: *rspec-metadata-pg
+rspec-pg 20 25: *rspec-metadata-pg
+rspec-pg 21 25: *rspec-metadata-pg
+rspec-pg 22 25: *rspec-metadata-pg
+rspec-pg 23 25: *rspec-metadata-pg
+rspec-pg 24 25: *rspec-metadata-pg
+
+rspec-mysql 0 25: *rspec-metadata-mysql
+rspec-mysql 1 25: *rspec-metadata-mysql
+rspec-mysql 2 25: *rspec-metadata-mysql
+rspec-mysql 3 25: *rspec-metadata-mysql
+rspec-mysql 4 25: *rspec-metadata-mysql
+rspec-mysql 5 25: *rspec-metadata-mysql
+rspec-mysql 6 25: *rspec-metadata-mysql
+rspec-mysql 7 25: *rspec-metadata-mysql
+rspec-mysql 8 25: *rspec-metadata-mysql
+rspec-mysql 9 25: *rspec-metadata-mysql
+rspec-mysql 10 25: *rspec-metadata-mysql
+rspec-mysql 11 25: *rspec-metadata-mysql
+rspec-mysql 12 25: *rspec-metadata-mysql
+rspec-mysql 13 25: *rspec-metadata-mysql
+rspec-mysql 14 25: *rspec-metadata-mysql
+rspec-mysql 15 25: *rspec-metadata-mysql
+rspec-mysql 16 25: *rspec-metadata-mysql
+rspec-mysql 17 25: *rspec-metadata-mysql
+rspec-mysql 18 25: *rspec-metadata-mysql
+rspec-mysql 19 25: *rspec-metadata-mysql
+rspec-mysql 20 25: *rspec-metadata-mysql
+rspec-mysql 21 25: *rspec-metadata-mysql
+rspec-mysql 22 25: *rspec-metadata-mysql
+rspec-mysql 23 25: *rspec-metadata-mysql
+rspec-mysql 24 25: *rspec-metadata-mysql
+
+spinach-pg 0 5: *spinach-metadata-pg
+spinach-pg 1 5: *spinach-metadata-pg
+spinach-pg 2 5: *spinach-metadata-pg
+spinach-pg 3 5: *spinach-metadata-pg
+spinach-pg 4 5: *spinach-metadata-pg
+
+spinach-mysql 0 5: *spinach-metadata-mysql
+spinach-mysql 1 5: *spinach-metadata-mysql
+spinach-mysql 2 5: *spinach-metadata-mysql
+spinach-mysql 3 5: *spinach-metadata-mysql
+spinach-mysql 4 5: *spinach-metadata-mysql
# Static analysis jobs
.ruby-static-analysis: &ruby-static-analysis
@@ -354,7 +392,7 @@ ee_compat_check:
except:
- master
- tags
- - /^[\d-]+-stable(-ee)?$/
+ - /^[\d-]+-stable(-ee)?/
allow_failure: yes
cache:
key: "ee_compat_check_repo"
diff --git a/.rubocop.yml b/.rubocop.yml
index 18d8e009da6..d25b4ac39c9 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -1045,7 +1045,7 @@ RSpec/BeforeAfterAll:
RSpec/DescribeClass:
Enabled: false
-# Use `described_class` for tested class / module.
+# Checks that the second argument to `describe` specifies a method.
RSpec/DescribeMethod:
Enabled: false
@@ -1053,8 +1053,7 @@ RSpec/DescribeMethod:
RSpec/DescribeSymbol:
Enabled: true
-# Checks that the second argument to top level describe is the tested method
-# name.
+# Checks that tests use `described_class`.
RSpec/DescribedClass:
Enabled: true
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6a9c751937e..3ecedd44c89 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,27 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
+## 9.4.5 (2017-08-14)
+
+- Fix deletion of deploy keys linked to other projects. !13162
+- Allow any logged in users to read_users_list even if it's restricted. !13201
+- Make Delete Merged Branches handle wildcard protected branches correctly. !13251
+- Fix an order of operations for CI connection error message in merge request widget. !13252
+- Fix pipeline_schedules pages when active schedule has an abnormal state. !13286
+- Add missing validation error for username change with container registry tags. !13356
+- Fix destroy of case-insensitive conflicting redirects. !13357
+- Project pending delete no longer return 500 error in admins projects view. !13389
+- Fix search box losing focus when typing.
+- Use jQuery to control scroll behavior in job log for cross browser consistency.
+- Use project_ref_path to create the link to a branch to fix links that 404.
+- improve file upload/replace experience.
+- fix jump to next discussion button.
+- Fixes new issue button for failed job returning 404.
+- Fix links to group milestones from issue and merge request sidebar.
+- Fixed sign-in restrictions buttons not toggling active state.
+- Fix Mattermost integration.
+- Change project FK migration to skip existing FKs.
+
## 9.4.4 (2017-08-09)
- Remove hidden symlinks from project import files.
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index ae6dd4e2032..c25c8e5b741 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-0.29.0
+0.30.0
diff --git a/Gemfile b/Gemfile
index 83f7199cbc4..a768fa428bf 100644
--- a/Gemfile
+++ b/Gemfile
@@ -402,7 +402,7 @@ group :ed25519 do
end
# Gitaly GRPC client
-gem 'gitaly', '~> 0.26.0'
+gem 'gitaly', '~> 0.27.0'
gem 'toml-rb', '~> 0.3.15', require: false
diff --git a/Gemfile.lock b/Gemfile.lock
index 3d435b6f901..ab01a556561 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -113,7 +113,7 @@ GEM
activesupport (>= 4.0.0)
mime-types (>= 1.16)
cause (0.1)
- charlock_holmes (0.7.3)
+ charlock_holmes (0.7.4)
chronic (0.10.2)
chronic_duration (0.10.6)
numerizer (~> 0.1.1)
@@ -270,7 +270,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
- gitaly (0.26.0)
+ gitaly (0.27.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
@@ -984,7 +984,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
- gitaly (~> 0.26.0)
+ gitaly (~> 0.27.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-markup (~> 1.5.1)
diff --git a/app/assets/javascripts/dispatcher.js b/app/assets/javascripts/dispatcher.js
index 8c5a4367440..de47485c9f2 100644
--- a/app/assets/javascripts/dispatcher.js
+++ b/app/assets/javascripts/dispatcher.js
@@ -347,6 +347,9 @@ import initChangesDropdown from './init_changes_dropdown';
if ($('#tree-slider').length) new TreeView();
if ($('.blob-viewer').length) new BlobViewer();
if ($('.project-show-activity').length) new gl.Activities();
+ $('#tree-slider').waitForImages(function() {
+ gl.utils.ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath);
+ });
break;
case 'projects:edit':
setupProjectEdit();
diff --git a/app/assets/javascripts/projects/project_new.js b/app/assets/javascripts/projects/project_new.js
index 985521aef34..7f972b6f6ee 100644
--- a/app/assets/javascripts/projects/project_new.js
+++ b/app/assets/javascripts/projects/project_new.js
@@ -36,7 +36,7 @@ const bindEvents = () => {
$('.how_to_import_link').on('click', (e) => {
e.preventDefault();
- $('.how_to_import_link').next('.modal').show();
+ $(e.currentTarget).next('.modal').show();
});
$('.modal-header .close').on('click', () => {
diff --git a/app/assets/stylesheets/framework/dropdowns.scss b/app/assets/stylesheets/framework/dropdowns.scss
index 02e0ba74158..1bb04b59a2a 100644
--- a/app/assets/stylesheets/framework/dropdowns.scss
+++ b/app/assets/stylesheets/framework/dropdowns.scss
@@ -725,9 +725,9 @@
}
// TODO: change global style and remove mixin
-@mixin new-style-dropdown {
- .dropdown-menu,
- .dropdown-menu-nav {
+@mixin new-style-dropdown($selector: '') {
+ #{$selector}.dropdown-menu,
+ #{$selector}.dropdown-menu-nav {
.divider {
margin: 6px 0;
}
@@ -773,7 +773,7 @@
}
}
- .dropdown-menu-align-right {
+ #{$selector}.dropdown-menu-align-right {
margin-top: 2px;
}
}
diff --git a/app/assets/stylesheets/framework/typography.scss b/app/assets/stylesheets/framework/typography.scss
index bf5f124d142..96409b10b99 100644
--- a/app/assets/stylesheets/framework/typography.scss
+++ b/app/assets/stylesheets/framework/typography.scss
@@ -339,6 +339,8 @@ a > code {
@extend .ref-name;
}
+@include new-style-dropdown('.git-revision-dropdown');
+
/**
* Apply Markdown typography
*
diff --git a/app/assets/stylesheets/pages/note_form.scss b/app/assets/stylesheets/pages/note_form.scss
index c90642178fc..b4468d6d0a2 100644
--- a/app/assets/stylesheets/pages/note_form.scss
+++ b/app/assets/stylesheets/pages/note_form.scss
@@ -108,6 +108,7 @@
background-color: $orange-50;
border-radius: $border-radius-default $border-radius-default 0 0;
border: 1px solid $border-gray-normal;
+ border-bottom: none;
padding: 3px 12px;
margin: auto;
align-items: center;
@@ -132,22 +133,9 @@
}
}
-.not-confidential {
- padding: 0;
- border-top: none;
-}
-
-.right-sidebar-expanded {
- .md-area {
- border-radius: 0;
- border-top: none;
- }
-}
-
-.right-sidebar-collapsed {
- .confidential-issue-warning {
- border-bottom: none;
- }
+.confidential-issue-warning + .md-area {
+ border-top-left-radius: 0;
+ border-top-right-radius: 0;
}
.discussion-form {
diff --git a/app/assets/stylesheets/pages/pipelines.scss b/app/assets/stylesheets/pages/pipelines.scss
index 6185342b495..85d1905ad40 100644
--- a/app/assets/stylesheets/pages/pipelines.scss
+++ b/app/assets/stylesheets/pages/pipelines.scss
@@ -824,6 +824,7 @@ button.mini-pipeline-graph-dropdown-toggle {
* Top arrow in the dropdown in the mini pipeline graph
*/
.mini-pipeline-graph-dropdown-menu {
+ z-index: 200;
&::before,
&::after {
diff --git a/app/controllers/admin/appearances_controller.rb b/app/controllers/admin/appearances_controller.rb
index 4b0ec54b3f4..92df1c8dff0 100644
--- a/app/controllers/admin/appearances_controller.rb
+++ b/app/controllers/admin/appearances_controller.rb
@@ -45,7 +45,7 @@ class Admin::AppearancesController < Admin::ApplicationController
# Use callbacks to share common setup or constraints between actions.
def set_appearance
- @appearance = Appearance.last || Appearance.new
+ @appearance = Appearance.current || Appearance.new
end
# Only allow a trusted parameter "white list" through.
diff --git a/app/controllers/concerns/authenticates_with_two_factor.rb b/app/controllers/concerns/authenticates_with_two_factor.rb
index ea441b1736b..b75e401a8df 100644
--- a/app/controllers/concerns/authenticates_with_two_factor.rb
+++ b/app/controllers/concerns/authenticates_with_two_factor.rb
@@ -69,7 +69,7 @@ module AuthenticatesWithTwoFactor
if U2fRegistration.authenticate(user, u2f_app_id, user_params[:device_response], session[:challenge])
# Remove any lingering user data from login
session.delete(:otp_user_id)
- session.delete(:challenges)
+ session.delete(:challenge)
remember_me(user) if user_params[:remember_me] == '1'
sign_in(user)
diff --git a/app/controllers/dashboard/projects_controller.rb b/app/controllers/dashboard/projects_controller.rb
index 74fe45e1ff6..f71ab702e71 100644
--- a/app/controllers/dashboard/projects_controller.rb
+++ b/app/controllers/dashboard/projects_controller.rb
@@ -52,8 +52,10 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
end
def load_events
- @events = Event.in_projects(load_projects(params.merge(non_public: true)))
- @events = event_filter.apply_filter(@events).with_associations
- @events = @events.limit(20).offset(params[:offset] || 0)
+ projects = load_projects(params.merge(non_public: true))
+
+ @events = EventCollection
+ .new(projects, offset: params[:offset].to_i, filter: event_filter)
+ .to_a
end
end
diff --git a/app/controllers/dashboard_controller.rb b/app/controllers/dashboard_controller.rb
index f9c31920302..19a5db6fd17 100644
--- a/app/controllers/dashboard_controller.rb
+++ b/app/controllers/dashboard_controller.rb
@@ -29,9 +29,9 @@ class DashboardController < Dashboard::ApplicationController
current_user.authorized_projects
end
- @events = Event.in_projects(projects)
- @events = @event_filter.apply_filter(@events).with_associations
- @events = @events.limit(20).offset(params[:offset] || 0)
+ @events = EventCollection
+ .new(projects, offset: params[:offset].to_i, filter: @event_filter)
+ .to_a
end
def set_show_full_reference
diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb
index 27137ffde54..f76b3f69e9e 100644
--- a/app/controllers/groups_controller.rb
+++ b/app/controllers/groups_controller.rb
@@ -160,9 +160,9 @@ class GroupsController < Groups::ApplicationController
end
def load_events
- @events = Event.in_projects(@projects)
- @events = event_filter.apply_filter(@events).with_associations
- @events = @events.limit(20).offset(params[:offset] || 0)
+ @events = EventCollection
+ .new(@projects, offset: params[:offset].to_i, filter: event_filter)
+ .to_a
end
def user_actions
diff --git a/app/controllers/projects_controller.rb b/app/controllers/projects_controller.rb
index 8dfe0f51709..1d24563a6a6 100644
--- a/app/controllers/projects_controller.rb
+++ b/app/controllers/projects_controller.rb
@@ -7,6 +7,7 @@ class ProjectsController < Projects::ApplicationController
before_action :repository, except: [:index, :new, :create]
before_action :assign_ref_vars, only: [:show], if: :repo_exists?
before_action :tree, only: [:show], if: [:repo_exists?, :project_view_files?]
+ before_action :project_export_enabled, only: [:export, :download_export, :remove_export, :generate_new_export]
# Authorize
before_action :authorize_admin_project!, only: [:edit, :update, :housekeeping, :download_export, :export, :remove_export, :generate_new_export]
@@ -301,10 +302,11 @@ class ProjectsController < Projects::ApplicationController
end
def load_events
- @events = @project.events.recent
- @events = event_filter.apply_filter(@events).with_associations
- limit = (params[:limit] || 20).to_i
- @events = @events.limit(limit).offset(params[:offset] || 0)
+ projects = Project.where(id: @project.id)
+
+ @events = EventCollection
+ .new(projects, offset: params[:offset].to_i, filter: event_filter)
+ .to_a
end
def project_params
@@ -389,4 +391,8 @@ class ProjectsController < Projects::ApplicationController
url_for(params)
end
+
+ def project_export_enabled
+ render_404 unless current_application_settings.project_export_enabled?
+ end
end
diff --git a/app/controllers/unicorn_test_controller.rb b/app/controllers/unicorn_test_controller.rb
index b7a1a046be0..ed04bd1f77d 100644
--- a/app/controllers/unicorn_test_controller.rb
+++ b/app/controllers/unicorn_test_controller.rb
@@ -1,12 +1,14 @@
+# :nocov:
if Rails.env.test?
class UnicornTestController < ActionController::Base
def pid
render plain: Process.pid.to_s
end
-
+
def kill
Process.kill(params[:signal], Process.pid)
render plain: 'Bye!'
end
end
end
+# :nocov:
diff --git a/app/finders/admin/projects_finder.rb b/app/finders/admin/projects_finder.rb
index a5ba791a513..7176bfe22d6 100644
--- a/app/finders/admin/projects_finder.rb
+++ b/app/finders/admin/projects_finder.rb
@@ -18,7 +18,7 @@ class Admin::ProjectsFinder
end
def execute
- items = Project.with_statistics
+ items = Project.without_deleted.with_statistics
items = items.in_namespace(namespace_id) if namespace_id.present?
items = items.where(visibility_level: visibility_level) if visibility_level.present?
items = items.with_push if with_push.present?
diff --git a/app/helpers/appearances_helper.rb b/app/helpers/appearances_helper.rb
index 16136d02530..cdf5fa5d4b7 100644
--- a/app/helpers/appearances_helper.rb
+++ b/app/helpers/appearances_helper.rb
@@ -20,7 +20,7 @@ module AppearancesHelper
end
def brand_item
- @appearance ||= Appearance.first
+ @appearance ||= Appearance.current
end
def brand_header_logo
diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb
index 6825adcb39f..150188f0b65 100644
--- a/app/helpers/application_settings_helper.rb
+++ b/app/helpers/application_settings_helper.rb
@@ -146,6 +146,7 @@ module ApplicationSettingsHelper
:plantuml_enabled,
:plantuml_url,
:polling_interval_multiplier,
+ :project_export_enabled,
:prometheus_metrics_enabled,
:recaptcha_enabled,
:recaptcha_private_key,
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index f9c48482be7..ff15689ecac 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -8,7 +8,27 @@ class Appearance < ActiveRecord::Base
validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte }
+ validate :single_appearance_row, on: :create
+
mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
+ CACHE_KEY = 'current_appearance'.freeze
+
+ after_commit :flush_redis_cache
+
+ def self.current
+ Rails.cache.fetch(CACHE_KEY) { first }
+ end
+
+ def flush_redis_cache
+ Rails.cache.delete(CACHE_KEY)
+ end
+
+ def single_appearance_row
+ if self.class.any?
+ errors.add(:single_appearance_row, 'Only 1 appearances row can exist')
+ end
+ end
end
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index bd7c4cd45ea..8e446ff6dd8 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -241,6 +241,7 @@ class ApplicationSetting < ActiveRecord::Base
performance_bar_allowed_group_id: nil,
plantuml_enabled: false,
plantuml_url: nil,
+ project_export_enabled: true,
recaptcha_enabled: false,
repository_checks_enabled: true,
repository_storages: ['default'],
diff --git a/app/models/broadcast_message.rb b/app/models/broadcast_message.rb
index 944725d91c3..3692bcc680d 100644
--- a/app/models/broadcast_message.rb
+++ b/app/models/broadcast_message.rb
@@ -14,9 +14,15 @@ class BroadcastMessage < ActiveRecord::Base
default_value_for :color, '#E75E40'
default_value_for :font, '#FFFFFF'
+ CACHE_KEY = 'broadcast_message_current'.freeze
+
+ after_commit :flush_redis_cache
+
def self.current
- Rails.cache.fetch("broadcast_message_current", expires_in: 1.minute) do
- where('ends_at > :now AND starts_at <= :now', now: Time.zone.now).order([:created_at, :id]).to_a
+ Rails.cache.fetch(CACHE_KEY) do
+ where('ends_at > :now AND starts_at <= :now', now: Time.zone.now)
+ .reorder(id: :asc)
+ .to_a
end
end
@@ -31,4 +37,8 @@ class BroadcastMessage < ActiveRecord::Base
def ended?
ends_at < Time.zone.now
end
+
+ def flush_redis_cache
+ Rails.cache.delete(CACHE_KEY)
+ end
end
diff --git a/app/models/event.rb b/app/models/event.rb
index 8d93a228494..f2a560a6b56 100644
--- a/app/models/event.rb
+++ b/app/models/event.rb
@@ -48,6 +48,7 @@ class Event < ActiveRecord::Base
belongs_to :author, class_name: "User"
belongs_to :project
belongs_to :target, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
+ has_one :push_event_payload, foreign_key: :event_id
# For Hash only
serialize :data # rubocop:disable Cop/ActiveRecordSerialize
@@ -55,19 +56,51 @@ class Event < ActiveRecord::Base
# Callbacks
after_create :reset_project_activity
after_create :set_last_repository_updated_at, if: :push?
+ after_create :replicate_event_for_push_events_migration
# Scopes
scope :recent, -> { reorder(id: :desc) }
scope :code_push, -> { where(action: PUSHED) }
- scope :in_projects, ->(projects) do
- where(project_id: projects.pluck(:id)).recent
+ scope :in_projects, -> (projects) do
+ sub_query = projects
+ .except(:order)
+ .select(1)
+ .where('projects.id = events.project_id')
+
+ where('EXISTS (?)', sub_query).recent
+ end
+
+ scope :with_associations, -> do
+ # We're using preload for "push_event_payload" as otherwise the association
+ # is not always available (depending on the query being built).
+ includes(:author, :project, project: :namespace)
+ .preload(:target, :push_event_payload)
end
- scope :with_associations, -> { includes(:author, :project, project: :namespace).preload(:target) }
scope :for_milestone_id, ->(milestone_id) { where(target_type: "Milestone", target_id: milestone_id) }
+ self.inheritance_column = 'action'
+
class << self
+ def find_sti_class(action)
+ if action.to_i == PUSHED
+ PushEvent
+ else
+ Event
+ end
+ end
+
+ def subclass_from_attributes(attrs)
+ # Without this Rails will keep calling this method on the returned class,
+ # resulting in an infinite loop.
+ return unless self == Event
+
+ action = attrs.with_indifferent_access[inheritance_column].to_i
+
+ PushEvent if action == PUSHED
+ end
+
# Update Gitlab::ContributionsCalendar#activity_dates if this changes
def contributions
where("action = ? OR (target_type IN (?) AND action IN (?)) OR (target_type = ? AND action = ?)",
@@ -290,6 +323,16 @@ class Event < ActiveRecord::Base
@commits ||= (data[:commits] || []).reverse
end
+ def commit_title
+ commit = commits.last
+
+ commit[:message] if commit
+ end
+
+ def commit_id
+ commit_to || commit_from
+ end
+
def commits_count
data[:total_commits_count] || commits.count || 0
end
@@ -385,6 +428,16 @@ class Event < ActiveRecord::Base
user ? author_id == user.id : false
end
+ # We're manually replicating data into the new table since database triggers
+ # are not dumped to db/schema.rb. This could mean that a new installation
+ # would not have the triggers in place, thus losing events data in GitLab
+ # 10.0.
+ def replicate_event_for_push_events_migration
+ new_attributes = attributes.with_indifferent_access.except(:title, :data)
+
+ EventForMigration.create!(new_attributes)
+ end
+
private
def recent_update?
diff --git a/app/models/event_collection.rb b/app/models/event_collection.rb
new file mode 100644
index 00000000000..8b8244314af
--- /dev/null
+++ b/app/models/event_collection.rb
@@ -0,0 +1,98 @@
+# A collection of events to display in an event list.
+#
+# An EventCollection is meant to be used for displaying events to a user (e.g.
+# in a controller), it's not suitable for building queries that are used for
+# building other queries.
+class EventCollection
+ # To prevent users from putting too much pressure on the database by cycling
+ # through thousands of events we put a limit on the number of pages.
+ MAX_PAGE = 10
+
+ # projects - An ActiveRecord::Relation object that returns the projects for
+ # which to retrieve events.
+ # filter - An EventFilter instance to use for filtering events.
+ def initialize(projects, limit: 20, offset: 0, filter: nil)
+ @projects = projects
+ @limit = limit
+ @offset = offset
+ @filter = filter
+ end
+
+ # Returns an Array containing the events.
+ def to_a
+ return [] if current_page > MAX_PAGE
+
+ relation = if Gitlab::Database.join_lateral_supported?
+ relation_with_join_lateral
+ else
+ relation_without_join_lateral
+ end
+
+ relation.with_associations.to_a
+ end
+
+ private
+
+ # Returns the events relation to use when JOIN LATERAL is not supported.
+ #
+ # This relation simply gets all the events for all authorized projects, then
+ # limits that set.
+ def relation_without_join_lateral
+ events = filtered_events.in_projects(projects)
+
+ paginate_events(events)
+ end
+
+ # Returns the events relation to use when JOIN LATERAL is supported.
+ #
+ # This relation is built using JOIN LATERAL, producing faster queries than a
+ # regular LIMIT + OFFSET approach.
+ def relation_with_join_lateral
+ projects_for_lateral = projects.select(:id).to_sql
+
+ lateral = filtered_events
+ .limit(limit_for_join_lateral)
+ .where('events.project_id = projects_for_lateral.id')
+ .to_sql
+
+ # The outer query does not need to re-apply the filters since the JOIN
+ # LATERAL body already takes care of this.
+ outer = base_relation
+ .from("(#{projects_for_lateral}) projects_for_lateral")
+ .joins("JOIN LATERAL (#{lateral}) AS #{Event.table_name} ON true")
+
+ paginate_events(outer)
+ end
+
+ def filtered_events
+ @filter ? @filter.apply_filter(base_relation) : base_relation
+ end
+
+ def paginate_events(events)
+ events.limit(@limit).offset(@offset)
+ end
+
+ def base_relation
+ # We want to have absolute control over the event queries being built, thus
+ # we're explicitly opting out of any default scopes that may be set.
+ Event.unscoped.recent
+ end
+
+ def limit_for_join_lateral
+ # Applying the OFFSET on the inside of a JOIN LATERAL leads to incorrect
+ # results. To work around this we need to increase the inner limit for every
+ # page.
+ #
+ # This means that on page 1 we use LIMIT 20, and an outer OFFSET of 0. On
+ # page 2 we use LIMIT 40 and an outer OFFSET of 20.
+ @limit + @offset
+ end
+
+ def current_page
+ (@offset / @limit) + 1
+ end
+
+ def projects
+ @projects.except(:order)
+ end
+end
diff --git a/app/models/event_for_migration.rb b/app/models/event_for_migration.rb
new file mode 100644
index 00000000000..a1672da5eec
--- /dev/null
+++ b/app/models/event_for_migration.rb
@@ -0,0 +1,5 @@
+# This model is used to replicate events between the old "events" table and the
+# new "events_for_migration" table that will replace "events" in GitLab 10.0.
+class EventForMigration < ActiveRecord::Base
+ self.table_name = 'events_for_migration'
+end
diff --git a/app/models/member.rb b/app/models/member.rb
index dc9247bc9a0..b26b5017183 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -276,6 +276,13 @@ class Member < ActiveRecord::Base
@notification_setting ||= user.notification_settings_for(source)
end
+ def notifiable?(type, opts = {})
+ # always notify when there isn't a user yet
+ return true if user.blank?
+
+ NotificationRecipientService.notifiable?(user, type, notifiable_options.merge(opts))
+ end
+
private
def send_invite
@@ -332,4 +339,8 @@ class Member < ActiveRecord::Base
def notification_service
NotificationService.new
end
+
+ def notifiable_options
+ {}
+ end
end
diff --git a/app/models/members/group_member.rb b/app/models/members/group_member.rb
index 47040f95533..661e668dbf9 100644
--- a/app/models/members/group_member.rb
+++ b/app/models/members/group_member.rb
@@ -30,6 +30,10 @@ class GroupMember < Member
'Group'
end
+ def notifiable_options
+ { group: group }
+ end
+
private
def send_invite
diff --git a/app/models/members/project_member.rb b/app/models/members/project_member.rb
index c0e17f4bfc8..b6f1dd272cd 100644
--- a/app/models/members/project_member.rb
+++ b/app/models/members/project_member.rb
@@ -87,6 +87,10 @@ class ProjectMember < Member
project.owner == user
end
+ def notifiable_options
+ { project: project }
+ end
+
private
def delete_member_todos
diff --git a/app/models/notification_recipient.rb b/app/models/notification_recipient.rb
index 418b42d8f1d..dc862565a71 100644
--- a/app/models/notification_recipient.rb
+++ b/app/models/notification_recipient.rb
@@ -5,14 +5,22 @@ class NotificationRecipient
custom_action: nil,
target: nil,
acting_user: nil,
- project: nil
+ project: nil,
+ group: nil,
+ skip_read_ability: false
)
+ unless NotificationSetting.levels.key?(type) || type == :subscription
+ raise ArgumentError, "invalid type: #{type.inspect}"
+ end
+
@custom_action = custom_action
@acting_user = acting_user
@target = target
- @project = project || @target&.project
+ @project = project || default_project
+ @group = group || @project&.group
@user = user
@type = type
+ @skip_read_ability = skip_read_ability
end
def notification_setting
@@ -77,6 +85,8 @@ class NotificationRecipient
def has_access?
DeclarativePolicy.subject_scope do
return false unless user.can?(:receive_notifications)
+ return true if @skip_read_ability
+
return false if @project && !user.can?(:read_project, @project)
return true unless read_ability
@@ -96,6 +106,7 @@ class NotificationRecipient
private
def read_ability
+ return nil if @skip_read_ability
return @read_ability if instance_variable_defined?(:@read_ability)
@read_ability =
@@ -111,12 +122,18 @@ class NotificationRecipient
end
end
+ def default_project
+ return nil if @target.nil?
+ return @target if @target.is_a?(Project)
+ return @target.project if @target.respond_to?(:project)
+ end
+
def find_notification_setting
project_setting = @project && user.notification_settings_for(@project)
return project_setting unless project_setting.nil? || project_setting.global?
- group_setting = @project&.group && user.notification_settings_for(@project.group)
+ group_setting = @group && user.notification_settings_for(@group)
return group_setting unless group_setting.nil? || group_setting.global?
diff --git a/app/models/push_event.rb b/app/models/push_event.rb
new file mode 100644
index 00000000000..3f1ff979de6
--- /dev/null
+++ b/app/models/push_event.rb
@@ -0,0 +1,126 @@
+class PushEvent < Event
+ # This validation exists so we can't accidentally use PushEvent with a
+ # different "action" value.
+ validate :validate_push_action
+
+ # Authors are required as they're used to display who pushed data.
+ #
+ # We're just validating the presence of the ID here as foreign key constraints
+ # should ensure the ID points to a valid user.
+ validates :author_id, presence: true
+
+ # The project is required to build links to commits, commit ranges, etc.
+ #
+ # We're just validating the presence of the ID here as foreign key constraints
+ # should ensure the ID points to a valid project.
+ validates :project_id, presence: true
+
+ # The "data" field must not be set for push events since it's not used and a
+ # waste of space.
+ validates :data, absence: true
+
+ # These fields are also not used for push events, thus storing them would be a
+ # waste.
+ validates :target_id, absence: true
+ validates :target_type, absence: true
+
+ def self.sti_name
+ PUSHED
+ end
+
+ def push?
+ true
+ end
+
+ def push_with_commits?
+ !!(commit_from && commit_to)
+ end
+
+ def tag?
+ return super unless push_event_payload
+
+ push_event_payload.tag?
+ end
+
+ def branch?
+ return super unless push_event_payload
+
+ push_event_payload.branch?
+ end
+
+ def valid_push?
+ return super unless push_event_payload
+
+ push_event_payload.ref.present?
+ end
+
+ def new_ref?
+ return super unless push_event_payload
+
+ push_event_payload.created?
+ end
+
+ def rm_ref?
+ return super unless push_event_payload
+
+ push_event_payload.removed?
+ end
+
+ def commit_from
+ return super unless push_event_payload
+
+ push_event_payload.commit_from
+ end
+
+ def commit_to
+ return super unless push_event_payload
+
+ push_event_payload.commit_to
+ end
+
+ def ref_name
+ return super unless push_event_payload
+
+ push_event_payload.ref
+ end
+
+ def ref_type
+ return super unless push_event_payload
+
+ push_event_payload.ref_type
+ end
+
+ def branch_name
+ return super unless push_event_payload
+
+ ref_name
+ end
+
+ def tag_name
+ return super unless push_event_payload
+
+ ref_name
+ end
+
+ def commit_title
+ return super unless push_event_payload
+
+ push_event_payload.commit_title
+ end
+
+ def commit_id
+ commit_to || commit_from
+ end
+
+ def commits_count
+ return super unless push_event_payload
+
+ push_event_payload.commit_count
+ end
+
+ def validate_push_action
+ return if action == PUSHED
+
+ errors.add(:action, "the action #{action.inspect} is not valid")
+ end
+end
diff --git a/app/models/push_event_payload.rb b/app/models/push_event_payload.rb
new file mode 100644
index 00000000000..6cdb1cd4fe9
--- /dev/null
+++ b/app/models/push_event_payload.rb
@@ -0,0 +1,22 @@
+class PushEventPayload < ActiveRecord::Base
+ include ShaAttribute
+
+ belongs_to :event, inverse_of: :push_event_payload
+
+ validates :event_id, :commit_count, :action, :ref_type, presence: true
+ validates :commit_title, length: { maximum: 70 }
+
+ sha_attribute :commit_from
+ sha_attribute :commit_to
+
+ enum action: {
+ created: 0,
+ removed: 1,
+ pushed: 2
+ }
+
+ enum ref_type: {
+ branch: 0,
+ tag: 1
+ }
+end
diff --git a/app/models/user.rb b/app/models/user.rb
index 7935b89662b..a4615436245 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -1069,6 +1069,7 @@ class User < ActiveRecord::Base
# Added according to https://github.com/plataformatec/devise/blob/7df57d5081f9884849ca15e4fde179ef164a575f/README.md#activejob-integration
def send_devise_notification(notification, *args)
+ return true unless can?(:receive_notifications)
devise_mailer.send(notification, self, *args).deliver_later
end
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 0f3a485a3fd..0b7e4f187f7 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -71,7 +71,14 @@ class EventCreateService
end
def push(project, current_user, push_data)
- create_event(project, current_user, Event::PUSHED, data: push_data)
+ # We're using an explicit transaction here so that any errors that may occur
+ # when creating push payload data will result in the event creation being
+ # rolled back as well.
+ Event.transaction do
+ event = create_event(project, current_user, Event::PUSHED)
+
+ PushEventPayloadService.new(event, push_data).execute
+ end
Users::ActivityService.new(current_user, 'push').execute
end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index df04b1a4fe3..4267879b03d 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -10,9 +10,11 @@ class NotificationService
# only if ssh key is not deploy key
#
# This is security email so it will be sent
- # even if user disabled notifications
+ # even if user disabled notifications. However,
+ # it won't be sent to internal users like the
+ # ghost user or the EE support bot.
def new_key(key)
- if key.user
+ if key.user&.can?(:receive_notifications)
mailer.new_ssh_key_email(key.id).deliver_later
end
end
@@ -22,14 +24,14 @@ class NotificationService
# This is a security email so it will be sent even if the user user disabled
# notifications
def new_gpg_key(gpg_key)
- if gpg_key.user
+ if gpg_key.user&.can?(:receive_notifications)
mailer.new_gpg_key_email(gpg_key.id).deliver_later
end
end
# Always notify user about email added to profile
def new_email(email)
- if email.user
+ if email.user&.can?(:receive_notifications)
mailer.new_email_email(email.id).deliver_later
end
end
@@ -185,6 +187,8 @@ class NotificationService
# Notify new user with email after creation
def new_user(user, token = nil)
+ return true unless notifiable?(user, :mention)
+
# Don't email omniauth created users
mailer.new_user_email(user.id, token).deliver_later unless user.identities.any?
end
@@ -206,19 +210,27 @@ class NotificationService
# Members
def new_access_request(member)
+ return true unless member.notifiable?(:subscription)
+
mailer.member_access_requested_email(member.real_source_type, member.id).deliver_later
end
def decline_access_request(member)
+ return true unless member.notifiable?(:subscription)
+
mailer.member_access_denied_email(member.real_source_type, member.source_id, member.user_id).deliver_later
end
# Project invite
def invite_project_member(project_member, token)
+ return true unless project_member.notifiable?(:subscription)
+
mailer.member_invited_email(project_member.real_source_type, project_member.id, token).deliver_later
end
def accept_project_invite(project_member)
+ return true unless project_member.notifiable?(:subscription)
+
mailer.member_invite_accepted_email(project_member.real_source_type, project_member.id).deliver_later
end
@@ -232,10 +244,14 @@ class NotificationService
end
def new_project_member(project_member)
+ return true unless project_member.notifiable?(:mention, skip_read_ability: true)
+
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
def update_project_member(project_member)
+ return true unless project_member.notifiable?(:mention)
+
mailer.member_access_granted_email(project_member.real_source_type, project_member.id).deliver_later
end
@@ -249,6 +265,9 @@ class NotificationService
end
def decline_group_invite(group_member)
+ # always send this one, since it's a response to the user's own
+ # action
+
mailer.member_invite_declined_email(
group_member.real_source_type,
group_member.group.id,
@@ -258,15 +277,19 @@ class NotificationService
end
def new_group_member(group_member)
+ return true unless group_member.notifiable?(:mention)
+
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def update_group_member(group_member)
+ return true unless group_member.notifiable?(:mention)
+
mailer.member_access_granted_email(group_member.real_source_type, group_member.id).deliver_later
end
def project_was_moved(project, old_path_with_namespace)
- recipients = NotificationRecipientService.notifiable_users(project.team.members, :mention, project: project)
+ recipients = notifiable_users(project.team.members, :mention, project: project)
recipients.each do |recipient|
mailer.project_was_moved_email(
@@ -288,10 +311,14 @@ class NotificationService
end
def project_exported(project, current_user)
+ return true unless notifiable?(current_user, :mention, project: project)
+
mailer.project_was_exported_email(current_user, project).deliver_later
end
def project_not_exported(project, current_user, errors)
+ return true unless notifiable?(current_user, :mention, project: project)
+
mailer.project_was_not_exported_email(current_user, project, errors).deliver_later
end
@@ -300,7 +327,7 @@ class NotificationService
return unless mailer.respond_to?(email_template)
- recipients ||= NotificationRecipientService.notifiable_users(
+ recipients ||= notifiable_users(
[pipeline.user], :watch,
custom_action: :"#{pipeline.status}_pipeline",
target: pipeline
@@ -369,7 +396,7 @@ class NotificationService
def relabeled_resource_email(target, labels, current_user, method)
recipients = labels.flat_map { |l| l.subscribers(target.project) }
- recipients = NotificationRecipientService.notifiable_users(
+ recipients = notifiable_users(
recipients, :subscription,
target: target,
acting_user: current_user
@@ -401,4 +428,14 @@ class NotificationService
object.previous_changes[attribute].first
end
end
+
+ private
+
+ def notifiable?(*args)
+ NotificationRecipientService.notifiable?(*args)
+ end
+
+ def notifiable_users(*args)
+ NotificationRecipientService.notifiable_users(*args)
+ end
end
diff --git a/app/services/push_event_payload_service.rb b/app/services/push_event_payload_service.rb
new file mode 100644
index 00000000000..b0a389c85f9
--- /dev/null
+++ b/app/services/push_event_payload_service.rb
@@ -0,0 +1,120 @@
+# Service class for creating push event payloads as stored in the
+# "push_event_payloads" table.
+#
+# Example:
+#
+# data = Gitlab::DataBuilder::Push.build(...)
+# event = Event.create(...)
+#
+# PushEventPayloadService.new(event, data).execute
+class PushEventPayloadService
+ # event - The event this push payload belongs to.
+ # push_data - A Hash produced by `Gitlab::DataBuilder::Push.build` to use for
+ # building the push payload.
+ def initialize(event, push_data)
+ @event = event
+ @push_data = push_data
+ end
+
+ # Creates and returns a new PushEventPayload row.
+ #
+ # This method will raise upon encountering validation errors.
+ #
+ # Returns an instance of PushEventPayload.
+ def execute
+ @event.build_push_event_payload(
+ commit_count: commit_count,
+ action: action,
+ ref_type: ref_type,
+ commit_from: commit_from_id,
+ commit_to: commit_to_id,
+ ref: trimmed_ref,
+ commit_title: commit_title,
+ event_id: @event.id
+ )
+
+ @event.push_event_payload.save!
+ @event.push_event_payload
+ end
+
+ # Returns the commit title to use.
+ #
+ # The commit title is limited to the first line and a maximum of 70
+ # characters.
+ def commit_title
+ commit = @push_data.fetch(:commits).last
+
+ return nil unless commit && commit[:message]
+
+ raw_msg = commit[:message]
+
+ # Find where the first line ends, without turning the entire message into an
+ # Array of lines (this is a waste of memory for large commit messages).
+ index = raw_msg.index("\n")
+ message = index ? raw_msg[0..index] : raw_msg
+
+ message.strip.truncate(70)
+ end
+
+ def commit_from_id
+ if create?
+ nil
+ else
+ revision_before
+ end
+ end
+
+ def commit_to_id
+ if remove?
+ nil
+ else
+ revision_after
+ end
+ end
+
+ def commit_count
+ @push_data.fetch(:total_commits_count)
+ end
+
+ def ref
+ @push_data.fetch(:ref)
+ end
+
+ def revision_before
+ @push_data.fetch(:before)
+ end
+
+ def revision_after
+ @push_data.fetch(:after)
+ end
+
+ def trimmed_ref
+ Gitlab::Git.ref_name(ref)
+ end
+
+ def create?
+ Gitlab::Git.blank_ref?(revision_before)
+ end
+
+ def remove?
+ Gitlab::Git.blank_ref?(revision_after)
+ end
+
+ def action
+ if create?
+ :created
+ elsif remove?
+ :removed
+ else
+ :pushed
+ end
+ end
+
+ def ref_type
+ if Gitlab::Git.tag_ref?(ref)
+ :tag
+ else
+ :branch
+ end
+ end
+end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index ef70871624b..3298ad104ec 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
end
def self.base_dir
- File.join(root_dir, 'system')
+ File.join(root_dir, '-', 'system')
end
private
diff --git a/app/views/admin/application_settings/_form.html.haml b/app/views/admin/application_settings/_form.html.haml
index a4f49d3f6d7..8bf6556079b 100644
--- a/app/views/admin/application_settings/_form.html.haml
+++ b/app/views/admin/application_settings/_form.html.haml
@@ -48,6 +48,12 @@
= select(:application_setting, :enabled_git_access_protocol, [['Both SSH and HTTP(S)', nil], ['Only SSH', 'ssh'], ['Only HTTP(S)', 'http']], {}, class: 'form-control')
%span.help-block#clone-protocol-help
Allow only the selected protocols to be used for Git access.
+ .form-group
+ .col-sm-offset-2.col-sm-10
+ .checkbox
+ = f.label :project_export_enabled do
+ = f.check_box :project_export_enabled
+ Project export enabled
%fieldset
%legend Account and Limit Settings
diff --git a/app/views/events/_commit.html.haml b/app/views/events/_commit.html.haml
index ad434a64556..98cdcca3ecc 100644
--- a/app/views/events/_commit.html.haml
+++ b/app/views/events/_commit.html.haml
@@ -1,5 +1,5 @@
%li.commit
.commit-row-title
- = link_to truncate_sha(commit[:id]), project_commit_path(project, commit[:id]), class: "commit-sha", alt: '', title: truncate_sha(commit[:id])
+ = link_to truncate_sha(event.commit_id), project_commit_path(project, event.commit_id), class: "commit-sha", alt: '', title: truncate_sha(event.commit_id)
&middot;
- = markdown event_commit_title(commit[:message]), project: project, pipeline: :single_line, author: event.author
+ = markdown event_commit_title(event.commit_title), project: project, pipeline: :single_line, author: event.author
diff --git a/app/views/events/_event_push.atom.haml b/app/views/events/_event_push.atom.haml
index 9fcacfbbf36..bf655f9d21a 100644
--- a/app/views/events/_event_push.atom.haml
+++ b/app/views/events/_event_push.atom.haml
@@ -1,14 +1,13 @@
%div{ xmlns: "http://www.w3.org/1999/xhtml" }
- - event.commits.first(15).each do |commit|
- %p
- %strong= commit[:author][:name]
- = link_to "(##{truncate_sha(commit[:id])})", project_commit_path(event.project, id: commit[:id])
- %i
- at
- = commit[:timestamp].to_time.to_s(:short)
- %blockquote= markdown(escape_once(commit[:message]), pipeline: :atom, project: event.project, author: event.author)
- - if event.commits_count > 15
+ %p
+ %strong= event.author_name
+ = link_to "(#{truncate_sha(event.commit_id)})", project_commit_path(event.project, event.commit_id)
+ %i
+ at
+ = event.created_at.to_s(:short)
+ %blockquote= markdown(escape_once(event.commit_title), pipeline: :atom, project: event.project, author: event.author)
+ - if event.commits_count > 1
%p
%i
\... and
- = pluralize(event.commits_count - 15, "more commit")
+ = pluralize(event.commits_count - 1, "more commit")
diff --git a/app/views/events/event/_push.html.haml b/app/views/events/event/_push.html.haml
index 54b414cc62a..973c652ad88 100644
--- a/app/views/events/event/_push.html.haml
+++ b/app/views/events/event/_push.html.haml
@@ -14,9 +14,7 @@
- if event.push_with_commits?
.event-body
%ul.well-list.event_commits
- - few_commits = event.commits[0...2]
- - few_commits.each do |commit|
- = render "events/commit", commit: commit, project: project, event: event
+ = render "events/commit", project: project, event: event
- create_mr = event.new_ref? && create_mr_button?(project.default_branch, event.ref_name, project) && event.authored_by?(current_user)
- if event.commits_count > 1
@@ -44,9 +42,6 @@
= link_to create_mr_path(project.default_branch, event.ref_name, project) do
Create Merge Request
- elsif event.rm_ref?
- - repository = project.repository
- - last_commit = repository.commit(event.commit_from)
- - if last_commit
- .event-body
- %ul.well-list.event_commits
- = render "events/commit", commit: last_commit, project: project, event: event
+ .event-body
+ %ul.well-list.event_commits
+ = render "events/commit", project: project, event: event
diff --git a/app/views/projects/_export.html.haml b/app/views/projects/_export.html.haml
new file mode 100644
index 00000000000..623d3bc91c6
--- /dev/null
+++ b/app/views/projects/_export.html.haml
@@ -0,0 +1,41 @@
+- return unless current_application_settings.project_export_enabled?
+
+- project = local_assigns.fetch(:project)
+- expanded = Rails.env.test?
+
+%section.settings
+ .settings-header
+ %h4
+ Export project
+ %button.btn.js-settings-toggle
+ = expanded ? 'Collapse' : 'Expand'
+ %p
+ Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the "New Project" page.
+ .settings-content.no-animate{ class: ('expanded' if expanded) }
+ .bs-callout.bs-callout-info
+ %p.append-bottom-0
+ %p
+ The following items will be exported:
+ %ul
+ %li Project and wiki repositories
+ %li Project uploads
+ %li Project configuration including web hooks and services
+ %li Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities
+ %p
+ The following items will NOT be exported:
+ %ul
+ %li Job traces and artifacts
+ %li LFS objects
+ %li Container registry images
+ %li CI variables
+ %li Any encrypted tokens
+ %p
+ Once the exported file is ready, you will receive a notification email with a download link.
+ - if project.export_project_path
+ = link_to 'Download export', download_export_project_path(project),
+ rel: 'nofollow', download: '', method: :get, class: "btn btn-default"
+ = link_to 'Generate new export', generate_new_export_project_path(project),
+ method: :post, class: "btn btn-default"
+ - else
+ = link_to 'Export project', export_project_path(project),
+ method: :post, class: "btn btn-default"
diff --git a/app/views/projects/_md_preview.html.haml b/app/views/projects/_md_preview.html.haml
index 6e13bf47ff6..97041b87c48 100644
--- a/app/views/projects/_md_preview.html.haml
+++ b/app/views/projects/_md_preview.html.haml
@@ -1,11 +1,9 @@
- referenced_users = local_assigns.fetch(:referenced_users, nil)
- if defined?(@issue) && @issue.confidential?
- %li.confidential-issue-warning
+ .confidential-issue-warning
= confidential_icon(@issue)
%span This is a confidential issue. Your comment will not be visible to the public.
-- else
- %li.confidential-issue-warning.not-confidential
.md-area
.md-header
diff --git a/app/views/projects/edit.html.haml b/app/views/projects/edit.html.haml
index c2794f8aaa8..6178abe9160 100644
--- a/app/views/projects/edit.html.haml
+++ b/app/views/projects/edit.html.haml
@@ -161,42 +161,7 @@
= render 'merge_request_settings', form: f
= f.submit 'Save changes', class: "btn btn-save"
- %section.settings
- .settings-header
- %h4
- Export project
- %button.btn.js-settings-toggle
- = expanded ? 'Collapse' : 'Expand'
- %p
- Export this project with all its related data in order to move your project to a new GitLab instance. Once the export is finished, you can import the file from the "New Project" page.
- .settings-content.no-animate{ class: ('expanded' if expanded) }
- .bs-callout.bs-callout-info
- %p.append-bottom-0
- %p
- The following items will be exported:
- %ul
- %li Project and wiki repositories
- %li Project uploads
- %li Project configuration including web hooks and services
- %li Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities
- %p
- The following items will NOT be exported:
- %ul
- %li Job traces and artifacts
- %li LFS objects
- %li Container registry images
- %li CI variables
- %li Any encrypted tokens
- %p
- Once the exported file is ready, you will receive a notification email with a download link.
- - if @project.export_project_path
- = link_to 'Download export', download_export_project_path(@project),
- rel: 'nofollow', download: '', method: :get, class: "btn btn-default"
- = link_to 'Generate new export', generate_new_export_project_path(@project),
- method: :post, class: "btn btn-default"
- - else
- = link_to 'Export project', export_project_path(@project),
- method: :post, class: "btn btn-default"
+ = render 'export', project: @project
%section.settings.advanced-settings
.settings-header
diff --git a/changelogs/unreleased/13325-bugfix-silence-on-disabled-notifications.yml b/changelogs/unreleased/13325-bugfix-silence-on-disabled-notifications.yml
new file mode 100644
index 00000000000..90b169390d2
--- /dev/null
+++ b/changelogs/unreleased/13325-bugfix-silence-on-disabled-notifications.yml
@@ -0,0 +1,6 @@
+---
+title: disabling notifications globally now properly turns off group/project added
+ emails
+merge_request: 13325
+author: @jneen
+type: fixed
diff --git a/changelogs/unreleased/34492-firefox-job.yml b/changelogs/unreleased/34492-firefox-job.yml
deleted file mode 100644
index 881b8f649ea..00000000000
--- a/changelogs/unreleased/34492-firefox-job.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Use jQuery to control scroll behavior in job log for cross browser consistency
-merge_request:
-author:
diff --git a/changelogs/unreleased/35052-please-select-a-file-when-attempting-to-upload-or-replace-from-the-ui.yml b/changelogs/unreleased/35052-please-select-a-file-when-attempting-to-upload-or-replace-from-the-ui.yml
deleted file mode 100644
index 5925da14f89..00000000000
--- a/changelogs/unreleased/35052-please-select-a-file-when-attempting-to-upload-or-replace-from-the-ui.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: improve file upload/replace experience
-merge_request:
-author:
diff --git a/changelogs/unreleased/35232-next-unresolved.yml b/changelogs/unreleased/35232-next-unresolved.yml
deleted file mode 100644
index 45f3fb429a8..00000000000
--- a/changelogs/unreleased/35232-next-unresolved.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: fix jump to next discussion button
-merge_request:
-author:
diff --git a/changelogs/unreleased/35697-allow-logged-in-user-to-read-user-list.yml b/changelogs/unreleased/35697-allow-logged-in-user-to-read-user-list.yml
deleted file mode 100644
index 54b2e71bef9..00000000000
--- a/changelogs/unreleased/35697-allow-logged-in-user-to-read-user-list.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Allow any logged in users to read_users_list even if it's restricted
-merge_request: 13201
-author:
diff --git a/changelogs/unreleased/36158-new-issue-button.yml b/changelogs/unreleased/36158-new-issue-button.yml
deleted file mode 100644
index df61fa06af7..00000000000
--- a/changelogs/unreleased/36158-new-issue-button.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fixes new issue button for failed job returning 404
-merge_request:
-author:
diff --git a/changelogs/unreleased/36213-return-is_admin-in-users-api-when-current_user-is-admin.yml b/changelogs/unreleased/36213-return-is_admin-in-users-api-when-current_user-is-admin.yml
new file mode 100644
index 00000000000..b51b5e58b39
--- /dev/null
+++ b/changelogs/unreleased/36213-return-is_admin-in-users-api-when-current_user-is-admin.yml
@@ -0,0 +1,6 @@
+---
+title: Include the `is_admin` field in the `GET /users/:id` API when current user
+ is an admin
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/appearances-caching-and-schema.yml b/changelogs/unreleased/appearances-caching-and-schema.yml
new file mode 100644
index 00000000000..5743f6e0f2d
--- /dev/null
+++ b/changelogs/unreleased/appearances-caching-and-schema.yml
@@ -0,0 +1,4 @@
+---
+title: Cache Appearance instances in Redis
+merge_request:
+author:
diff --git a/changelogs/unreleased/broadcast-messages-cache.yml b/changelogs/unreleased/broadcast-messages-cache.yml
new file mode 100644
index 00000000000..a3c9e1ff465
--- /dev/null
+++ b/changelogs/unreleased/broadcast-messages-cache.yml
@@ -0,0 +1,4 @@
+---
+title: Better caching and indexing of broadcast messages
+merge_request:
+author:
diff --git a/changelogs/unreleased/bvl-rollback-renamed-system-namespace.yml b/changelogs/unreleased/bvl-rollback-renamed-system-namespace.yml
new file mode 100644
index 00000000000..a24cc7a1c43
--- /dev/null
+++ b/changelogs/unreleased/bvl-rollback-renamed-system-namespace.yml
@@ -0,0 +1,4 @@
+---
+title: Don't rename namespace called system when upgrading from 9.1.x to 9.5
+merge_request: 13228
+author:
diff --git a/changelogs/unreleased/disable-project-export.yml b/changelogs/unreleased/disable-project-export.yml
new file mode 100644
index 00000000000..d7ca9f46193
--- /dev/null
+++ b/changelogs/unreleased/disable-project-export.yml
@@ -0,0 +1,4 @@
+---
+title: Add option to disable project export on instance
+merge_request: 13211
+author: Robin Bobbitt
diff --git a/changelogs/unreleased/fix-group-milestone-link-in-issuable-sidebar.yml b/changelogs/unreleased/fix-group-milestone-link-in-issuable-sidebar.yml
deleted file mode 100644
index 1558e575e6d..00000000000
--- a/changelogs/unreleased/fix-group-milestone-link-in-issuable-sidebar.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix links to group milestones from issue and merge request sidebar
-merge_request:
-author:
diff --git a/changelogs/unreleased/fix-oauth-checkboxes.yml b/changelogs/unreleased/fix-oauth-checkboxes.yml
deleted file mode 100644
index 2839ccc42cb..00000000000
--- a/changelogs/unreleased/fix-oauth-checkboxes.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fixed sign-in restrictions buttons not toggling active state
-merge_request:
-author:
diff --git a/changelogs/unreleased/fix-sm-34547-cannot-connect-to-ci-server-error-messages.yml b/changelogs/unreleased/fix-sm-34547-cannot-connect-to-ci-server-error-messages.yml
deleted file mode 100644
index ddaec4f19f9..00000000000
--- a/changelogs/unreleased/fix-sm-34547-cannot-connect-to-ci-server-error-messages.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Fix an order of operations for CI connection error message in merge request
- widget
-merge_request: 13252
-author:
diff --git a/changelogs/unreleased/fix-sm-35931-active-ci-pipelineschedule-have-nullified-next_run_at.yml b/changelogs/unreleased/fix-sm-35931-active-ci-pipelineschedule-have-nullified-next_run_at.yml
deleted file mode 100644
index 07840205b6e..00000000000
--- a/changelogs/unreleased/fix-sm-35931-active-ci-pipelineschedule-have-nullified-next_run_at.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix pipeline_schedules pages when active schedule has an abnormal state
-merge_request: 13286
-author:
diff --git a/changelogs/unreleased/mattermost_fixes.yml b/changelogs/unreleased/mattermost_fixes.yml
deleted file mode 100644
index 667109a0bb4..00000000000
--- a/changelogs/unreleased/mattermost_fixes.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix Mattermost integration
-merge_request:
-author:
diff --git a/changelogs/unreleased/migrate-events-into-a-new-format.yml b/changelogs/unreleased/migrate-events-into-a-new-format.yml
new file mode 100644
index 00000000000..8a29f75323f
--- /dev/null
+++ b/changelogs/unreleased/migrate-events-into-a-new-format.yml
@@ -0,0 +1,4 @@
+---
+title: Migrate events into a new format to reduce the storage necessary and improve performance
+merge_request:
+author:
diff --git a/changelogs/unreleased/mk-fix-case-insensitive-redirect-matching.yml b/changelogs/unreleased/mk-fix-case-insensitive-redirect-matching.yml
deleted file mode 100644
index c539480c65f..00000000000
--- a/changelogs/unreleased/mk-fix-case-insensitive-redirect-matching.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix destroy of case-insensitive conflicting redirects
-merge_request: 13357
-author:
diff --git a/changelogs/unreleased/mk-fix-deploy-key-deletion.yml b/changelogs/unreleased/mk-fix-deploy-key-deletion.yml
deleted file mode 100644
index 9ff2e49b14c..00000000000
--- a/changelogs/unreleased/mk-fix-deploy-key-deletion.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix deletion of deploy keys linked to other projects
-merge_request: 13162
-author:
diff --git a/changelogs/unreleased/mk-validate-username-change-with-container-registry-tags.yml b/changelogs/unreleased/mk-validate-username-change-with-container-registry-tags.yml
deleted file mode 100644
index 425d5231e14..00000000000
--- a/changelogs/unreleased/mk-validate-username-change-with-container-registry-tags.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Add missing validation error for username change with container registry tags
-merge_request: 13356
-author:
diff --git a/changelogs/unreleased/project-foreign-keys-without-errors.yml b/changelogs/unreleased/project-foreign-keys-without-errors.yml
deleted file mode 100644
index 63c53c8ad8f..00000000000
--- a/changelogs/unreleased/project-foreign-keys-without-errors.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Change project FK migration to skip existing FKs
-merge_request:
-author:
diff --git a/changelogs/unreleased/search-flickering.yml b/changelogs/unreleased/search-flickering.yml
deleted file mode 100644
index 951a5a0292a..00000000000
--- a/changelogs/unreleased/search-flickering.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Fix search box losing focus when typing
-merge_request:
-author:
diff --git a/changelogs/unreleased/tc-fix-wildcard-protected-delete-merged.yml b/changelogs/unreleased/tc-fix-wildcard-protected-delete-merged.yml
deleted file mode 100644
index 9ca5f81cf79..00000000000
--- a/changelogs/unreleased/tc-fix-wildcard-protected-delete-merged.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Make Delete Merged Branches handle wildcard protected branches correctly
-merge_request: 13251
-author:
diff --git a/changelogs/unreleased/use-a-specialized-class-for-querying-events.yml b/changelogs/unreleased/use-a-specialized-class-for-querying-events.yml
new file mode 100644
index 00000000000..6c1ec10aa12
--- /dev/null
+++ b/changelogs/unreleased/use-a-specialized-class-for-querying-events.yml
@@ -0,0 +1,4 @@
+---
+title: Use a specialized class for querying events to improve performance
+merge_request:
+author:
diff --git a/changelogs/unreleased/zj-ref-path-monospace.yml b/changelogs/unreleased/zj-ref-path-monospace.yml
deleted file mode 100644
index 638a29eb90e..00000000000
--- a/changelogs/unreleased/zj-ref-path-monospace.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-title: Use project_ref_path to create the link to a branch to fix links that 404
-merge_request:
-author:
diff --git a/config/routes/uploads.rb b/config/routes/uploads.rb
index e9c9aa8b2f9..d7bca8310e4 100644
--- a/config/routes/uploads.rb
+++ b/config/routes/uploads.rb
@@ -5,12 +5,12 @@ scope path: :uploads do
constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: /[^\/]+/ }
# show uploads for models, snippets (notes) available for now
- get 'system/:model/:id/:secret/:filename',
+ get '-/system/:model/:id/:secret/:filename',
to: 'uploads#show',
constraints: { model: /personal_snippet/, id: /\d+/, filename: /[^\/]+/ }
# show temporary uploads
- get 'system/temp/:secret/:filename',
+ get '-/system/temp/:secret/:filename',
to: 'uploads#show',
constraints: { filename: /[^\/]+/ }
diff --git a/db/migrate/20170316163800_rename_system_namespaces.rb b/db/migrate/20170316163800_rename_system_namespaces.rb
deleted file mode 100644
index 9e9fb5ac225..00000000000
--- a/db/migrate/20170316163800_rename_system_namespaces.rb
+++ /dev/null
@@ -1,231 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-class RenameSystemNamespaces < ActiveRecord::Migration
- include Gitlab::Database::MigrationHelpers
- include Gitlab::ShellAdapter
- disable_ddl_transaction!
-
- class User < ActiveRecord::Base
- self.table_name = 'users'
- end
-
- class Namespace < ActiveRecord::Base
- self.table_name = 'namespaces'
- belongs_to :parent, class_name: 'RenameSystemNamespaces::Namespace'
- has_one :route, as: :source
- has_many :children, class_name: 'RenameSystemNamespaces::Namespace', foreign_key: :parent_id
- belongs_to :owner, class_name: 'RenameSystemNamespaces::User'
-
- # Overridden to have the correct `source_type` for the `route` relation
- def self.name
- 'Namespace'
- end
-
- def full_path
- if route && route.path.present?
- @full_path ||= route.path
- else
- update_route if persisted?
-
- build_full_path
- end
- end
-
- def build_full_path
- if parent && path
- parent.full_path + '/' + path
- else
- path
- end
- end
-
- def update_route
- prepare_route
- route.save
- end
-
- def prepare_route
- route || build_route(source: self)
- route.path = build_full_path
- route.name = build_full_name
- @full_path = nil
- @full_name = nil
- end
-
- def build_full_name
- if parent && name
- parent.human_name + ' / ' + name
- else
- name
- end
- end
-
- def human_name
- owner&.name
- end
- end
-
- class Route < ActiveRecord::Base
- self.table_name = 'routes'
- belongs_to :source, polymorphic: true
- end
-
- class Project < ActiveRecord::Base
- self.table_name = 'projects'
-
- def repository_storage_path
- Gitlab.config.repositories.storages[repository_storage]['path']
- end
- end
-
- DOWNTIME = false
-
- def up
- return unless system_namespace
-
- old_path = system_namespace.path
- old_full_path = system_namespace.full_path
- # Only remove the last occurrence of the path name to get the parent namespace path
- namespace_path = remove_last_occurrence(old_full_path, old_path)
- new_path = rename_path(namespace_path, old_path)
- new_full_path = join_namespace_path(namespace_path, new_path)
-
- Namespace.where(id: system_namespace).update_all(path: new_path) # skips callbacks & validations
-
- replace_statement = replace_sql(Route.arel_table[:path], old_full_path, new_full_path)
- route_matches = [old_full_path, "#{old_full_path}/%"]
-
- update_column_in_batches(:routes, :path, replace_statement) do |table, query|
- query.where(Route.arel_table[:path].matches_any(route_matches))
- end
-
- clear_cache_for_namespace(system_namespace)
-
- # tasks here are based on `Namespace#move_dir`
- move_repositories(system_namespace, old_full_path, new_full_path)
- move_namespace_folders(uploads_dir, old_full_path, new_full_path) if file_storage?
- move_namespace_folders(pages_dir, old_full_path, new_full_path)
- end
-
- def down
- # nothing to do
- end
-
- def remove_last_occurrence(string, pattern)
- string.reverse.sub(pattern.reverse, "").reverse
- end
-
- def move_namespace_folders(directory, old_relative_path, new_relative_path)
- old_path = File.join(directory, old_relative_path)
- return unless File.directory?(old_path)
-
- new_path = File.join(directory, new_relative_path)
- FileUtils.mv(old_path, new_path)
- end
-
- def move_repositories(namespace, old_full_path, new_full_path)
- repo_paths_for_namespace(namespace).each do |repository_storage_path|
- # Ensure old directory exists before moving it
- gitlab_shell.add_namespace(repository_storage_path, old_full_path)
-
- unless gitlab_shell.mv_namespace(repository_storage_path, old_full_path, new_full_path)
- say "Exception moving path #{repository_storage_path} from #{old_full_path} to #{new_full_path}"
- end
- end
- end
-
- def rename_path(namespace_path, path_was)
- counter = 0
- path = "#{path_was}#{counter}"
-
- while route_exists?(join_namespace_path(namespace_path, path))
- counter += 1
- path = "#{path_was}#{counter}"
- end
-
- path
- end
-
- def route_exists?(full_path)
- Route.where(Route.arel_table[:path].matches(full_path)).any?
- end
-
- def join_namespace_path(namespace_path, path)
- if namespace_path.present?
- File.join(namespace_path, path)
- else
- path
- end
- end
-
- def system_namespace
- @system_namespace ||= Namespace.where(parent_id: nil)
- .where(arel_table[:path].matches(system_namespace_path))
- .first
- end
-
- def system_namespace_path
- "system"
- end
-
- def clear_cache_for_namespace(namespace)
- project_ids = projects_for_namespace(namespace).pluck(:id)
-
- update_column_in_batches(:projects, :description_html, nil) do |table, query|
- query.where(table[:id].in(project_ids))
- end
-
- update_column_in_batches(:issues, :description_html, nil) do |table, query|
- query.where(table[:project_id].in(project_ids))
- end
-
- update_column_in_batches(:merge_requests, :description_html, nil) do |table, query|
- query.where(table[:target_project_id].in(project_ids))
- end
-
- update_column_in_batches(:notes, :note_html, nil) do |table, query|
- query.where(table[:project_id].in(project_ids))
- end
-
- update_column_in_batches(:milestones, :description_html, nil) do |table, query|
- query.where(table[:project_id].in(project_ids))
- end
- end
-
- def projects_for_namespace(namespace)
- namespace_ids = child_ids_for_parent(namespace, ids: [namespace.id])
- namespace_or_children = Project.arel_table[:namespace_id].in(namespace_ids)
- Project.unscoped.where(namespace_or_children)
- end
-
- # This won't scale to huge trees, but it should do for a handful of namespaces
- # called `system`.
- def child_ids_for_parent(namespace, ids: [])
- namespace.children.each do |child|
- ids << child.id
- child_ids_for_parent(child, ids: ids) if child.children.any?
- end
- ids
- end
-
- def repo_paths_for_namespace(namespace)
- projects_for_namespace(namespace).distinct
- .select(:repository_storage).map(&:repository_storage_path)
- end
-
- def uploads_dir
- File.join(Rails.root, "public", "uploads")
- end
-
- def pages_dir
- Settings.pages.path
- end
-
- def file_storage?
- CarrierWave::Uploader::Base.storage == CarrierWave::Storage::File
- end
-
- def arel_table
- Namespace.arel_table
- end
-end
diff --git a/db/migrate/20170316163845_move_uploads_to_system_dir.rb b/db/migrate/20170316163845_move_uploads_to_system_dir.rb
index 564ee10b5ab..cfcb909ddaf 100644
--- a/db/migrate/20170316163845_move_uploads_to_system_dir.rb
+++ b/db/migrate/20170316163845_move_uploads_to_system_dir.rb
@@ -54,6 +54,6 @@ class MoveUploadsToSystemDir < ActiveRecord::Migration
end
def new_upload_dir
- File.join(base_directory, "public", "uploads", "system")
+ File.join(base_directory, "public", "uploads", "-", "system")
end
end
diff --git a/db/migrate/20170608152747_prepare_events_table_for_push_events_migration.rb b/db/migrate/20170608152747_prepare_events_table_for_push_events_migration.rb
new file mode 100644
index 00000000000..f4f03bbabaf
--- /dev/null
+++ b/db/migrate/20170608152747_prepare_events_table_for_push_events_migration.rb
@@ -0,0 +1,51 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class PrepareEventsTableForPushEventsMigration < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ # The order of these columns is deliberate and results in the following
+ # columns and sizes:
+ #
+ # * id (4 bytes)
+ # * project_id (4 bytes)
+ # * author_id (4 bytes)
+ # * target_id (4 bytes)
+ # * created_at (8 bytes)
+ # * updated_at (8 bytes)
+ # * action (2 bytes)
+ # * target_type (variable)
+ #
+ # Unfortunately we can't make the "id" column a bigint/bigserial as Rails 4
+ # does not support this properly.
+ create_table :events_for_migration do |t|
+ t.references :project,
+ index: true,
+ foreign_key: { on_delete: :cascade }
+
+ t.integer :author_id, index: true, null: false
+ t.integer :target_id
+
+ t.timestamps_with_timezone null: false
+
+ t.integer :action, null: false, limit: 2, index: true
+ t.string :target_type
+
+ t.index %i[target_type target_id]
+ end
+
+ # t.references doesn't like it when the column name doesn't make the table
+ # name so we have to add the foreign key separately.
+ add_concurrent_foreign_key(:events_for_migration, :users, column: :author_id)
+ end
+
+ def down
+ drop_table :events_for_migration
+ end
+end
diff --git a/db/migrate/20170608152748_create_push_event_payloads_tables.rb b/db/migrate/20170608152748_create_push_event_payloads_tables.rb
new file mode 100644
index 00000000000..6c55ad1f2f7
--- /dev/null
+++ b/db/migrate/20170608152748_create_push_event_payloads_tables.rb
@@ -0,0 +1,46 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class CreatePushEventPayloadsTables < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ create_table :push_event_payloads, id: false do |t|
+ t.bigint :commit_count, null: false
+
+ t.integer :event_id, null: false
+ t.integer :action, null: false, limit: 2
+ t.integer :ref_type, null: false, limit: 2
+
+ t.binary :commit_from
+ t.binary :commit_to
+
+ t.text :ref
+ t.string :commit_title, limit: 70
+
+ t.index :event_id, unique: true
+ end
+
+ # We're adding a foreign key to the _shadow_ table, and this is deliberate.
+ # By using the shadow table we don't have to recreate/revalidate this
+ # foreign key after swapping the "events_for_migration" and "events" tables.
+ #
+ # The "events_for_migration" table has a foreign key to "projects.id"
+ # ensuring that project removals also remove events from the shadow table
+ # (and thus also from this table).
+ add_concurrent_foreign_key(
+ :push_event_payloads,
+ :events_for_migration,
+ column: :event_id
+ )
+ end
+
+ def down
+ drop_table :push_event_payloads
+ end
+end
diff --git a/db/migrate/20170717074009_move_system_upload_folder.rb b/db/migrate/20170717074009_move_system_upload_folder.rb
index cce31794115..d3caa53a7a4 100644
--- a/db/migrate/20170717074009_move_system_upload_folder.rb
+++ b/db/migrate/20170717074009_move_system_upload_folder.rb
@@ -15,6 +15,11 @@ class MoveSystemUploadFolder < ActiveRecord::Migration
return
end
+ if File.directory?(new_directory)
+ say "#{new_directory} already exists. No need to redo the move."
+ return
+ end
+
FileUtils.mkdir_p(File.join(base_directory, '-'))
say "Moving #{old_directory} -> #{new_directory}"
@@ -33,6 +38,11 @@ class MoveSystemUploadFolder < ActiveRecord::Migration
return
end
+ if !File.symlink?(old_directory) && File.directory?(old_directory)
+ say "#{old_directory} already exists and is not a symlink, no need to revert."
+ return
+ end
+
if File.symlink?(old_directory)
say "Removing #{old_directory} -> #{new_directory} symlink"
FileUtils.rm(old_directory)
diff --git a/db/migrate/20170727123534_add_index_on_events_project_id_id.rb b/db/migrate/20170727123534_add_index_on_events_project_id_id.rb
new file mode 100644
index 00000000000..1c4aaaf9dd6
--- /dev/null
+++ b/db/migrate/20170727123534_add_index_on_events_project_id_id.rb
@@ -0,0 +1,37 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddIndexOnEventsProjectIdId < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ COLUMNS = %i[project_id id].freeze
+ TABLES = %i[events events_for_migration].freeze
+
+ disable_ddl_transaction!
+
+ def up
+ TABLES.each do |table|
+ add_concurrent_index(table, COLUMNS) unless index_exists?(table, COLUMNS)
+
+ # We remove the index _after_ adding the new one since MySQL doesn't let
+ # you remove an index when a foreign key exists for the same column.
+ if index_exists?(table, :project_id)
+ remove_concurrent_index(table, :project_id)
+ end
+ end
+ end
+
+ def down
+ TABLES.each do |table|
+ unless index_exists?(table, :project_id)
+ add_concurrent_index(table, :project_id)
+ end
+
+ unless index_exists?(table, COLUMNS)
+ remove_concurrent_index(table, COLUMNS)
+ end
+ end
+ end
+end
diff --git a/db/migrate/20170809133343_add_broadcast_messages_index.rb b/db/migrate/20170809133343_add_broadcast_messages_index.rb
new file mode 100644
index 00000000000..4ab2ddb059d
--- /dev/null
+++ b/db/migrate/20170809133343_add_broadcast_messages_index.rb
@@ -0,0 +1,21 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBroadcastMessagesIndex < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ COLUMNS = %i[starts_at ends_at id].freeze
+
+ def up
+ add_concurrent_index :broadcast_messages, COLUMNS
+ end
+
+ def down
+ remove_concurrent_index :broadcast_messages, COLUMNS
+ end
+end
diff --git a/db/migrate/20170809134534_add_broadcast_message_not_null_constraints.rb b/db/migrate/20170809134534_add_broadcast_message_not_null_constraints.rb
new file mode 100644
index 00000000000..13e8ef52f22
--- /dev/null
+++ b/db/migrate/20170809134534_add_broadcast_message_not_null_constraints.rb
@@ -0,0 +1,17 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBroadcastMessageNotNullConstraints < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ COLUMNS = %i[starts_at ends_at created_at updated_at message_html]
+
+ def change
+ COLUMNS.each do |column|
+ change_column_null :broadcast_messages, column, false
+ end
+ end
+end
diff --git a/db/migrate/20170809142252_cleanup_appearances_schema.rb b/db/migrate/20170809142252_cleanup_appearances_schema.rb
new file mode 100644
index 00000000000..90d12925ba2
--- /dev/null
+++ b/db/migrate/20170809142252_cleanup_appearances_schema.rb
@@ -0,0 +1,33 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class CleanupAppearancesSchema < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ NOT_NULL_COLUMNS = %i[title description description_html created_at updated_at]
+
+ TIME_COLUMNS = %i[created_at updated_at]
+
+ def up
+ NOT_NULL_COLUMNS.each do |column|
+ change_column_null :appearances, column, false
+ end
+
+ TIME_COLUMNS.each do |column|
+ change_column :appearances, column, :datetime_with_timezone
+ end
+ end
+
+ def down
+ NOT_NULL_COLUMNS.each do |column|
+ change_column_null :appearances, column, true
+ end
+
+ TIME_COLUMNS.each do |column|
+ change_column :appearances, column, :datetime # rubocop: disable Migration/Datetime
+ end
+ end
+end
diff --git a/db/migrate/20170809161910_add_project_export_enabled_to_application_settings.rb b/db/migrate/20170809161910_add_project_export_enabled_to_application_settings.rb
new file mode 100644
index 00000000000..4baba1ade6d
--- /dev/null
+++ b/db/migrate/20170809161910_add_project_export_enabled_to_application_settings.rb
@@ -0,0 +1,14 @@
+class AddProjectExportEnabledToApplicationSettings < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+ disable_ddl_transaction!
+
+ DOWNTIME = false
+
+ def up
+ add_column_with_default(:application_settings, :project_export_enabled, :boolean, default: true)
+ end
+
+ def down
+ remove_column(:application_settings, :project_export_enabled)
+ end
+end
diff --git a/db/post_migrate/20170317162059_update_upload_paths_to_system.rb b/db/post_migrate/20170317162059_update_upload_paths_to_system.rb
index ca2912f8dce..92e33848bf0 100644
--- a/db/post_migrate/20170317162059_update_upload_paths_to_system.rb
+++ b/db/post_migrate/20170317162059_update_upload_paths_to_system.rb
@@ -48,7 +48,7 @@ class UpdateUploadPathsToSystem < ActiveRecord::Migration
end
def new_upload_dir
- File.join(base_directory, "system")
+ File.join(base_directory, "-", "system")
end
def arel_table
diff --git a/db/post_migrate/20170406111121_clean_upload_symlinks.rb b/db/post_migrate/20170406111121_clean_upload_symlinks.rb
index fc3a4acc0bb..f2ce25d4524 100644
--- a/db/post_migrate/20170406111121_clean_upload_symlinks.rb
+++ b/db/post_migrate/20170406111121_clean_upload_symlinks.rb
@@ -47,6 +47,6 @@ class CleanUploadSymlinks < ActiveRecord::Migration
end
def new_upload_dir
- File.join(base_directory, "public", "uploads", "system")
+ File.join(base_directory, "public", "uploads", "-", "system")
end
end
diff --git a/db/post_migrate/20170606202615_move_appearance_to_system_dir.rb b/db/post_migrate/20170606202615_move_appearance_to_system_dir.rb
index 561de59ec69..07935ab8a52 100644
--- a/db/post_migrate/20170606202615_move_appearance_to_system_dir.rb
+++ b/db/post_migrate/20170606202615_move_appearance_to_system_dir.rb
@@ -52,6 +52,6 @@ class MoveAppearanceToSystemDir < ActiveRecord::Migration
end
def new_upload_dir
- File.join(base_directory, "public", "uploads", "system")
+ File.join(base_directory, "public", "uploads", "-", "system")
end
end
diff --git a/db/post_migrate/20170612071012_move_personal_snippets_files.rb b/db/post_migrate/20170612071012_move_personal_snippets_files.rb
index 33043364bde..2b79a87ccd8 100644
--- a/db/post_migrate/20170612071012_move_personal_snippets_files.rb
+++ b/db/post_migrate/20170612071012_move_personal_snippets_files.rb
@@ -10,7 +10,7 @@ class MovePersonalSnippetsFiles < ActiveRecord::Migration
return unless file_storage?
@source_relative_location = File.join('/uploads', 'personal_snippet')
- @destination_relative_location = File.join('/uploads', 'system', 'personal_snippet')
+ @destination_relative_location = File.join('/uploads', '-', 'system', 'personal_snippet')
move_personal_snippet_files
end
@@ -18,7 +18,7 @@ class MovePersonalSnippetsFiles < ActiveRecord::Migration
def down
return unless file_storage?
- @source_relative_location = File.join('/uploads', 'system', 'personal_snippet')
+ @source_relative_location = File.join('/uploads', '-', 'system', 'personal_snippet')
@destination_relative_location = File.join('/uploads', 'personal_snippet')
move_personal_snippet_files
diff --git a/db/post_migrate/20170627101016_schedule_event_migrations.rb b/db/post_migrate/20170627101016_schedule_event_migrations.rb
new file mode 100644
index 00000000000..1f34375ff0d
--- /dev/null
+++ b/db/post_migrate/20170627101016_schedule_event_migrations.rb
@@ -0,0 +1,40 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class ScheduleEventMigrations < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ BUFFER_SIZE = 1000
+
+ disable_ddl_transaction!
+
+ class Event < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'events'
+ end
+
+ def up
+ jobs = []
+
+ Event.each_batch(of: 1000) do |relation|
+ min, max = relation.pluck('MIN(id), MAX(id)').first
+
+ if jobs.length == BUFFER_SIZE
+ # We push multiple jobs at a time to reduce the time spent in
+ # Sidekiq/Redis operations. We're using this buffer based approach so we
+ # don't need to run additional queries for every range.
+ BackgroundMigrationWorker.perform_bulk(jobs)
+ jobs.clear
+ end
+
+ jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
+ end
+
+ BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
+ end
+
+ def down
+ end
+end
diff --git a/db/post_migrate/20170807190736_move_personal_snippet_files_into_correct_folder.rb b/db/post_migrate/20170807190736_move_personal_snippet_files_into_correct_folder.rb
new file mode 100644
index 00000000000..e3d2446b897
--- /dev/null
+++ b/db/post_migrate/20170807190736_move_personal_snippet_files_into_correct_folder.rb
@@ -0,0 +1,29 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class MovePersonalSnippetFilesIntoCorrectFolder < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+ disable_ddl_transaction!
+
+ DOWNTIME = false
+ NEW_DIRECTORY = File.join('/uploads', '-', 'system', 'personal_snippet')
+ OLD_DIRECTORY = File.join('/uploads', 'system', 'personal_snippet')
+
+ def up
+ return unless file_storage?
+
+ BackgroundMigrationWorker.perform_async('MovePersonalSnippetFiles',
+ [OLD_DIRECTORY, NEW_DIRECTORY])
+ end
+
+ def down
+ return unless file_storage?
+
+ BackgroundMigrationWorker.perform_async('MovePersonalSnippetFiles',
+ [NEW_DIRECTORY, OLD_DIRECTORY])
+ end
+
+ def file_storage?
+ CarrierWave::Uploader::Base.storage == CarrierWave::Storage::File
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index ed3cf70bcdd..3206e106552 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 20170807160457) do
+ActiveRecord::Schema.define(version: 20170809161910) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -28,13 +28,13 @@ ActiveRecord::Schema.define(version: 20170807160457) do
end
create_table "appearances", force: :cascade do |t|
- t.string "title"
- t.text "description"
+ t.string "title", null: false
+ t.text "description", null: false
t.string "header_logo"
t.string "logo"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
- t.text "description_html"
+ t.text "description_html", null: false
t.integer "cached_markdown_version"
end
@@ -127,6 +127,7 @@ ActiveRecord::Schema.define(version: 20170807160457) do
t.string "help_page_support_url"
t.integer "performance_bar_allowed_group_id"
t.boolean "password_authentication_enabled"
+ t.boolean "project_export_enabled", default: true, null: false
end
create_table "audit_events", force: :cascade do |t|
@@ -163,16 +164,18 @@ ActiveRecord::Schema.define(version: 20170807160457) do
create_table "broadcast_messages", force: :cascade do |t|
t.text "message", null: false
- t.datetime "starts_at"
- t.datetime "ends_at"
- t.datetime "created_at"
- t.datetime "updated_at"
+ t.datetime "starts_at", null: false
+ t.datetime "ends_at", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
t.string "color"
t.string "font"
- t.text "message_html"
+ t.text "message_html", null: false
t.integer "cached_markdown_version"
end
+ add_index "broadcast_messages", ["starts_at", "ends_at", "id"], name: "index_broadcast_messages_on_starts_at_and_ends_at_and_id", using: :btree
+
create_table "chat_names", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "service_id", null: false
@@ -530,10 +533,25 @@ ActiveRecord::Schema.define(version: 20170807160457) do
add_index "events", ["action"], name: "index_events_on_action", using: :btree
add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree
add_index "events", ["created_at"], name: "index_events_on_created_at", using: :btree
- add_index "events", ["project_id"], name: "index_events_on_project_id", using: :btree
+ add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
add_index "events", ["target_id"], name: "index_events_on_target_id", using: :btree
add_index "events", ["target_type"], name: "index_events_on_target_type", using: :btree
+ create_table "events_for_migration", force: :cascade do |t|
+ t.integer "project_id"
+ t.integer "author_id", null: false
+ t.integer "target_id"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.integer "action", limit: 2, null: false
+ t.string "target_type"
+ end
+
+ add_index "events_for_migration", ["action"], name: "index_events_for_migration_on_action", using: :btree
+ add_index "events_for_migration", ["author_id"], name: "index_events_for_migration_on_author_id", using: :btree
+ add_index "events_for_migration", ["project_id", "id"], name: "index_events_for_migration_on_project_id_and_id", using: :btree
+ add_index "events_for_migration", ["target_type", "target_id"], name: "index_events_for_migration_on_target_type_and_target_id", using: :btree
+
create_table "feature_gates", force: :cascade do |t|
t.string "feature_key", null: false
t.string "key", null: false
@@ -1254,6 +1272,19 @@ ActiveRecord::Schema.define(version: 20170807160457) do
add_index "protected_tags", ["project_id"], name: "index_protected_tags_on_project_id", using: :btree
+ create_table "push_event_payloads", id: false, force: :cascade do |t|
+ t.integer "commit_count", limit: 8, null: false
+ t.integer "event_id", null: false
+ t.integer "action", limit: 2, null: false
+ t.integer "ref_type", limit: 2, null: false
+ t.binary "commit_from"
+ t.binary "commit_to"
+ t.text "ref"
+ t.string "commit_title", limit: 70
+ end
+
+ add_index "push_event_payloads", ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
+
create_table "redirect_routes", force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
@@ -1654,6 +1685,8 @@ ActiveRecord::Schema.define(version: 20170807160457) do
add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade
add_foreign_key "environments", "projects", name: "fk_d1c8c1da6a", on_delete: :cascade
add_foreign_key "events", "projects", name: "fk_0434b48643", on_delete: :cascade
+ add_foreign_key "events_for_migration", "projects", on_delete: :cascade
+ add_foreign_key "events_for_migration", "users", column: "author_id", name: "fk_edfd187b6f", on_delete: :cascade
add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
@@ -1696,6 +1729,7 @@ ActiveRecord::Schema.define(version: 20170807160457) do
add_foreign_key "protected_tag_create_access_levels", "protected_tags"
add_foreign_key "protected_tag_create_access_levels", "users"
add_foreign_key "protected_tags", "projects", name: "fk_8e4af87648", on_delete: :cascade
+ add_foreign_key "push_event_payloads", "events_for_migration", column: "event_id", name: "fk_36c74129da", on_delete: :cascade
add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade
add_foreign_key "services", "projects", name: "fk_71cce407f9", on_delete: :cascade
add_foreign_key "snippets", "projects", name: "fk_be41fd4bb7", on_delete: :cascade
diff --git a/doc/api/events.md b/doc/api/events.md
index 3d5170f3f1e..129af0afa35 100644
--- a/doc/api/events.md
+++ b/doc/api/events.md
@@ -79,7 +79,6 @@ Example response:
"target_id":160,
"target_type":"Issue",
"author_id":25,
- "data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z",
"author":{
@@ -99,7 +98,6 @@ Example response:
"target_id":159,
"target_type":"Issue",
"author_id":21,
- "data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z",
"author":{
@@ -151,7 +149,6 @@ Example response:
"target_id": 830,
"target_type": "Issue",
"author_id": 1,
- "data": null,
"target_title": "Public project search field",
"author": {
"name": "Dmitriy Zaporozhets",
@@ -166,7 +163,7 @@ Example response:
{
"title": null,
"project_id": 15,
- "action_name": "opened",
+ "action_name": "pushed",
"target_id": null,
"target_type": null,
"author_id": 1,
@@ -179,31 +176,14 @@ Example response:
"web_url": "http://localhost:3000/root"
},
"author_username": "john",
- "data": {
- "before": "50d4420237a9de7be1304607147aec22e4a14af7",
- "after": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
- "ref": "refs/heads/master",
- "user_id": 1,
- "user_name": "Dmitriy Zaporozhets",
- "repository": {
- "name": "gitlabhq",
- "url": "git@dev.gitlab.org:gitlab/gitlabhq.git",
- "description": "GitLab: self hosted Git management software. \r\nDistributed under the MIT License.",
- "homepage": "https://dev.gitlab.org/gitlab/gitlabhq"
- },
- "commits": [
- {
- "id": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
- "message": "Add simple search to projects in public area",
- "timestamp": "2013-05-13T18:18:08+00:00",
- "url": "https://dev.gitlab.org/gitlab/gitlabhq/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
- "author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- }
- ],
- "total_commits_count": 1
+ "push_data": {
+ "commit_count": 1,
+ "action": "pushed",
+ "ref_type": "branch",
+ "commit_from": "50d4420237a9de7be1304607147aec22e4a14af7",
+ "commit_to": "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
+ "ref": "master",
+ "commit_title": "Add simple search to projects in public area"
},
"target_title": null
},
@@ -214,7 +194,6 @@ Example response:
"target_id": 840,
"target_type": "Issue",
"author_id": 1,
- "data": null,
"target_title": "Finish & merge Code search PR",
"author": {
"name": "Dmitriy Zaporozhets",
@@ -233,7 +212,6 @@ Example response:
"target_id": 1312,
"target_type": "Note",
"author_id": 1,
- "data": null,
"target_title": null,
"created_at": "2015-12-04T10:33:58.089Z",
"note": {
@@ -305,7 +283,6 @@ Example response:
"target_iid":160,
"target_type":"Issue",
"author_id":25,
- "data":null,
"target_title":"Qui natus eos odio tempore et quaerat consequuntur ducimus cupiditate quis.",
"created_at":"2017-02-09T10:43:19.667Z",
"author":{
@@ -326,7 +303,6 @@ Example response:
"target_iid":159,
"target_type":"Issue",
"author_id":21,
- "data":null,
"target_title":"Nostrum enim non et sed optio illo deleniti non.",
"created_at":"2017-02-09T10:43:19.426Z",
"author":{
diff --git a/doc/development/testing.md b/doc/development/testing.md
index ea94c87d8c6..c7eac3cf40c 100644
--- a/doc/development/testing.md
+++ b/doc/development/testing.md
@@ -157,8 +157,9 @@ trade-off:
- Unit tests are usually cheap, and you should consider them like the basement
of your house: you need them to be confident that your code is behaving
- correctly. However if you run only unit tests without integration / system tests, you might [miss] the [big] [picture]!
-- Integration tests are a bit more expensive, but don't abuse them. A feature test
+ correctly. However if you run only unit tests without integration / system
+ tests, you might [miss] the [big] [picture]!
+- Integration tests are a bit more expensive, but don't abuse them. A system test
is often better than an integration test that is stubbing a lot of internals.
- System tests are expensive (compared to unit tests), even more if they require
a JavaScript driver. Make sure to follow the guidelines in the [Speed](#test-speed)
@@ -195,11 +196,27 @@ Please consult the [dedicated "Frontend testing" guide](./fe_guide/testing.md).
- Try to match the ordering of tests to the ordering within the class.
- Try to follow the [Four-Phase Test][four-phase-test] pattern, using newlines
to separate phases.
-- Try to use `Gitlab.config.gitlab.host` rather than hard coding `'localhost'`
+- Use `Gitlab.config.gitlab.host` rather than hard coding `'localhost'`
+- Don't assert against the absolute value of a sequence-generated attribute (see
+ [Gotchas](gotchas.md#dont-assert-against-the-absolute-value-of-a-sequence-generated-attribute)).
+- Don't supply the `:each` argument to hooks since it's the default.
- On `before` and `after` hooks, prefer it scoped to `:context` over `:all`
[four-phase-test]: https://robots.thoughtbot.com/four-phase-test
+### Automatic retries and flaky tests detection
+
+On our CI, we use [rspec-retry] to automatically retry a failing example a few
+times (see [`spec/spec_helper.rb`] for the precise retries count).
+
+We also use a home-made `RspecFlaky::Listener` listener which records flaky
+examples in a JSON report file on `master` (`retrieve-tests-metadata` and `update-tests-metadata` jobs), and warns when a new flaky example
+is detected in any other branch (`flaky-examples-check` job). In the future, the
+`flaky-examples-check` job will not be allowed to fail.
+
+[rspec-retry]: https://github.com/NoRedInk/rspec-retry
+[`spec/spec_helper.rb`]: https://gitlab.com/gitlab-org/gitlab-ce/blob/master/spec/spec_helper.rb
+
### `let` variables
GitLab's RSpec suite has made extensive use of `let` variables to reduce
@@ -270,6 +287,15 @@ complexity of RSpec expectations.They should be placed under
a certain type of specs only (e.g. features, requests etc.) but shouldn't be if
they apply to multiple type of specs.
+#### have_gitlab_http_status
+
+Prefer `have_gitlab_http_status` over `have_http_status` because the former
+could also show the response body whenever the status mismatched. This would
+be very useful whenever some tests start breaking and we would love to know
+why without editing the source and rerun the tests.
+
+This is especially useful whenever it's showing 500 internal server error.
+
### Shared contexts
All shared contexts should be be placed under `spec/support/shared_contexts/`.
diff --git a/doc/install/requirements.md b/doc/install/requirements.md
index 141df55f6bc..175dfc62096 100644
--- a/doc/install/requirements.md
+++ b/doc/install/requirements.md
@@ -104,6 +104,10 @@ features of GitLab work with MySQL/MariaDB:
See [issue #30472][30472] for more information.
1. GitLab Geo does [not support MySQL](https://docs.gitlab.com/ee/gitlab-geo/database.html#mysql-replication).
1. [Zero downtime migrations][zero] do not work with MySQL
+1. GitLab [optimizes the loading of dashboard events](https://gitlab.com/gitlab-org/gitlab-ce/issues/31806) using [PostgreSQL LATERAL JOINs](https://blog.heapanalytics.com/postgresqls-powerful-new-join-type-lateral/).
+1. In general, SQL optimized for PostgreSQL may run much slower in MySQL due to
+ differences in query planners. For example, subqueries that work well in PostgreSQL
+ may not be [performant in MySQL](https://dev.mysql.com/doc/refman/5.7/en/optimizing-subqueries.html)
1. We expect this list to grow over time.
Existing users using GitLab with MySQL/MariaDB are advised to
diff --git a/doc/update/patch_versions.md b/doc/update/patch_versions.md
index 12408123158..30107360446 100644
--- a/doc/update/patch_versions.md
+++ b/doc/update/patch_versions.md
@@ -56,21 +56,17 @@ sudo -u git -H bundle clean
# Run database migrations
sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production
-### 4. Compile GetText PO files
-
-Internationalization was added in `v9.2.0` so these commands are only
-required for versions equal or major to it.
-
-```bash
+# Compile GetText PO files
+# Internationalization was added in `v9.2.0` so these commands are only
+# required for versions equal or major to it.
sudo -u git -H bundle exec rake gettext:pack RAILS_ENV=production
sudo -u git -H bundle exec rake gettext:po_to_json RAILS_ENV=production
-```
# Clean up assets and cache
sudo -u git -H bundle exec rake yarn:install gitlab:assets:clean gitlab:assets:compile cache:clear RAILS_ENV=production NODE_ENV=production
```
-### 5. Update gitlab-workhorse to the corresponding version
+### 4. Update gitlab-workhorse to the corresponding version
```bash
cd /home/git/gitlab
@@ -78,7 +74,7 @@ cd /home/git/gitlab
sudo -u git -H bundle exec rake "gitlab:workhorse:install[/home/git/gitlab-workhorse]" RAILS_ENV=production
```
-### 6. Update gitlab-shell to the corresponding version
+### 5. Update gitlab-shell to the corresponding version
```bash
cd /home/git/gitlab-shell
@@ -88,14 +84,14 @@ sudo -u git -H git checkout v`cat /home/git/gitlab/GITLAB_SHELL_VERSION` -b v`ca
sudo -u git -H sh -c 'if [ -x bin/compile ]; then bin/compile; fi'
```
-### 7. Start application
+### 6. Start application
```bash
sudo service gitlab start
sudo service nginx restart
```
-### 8. Check application status
+### 7. Check application status
Check if GitLab and its environment are configured correctly:
diff --git a/doc/user/project/settings/import_export.md b/doc/user/project/settings/import_export.md
index 35960ade3d4..97cca3007b1 100644
--- a/doc/user/project/settings/import_export.md
+++ b/doc/user/project/settings/import_export.md
@@ -9,6 +9,9 @@
> application settings (`/admin/application_settings`) under 'Import sources'.
> Ask your administrator if you don't see the **GitLab export** button when
> creating a new project.
+> - Starting with GitLab 10.0, administrators can disable the project export option
+> on the GitLab instance in application settings (`/admin/application_settings`)
+> under 'Visibility and Access Controls'.
> - You can find some useful raketasks if you are an administrator in the
> [import_export](../../../administration/raketasks/project_import_export.md)
> raketask.
diff --git a/features/steps/shared/project.rb b/features/steps/shared/project.rb
index 00f7cded2ae..605c9a3ab71 100644
--- a/features/steps/shared/project.rb
+++ b/features/steps/shared/project.rb
@@ -71,28 +71,14 @@ module SharedProject
step 'project "Shop" has push event' do
@project = Project.find_by(name: "Shop")
-
- data = {
- before: Gitlab::Git::BLANK_SHA,
- after: "6d394385cf567f80a8fd85055db1ab4c5295806f",
- ref: "refs/heads/fix",
- user_id: @user.id,
- user_name: @user.name,
- repository: {
- name: @project.name,
- url: "localhost/rubinius",
- description: "",
- homepage: "localhost/rubinius",
- private: true
- }
- }
-
- @event = Event.create(
- project: @project,
- action: Event::PUSHED,
- data: data,
- author_id: @user.id
- )
+ @event = create(:push_event, project: @project, author: @user)
+
+ create(:push_event_payload,
+ event: @event,
+ action: :created,
+ commit_to: '6d394385cf567f80a8fd85055db1ab4c5295806f',
+ ref: 'fix',
+ commit_count: 1)
end
step 'I should see project "Shop" activity feed' do
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index 3bb1910a441..18cd604a216 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -497,14 +497,24 @@ module API
expose :author, using: Entities::UserBasic
end
+ class PushEventPayload < Grape::Entity
+ expose :commit_count, :action, :ref_type, :commit_from, :commit_to
+ expose :ref, :commit_title
+ end
+
class Event < Grape::Entity
expose :title, :project_id, :action_name
expose :target_id, :target_iid, :target_type, :author_id
- expose :data, :target_title
+ expose :target_title
expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: Entities::UserBasic, if: ->(event, options) { event.author }
+ expose :push_event_payload,
+ as: :push_data,
+ using: PushEventPayload,
+ if: -> (event, _) { event.push? }
+
expose :author_username do |event, options|
event.author&.username
end
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index d55a61fa638..667ba468ce6 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -29,6 +29,7 @@ module API
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :disabled_oauth_sign_in_sources, type: Array[String], desc: 'Disable certain OAuth sign-in sources'
optional :enabled_git_access_protocol, type: String, values: %w[ssh http nil], desc: 'Allow only the selected protocols to be used for Git access.'
+ optional :project_export_enabled, type: Boolean, desc: 'Enable project export'
optional :gravatar_enabled, type: Boolean, desc: 'Flag indicating if the Gravatar service is enabled'
optional :default_projects_limit, type: Integer, desc: 'The maximum number of personal projects'
optional :max_attachment_size, type: Integer, desc: 'Maximum attachment size in MB'
diff --git a/lib/api/users.rb b/lib/api/users.rb
index a590f2692a2..e2019d6d512 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -79,22 +79,17 @@ module API
end
desc 'Get a single user' do
- success Entities::UserBasic
+ success Entities::User
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
end
get ":id" do
user = User.find_by(id: params[:id])
- not_found!('User') unless user
+ not_found!('User') unless user && can?(current_user, :read_user, user)
- if current_user && current_user.admin?
- present user, with: Entities::UserPublic
- elsif can?(current_user, :read_user, user)
- present user, with: Entities::User
- else
- render_api_error!("User not found.", 404)
- end
+ opts = current_user&.admin? ? { with: Entities::UserWithAdmin } : {}
+ present user, opts
end
desc 'Create a user. Available only for admins.' do
diff --git a/lib/api/v3/entities.rb b/lib/api/v3/entities.rb
index 4a2e9c9cbb0..a9a35f2a4bd 100644
--- a/lib/api/v3/entities.rb
+++ b/lib/api/v3/entities.rb
@@ -25,14 +25,24 @@ module API
expose(:downvote?) { |note| false }
end
+ class PushEventPayload < Grape::Entity
+ expose :commit_count, :action, :ref_type, :commit_from, :commit_to
+ expose :ref, :commit_title
+ end
+
class Event < Grape::Entity
expose :title, :project_id, :action_name
expose :target_id, :target_type, :author_id
- expose :data, :target_title
+ expose :target_title
expose :created_at
expose :note, using: Entities::Note, if: ->(event, options) { event.note? }
expose :author, using: ::API::Entities::UserBasic, if: ->(event, options) { event.author }
+ expose :push_event_payload,
+ as: :push_data,
+ using: PushEventPayload,
+ if: -> (event, _) { event.push? }
+
expose :author_username do |event, options|
event.author&.username
end
diff --git a/lib/file_streamer.rb b/lib/file_streamer.rb
deleted file mode 100644
index 4e3c6d3c773..00000000000
--- a/lib/file_streamer.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-class FileStreamer #:nodoc:
- attr_reader :to_path
-
- def initialize(path)
- @to_path = path
- end
-
- # Stream the file's contents if Rack::Sendfile isn't present.
- def each
- File.open(to_path, 'rb') do |file|
- while chunk = file.read(16384)
- yield chunk
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb b/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
new file mode 100644
index 00000000000..432f7c3e706
--- /dev/null
+++ b/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
@@ -0,0 +1,176 @@
+module Gitlab
+ module BackgroundMigration
+ # Class that migrates events for the new push event payloads setup. All
+ # events are copied to a shadow table, and push events will also have a row
+ # created in the push_event_payloads table.
+ class MigrateEventsToPushEventPayloads
+ class Event < ActiveRecord::Base
+ self.table_name = 'events'
+
+ serialize :data
+
+ BLANK_REF = ('0' * 40).freeze
+ TAG_REF_PREFIX = 'refs/tags/'.freeze
+ MAX_INDEX = 69
+ PUSHED = 5
+
+ def push_event?
+ action == PUSHED && data.present?
+ end
+
+ def commit_title
+ commit = commits.last
+
+ return nil unless commit && commit[:message]
+
+ index = commit[:message].index("\n")
+ message = index ? commit[:message][0..index] : commit[:message]
+
+ message.strip.truncate(70)
+ end
+
+ def commit_from_sha
+ if create?
+ nil
+ else
+ data[:before]
+ end
+ end
+
+ def commit_to_sha
+ if remove?
+ nil
+ else
+ data[:after]
+ end
+ end
+
+ def data
+ super || {}
+ end
+
+ def commits
+ data[:commits] || []
+ end
+
+ def commit_count
+ data[:total_commits_count] || 0
+ end
+
+ def ref
+ data[:ref]
+ end
+
+ def trimmed_ref_name
+ if ref_type == :tag
+ ref[10..-1]
+ else
+ ref[11..-1]
+ end
+ end
+
+ def create?
+ data[:before] == BLANK_REF
+ end
+
+ def remove?
+ data[:after] == BLANK_REF
+ end
+
+ def push_action
+ if create?
+ :created
+ elsif remove?
+ :removed
+ else
+ :pushed
+ end
+ end
+
+ def ref_type
+ if ref.start_with?(TAG_REF_PREFIX)
+ :tag
+ else
+ :branch
+ end
+ end
+ end
+
+ class EventForMigration < ActiveRecord::Base
+ self.table_name = 'events_for_migration'
+ end
+
+ class PushEventPayload < ActiveRecord::Base
+ self.table_name = 'push_event_payloads'
+
+ enum action: {
+ created: 0,
+ removed: 1,
+ pushed: 2
+ }
+
+ enum ref_type: {
+ branch: 0,
+ tag: 1
+ }
+ end
+
+ # start_id - The start ID of the range of events to process
+ # end_id - The end ID of the range to process.
+ def perform(start_id, end_id)
+ return unless migrate?
+
+ find_events(start_id, end_id).each { |event| process_event(event) }
+ end
+
+ def process_event(event)
+ replicate_event(event)
+ create_push_event_payload(event) if event.push_event?
+ end
+
+ def replicate_event(event)
+ new_attributes = event.attributes
+ .with_indifferent_access.except(:title, :data)
+
+ EventForMigration.create!(new_attributes)
+ rescue ActiveRecord::InvalidForeignKey
+ # A foreign key error means the associated event was removed. In this
+ # case we'll just skip migrating the event.
+ end
+
+ def create_push_event_payload(event)
+ commit_from = pack(event.commit_from_sha)
+ commit_to = pack(event.commit_to_sha)
+
+ PushEventPayload.create!(
+ event_id: event.id,
+ commit_count: event.commit_count,
+ ref_type: event.ref_type,
+ action: event.push_action,
+ commit_from: commit_from,
+ commit_to: commit_to,
+ ref: event.trimmed_ref_name,
+ commit_title: event.commit_title
+ )
+ rescue ActiveRecord::InvalidForeignKey
+ # A foreign key error means the associated event was removed. In this
+ # case we'll just skip migrating the event.
+ end
+
+ def find_events(start_id, end_id)
+ Event
+ .where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
+ .where(id: start_id..end_id)
+ end
+
+ def migrate?
+ Event.table_exists? && PushEventPayload.table_exists? &&
+ EventForMigration.table_exists?
+ end
+
+ def pack(value)
+ value ? [value].pack('H*') : nil
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/move_personal_snippet_files.rb b/lib/gitlab/background_migration/move_personal_snippet_files.rb
new file mode 100644
index 00000000000..07cec96bcc3
--- /dev/null
+++ b/lib/gitlab/background_migration/move_personal_snippet_files.rb
@@ -0,0 +1,79 @@
+module Gitlab
+ module BackgroundMigration
+ class MovePersonalSnippetFiles
+ delegate :select_all, :execute, :quote_string, to: :connection
+
+ def perform(relative_source, relative_destination)
+ @source_relative_location = relative_source
+ @destination_relative_location = relative_destination
+
+ move_personal_snippet_files
+ end
+
+ def move_personal_snippet_files
+ query = "SELECT uploads.path, uploads.model_id FROM uploads "\
+ "INNER JOIN snippets ON snippets.id = uploads.model_id WHERE uploader = 'PersonalFileUploader'"
+ select_all(query).each do |upload|
+ secret = upload['path'].split('/')[0]
+ file_name = upload['path'].split('/')[1]
+
+ move_file(upload['model_id'], secret, file_name)
+ update_markdown(upload['model_id'], secret, file_name)
+ end
+ end
+
+ def move_file(snippet_id, secret, file_name)
+ source_dir = File.join(base_directory, @source_relative_location, snippet_id.to_s, secret)
+ destination_dir = File.join(base_directory, @destination_relative_location, snippet_id.to_s, secret)
+
+ source_file_path = File.join(source_dir, file_name)
+ destination_file_path = File.join(destination_dir, file_name)
+
+ unless File.exist?(source_file_path)
+ say "Source file `#{source_file_path}` doesn't exist. Skipping."
+ return
+ end
+
+ say "Moving file #{source_file_path} -> #{destination_file_path}"
+
+ FileUtils.mkdir_p(destination_dir)
+ FileUtils.move(source_file_path, destination_file_path)
+ end
+
+ def update_markdown(snippet_id, secret, file_name)
+ source_markdown_path = File.join(@source_relative_location, snippet_id.to_s, secret, file_name)
+ destination_markdown_path = File.join(@destination_relative_location, snippet_id.to_s, secret, file_name)
+
+ source_markdown = "](#{source_markdown_path})"
+ destination_markdown = "](#{destination_markdown_path})"
+ quoted_source = quote_string(source_markdown)
+ quoted_destination = quote_string(destination_markdown)
+
+ execute("UPDATE snippets "\
+ "SET description = replace(snippets.description, '#{quoted_source}', '#{quoted_destination}'), description_html = NULL "\
+ "WHERE id = #{snippet_id}")
+
+ query = "SELECT id, note FROM notes WHERE noteable_id = #{snippet_id} "\
+ "AND noteable_type = 'Snippet' AND note IS NOT NULL"
+ select_all(query).each do |note|
+ text = note['note'].gsub(source_markdown, destination_markdown)
+ quoted_text = quote_string(text)
+
+ execute("UPDATE notes SET note = '#{quoted_text}', note_html = NULL WHERE id = #{note['id']}")
+ end
+ end
+
+ def base_directory
+ File.join(Rails.root, 'public')
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def say(message)
+ Rails.logger.debug(message)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/force_push.rb b/lib/gitlab/checks/force_push.rb
index 1e73f89158d..714464fd5e7 100644
--- a/lib/gitlab/checks/force_push.rb
+++ b/lib/gitlab/checks/force_push.rb
@@ -5,12 +5,19 @@ module Gitlab
return false if project.empty_repo?
# Created or deleted branch
- if Gitlab::Git.blank_ref?(oldrev) || Gitlab::Git.blank_ref?(newrev)
- false
- else
- Gitlab::Git::RevList.new(
- path_to_repo: project.repository.path_to_repo,
- oldrev: oldrev, newrev: newrev).missed_ref.present?
+ return false if Gitlab::Git.blank_ref?(oldrev) || Gitlab::Git.blank_ref?(newrev)
+
+ GitalyClient.migrate(:force_push) do |is_enabled|
+ if is_enabled
+ !project
+ .repository
+ .gitaly_commit_client
+ .is_ancestor(oldrev, newrev)
+ else
+ Gitlab::Git::RevList.new(
+ path_to_repo: project.repository.path_to_repo,
+ oldrev: oldrev, newrev: newrev).missed_ref.present?
+ end
end
end
end
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index d7dab584a44..e001d25e7b7 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -25,6 +25,10 @@ module Gitlab
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end
+ def self.join_lateral_supported?
+ postgresql? && version.to_f >= 9.3
+ end
+
def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}"
diff --git a/lib/gitlab/gitaly_client/util.rb b/lib/gitlab/gitaly_client/util.rb
index f5a4c5493ef..8fc937496af 100644
--- a/lib/gitlab/gitaly_client/util.rb
+++ b/lib/gitlab/gitaly_client/util.rb
@@ -5,7 +5,9 @@ module Gitlab
def repository(repository_storage, relative_path)
Gitaly::Repository.new(
storage_name: repository_storage,
- relative_path: relative_path
+ relative_path: relative_path,
+ git_object_directory: Gitlab::Git::Env['GIT_OBJECT_DIRECTORY'].to_s,
+ git_alternate_object_directories: Array.wrap(Gitlab::Git::Env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
)
end
end
diff --git a/lib/gitlab/import_export/import_export.yml b/lib/gitlab/import_export/import_export.yml
index c5c05bfe2fb..9d9ebcb389a 100644
--- a/lib/gitlab/import_export/import_export.yml
+++ b/lib/gitlab/import_export/import_export.yml
@@ -3,18 +3,22 @@ project_tree:
- labels:
:priorities
- milestones:
- - :events
+ - events:
+ - :push_event_payload
- issues:
- - :events
+ - events:
+ - :push_event_payload
- :timelogs
- notes:
- :author
- - :events
+ - events:
+ - :push_event_payload
- label_links:
- label:
:priorities
- milestone:
- - :events
+ - events:
+ - :push_event_payload
- snippets:
- :award_emoji
- notes:
@@ -25,21 +29,25 @@ project_tree:
- merge_requests:
- notes:
- :author
- - :events
+ - events:
+ - :push_event_payload
- merge_request_diff:
- :merge_request_diff_commits
- :merge_request_diff_files
- - :events
+ - events:
+ - :push_event_payload
- :timelogs
- label_links:
- label:
:priorities
- milestone:
- - :events
+ - events:
+ - :push_event_payload
- pipelines:
- notes:
- :author
- - :events
+ - events:
+ - :push_event_payload
- :stages
- :statuses
- :triggers
@@ -107,6 +115,8 @@ excluded_attributes:
statuses:
- :trace
- :token
+ push_event_payload:
+ - :event_id
methods:
labels:
diff --git a/lib/gitlab/middleware/webpack_proxy.rb b/lib/gitlab/middleware/webpack_proxy.rb
index 6105d165810..6aecf63231f 100644
--- a/lib/gitlab/middleware/webpack_proxy.rb
+++ b/lib/gitlab/middleware/webpack_proxy.rb
@@ -1,6 +1,7 @@
# This Rack middleware is intended to proxy the webpack assets directory to the
# webpack-dev-server. It is only intended for use in development.
+# :nocov:
module Gitlab
module Middleware
class WebpackProxy < Rack::Proxy
@@ -22,3 +23,4 @@ module Gitlab
end
end
end
+# :nocov:
diff --git a/lib/gitlab/o_auth/session.rb b/lib/gitlab/o_auth/session.rb
index f33bfd0bd0e..30739f2a2c5 100644
--- a/lib/gitlab/o_auth/session.rb
+++ b/lib/gitlab/o_auth/session.rb
@@ -1,3 +1,4 @@
+# :nocov:
module Gitlab
module OAuth
module Session
@@ -15,3 +16,4 @@ module Gitlab
end
end
end
+# :nocov:
diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb
index 823f697f51c..f9ab9bd466f 100644
--- a/lib/gitlab/seeder.rb
+++ b/lib/gitlab/seeder.rb
@@ -1,3 +1,4 @@
+# :nocov:
module DeliverNever
def deliver_later
self
@@ -21,3 +22,4 @@ module Gitlab
end
end
end
+# :nocov:
diff --git a/lib/rspec_flaky/example.rb b/lib/rspec_flaky/example.rb
new file mode 100644
index 00000000000..b6e790cbbab
--- /dev/null
+++ b/lib/rspec_flaky/example.rb
@@ -0,0 +1,46 @@
+module RspecFlaky
+ # This is a wrapper class for RSpec::Core::Example
+ class Example
+ delegate :status, :exception, to: :execution_result
+
+ def initialize(rspec_example)
+ @rspec_example = rspec_example.try(:example) || rspec_example
+ end
+
+ def uid
+ @uid ||= Digest::MD5.hexdigest("#{description}-#{file}")
+ end
+
+ def example_id
+ rspec_example.id
+ end
+
+ def file
+ metadata[:file_path]
+ end
+
+ def line
+ metadata[:line_number]
+ end
+
+ def description
+ metadata[:full_description]
+ end
+
+ def attempts
+ rspec_example.try(:attempts) || 1
+ end
+
+ private
+
+ attr_reader :rspec_example
+
+ def metadata
+ rspec_example.metadata
+ end
+
+ def execution_result
+ rspec_example.execution_result
+ end
+ end
+end
diff --git a/lib/rspec_flaky/flaky_example.rb b/lib/rspec_flaky/flaky_example.rb
new file mode 100644
index 00000000000..f81fb90e870
--- /dev/null
+++ b/lib/rspec_flaky/flaky_example.rb
@@ -0,0 +1,39 @@
+module RspecFlaky
+ # This represents a flaky RSpec example and is mainly meant to be saved in a JSON file
+ class FlakyExample < OpenStruct
+ def initialize(example)
+ if example.respond_to?(:example_id)
+ super(
+ example_id: example.example_id,
+ file: example.file,
+ line: example.line,
+ description: example.description,
+ last_attempts_count: example.attempts,
+ flaky_reports: 1)
+ else
+ super
+ end
+ end
+
+ def first_flaky_at
+ self[:first_flaky_at] || Time.now
+ end
+
+ def last_flaky_at
+ Time.now
+ end
+
+ def last_flaky_job
+ return unless ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
+
+ "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
+ end
+
+ def to_h
+ super.merge(
+ first_flaky_at: first_flaky_at,
+ last_flaky_at: last_flaky_at,
+ last_flaky_job: last_flaky_job)
+ end
+ end
+end
diff --git a/lib/rspec_flaky/listener.rb b/lib/rspec_flaky/listener.rb
new file mode 100644
index 00000000000..ec2fbd9e36c
--- /dev/null
+++ b/lib/rspec_flaky/listener.rb
@@ -0,0 +1,75 @@
+require 'json'
+
+module RspecFlaky
+ class Listener
+ attr_reader :all_flaky_examples, :new_flaky_examples
+
+ def initialize
+ @new_flaky_examples = {}
+ @all_flaky_examples = init_all_flaky_examples
+ end
+
+ def example_passed(notification)
+ current_example = RspecFlaky::Example.new(notification.example)
+
+ return unless current_example.attempts > 1
+
+ flaky_example_hash = all_flaky_examples[current_example.uid]
+
+ all_flaky_examples[current_example.uid] =
+ if flaky_example_hash
+ FlakyExample.new(flaky_example_hash).tap do |ex|
+ ex.last_attempts_count = current_example.attempts
+ ex.flaky_reports += 1
+ end
+ else
+ FlakyExample.new(current_example).tap do |ex|
+ new_flaky_examples[current_example.uid] = ex
+ end
+ end
+ end
+
+ def dump_summary(_)
+ write_report_file(all_flaky_examples, all_flaky_examples_report_path)
+
+ if new_flaky_examples.any?
+ Rails.logger.warn "\nNew flaky examples detected:\n"
+ Rails.logger.warn JSON.pretty_generate(to_report(new_flaky_examples))
+
+ write_report_file(new_flaky_examples, new_flaky_examples_report_path)
+ end
+ end
+
+ def to_report(examples)
+ Hash[examples.map { |k, ex| [k, ex.to_h] }]
+ end
+
+ private
+
+ def init_all_flaky_examples
+ return {} unless File.exist?(all_flaky_examples_report_path)
+
+ all_flaky_examples = JSON.parse(File.read(all_flaky_examples_report_path))
+
+ Hash[(all_flaky_examples || {}).map { |k, ex| [k, FlakyExample.new(ex)] }]
+ end
+
+ def write_report_file(examples, file_path)
+ return unless ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
+
+ report_path_dir = File.dirname(file_path)
+ FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
+ File.write(file_path, JSON.pretty_generate(to_report(examples)))
+ end
+
+ def all_flaky_examples_report_path
+ @all_flaky_examples_report_path ||= ENV['ALL_FLAKY_RSPEC_REPORT_PATH'] ||
+ Rails.root.join("rspec_flaky/all-report.json")
+ end
+
+ def new_flaky_examples_report_path
+ @new_flaky_examples_report_path ||= ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] ||
+ Rails.root.join("rspec_flaky/new-report.json")
+ end
+ end
+end
diff --git a/scripts/detect-new-flaky-examples b/scripts/detect-new-flaky-examples
new file mode 100755
index 00000000000..3bee4f9a34b
--- /dev/null
+++ b/scripts/detect-new-flaky-examples
@@ -0,0 +1,21 @@
+#!/usr/bin/env ruby
+
+require 'json'
+
+report_file = ARGV.shift
+unless report_file
+ puts 'usage: detect-new-flaky-examples <report-file>'
+ exit 1
+end
+
+puts "Loading #{report_file}..."
+report = JSON.parse(File.read(report_file))
+
+if report.any?
+ puts "New flaky examples were detected!\n"
+ puts JSON.pretty_generate(report)
+ exit 1
+else
+ puts "No new flaky examples detected.\n"
+ exit 0
+end
diff --git a/scripts/merge-reports b/scripts/merge-reports
index aad76bcc327..3a421f1f1fc 100755
--- a/scripts/merge-reports
+++ b/scripts/merge-reports
@@ -4,7 +4,7 @@ require 'json'
main_report_file = ARGV.shift
unless main_report_file
- puts 'usage: merge_reports <main-report> [extra reports...]'
+ puts 'usage: merge-reports <main-report> [extra reports...]'
exit 1
end
diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb
index 65587064eb1..373260b3978 100644
--- a/spec/controllers/admin/projects_controller_spec.rb
+++ b/spec/controllers/admin/projects_controller_spec.rb
@@ -12,12 +12,24 @@ describe Admin::ProjectsController do
it 'retrieves the project for the given visibility level' do
get :index, visibility_level: [Gitlab::VisibilityLevel::PUBLIC]
+
expect(response.body).to match(project.name)
end
it 'does not retrieve the project' do
get :index, visibility_level: [Gitlab::VisibilityLevel::INTERNAL]
+
expect(response.body).not_to match(project.name)
end
+
+ it 'does not respond with projects pending deletion' do
+ pending_delete_project = create(:project, pending_delete: true)
+
+ get :index
+
+ expect(response).to have_http_status(200)
+ expect(response.body).not_to match(pending_delete_project.name)
+ expect(response.body).to match(project.name)
+ end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 8ecd8b6ca71..c0e48046937 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -578,6 +578,118 @@ describe ProjectsController do
end
end
+ describe '#export' do
+ before do
+ sign_in(user)
+
+ project.add_master(user)
+ end
+
+ context 'when project export is enabled' do
+ it 'returns 302' do
+ get :export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(302)
+ end
+ end
+
+ context 'when project export is disabled' do
+ before do
+ stub_application_setting(project_export_enabled?: false)
+ end
+
+ it 'returns 404' do
+ get :export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ describe '#download_export' do
+ before do
+ sign_in(user)
+
+ project.add_master(user)
+ end
+
+ context 'when project export is enabled' do
+ it 'returns 302' do
+ get :download_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(302)
+ end
+ end
+
+ context 'when project export is disabled' do
+ before do
+ stub_application_setting(project_export_enabled?: false)
+ end
+
+ it 'returns 404' do
+ get :download_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ describe '#remove_export' do
+ before do
+ sign_in(user)
+
+ project.add_master(user)
+ end
+
+ context 'when project export is enabled' do
+ it 'returns 302' do
+ post :remove_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(302)
+ end
+ end
+
+ context 'when project export is disabled' do
+ before do
+ stub_application_setting(project_export_enabled?: false)
+ end
+
+ it 'returns 404' do
+ post :remove_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
+ describe '#generate_new_export' do
+ before do
+ sign_in(user)
+
+ project.add_master(user)
+ end
+
+ context 'when project export is enabled' do
+ it 'returns 302' do
+ post :generate_new_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(302)
+ end
+ end
+
+ context 'when project export is disabled' do
+ before do
+ stub_application_setting(project_export_enabled?: false)
+ end
+
+ it 'returns 404' do
+ post :generate_new_export, namespace_id: project.namespace, id: project
+
+ expect(response).to have_http_status(404)
+ end
+ end
+ end
+
def project_moved_message(redirect_route, project)
"Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path."
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 475ceda11fe..7c5d059760f 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -186,8 +186,8 @@ describe SnippetsController do
end
context 'when the snippet description contains a file' do
- let(:picture_file) { '/system/temp/secret56/picture.jpg' }
- let(:text_file) { '/system/temp/secret78/text.txt' }
+ let(:picture_file) { '/-/system/temp/secret56/picture.jpg' }
+ let(:text_file) { '/-/system/temp/secret78/text.txt' }
let(:description) do
"Description with picture: ![picture](/uploads#{picture_file}) and "\
"text: [text.txt](/uploads#{text_file})"
@@ -208,8 +208,8 @@ describe SnippetsController do
snippet = subject
expected_description = "Description with picture: "\
- "![picture](/uploads/system/personal_snippet/#{snippet.id}/secret56/picture.jpg) and "\
- "text: [text.txt](/uploads/system/personal_snippet/#{snippet.id}/secret78/text.txt)"
+ "![picture](/uploads/-/system/personal_snippet/#{snippet.id}/secret56/picture.jpg) and "\
+ "text: [text.txt](/uploads/-/system/personal_snippet/#{snippet.id}/secret78/text.txt)"
expect(snippet.description).to eq(expected_description)
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index b3a40f5d15c..b29f3d861be 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -102,7 +102,7 @@ describe UploadsController do
subject
expect(response.body).to match '\"alt\":\"rails_sample\"'
- expect(response.body).to match "\"url\":\"/uploads/system/temp"
+ expect(response.body).to match "\"url\":\"/uploads/-/system/temp"
end
it 'does not create an Upload record' do
@@ -119,7 +119,7 @@ describe UploadsController do
subject
expect(response.body).to match '\"alt\":\"doc_sample.txt\"'
- expect(response.body).to match "\"url\":\"/uploads/system/temp"
+ expect(response.body).to match "\"url\":\"/uploads/-/system/temp"
end
it 'does not create an Upload record' do
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index a64ad73cba8..2cecd2646fc 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -92,8 +92,14 @@ describe UsersController do
before do
sign_in(user)
project.team << [user, :developer]
- EventCreateService.new.push(project, user, [])
- EventCreateService.new.push(forked_project, user, [])
+
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, user)
+
+ fork_push_data = Gitlab::DataBuilder::Push
+ .build_sample(forked_project, user)
+
+ EventCreateService.new.push(project, user, push_data)
+ EventCreateService.new.push(forked_project, user, fork_push_data)
end
it 'includes forked projects' do
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index 11d2016955c..ad9f7e2caef 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -2,6 +2,7 @@ FactoryGirl.define do
factory :event do
project
author factory: :user
+ action Event::JOINED
trait(:created) { action Event::CREATED }
trait(:updated) { action Event::UPDATED }
@@ -20,4 +21,19 @@ FactoryGirl.define do
target factory: :closed_issue
end
end
+
+ factory :push_event, class: PushEvent do
+ project factory: :project_empty_repo
+ author factory: :user
+ action Event::PUSHED
+ end
+
+ factory :push_event_payload do
+ event
+ commit_count 1
+ action :pushed
+ ref_type :branch
+ ref 'master'
+ commit_to '3cdce97ed87c91368561584e7358f4d46e3e173c'
+ end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index c9591a7d854..5db42175c15 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -35,6 +35,7 @@ feature 'Admin updates settings' do
fill_in 'Help page text', with: 'Example text'
check 'Hide marketing-related entries from help'
fill_in 'Support page URL', with: 'http://example.com/help'
+ uncheck 'Project export enabled'
click_button 'Save'
expect(current_application_settings.gravatar_enabled).to be_falsey
@@ -42,6 +43,7 @@ feature 'Admin updates settings' do
expect(current_application_settings.help_page_text).to eq "Example text"
expect(current_application_settings.help_page_hide_commercial_content).to be_truthy
expect(current_application_settings.help_page_support_url).to eq "http://example.com/help"
+ expect(current_application_settings.project_export_enabled).to be_falsey
expect(page).to have_content "Application settings saved successfully"
end
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 8d3d4ff8773..c3bf50ef9d1 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -15,10 +15,12 @@ describe 'Issue Boards', js: true do
let!(:list) { create(:list, board: board, label: development, position: 0) }
let(:card) { find('.board:nth-child(2)').first('.card') }
- before do
- Timecop.freeze
+ around do |example|
+ Timecop.freeze { example.run }
+ end
- project.team << [user, :master]
+ before do
+ project.add_master(user)
sign_in(user)
@@ -26,10 +28,6 @@ describe 'Issue Boards', js: true do
wait_for_requests
end
- after do
- Timecop.return
- end
-
it 'shows sidebar when clicking issue' do
click_card(card)
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 64fbc80cb81..9a597a2d690 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -42,14 +42,14 @@ feature 'Contributions Calendar', :js do
end
def push_code_contribution
- push_params = {
- project: contributed_project,
- action: Event::PUSHED,
- author_id: user.id,
- data: { commit_count: 3 }
- }
-
- Event.create(push_params)
+ event = create(:push_event, project: contributed_project, author: user)
+
+ create(:push_event_payload,
+ event: event,
+ commit_from: '11f9ac0a48b62cef25eedede4c1819964f08d5ce',
+ commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ commit_count: 3,
+ ref: 'master')
end
def note_comment_contribution
diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb
index 4917dfcf1d1..582868bac1e 100644
--- a/spec/features/dashboard/activity_spec.rb
+++ b/spec/features/dashboard/activity_spec.rb
@@ -23,27 +23,19 @@ feature 'Dashboard > Activity' do
create(:merge_request, author: user, source_project: project, target_project: project)
end
- let(:push_event_data) do
- {
- before: Gitlab::Git::BLANK_SHA,
- after: '0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e',
- ref: 'refs/heads/new_design',
- user_id: user.id,
- user_name: user.name,
- repository: {
- name: project.name,
- url: 'localhost/rubinius',
- description: '',
- homepage: 'localhost/rubinius',
- private: true
- }
- }
- end
-
let(:note) { create(:note, project: project, noteable: merge_request) }
let!(:push_event) do
- create(:event, :pushed, data: push_event_data, project: project, author: user)
+ event = create(:push_event, project: project, author: user)
+
+ create(:push_event_payload,
+ event: event,
+ action: :created,
+ commit_to: '0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e',
+ ref: 'new_design',
+ commit_count: 1)
+
+ event
end
let!(:merged_event) do
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 574bbe0e0e1..32b3e13c624 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -5,14 +5,12 @@ feature 'Group milestones', :js do
let!(:project) { create(:project_empty_repo, group: group) }
let(:user) { create(:group_member, :master, user: create(:user), group: group ).user }
- before do
- Timecop.freeze
-
- sign_in(user)
+ around do |example|
+ Timecop.freeze { example.run }
end
- after do
- Timecop.return
+ before do
+ sign_in(user)
end
context 'create a milestone' do
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index a5bb642221c..3c8e37ff920 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -729,7 +729,6 @@ describe 'Issues' do
visit project_issue_path(project, issue)
expect(page).not_to have_css('.is-confidential')
- expect(page).to have_css('.is-not-confidential')
end
end
end
diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/user_edits_files_spec.rb
index 8c9fc8821e6..3129aad8473 100644
--- a/spec/features/projects/user_edits_files_spec.rb
+++ b/spec/features/projects/user_edits_files_spec.rb
@@ -20,6 +20,9 @@ describe 'User edits files' do
it 'inserts a content of a file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
+
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
@@ -35,6 +38,9 @@ describe 'User edits files' do
it 'commits an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
+
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -49,6 +55,9 @@ describe 'User edits files' do
it 'commits an edited file to a new branch', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
+
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
fill_in(:branch_name, with: 'new_branch_name', visible: true)
@@ -65,6 +74,9 @@ describe 'User edits files' do
it 'shows the diff of an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
+
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
click_link('Preview changes')
@@ -92,6 +104,8 @@ describe 'User edits files' do
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
)
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
@@ -105,6 +119,9 @@ describe 'User edits files' do
expect(page).to have_button('Cancel')
click_link('Fork')
+
+ wait_for_requests
+
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index a919f5fa20b..d732383a1e1 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -41,7 +41,7 @@ feature 'User creates snippet', :js do
expect(page).to have_content('My Snippet')
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/system/temp/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/uploads/-/system/temp/\h{32}/banana_sample\.gif\z})
visit(link)
expect(page.status_code).to eq(200)
@@ -59,7 +59,7 @@ feature 'User creates snippet', :js do
wait_for_requests
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
visit(link)
expect(page.status_code).to eq(200)
@@ -84,7 +84,7 @@ feature 'User creates snippet', :js do
end
expect(page).to have_content('Hello World!')
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
visit(link)
expect(page.status_code).to eq(200)
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 26070e508e2..71de6b6bd1c 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -33,7 +33,7 @@ feature 'User edits snippet', :js do
wait_for_requests
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/system/personal_snippet/#{snippet.id}/\h{32}/banana_sample\.gif\z})
+ expect(link).to match(%r{/uploads/-/system/personal_snippet/#{snippet.id}/\h{32}/banana_sample\.gif\z})
end
it 'updates the snippet to make it internal' do
diff --git a/spec/finders/admin/projects_finder_spec.rb b/spec/finders/admin/projects_finder_spec.rb
index 4e367d39cf3..28e36330029 100644
--- a/spec/finders/admin/projects_finder_spec.rb
+++ b/spec/finders/admin/projects_finder_spec.rb
@@ -38,6 +38,12 @@ describe Admin::ProjectsFinder do
it { is_expected.to match_array([shared_project, public_project, internal_project, private_project]) }
end
+ context 'with pending delete project' do
+ let!(:pending_delete_project) { create(:project, pending_delete: true) }
+
+ it { is_expected.not_to include(pending_delete_project) }
+ end
+
context 'filter by namespace_id' do
let(:namespace) { create(:namespace) }
let!(:project_in_namespace) { create(:project, namespace: namespace) }
diff --git a/spec/finders/contributed_projects_finder_spec.rb b/spec/finders/contributed_projects_finder_spec.rb
index 2d079ea83b4..60ea98e61c7 100644
--- a/spec/finders/contributed_projects_finder_spec.rb
+++ b/spec/finders/contributed_projects_finder_spec.rb
@@ -14,8 +14,8 @@ describe ContributedProjectsFinder do
private_project.add_developer(current_user)
public_project.add_master(source_user)
- create(:event, :pushed, project: public_project, target: public_project, author: source_user)
- create(:event, :pushed, project: private_project, target: private_project, author: source_user)
+ create(:push_event, project: public_project, author: source_user)
+ create(:push_event, project: private_project, author: source_user)
end
describe 'without a current user' do
diff --git a/spec/lib/event_filter_spec.rb b/spec/lib/event_filter_spec.rb
index b0efcab47fb..87ae6b6cf01 100644
--- a/spec/lib/event_filter_spec.rb
+++ b/spec/lib/event_filter_spec.rb
@@ -5,7 +5,7 @@ describe EventFilter do
let(:source_user) { create(:user) }
let!(:public_project) { create(:project, :public) }
- let!(:push_event) { create(:event, :pushed, project: public_project, target: public_project, author: source_user) }
+ let!(:push_event) { create(:push_event, project: public_project, author: source_user) }
let!(:merged_event) { create(:event, :merged, project: public_project, target: public_project, author: source_user) }
let!(:created_event) { create(:event, :created, project: public_project, target: public_project, author: source_user) }
let!(:updated_event) { create(:event, :updated, project: public_project, target: public_project, author: source_user) }
diff --git a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb b/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
new file mode 100644
index 00000000000..87f45619e7a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
@@ -0,0 +1,423 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event do
+ describe '#commit_title' do
+ it 'returns nil when there are no commits' do
+ expect(described_class.new.commit_title).to be_nil
+ end
+
+ it 'returns nil when there are commits without commit messages' do
+ event = described_class.new
+
+ allow(event).to receive(:commits).and_return([{ id: '123' }])
+
+ expect(event.commit_title).to be_nil
+ end
+
+ it 'returns the commit message when it is less than 70 characters long' do
+ event = described_class.new
+
+ allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
+
+ expect(event.commit_title).to eq('Hello world')
+ end
+
+ it 'returns the first line of a commit message if multiple lines are present' do
+ event = described_class.new
+
+ allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
+
+ expect(event.commit_title).to eq('Hello')
+ end
+
+ it 'truncates the commit to 70 characters when it is too long' do
+ event = described_class.new
+
+ allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
+
+ expect(event.commit_title).to eq(('a' * 67) + '...')
+ end
+ end
+
+ describe '#commit_from_sha' do
+ it 'returns nil when pushing to a new ref' do
+ event = described_class.new
+
+ allow(event).to receive(:create?).and_return(true)
+
+ expect(event.commit_from_sha).to be_nil
+ end
+
+ it 'returns the ID of the first commit when pushing to an existing ref' do
+ event = described_class.new
+
+ allow(event).to receive(:create?).and_return(false)
+ allow(event).to receive(:data).and_return(before: '123')
+
+ expect(event.commit_from_sha).to eq('123')
+ end
+ end
+
+ describe '#commit_to_sha' do
+ it 'returns nil when removing an existing ref' do
+ event = described_class.new
+
+ allow(event).to receive(:remove?).and_return(true)
+
+ expect(event.commit_to_sha).to be_nil
+ end
+
+ it 'returns the ID of the last commit when pushing to an existing ref' do
+ event = described_class.new
+
+ allow(event).to receive(:remove?).and_return(false)
+ allow(event).to receive(:data).and_return(after: '123')
+
+ expect(event.commit_to_sha).to eq('123')
+ end
+ end
+
+ describe '#data' do
+ it 'returns the deserialized data' do
+ event = described_class.new(data: { before: '123' })
+
+ expect(event.data).to eq(before: '123')
+ end
+
+ it 'returns an empty hash when no data is present' do
+ event = described_class.new
+
+ expect(event.data).to eq({})
+ end
+ end
+
+ describe '#commits' do
+ it 'returns an Array of commits' do
+ event = described_class.new(data: { commits: [{ id: '123' }] })
+
+ expect(event.commits).to eq([{ id: '123' }])
+ end
+
+ it 'returns an empty array when no data is present' do
+ event = described_class.new
+
+ expect(event.commits).to eq([])
+ end
+ end
+
+ describe '#commit_count' do
+ it 'returns the number of commits' do
+ event = described_class.new(data: { total_commits_count: 2 })
+
+ expect(event.commit_count).to eq(2)
+ end
+
+ it 'returns 0 when no data is present' do
+ event = described_class.new
+
+ expect(event.commit_count).to eq(0)
+ end
+ end
+
+ describe '#ref' do
+ it 'returns the name of the ref' do
+ event = described_class.new(data: { ref: 'refs/heads/master' })
+
+ expect(event.ref).to eq('refs/heads/master')
+ end
+ end
+
+ describe '#trimmed_ref_name' do
+ it 'returns the trimmed ref name for a branch' do
+ event = described_class.new(data: { ref: 'refs/heads/master' })
+
+ expect(event.trimmed_ref_name).to eq('master')
+ end
+
+ it 'returns the trimmed ref name for a tag' do
+ event = described_class.new(data: { ref: 'refs/tags/v1.2' })
+
+ expect(event.trimmed_ref_name).to eq('v1.2')
+ end
+ end
+
+ describe '#create?' do
+ it 'returns true when creating a new ref' do
+ event = described_class.new(data: { before: described_class::BLANK_REF })
+
+ expect(event.create?).to eq(true)
+ end
+
+ it 'returns false when pushing to an existing ref' do
+ event = described_class.new(data: { before: '123' })
+
+ expect(event.create?).to eq(false)
+ end
+ end
+
+ describe '#remove?' do
+ it 'returns true when removing an existing ref' do
+ event = described_class.new(data: { after: described_class::BLANK_REF })
+
+ expect(event.remove?).to eq(true)
+ end
+
+ it 'returns false when pushing to an existing ref' do
+ event = described_class.new(data: { after: '123' })
+
+ expect(event.remove?).to eq(false)
+ end
+ end
+
+ describe '#push_action' do
+ let(:event) { described_class.new }
+
+ it 'returns :created when creating a new ref' do
+ allow(event).to receive(:create?).and_return(true)
+
+ expect(event.push_action).to eq(:created)
+ end
+
+ it 'returns :removed when removing an existing ref' do
+ allow(event).to receive(:create?).and_return(false)
+ allow(event).to receive(:remove?).and_return(true)
+
+ expect(event.push_action).to eq(:removed)
+ end
+
+ it 'returns :pushed when pushing to an existing ref' do
+ allow(event).to receive(:create?).and_return(false)
+ allow(event).to receive(:remove?).and_return(false)
+
+ expect(event.push_action).to eq(:pushed)
+ end
+ end
+
+ describe '#ref_type' do
+ let(:event) { described_class.new }
+
+ it 'returns :tag for a tag' do
+ allow(event).to receive(:ref).and_return('refs/tags/1.2')
+
+ expect(event.ref_type).to eq(:tag)
+ end
+
+ it 'returns :branch for a branch' do
+ allow(event).to receive(:ref).and_return('refs/heads/1.2')
+
+ expect(event.ref_type).to eq(:branch)
+ end
+ end
+end
+
+describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads do
+ let(:migration) { described_class.new }
+ let(:project) { create(:project_empty_repo) }
+ let(:author) { create(:user) }
+
+ # We can not rely on FactoryGirl as the state of Event may change in ways that
+ # the background migration does not expect, hence we use the Event class of
+ # the migration itself.
+ def create_push_event(project, author, data = nil)
+ klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
+
+ klass.create!(
+ action: klass::PUSHED,
+ project_id: project.id,
+ author_id: author.id,
+ data: data
+ )
+ end
+
+ # The background migration relies on a temporary table, hence we're migrating
+ # to a specific version of the database where said table is still present.
+ before :all do
+ ActiveRecord::Migration.verbose = false
+
+ ActiveRecord::Migrator
+ .migrate(ActiveRecord::Migrator.migrations_paths, 20170608152748)
+ end
+
+ after :all do
+ ActiveRecord::Migrator.migrate(ActiveRecord::Migrator.migrations_paths)
+
+ ActiveRecord::Migration.verbose = true
+ end
+
+ describe '#perform' do
+ it 'returns if data should not be migrated' do
+ allow(migration).to receive(:migrate?).and_return(false)
+
+ expect(migration).not_to receive(:find_events)
+
+ migration.perform(1, 10)
+ end
+
+ it 'migrates the range of events if data is to be migrated' do
+ event1 = create_push_event(project, author, { commits: [] })
+ event2 = create_push_event(project, author, { commits: [] })
+
+ allow(migration).to receive(:migrate?).and_return(true)
+
+ expect(migration).to receive(:process_event).twice
+
+ migration.perform(event1.id, event2.id)
+ end
+ end
+
+ describe '#process_event' do
+ it 'processes a regular event' do
+ event = double(:event, push_event?: false)
+
+ expect(migration).to receive(:replicate_event)
+ expect(migration).not_to receive(:create_push_event_payload)
+
+ migration.process_event(event)
+ end
+
+ it 'processes a push event' do
+ event = double(:event, push_event?: true)
+
+ expect(migration).to receive(:replicate_event)
+ expect(migration).to receive(:create_push_event_payload)
+
+ migration.process_event(event)
+ end
+ end
+
+ describe '#replicate_event' do
+ it 'replicates the event to the "events_for_migration" table' do
+ event = create_push_event(
+ project,
+ author,
+ data: { commits: [] },
+ title: 'bla'
+ )
+
+ attributes = event
+ .attributes.with_indifferent_access.except(:title, :data)
+
+ expect(described_class::EventForMigration)
+ .to receive(:create!)
+ .with(attributes)
+
+ migration.replicate_event(event)
+ end
+ end
+
+ describe '#create_push_event_payload' do
+ let(:push_data) do
+ {
+ commits: [],
+ ref: 'refs/heads/master',
+ before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
+ after: '0' * 40,
+ total_commits_count: 1
+ }
+ end
+
+ let(:event) do
+ create_push_event(project, author, push_data)
+ end
+
+ before do
+ # The foreign key in push_event_payloads at this point points to the
+ # "events_for_migration" table so we need to make sure a row exists in
+ # said table.
+ migration.replicate_event(event)
+ end
+
+ it 'creates a push event payload for an event' do
+ payload = migration.create_push_event_payload(event)
+
+ expect(PushEventPayload.count).to eq(1)
+ expect(payload.valid?).to eq(true)
+ end
+
+ it 'does not create push event payloads for removed events' do
+ allow(event).to receive(:id).and_return(-1)
+
+ payload = migration.create_push_event_payload(event)
+
+ expect(payload).to be_nil
+ expect(PushEventPayload.count).to eq(0)
+ end
+
+ it 'encodes and decodes the commit IDs from and to binary data' do
+ payload = migration.create_push_event_payload(event)
+ packed = migration.pack(push_data[:before])
+
+ expect(payload.commit_from).to eq(packed)
+ expect(payload.commit_to).to be_nil
+ end
+ end
+
+ describe '#find_events' do
+ it 'returns the events for the given ID range' do
+ event1 = create_push_event(project, author, { commits: [] })
+ event2 = create_push_event(project, author, { commits: [] })
+ event3 = create_push_event(project, author, { commits: [] })
+ events = migration.find_events(event1.id, event2.id)
+
+ expect(events.length).to eq(2)
+ expect(events.pluck(:id)).not_to include(event3.id)
+ end
+ end
+
+ describe '#migrate?' do
+ it 'returns true when data should be migrated' do
+ allow(described_class::Event)
+ .to receive(:table_exists?).and_return(true)
+
+ allow(described_class::PushEventPayload)
+ .to receive(:table_exists?).and_return(true)
+
+ allow(described_class::EventForMigration)
+ .to receive(:table_exists?).and_return(true)
+
+ expect(migration.migrate?).to eq(true)
+ end
+
+ it 'returns false if the "events" table does not exist' do
+ allow(described_class::Event)
+ .to receive(:table_exists?).and_return(false)
+
+ expect(migration.migrate?).to eq(false)
+ end
+
+ it 'returns false if the "push_event_payloads" table does not exist' do
+ allow(described_class::Event)
+ .to receive(:table_exists?).and_return(true)
+
+ allow(described_class::PushEventPayload)
+ .to receive(:table_exists?).and_return(false)
+
+ expect(migration.migrate?).to eq(false)
+ end
+
+ it 'returns false when the "events_for_migration" table does not exist' do
+ allow(described_class::Event)
+ .to receive(:table_exists?).and_return(true)
+
+ allow(described_class::PushEventPayload)
+ .to receive(:table_exists?).and_return(true)
+
+ allow(described_class::EventForMigration)
+ .to receive(:table_exists?).and_return(false)
+
+ expect(migration.migrate?).to eq(false)
+ end
+ end
+
+ describe '#pack' do
+ it 'packs a SHA1 into a 20 byte binary string' do
+ packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
+
+ expect(packed.bytesize).to eq(20)
+ end
+
+ it 'returns nil if the input value is nil' do
+ expect(migration.pack(nil)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb b/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
new file mode 100644
index 00000000000..ee60e498b59
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
@@ -0,0 +1,72 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MovePersonalSnippetFiles do
+ let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'move_snippet_files_test') }
+ let(:old_uploads_dir) { File.join('uploads', 'system', 'personal_snippet') }
+ let(:new_uploads_dir) { File.join('uploads', '-', 'system', 'personal_snippet') }
+ let(:snippet) do
+ snippet = create(:personal_snippet)
+ create_upload_for_snippet(snippet)
+ snippet.update_attributes!(description: markdown_linking_file(snippet))
+ snippet
+ end
+
+ let(:migration) { described_class.new }
+
+ before do
+ allow(migration).to receive(:base_directory) { test_dir }
+ end
+
+ describe '#perform' do
+ it 'moves the file on the disk' do
+ expected_path = File.join(test_dir, new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
+
+ migration.perform(old_uploads_dir, new_uploads_dir)
+
+ expect(File.exist?(expected_path)).to be_truthy
+ end
+
+ it 'updates the markdown of the snippet' do
+ expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
+ expected_markdown = "[an upload](#{expected_path})"
+
+ migration.perform(old_uploads_dir, new_uploads_dir)
+
+ expect(snippet.reload.description).to eq(expected_markdown)
+ end
+
+ it 'updates the markdown of notes' do
+ expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
+ expected_markdown = "with [an upload](#{expected_path})"
+
+ note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown_linking_file(snippet)}")
+
+ migration.perform(old_uploads_dir, new_uploads_dir)
+
+ expect(note.reload.note).to eq(expected_markdown)
+ end
+ end
+
+ def create_upload_for_snippet(snippet)
+ snippet_path = path_for_file_in_snippet(snippet)
+ path = File.join(old_uploads_dir, snippet.id.to_s, snippet_path)
+ absolute_path = File.join(test_dir, path)
+
+ FileUtils.mkdir_p(File.dirname(absolute_path))
+ FileUtils.touch(absolute_path)
+
+ create(:upload, model: snippet, path: snippet_path, uploader: PersonalFileUploader)
+ end
+
+ def path_for_file_in_snippet(snippet)
+ secret = "secret#{snippet.id}"
+ filename = 'upload.txt'
+
+ File.join(secret, filename)
+ end
+
+ def markdown_linking_file(snippet)
+ path = File.join(old_uploads_dir, snippet.id.to_s, path_for_file_in_snippet(snippet))
+ "[an upload](#{path})"
+ end
+end
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 6c4cfa1203e..f8c8b83a3ac 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Checks::ForcePush do
let(:project) { create(:project, :repository) }
- context "exit code checking" do
+ context "exit code checking", skip_gitaly_mock: true do
it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do
allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index c5f9aecd867..5fa94999d25 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -51,6 +51,28 @@ describe Gitlab::Database do
end
end
+ describe '.join_lateral_supported?' do
+ it 'returns false when using MySQL' do
+ allow(described_class).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.join_lateral_supported?).to eq(false)
+ end
+
+ it 'returns false when using PostgreSQL 9.2' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.2.1')
+
+ expect(described_class.join_lateral_supported?).to eq(false)
+ end
+
+ it 'returns true when using PostgreSQL 9.3.0 or newer' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ allow(described_class).to receive(:version).and_return('9.3.0')
+
+ expect(described_class.join_lateral_supported?).to eq(true)
+ end
+ end
+
describe '.nulls_last_order' do
context 'when using PostgreSQL' do
before do
diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
index 7256402b010..9d1763b96ad 100644
--- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
@@ -175,11 +175,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
describe '#track_storage_inaccessible' do
around do |example|
- Timecop.freeze
-
- example.run
-
- Timecop.return
+ Timecop.freeze { example.run }
end
it 'records the failure time in redis' do
diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
index a0e5e401359..f5c9680bf59 100644
--- a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
@@ -106,12 +106,6 @@ describe Gitlab::HealthChecks::FsShardsCheck do
}.with_indifferent_access
end
- # Unsolved intermittent failure in CI https://gitlab.com/gitlab-org/gitlab-ce/issues/31128
- around do |example| # rubocop:disable RSpec/AroundBlock
- times_to_try = ENV['CI'] ? 4 : 1
- example.run_with_retry retry: times_to_try
- end
-
it 'provides metrics' do
metrics = described_class.metrics
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 6a41afe0c25..8da02b0cf00 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -22,6 +22,7 @@ events:
- author
- project
- target
+- push_event_payload
notes:
- award_emoji
- project
@@ -272,3 +273,5 @@ timelogs:
- issue
- merge_request
- user
+push_event_payload:
+- event
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 4dce48f8079..ae3b0173160 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -36,6 +36,14 @@ Event:
- updated_at
- action
- author_id
+PushEventPayload:
+- commit_count
+- action
+- ref_type
+- commit_from
+- commit_to
+- ref
+- commit_title
Note:
- id
- note
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index 461b1e4182a..ebe66948a91 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -4,10 +4,6 @@ describe Gitlab::Metrics::RequestsRackMiddleware do
let(:app) { double('app') }
subject { described_class.new(app) }
- around do |example|
- Timecop.freeze { example.run }
- end
-
describe '#call' do
let(:status) { 100 }
let(:env) { { 'REQUEST_METHOD' => 'GET' } }
@@ -28,16 +24,14 @@ describe Gitlab::Metrics::RequestsRackMiddleware do
subject.call(env)
end
- it 'measures execution time' do
- execution_time = 10
- allow(app).to receive(:call) do |*args|
- Timecop.freeze(execution_time.seconds)
- [200, nil, nil]
- end
+ RSpec::Matchers.define :a_positive_execution_time do
+ match { |actual| actual > 0 }
+ end
- expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ status: 200, method: 'get' }, execution_time)
+ it 'measures execution time' do
+ expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ status: 200, method: 'get' }, a_positive_execution_time)
- subject.call(env)
+ Timecop.scale(3600) { subject.call(env) }
end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 654397ccffb..e78892d4232 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -217,7 +217,9 @@ describe Gitlab::Workhorse do
it 'includes a Repository param' do
repo_param = { Repository: {
storage_name: 'default',
- relative_path: project.full_path + '.git'
+ relative_path: project.full_path + '.git',
+ git_object_directory: '',
+ git_alternate_object_directories: []
} }
expect(subject).to include(repo_param)
diff --git a/spec/lib/rspec_flaky/example_spec.rb b/spec/lib/rspec_flaky/example_spec.rb
new file mode 100644
index 00000000000..5b4fd5ddf3e
--- /dev/null
+++ b/spec/lib/rspec_flaky/example_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe RspecFlaky::Example do
+ let(:example_attrs) do
+ {
+ id: 'spec/foo/bar_spec.rb:2',
+ metadata: {
+ file_path: 'spec/foo/bar_spec.rb',
+ line_number: 2,
+ full_description: 'hello world'
+ },
+ execution_result: double(status: 'passed', exception: 'BOOM!'),
+ attempts: 1
+ }
+ end
+ let(:rspec_example) { double(example_attrs) }
+
+ describe '#initialize' do
+ shared_examples 'a valid Example instance' do
+ it 'returns valid attributes' do
+ example = described_class.new(args)
+
+ expect(example.example_id).to eq(example_attrs[:id])
+ end
+ end
+
+ context 'when given an Rspec::Core::Example that responds to #example' do
+ let(:args) { double(example: rspec_example) }
+
+ it_behaves_like 'a valid Example instance'
+ end
+
+ context 'when given an Rspec::Core::Example that does not respond to #example' do
+ let(:args) { rspec_example }
+
+ it_behaves_like 'a valid Example instance'
+ end
+ end
+
+ subject { described_class.new(rspec_example) }
+
+ describe '#uid' do
+ it 'returns a hash of the full description' do
+ expect(subject.uid).to eq(Digest::MD5.hexdigest("#{subject.description}-#{subject.file}"))
+ end
+ end
+
+ describe '#example_id' do
+ it 'returns the ID of the RSpec::Core::Example' do
+ expect(subject.example_id).to eq(rspec_example.id)
+ end
+ end
+
+ describe '#attempts' do
+ it 'returns the attempts of the RSpec::Core::Example' do
+ expect(subject.attempts).to eq(rspec_example.attempts)
+ end
+ end
+
+ describe '#file' do
+ it 'returns the metadata[:file_path] of the RSpec::Core::Example' do
+ expect(subject.file).to eq(rspec_example.metadata[:file_path])
+ end
+ end
+
+ describe '#line' do
+ it 'returns the metadata[:line_number] of the RSpec::Core::Example' do
+ expect(subject.line).to eq(rspec_example.metadata[:line_number])
+ end
+ end
+
+ describe '#description' do
+ it 'returns the metadata[:full_description] of the RSpec::Core::Example' do
+ expect(subject.description).to eq(rspec_example.metadata[:full_description])
+ end
+ end
+
+ describe '#status' do
+ it 'returns the execution_result.status of the RSpec::Core::Example' do
+ expect(subject.status).to eq(rspec_example.execution_result.status)
+ end
+ end
+
+ describe '#exception' do
+ it 'returns the execution_result.exception of the RSpec::Core::Example' do
+ expect(subject.exception).to eq(rspec_example.execution_result.exception)
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
new file mode 100644
index 00000000000..cbfc1e538ab
--- /dev/null
+++ b/spec/lib/rspec_flaky/flaky_example_spec.rb
@@ -0,0 +1,104 @@
+require 'spec_helper'
+
+describe RspecFlaky::FlakyExample do
+ let(:flaky_example_attrs) do
+ {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ file: 'spec/foo/bar_spec.rb',
+ line: 2,
+ description: 'hello world',
+ first_flaky_at: 1234,
+ last_flaky_at: 2345,
+ last_attempts_count: 2,
+ flaky_reports: 1
+ }
+ end
+ let(:example_attrs) do
+ {
+ uid: 'abc123',
+ example_id: flaky_example_attrs[:example_id],
+ file: flaky_example_attrs[:file],
+ line: flaky_example_attrs[:line],
+ description: flaky_example_attrs[:description],
+ status: 'passed',
+ exception: 'BOOM!',
+ attempts: flaky_example_attrs[:last_attempts_count]
+ }
+ end
+ let(:example) { double(example_attrs) }
+
+ describe '#initialize' do
+ shared_examples 'a valid FlakyExample instance' do
+ it 'returns valid attributes' do
+ flaky_example = described_class.new(args)
+
+ expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
+ expect(flaky_example.example_id).to eq(flaky_example_attrs[:example_id])
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ let(:args) { example }
+
+ it_behaves_like 'a valid FlakyExample instance'
+ end
+
+ context 'when given a hash' do
+ let(:args) { flaky_example_attrs }
+
+ it_behaves_like 'a valid FlakyExample instance'
+ end
+ end
+
+ describe '#to_h' do
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('CI_PROJECT_URL', nil)
+ stub_env('CI_JOB_ID', nil)
+ end
+
+ shared_examples 'a valid FlakyExample hash' do
+ let(:additional_attrs) { {} }
+
+ it 'returns a valid hash' do
+ flaky_example = described_class.new(args)
+ final_hash = flaky_example_attrs
+ .merge(last_flaky_at: instance_of(Time), last_flaky_job: nil)
+ .merge(additional_attrs)
+
+ expect(flaky_example.to_h).to match(hash_including(final_hash))
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ let(:args) { example }
+
+ context 'when run locally' do
+ it_behaves_like 'a valid FlakyExample hash' do
+ let(:additional_attrs) do
+ { first_flaky_at: instance_of(Time) }
+ end
+ end
+ end
+
+ context 'when run on the CI' do
+ before do
+ stub_env('CI_PROJECT_URL', 'https://gitlab.com/gitlab-org/gitlab-ce')
+ stub_env('CI_JOB_ID', 42)
+ end
+
+ it_behaves_like 'a valid FlakyExample hash' do
+ let(:additional_attrs) do
+ { first_flaky_at: instance_of(Time), last_flaky_job: "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42" }
+ end
+ end
+ end
+ end
+
+ context 'when given a hash' do
+ let(:args) { flaky_example_attrs }
+
+ it_behaves_like 'a valid FlakyExample hash'
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb
new file mode 100644
index 00000000000..0e193bf408b
--- /dev/null
+++ b/spec/lib/rspec_flaky/listener_spec.rb
@@ -0,0 +1,178 @@
+require 'spec_helper'
+
+describe RspecFlaky::Listener do
+ let(:flaky_example_report) do
+ {
+ 'abc123' => {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ file: 'spec/foo/bar_spec.rb',
+ line: 2,
+ description: 'hello world',
+ first_flaky_at: 1234,
+ last_flaky_at: instance_of(Time),
+ last_attempts_count: 2,
+ flaky_reports: 1,
+ last_flaky_job: nil
+ }
+ }
+ end
+ let(:example_attrs) do
+ {
+ id: 'spec/foo/baz_spec.rb:3',
+ metadata: {
+ file_path: 'spec/foo/baz_spec.rb',
+ line_number: 3,
+ full_description: 'hello GitLab'
+ },
+ execution_result: double(status: 'passed', exception: nil)
+ }
+ end
+
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('CI_PROJECT_URL', nil)
+ stub_env('CI_JOB_ID', nil)
+ end
+
+ describe '#initialize' do
+ shared_examples 'a valid Listener instance' do
+ let(:expected_all_flaky_examples) { {} }
+
+ it 'returns a valid Listener instance' do
+ listener = described_class.new
+
+ expect(listener.to_report(listener.all_flaky_examples))
+ .to match(hash_including(expected_all_flaky_examples))
+ expect(listener.new_flaky_examples).to eq({})
+ end
+ end
+
+ context 'when no report file exists' do
+ it_behaves_like 'a valid Listener instance'
+ end
+
+ context 'when a report file exists and set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ let(:report_file) do
+ Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
+ f.write(JSON.pretty_generate(flaky_example_report))
+ f.rewind
+ end
+ end
+
+ before do
+ stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file.path)
+ end
+
+ after do
+ report_file.close
+ report_file.unlink
+ end
+
+ it_behaves_like 'a valid Listener instance' do
+ let(:expected_all_flaky_examples) { flaky_example_report }
+ end
+ end
+ end
+
+ describe '#example_passed' do
+ let(:rspec_example) { double(example_attrs) }
+ let(:notification) { double(example: rspec_example) }
+
+ shared_examples 'a non-flaky example' do
+ it 'does not change the flaky examples hash' do
+ expect { subject.example_passed(notification) }
+ .not_to change { subject.all_flaky_examples }
+ end
+ end
+
+ describe 'when the RSpec example does not respond to attempts' do
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 1 attempt' do
+ let(:rspec_example) { double(example_attrs.merge(attempts: 1)) }
+
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 2 attempts' do
+ let(:rspec_example) { double(example_attrs.merge(attempts: 2)) }
+ let(:expected_new_flaky_example) do
+ {
+ example_id: 'spec/foo/baz_spec.rb:3',
+ file: 'spec/foo/baz_spec.rb',
+ line: 3,
+ description: 'hello GitLab',
+ first_flaky_at: instance_of(Time),
+ last_flaky_at: instance_of(Time),
+ last_attempts_count: 2,
+ flaky_reports: 1,
+ last_flaky_job: nil
+ }
+ end
+
+ it 'does not change the flaky examples hash' do
+ expect { subject.example_passed(notification) }
+ .to change { subject.all_flaky_examples }
+
+ new_example = RspecFlaky::Example.new(rspec_example)
+
+ expect(subject.all_flaky_examples[new_example.uid].to_h)
+ .to match(hash_including(expected_new_flaky_example))
+ end
+ end
+ end
+
+ describe '#dump_summary' do
+ let(:rspec_example) { double(example_attrs) }
+ let(:notification) { double(example: rspec_example) }
+
+ context 'when a report file path is set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') }
+
+ before do
+ stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file_path)
+ FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ end
+
+ after do
+ FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ end
+
+ context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
+ end
+
+ it 'does not write the report file' do
+ subject.example_passed(notification)
+
+ subject.dump_summary(nil)
+
+ expect(File.exist?(report_file_path)).to be(false)
+ end
+ end
+
+ context 'when FLAKY_RSPEC_GENERATE_REPORT == "true"' do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
+ end
+
+ it 'writes the report file' do
+ subject.example_passed(notification)
+
+ subject.dump_summary(nil)
+
+ expect(File.exist?(report_file_path)).to be(true)
+ end
+ end
+ end
+ end
+
+ describe '#to_report' do
+ it 'transforms the internal hash to a JSON-ready hash' do
+ expect(subject.to_report('abc123' => RspecFlaky::FlakyExample.new(flaky_example_report['abc123'])))
+ .to match(hash_including(flaky_example_report))
+ end
+ end
+end
diff --git a/spec/migrations/clean_upload_symlinks_spec.rb b/spec/migrations/clean_upload_symlinks_spec.rb
index cecb3ddac53..26653b9c008 100644
--- a/spec/migrations/clean_upload_symlinks_spec.rb
+++ b/spec/migrations/clean_upload_symlinks_spec.rb
@@ -5,7 +5,7 @@ describe CleanUploadSymlinks do
let(:migration) { described_class.new }
let(:test_dir) { File.join(Rails.root, "tmp", "tests", "move_uploads_test") }
let(:uploads_dir) { File.join(test_dir, "public", "uploads") }
- let(:new_uploads_dir) { File.join(uploads_dir, "system") }
+ let(:new_uploads_dir) { File.join(uploads_dir, "-", "system") }
let(:original_path) { File.join(new_uploads_dir, 'user') }
let(:symlink_path) { File.join(uploads_dir, 'user') }
diff --git a/spec/migrations/move_personal_snippets_files_spec.rb b/spec/migrations/move_personal_snippets_files_spec.rb
index 8505c7bf3e3..1a319eccc0d 100644
--- a/spec/migrations/move_personal_snippets_files_spec.rb
+++ b/spec/migrations/move_personal_snippets_files_spec.rb
@@ -5,7 +5,7 @@ describe MovePersonalSnippetsFiles do
let(:migration) { described_class.new }
let(:test_dir) { File.join(Rails.root, "tmp", "tests", "move_snippet_files_test") }
let(:uploads_dir) { File.join(test_dir, 'uploads') }
- let(:new_uploads_dir) { File.join(uploads_dir, 'system') }
+ let(:new_uploads_dir) { File.join(uploads_dir, '-', 'system') }
before do
allow(CarrierWave).to receive(:root).and_return(test_dir)
@@ -42,7 +42,7 @@ describe MovePersonalSnippetsFiles do
describe 'updating the markdown' do
it 'includes the new path when the file exists' do
secret = "secret#{snippet.id}"
- file_location = "/uploads/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
+ file_location = "/uploads/-/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
migration.up
@@ -60,7 +60,7 @@ describe MovePersonalSnippetsFiles do
it 'updates the note markdown' do
secret = "secret#{snippet.id}"
- file_location = "/uploads/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
+ file_location = "/uploads/-/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
markdown = markdown_linking_file('picture.jpg', snippet)
note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}")
@@ -108,7 +108,7 @@ describe MovePersonalSnippetsFiles do
it 'keeps the markdown as is when the file is missing' do
secret = "secret#{snippet_with_missing_file.id}"
- file_location = "/uploads/system/personal_snippet/#{snippet_with_missing_file.id}/#{secret}/picture.jpg"
+ file_location = "/uploads/-/system/personal_snippet/#{snippet_with_missing_file.id}/#{secret}/picture.jpg"
migration.down
@@ -167,7 +167,7 @@ describe MovePersonalSnippetsFiles do
def markdown_linking_file(filename, snippet, in_new_path: false)
markdown = "![#{filename.split('.')[0]}]"
markdown += '(/uploads'
- markdown += '/system' if in_new_path
+ markdown += '/-/system' if in_new_path
markdown += "/#{model_file_path(filename, snippet)})"
markdown
end
diff --git a/spec/migrations/move_system_upload_folder_spec.rb b/spec/migrations/move_system_upload_folder_spec.rb
index b622b4e9536..d3180477db3 100644
--- a/spec/migrations/move_system_upload_folder_spec.rb
+++ b/spec/migrations/move_system_upload_folder_spec.rb
@@ -33,6 +33,15 @@ describe MoveSystemUploadFolder do
expect(File.symlink?(File.join(test_base, 'system'))).to be_truthy
expect(File.exist?(File.join(test_base, 'system', 'file'))).to be_truthy
end
+
+ it 'does not move if the target directory already exists' do
+ FileUtils.mkdir_p(File.join(test_base, '-', 'system'))
+
+ expect(FileUtils).not_to receive(:mv)
+ expect(migration).to receive(:say).with(/already exists. No need to redo the move/)
+
+ migration.up
+ end
end
describe '#down' do
@@ -58,5 +67,14 @@ describe MoveSystemUploadFolder do
expect(File.directory?(File.join(test_base, 'system'))).to be_truthy
expect(File.symlink?(File.join(test_base, 'system'))).to be_falsey
end
+
+ it 'does not move if the old directory already exists' do
+ FileUtils.mkdir_p(File.join(test_base, 'system'))
+
+ expect(FileUtils).not_to receive(:mv)
+ expect(migration).to receive(:say).with(/already exists and is not a symlink, no need to revert/)
+
+ migration.down
+ end
end
end
diff --git a/spec/migrations/move_uploads_to_system_dir_spec.rb b/spec/migrations/move_uploads_to_system_dir_spec.rb
index 37d66452447..ca11a2004c5 100644
--- a/spec/migrations/move_uploads_to_system_dir_spec.rb
+++ b/spec/migrations/move_uploads_to_system_dir_spec.rb
@@ -5,7 +5,7 @@ describe MoveUploadsToSystemDir do
let(:migration) { described_class.new }
let(:test_dir) { File.join(Rails.root, "tmp", "move_uploads_test") }
let(:uploads_dir) { File.join(test_dir, "public", "uploads") }
- let(:new_uploads_dir) { File.join(uploads_dir, "system") }
+ let(:new_uploads_dir) { File.join(uploads_dir, "-", "system") }
before do
FileUtils.remove_dir(test_dir) if File.directory?(test_dir)
diff --git a/spec/migrations/rename_system_namespaces_spec.rb b/spec/migrations/rename_system_namespaces_spec.rb
deleted file mode 100644
index 747694cbe33..00000000000
--- a/spec/migrations/rename_system_namespaces_spec.rb
+++ /dev/null
@@ -1,254 +0,0 @@
-require "spec_helper"
-require Rails.root.join("db", "migrate", "20170316163800_rename_system_namespaces.rb")
-
-describe RenameSystemNamespaces, truncate: true do
- let(:migration) { described_class.new }
- let(:test_dir) { File.join(Rails.root, "tmp", "tests", "rename_namespaces_test") }
- let(:uploads_dir) { File.join(test_dir, "public", "uploads") }
- let(:system_namespace) do
- namespace = build(:namespace, path: "system")
- namespace.save(validate: false)
- namespace
- end
-
- def save_invalid_routable(routable)
- routable.__send__(:prepare_route)
- routable.save(validate: false)
- end
-
- before do
- FileUtils.remove_dir(test_dir) if File.directory?(test_dir)
- FileUtils.mkdir_p(uploads_dir)
- FileUtils.remove_dir(TestEnv.repos_path) if File.directory?(TestEnv.repos_path)
- allow(migration).to receive(:say)
- allow(migration).to receive(:uploads_dir).and_return(uploads_dir)
- end
-
- describe "#system_namespace" do
- it "only root namespaces called with path `system`" do
- system_namespace
- system_namespace_with_parent = build(:namespace, path: 'system', parent: create(:namespace))
- system_namespace_with_parent.save(validate: false)
-
- expect(migration.system_namespace.id).to eq(system_namespace.id)
- end
- end
-
- describe "#up" do
- before do
- system_namespace
- end
-
- it "doesn't break if there are no namespaces called system" do
- Namespace.delete_all
-
- migration.up
- end
-
- it "renames namespaces called system" do
- migration.up
-
- expect(system_namespace.reload.path).to eq("system0")
- end
-
- it "renames the route to the namespace" do
- migration.up
-
- expect(system_namespace.reload.full_path).to eq("system0")
- end
-
- it "renames the route for projects of the namespace" do
- project = build(:project, :repository, path: "project-path", namespace: system_namespace)
- save_invalid_routable(project)
-
- migration.up
-
- expect(project.route.reload.path).to eq("system0/project-path")
- end
-
- it "doesn't touch routes of namespaces that look like system" do
- namespace = create(:group, path: 'systemlookalike')
- project = create(:project, :repository, namespace: namespace, path: 'the-project')
-
- migration.up
-
- expect(project.route.reload.path).to eq('systemlookalike/the-project')
- expect(namespace.route.reload.path).to eq('systemlookalike')
- end
-
- it "moves the the repository for a project in the namespace" do
- project = build(:project, :repository, namespace: system_namespace, path: "system-project")
- save_invalid_routable(project)
- TestEnv.copy_repo(project,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
- expected_repo = File.join(TestEnv.repos_path, "system0", "system-project.git")
-
- migration.up
-
- expect(File.directory?(expected_repo)).to be(true)
- end
-
- it "moves the uploads for the namespace" do
- allow(migration).to receive(:move_namespace_folders).with(Settings.pages.path, "system", "system0")
- expect(migration).to receive(:move_namespace_folders).with(uploads_dir, "system", "system0")
-
- migration.up
- end
-
- it "moves the pages for the namespace" do
- allow(migration).to receive(:move_namespace_folders).with(uploads_dir, "system", "system0")
- expect(migration).to receive(:move_namespace_folders).with(Settings.pages.path, "system", "system0")
-
- migration.up
- end
-
- describe "clears the markdown cache for projects in the system namespace" do
- let!(:project) do
- project = build(:project, :repository, namespace: system_namespace)
- save_invalid_routable(project)
- project
- end
-
- it 'removes description_html from projects' do
- migration.up
-
- expect(project.reload.description_html).to be_nil
- end
-
- it 'removes issue descriptions' do
- issue = create(:issue, project: project, description_html: 'Issue description')
-
- migration.up
-
- expect(issue.reload.description_html).to be_nil
- end
-
- it 'removes merge request descriptions' do
- merge_request = create(:merge_request,
- source_project: project,
- target_project: project,
- description_html: 'MergeRequest description')
-
- migration.up
-
- expect(merge_request.reload.description_html).to be_nil
- end
-
- it 'removes note html' do
- note = create(:note,
- project: project,
- noteable: create(:issue, project: project),
- note_html: 'note description')
-
- migration.up
-
- expect(note.reload.note_html).to be_nil
- end
-
- it 'removes milestone description' do
- milestone = create(:milestone,
- project: project,
- description_html: 'milestone description')
-
- migration.up
-
- expect(milestone.reload.description_html).to be_nil
- end
- end
-
- context "system namespace -> subgroup -> system0 project" do
- it "updates the route of the project correctly" do
- subgroup = build(:group, path: "subgroup", parent: system_namespace)
- save_invalid_routable(subgroup)
- project = build(:project, :repository, path: "system0", namespace: subgroup)
- save_invalid_routable(project)
-
- migration.up
-
- expect(project.route.reload.path).to eq("system0/subgroup/system0")
- end
- end
- end
-
- describe "#move_repositories" do
- let(:namespace) { create(:group, name: "hello-group") }
- it "moves a project for a namespace" do
- create(:project, :repository, namespace: namespace, path: "hello-project")
- expected_path = File.join(TestEnv.repos_path, "bye-group", "hello-project.git")
-
- migration.move_repositories(namespace, "hello-group", "bye-group")
-
- expect(File.directory?(expected_path)).to be(true)
- end
-
- it "moves a namespace in a subdirectory correctly" do
- child_namespace = create(:group, name: "sub-group", parent: namespace)
- create(:project, :repository, namespace: child_namespace, path: "hello-project")
-
- expected_path = File.join(TestEnv.repos_path, "hello-group", "renamed-sub-group", "hello-project.git")
-
- migration.move_repositories(child_namespace, "hello-group/sub-group", "hello-group/renamed-sub-group")
-
- expect(File.directory?(expected_path)).to be(true)
- end
-
- it "moves a parent namespace with subdirectories" do
- child_namespace = create(:group, name: "sub-group", parent: namespace)
- create(:project, :repository, namespace: child_namespace, path: "hello-project")
- expected_path = File.join(TestEnv.repos_path, "renamed-group", "sub-group", "hello-project.git")
-
- migration.move_repositories(child_namespace, "hello-group", "renamed-group")
-
- expect(File.directory?(expected_path)).to be(true)
- end
- end
-
- describe "#move_namespace_folders" do
- it "moves a namespace with files" do
- source = File.join(uploads_dir, "parent-group", "sub-group")
- FileUtils.mkdir_p(source)
- destination = File.join(uploads_dir, "parent-group", "moved-group")
- FileUtils.touch(File.join(source, "test.txt"))
- expected_file = File.join(destination, "test.txt")
-
- migration.move_namespace_folders(uploads_dir, File.join("parent-group", "sub-group"), File.join("parent-group", "moved-group"))
-
- expect(File.exist?(expected_file)).to be(true)
- end
-
- it "moves a parent namespace uploads" do
- source = File.join(uploads_dir, "parent-group", "sub-group")
- FileUtils.mkdir_p(source)
- destination = File.join(uploads_dir, "moved-parent", "sub-group")
- FileUtils.touch(File.join(source, "test.txt"))
- expected_file = File.join(destination, "test.txt")
-
- migration.move_namespace_folders(uploads_dir, "parent-group", "moved-parent")
-
- expect(File.exist?(expected_file)).to be(true)
- end
- end
-
- describe "#child_ids_for_parent" do
- it "collects child ids for all levels" do
- parent = create(:group)
- first_child = create(:group, parent: parent)
- second_child = create(:group, parent: parent)
- third_child = create(:group, parent: second_child)
- all_ids = [parent.id, first_child.id, second_child.id, third_child.id]
-
- collected_ids = migration.child_ids_for_parent(parent, ids: [parent.id])
-
- expect(collected_ids).to contain_exactly(*all_ids)
- end
- end
-
- describe "#remove_last_ocurrence" do
- it "removes only the last occurance of a string" do
- input = "this/is/system/namespace/with/system"
-
- expect(migration.remove_last_occurrence(input, "system")).to eq("this/is/system/namespace/with/")
- end
- end
-end
diff --git a/spec/migrations/update_upload_paths_to_system_spec.rb b/spec/migrations/update_upload_paths_to_system_spec.rb
index 11412005b72..0a45c5ea32d 100644
--- a/spec/migrations/update_upload_paths_to_system_spec.rb
+++ b/spec/migrations/update_upload_paths_to_system_spec.rb
@@ -11,7 +11,7 @@ describe UpdateUploadPathsToSystem do
describe "#uploads_to_switch_to_new_path" do
it "contains only uploads with the old path for the correct models" do
_upload_for_other_type = create(:upload, model: create(:ci_pipeline), path: "uploads/ci_pipeline/avatar.jpg")
- _upload_with_system_path = create(:upload, model: create(:project), path: "uploads/system/project/avatar.jpg")
+ _upload_with_system_path = create(:upload, model: create(:project), path: "uploads/-/system/project/avatar.jpg")
_upload_with_other_path = create(:upload, model: create(:project), path: "thelongsecretforafileupload/avatar.jpg")
old_upload = create(:upload, model: create(:project), path: "uploads/project/avatar.jpg")
group_upload = create(:upload, model: create(:group), path: "uploads/group/avatar.jpg")
@@ -23,7 +23,7 @@ describe UpdateUploadPathsToSystem do
describe "#uploads_to_switch_to_old_path" do
it "contains only uploads with the new path for the correct models" do
_upload_for_other_type = create(:upload, model: create(:ci_pipeline), path: "uploads/ci_pipeline/avatar.jpg")
- upload_with_system_path = create(:upload, model: create(:project), path: "uploads/system/project/avatar.jpg")
+ upload_with_system_path = create(:upload, model: create(:project), path: "uploads/-/system/project/avatar.jpg")
_upload_with_other_path = create(:upload, model: create(:project), path: "thelongsecretforafileupload/avatar.jpg")
_old_upload = create(:upload, model: create(:project), path: "uploads/project/avatar.jpg")
@@ -37,13 +37,13 @@ describe UpdateUploadPathsToSystem do
migration.up
- expect(old_upload.reload.path).to eq("uploads/system/project/avatar.jpg")
+ expect(old_upload.reload.path).to eq("uploads/-/system/project/avatar.jpg")
end
end
describe "#down", truncate: true do
it "updates the new system patsh to the old paths" do
- new_upload = create(:upload, model: create(:project), path: "uploads/system/project/avatar.jpg")
+ new_upload = create(:upload, model: create(:project), path: "uploads/-/system/project/avatar.jpg")
migration.down
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 7cd3a84d592..b5d5d58697b 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -9,4 +9,39 @@ RSpec.describe Appearance do
it { is_expected.to validate_presence_of(:description) }
it { is_expected.to have_many(:uploads).dependent(:destroy) }
+
+ describe '.current', :use_clean_rails_memory_store_caching do
+ let!(:appearance) { create(:appearance) }
+
+ it 'returns the current appearance row' do
+ expect(described_class.current).to eq(appearance)
+ end
+
+ it 'caches the result' do
+ expect(described_class).to receive(:first).once
+
+ 2.times { described_class.current }
+ end
+ end
+
+ describe '#flush_redis_cache' do
+ it 'flushes the cache in Redis' do
+ appearance = create(:appearance)
+
+ expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
+
+ appearance.flush_redis_cache
+ end
+ end
+
+ describe '#single_appearance_row' do
+ it 'adds an error when more than 1 row exists' do
+ create(:appearance)
+
+ new_row = build(:appearance)
+ new_row.save
+
+ expect(new_row.valid?).to eq(false)
+ end
+ end
end
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index a8ca1d110e4..3369aef1d3e 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -20,7 +20,7 @@ describe BroadcastMessage do
it { is_expected.not_to allow_value('000').for(:font) }
end
- describe '.current' do
+ describe '.current', :use_clean_rails_memory_store_caching do
it 'returns message if time match' do
message = create(:broadcast_message)
@@ -45,6 +45,14 @@ describe BroadcastMessage do
expect(described_class.current).to be_empty
end
+
+ it 'caches the output of the query' do
+ create(:broadcast_message)
+
+ expect(described_class).to receive(:where).and_call_original.once
+
+ 2.times { described_class.current }
+ end
end
describe '#active?' do
@@ -102,4 +110,14 @@ describe BroadcastMessage do
end
end
end
+
+ describe '#flush_redis_cache' do
+ it 'flushes the Redis cache' do
+ message = create(:broadcast_message)
+
+ expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
+
+ message.flush_redis_cache
+ end
+ end
end
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
new file mode 100644
index 00000000000..e0a87c18cc7
--- /dev/null
+++ b/spec/models/event_collection_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe EventCollection do
+ describe '#to_a' do
+ let(:project) { create(:project_empty_repo) }
+ let(:projects) { Project.where(id: project.id) }
+ let(:user) { create(:user) }
+
+ before do
+ 20.times do
+ event = create(:push_event, project: project, author: user)
+
+ create(:push_event_payload, event: event)
+ end
+
+ create(:closed_issue_event, project: project, author: user)
+ end
+
+ it 'returns an Array of events' do
+ events = described_class.new(projects).to_a
+
+ expect(events).to be_an_instance_of(Array)
+ end
+
+ it 'applies a limit to the number of events' do
+ events = described_class.new(projects).to_a
+
+ expect(events.length).to eq(20)
+ end
+
+ it 'can paginate through events' do
+ events = described_class.new(projects, offset: 20).to_a
+
+ expect(events.length).to eq(1)
+ end
+
+ it 'returns an empty Array when crossing the maximum page number' do
+ events = described_class.new(projects, limit: 1, offset: 15).to_a
+
+ expect(events).to be_empty
+ end
+
+ it 'allows filtering of events using an EventFilter' do
+ filter = EventFilter.new(EventFilter.issue)
+ events = described_class.new(projects, filter: filter).to_a
+
+ expect(events.length).to eq(1)
+ expect(events[0].action).to eq(Event::CLOSED)
+ end
+ end
+end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index d86bf1a90a9..ff3224dd298 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -304,27 +304,15 @@ describe Event do
end
end
- def create_push_event(project, user, attrs = {})
- data = {
- before: Gitlab::Git::BLANK_SHA,
- after: "0220c11b9a3e6c69dc8fd35321254ca9a7b98f7e",
- ref: "refs/heads/master",
- user_id: user.id,
- user_name: user.name,
- repository: {
- name: project.name,
- url: "localhost/rubinius",
- description: "",
- homepage: "localhost/rubinius",
- private: true
- }
- }
-
- described_class.create({
- project: project,
- action: described_class::PUSHED,
- data: data,
- author_id: user.id
- }.merge!(attrs))
+ def create_push_event(project, user)
+ event = create(:push_event, project: project, author: user)
+
+ create(:push_event_payload,
+ event: event,
+ commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ commit_count: 0,
+ ref: 'master')
+
+ event
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 6d825ba68d1..9203f6562f2 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -57,18 +57,14 @@ describe Issue do
end
describe '#closed_at' do
- after do
- Timecop.return
- end
-
- let!(:now) { Timecop.freeze(Time.now) }
-
it 'sets closed_at to Time.now when issue is closed' do
issue = create(:issue, state: 'opened')
+ expect(issue.closed_at).to be_nil
+
issue.close
- expect(issue.closed_at).to eq(now)
+ expect(issue.closed_at).to be_present
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index f1d1f37c78a..fa3e80ba062 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -149,7 +149,7 @@ describe ProjectMember do
describe 'notifications' do
describe '#after_accept_request' do
it 'calls NotificationService.new_project_member' do
- member = create(:project_member, user: build_stubbed(:user), requested_at: Time.now)
+ member = create(:project_member, user: create(:user), requested_at: Time.now)
expect_any_instance_of(NotificationService).to receive(:new_project_member)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index a28e92446ea..d9ab44dc49f 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -485,7 +485,7 @@ describe Project do
describe 'last_activity' do
it 'alias last_activity to last_event' do
- last_event = create(:event, project: project)
+ last_event = create(:event, :closed, project: project)
expect(project.last_activity).to eq(last_event)
end
@@ -493,7 +493,7 @@ describe Project do
describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do
- new_event = create(:event, project: project, created_at: Time.now)
+ new_event = create(:event, :closed, project: project, created_at: Time.now)
project.reload
expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i)
diff --git a/spec/models/push_event_payload_spec.rb b/spec/models/push_event_payload_spec.rb
new file mode 100644
index 00000000000..a049ad35584
--- /dev/null
+++ b/spec/models/push_event_payload_spec.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+describe PushEventPayload do
+ describe 'saving payloads' do
+ it 'does not allow commit messages longer than 70 characters' do
+ event = create(:push_event)
+ payload = build(:push_event_payload, event: event)
+
+ expect(payload).to be_valid
+
+ payload.commit_title = 'a' * 100
+
+ expect(payload).not_to be_valid
+ end
+ end
+end
diff --git a/spec/models/push_event_spec.rb b/spec/models/push_event_spec.rb
new file mode 100644
index 00000000000..532fb024261
--- /dev/null
+++ b/spec/models/push_event_spec.rb
@@ -0,0 +1,202 @@
+require 'spec_helper'
+
+describe PushEvent do
+ let(:payload) { PushEventPayload.new }
+
+ let(:event) do
+ event = described_class.new
+
+ allow(event).to receive(:push_event_payload).and_return(payload)
+
+ event
+ end
+
+ describe '.sti_name' do
+ it 'returns Event::PUSHED' do
+ expect(described_class.sti_name).to eq(Event::PUSHED)
+ end
+ end
+
+ describe '#push?' do
+ it 'returns true' do
+ expect(event).to be_push
+ end
+ end
+
+ describe '#push_with_commits?' do
+ it 'returns true when both the first and last commit are present' do
+ allow(event).to receive(:commit_from).and_return('123')
+ allow(event).to receive(:commit_to).and_return('456')
+
+ expect(event).to be_push_with_commits
+ end
+
+ it 'returns false when the first commit is missing' do
+ allow(event).to receive(:commit_to).and_return('456')
+
+ expect(event).not_to be_push_with_commits
+ end
+
+ it 'returns false when the last commit is missing' do
+ allow(event).to receive(:commit_from).and_return('123')
+
+ expect(event).not_to be_push_with_commits
+ end
+ end
+
+ describe '#tag?' do
+ it 'returns true when pushing to a tag' do
+ allow(payload).to receive(:tag?).and_return(true)
+
+ expect(event).to be_tag
+ end
+
+ it 'returns false when pushing to a branch' do
+ allow(payload).to receive(:tag?).and_return(false)
+
+ expect(event).not_to be_tag
+ end
+ end
+
+ describe '#branch?' do
+ it 'returns true when pushing to a branch' do
+ allow(payload).to receive(:branch?).and_return(true)
+
+ expect(event).to be_branch
+ end
+
+ it 'returns false when pushing to a tag' do
+ allow(payload).to receive(:branch?).and_return(false)
+
+ expect(event).not_to be_branch
+ end
+ end
+
+ describe '#valid_push?' do
+ it 'returns true if a ref exists' do
+ allow(payload).to receive(:ref).and_return('master')
+
+ expect(event).to be_valid_push
+ end
+
+ it 'returns false when no ref is present' do
+ expect(event).not_to be_valid_push
+ end
+ end
+
+ describe '#new_ref?' do
+ it 'returns true when pushing a new ref' do
+ allow(payload).to receive(:created?).and_return(true)
+
+ expect(event).to be_new_ref
+ end
+
+ it 'returns false when pushing to an existing ref' do
+ allow(payload).to receive(:created?).and_return(false)
+
+ expect(event).not_to be_new_ref
+ end
+ end
+
+ describe '#rm_ref?' do
+ it 'returns true when removing an existing ref' do
+ allow(payload).to receive(:removed?).and_return(true)
+
+ expect(event).to be_rm_ref
+ end
+
+ it 'returns false when pushing to an existing ref' do
+ allow(payload).to receive(:removed?).and_return(false)
+
+ expect(event).not_to be_rm_ref
+ end
+ end
+
+ describe '#commit_from' do
+ it 'returns the first commit SHA' do
+ allow(payload).to receive(:commit_from).and_return('123')
+
+ expect(event.commit_from).to eq('123')
+ end
+ end
+
+ describe '#commit_to' do
+ it 'returns the last commit SHA' do
+ allow(payload).to receive(:commit_to).and_return('123')
+
+ expect(event.commit_to).to eq('123')
+ end
+ end
+
+ describe '#ref_name' do
+ it 'returns the name of the ref' do
+ allow(payload).to receive(:ref).and_return('master')
+
+ expect(event.ref_name).to eq('master')
+ end
+ end
+
+ describe '#ref_type' do
+ it 'returns the type of the ref' do
+ allow(payload).to receive(:ref_type).and_return('branch')
+
+ expect(event.ref_type).to eq('branch')
+ end
+ end
+
+ describe '#branch_name' do
+ it 'returns the name of the branch' do
+ allow(payload).to receive(:ref).and_return('master')
+
+ expect(event.branch_name).to eq('master')
+ end
+ end
+
+ describe '#tag_name' do
+ it 'returns the name of the tag' do
+ allow(payload).to receive(:ref).and_return('1.2')
+
+ expect(event.tag_name).to eq('1.2')
+ end
+ end
+
+ describe '#commit_title' do
+ it 'returns the commit message' do
+ allow(payload).to receive(:commit_title).and_return('foo')
+
+ expect(event.commit_title).to eq('foo')
+ end
+ end
+
+ describe '#commit_id' do
+ it 'returns the SHA of the last commit if present' do
+ allow(event).to receive(:commit_to).and_return('123')
+
+ expect(event.commit_id).to eq('123')
+ end
+
+ it 'returns the SHA of the first commit if the last commit is not present' do
+ allow(event).to receive(:commit_to).and_return(nil)
+ allow(event).to receive(:commit_from).and_return('123')
+
+ expect(event.commit_id).to eq('123')
+ end
+ end
+
+ describe '#commits_count' do
+ it 'returns the number of commits' do
+ allow(payload).to receive(:commit_count).and_return(1)
+
+ expect(event.commits_count).to eq(1)
+ end
+ end
+
+ describe '#validate_push_action' do
+ it 'adds an error when the action is not PUSHED' do
+ event.action = Event::CREATED
+ event.validate_push_action
+
+ expect(event.errors.count).to eq(1)
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 6c8248eeb40..97bb91a6ac8 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1291,7 +1291,7 @@ describe User do
let!(:project2) { create(:project, forked_from_project: project3) }
let!(:project3) { create(:project) }
let!(:merge_request) { create(:merge_request, source_project: project2, target_project: project3, author: subject) }
- let!(:push_event) { create(:event, :pushed, project: project1, target: project1, author: subject) }
+ let!(:push_event) { create(:push_event, project: project1, author: subject) }
let!(:merge_event) { create(:event, :created, project: project3, target: merge_request, author: subject) }
before do
@@ -1333,10 +1333,18 @@ describe User do
subject { create(:user) }
let!(:project1) { create(:project, :repository) }
let!(:project2) { create(:project, :repository, forked_from_project: project1) }
- let!(:push_data) do
- Gitlab::DataBuilder::Push.build_sample(project2, subject)
+
+ let!(:push_event) do
+ event = create(:push_event, project: project2, author: subject)
+
+ create(:push_event_payload,
+ event: event,
+ commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ commit_count: 0,
+ ref: 'master')
+
+ event
end
- let!(:push_event) { create(:event, :pushed, project: project2, target: project1, author: subject, data: push_data) }
before do
project1.team << [subject, :master]
@@ -1363,8 +1371,13 @@ describe User do
expect(subject.recent_push(project1)).to eq(nil)
expect(subject.recent_push(project2)).to eq(push_event)
- push_data1 = Gitlab::DataBuilder::Push.build_sample(project1, subject)
- push_event1 = create(:event, :pushed, project: project1, target: project1, author: subject, data: push_data1)
+ push_event1 = create(:push_event, project: project1, author: subject)
+
+ create(:push_event_payload,
+ event: push_event1,
+ commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ commit_count: 0,
+ ref: 'master')
expect(subject.recent_push([project1, project2])).to eq(push_event1) # Newest
end
diff --git a/spec/requests/api/events_spec.rb b/spec/requests/api/events_spec.rb
index f1a26b6ce6c..a23d28994ce 100644
--- a/spec/requests/api/events_spec.rb
+++ b/spec/requests/api/events_spec.rb
@@ -59,6 +59,34 @@ describe API::Events do
expect(json_response.size).to eq(1)
end
+ context 'when the list of events includes push events' do
+ let(:event) do
+ create(:push_event, author: user, project: private_project)
+ end
+
+ let!(:payload) { create(:push_event_payload, event: event) }
+ let(:payload_hash) { json_response[0]['push_data'] }
+
+ before do
+ get api("/users/#{user.id}/events?action=pushed", user)
+ end
+
+ it 'responds with HTTP 200 OK' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'includes the push payload as a Hash' do
+ expect(payload_hash).to be_an_instance_of(Hash)
+ end
+
+ it 'includes the push payload details' do
+ expect(payload_hash['commit_count']).to eq(payload.commit_count)
+ expect(payload_hash['action']).to eq(payload.action)
+ expect(payload_hash['ref_type']).to eq(payload.ref_type)
+ expect(payload_hash['commit_to']).to eq(payload.commit_to)
+ end
+ end
+
context 'when there are multiple events from different projects' do
let(:second_note) { create(:note_on_issue, project: create(:project)) }
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index 8a2de23716f..e9c30dba8d4 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -181,13 +181,12 @@ describe API::Internal do
describe "POST /internal/allowed", :clean_gitlab_redis_shared_state do
context "access granted" do
- before do
- project.team << [user, :developer]
- Timecop.freeze
+ around do |example|
+ Timecop.freeze { example.run }
end
- after do
- Timecop.return
+ before do
+ project.team << [user, :developer]
end
context 'with env passed as a JSON' do
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index c3ed5cd8ece..97275b80d03 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -43,7 +43,9 @@ describe API::Settings, 'Settings' do
default_artifacts_expire_in: '2 days',
help_page_text: 'custom help text',
help_page_hide_commercial_content: true,
- help_page_support_url: 'http://example.com/help'
+ help_page_support_url: 'http://example.com/help',
+ project_export_enabled: false
+
expect(response).to have_http_status(200)
expect(json_response['default_projects_limit']).to eq(3)
expect(json_response['password_authentication_enabled']).to be_falsey
@@ -58,6 +60,7 @@ describe API::Settings, 'Settings' do
expect(json_response['help_page_text']).to eq('custom help text')
expect(json_response['help_page_hide_commercial_content']).to be_truthy
expect(json_response['help_page_support_url']).to eq('http://example.com/help')
+ expect(json_response['project_export_enabled']).to be_falsey
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2dc7be22f8f..49739a1601a 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -217,9 +217,19 @@ describe API::Users do
it "does not return the user's `is_admin` flag" do
get api("/users/#{user.id}", user)
+ expect(response).to have_http_status(200)
expect(json_response['is_admin']).to be_nil
end
+ context 'when authenticated as admin' do
+ it 'includes the `is_admin` field' do
+ get api("/users/#{user.id}", admin)
+
+ expect(response).to have_http_status(200)
+ expect(json_response['is_admin']).to be(false)
+ end
+ end
+
context 'for an anonymous user' do
it "returns a user by id" do
get api("/users/#{user.id}")
diff --git a/spec/requests/api/v3/users_spec.rb b/spec/requests/api/v3/users_spec.rb
index bc0a4ab20a3..227b8d1b0c1 100644
--- a/spec/requests/api/v3/users_spec.rb
+++ b/spec/requests/api/v3/users_spec.rb
@@ -252,6 +252,31 @@ describe API::V3::Users do
end
context "as a user than can see the event's project" do
+ context 'when the list of events includes push events' do
+ let(:event) { create(:push_event, author: user, project: project) }
+ let!(:payload) { create(:push_event_payload, event: event) }
+ let(:payload_hash) { json_response[0]['push_data'] }
+
+ before do
+ get api("/users/#{user.id}/events?action=pushed", user)
+ end
+
+ it 'responds with HTTP 200 OK' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'includes the push payload as a Hash' do
+ expect(payload_hash).to be_an_instance_of(Hash)
+ end
+
+ it 'includes the push payload details' do
+ expect(payload_hash['commit_count']).to eq(payload.commit_count)
+ expect(payload_hash['action']).to eq(payload.action)
+ expect(payload_hash['ref_type']).to eq(payload.ref_type)
+ expect(payload_hash['commit_to']).to eq(payload.commit_to)
+ end
+ end
+
context 'joined event' do
it 'returns the "joined" event' do
get v3_api("/users/#{user.id}/events", user)
diff --git a/spec/serializers/analytics_build_entity_spec.rb b/spec/serializers/analytics_build_entity_spec.rb
index 9f26d5cd09a..1ff4908972a 100644
--- a/spec/serializers/analytics_build_entity_spec.rb
+++ b/spec/serializers/analytics_build_entity_spec.rb
@@ -13,12 +13,8 @@ describe AnalyticsBuildEntity do
subject { entity.as_json }
- before do
- Timecop.freeze
- end
-
- after do
- Timecop.return
+ around do |example|
+ Timecop.freeze { example.run }
end
it 'contains the URL' do
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 42adb044190..02d7ddeb86b 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -117,12 +117,52 @@ describe EventCreateService do
let(:project) { create(:project) }
let(:user) { create(:user) }
+ let(:push_data) do
+ {
+ commits: [
+ {
+ id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ message: 'This is a commit'
+ }
+ ],
+ before: '0000000000000000000000000000000000000000',
+ after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ total_commits_count: 1,
+ ref: 'refs/heads/my-branch'
+ }
+ end
+
it 'creates a new event' do
- expect { service.push(project, user, {}) }.to change { Event.count }
+ expect { service.push(project, user, push_data) }.to change { Event.count }
+ end
+
+ it 'creates the push event payload' do
+ expect(PushEventPayloadService).to receive(:new)
+ .with(an_instance_of(PushEvent), push_data)
+ .and_call_original
+
+ service.push(project, user, push_data)
end
it 'updates user last activity' do
- expect { service.push(project, user, {}) }.to change { user_activity(user) }
+ expect { service.push(project, user, push_data) }
+ .to change { user_activity(user) }
+ end
+
+ it 'does not create any event data when an error is raised' do
+ payload_service = double(:service)
+
+ allow(payload_service).to receive(:execute)
+ .and_raise(RuntimeError)
+
+ allow(PushEventPayloadService).to receive(:new)
+ .and_return(payload_service)
+
+ expect { service.push(project, user, push_data) }
+ .to raise_error(RuntimeError)
+
+ expect(Event.count).to eq(0)
+ expect(PushEventPayload.count).to eq(0)
end
end
diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb
index a6449a3c9f5..8485605b398 100644
--- a/spec/services/git_push_service_spec.rb
+++ b/spec/services/git_push_service_spec.rb
@@ -141,10 +141,13 @@ describe GitPushService, services: true do
let!(:push_data) { push_data_from_service(project, user, oldrev, newrev, ref) }
let(:event) { Event.find_by_action(Event::PUSHED) }
- it { expect(event).not_to be_nil }
+ it { expect(event).to be_an_instance_of(PushEvent) }
it { expect(event.project).to eq(project) }
it { expect(event.action).to eq(Event::PUSHED) }
- it { expect(event.data).to eq(push_data) }
+ it { expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) }
+ it { expect(event.push_event_payload.commit_from).to eq(oldrev) }
+ it { expect(event.push_event_payload.commit_to).to eq(newrev) }
+ it { expect(event.push_event_payload.ref).to eq('master') }
context "Updates merge requests" do
it "when pushing a new branch for the first time" do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 64981c199e4..44b2d28d1d4 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -80,12 +80,16 @@ describe NotificationService, :mailer do
describe 'Keys' do
describe '#new_key' do
- let!(:key) { create(:personal_key) }
+ let(:key_options) { {} }
+ let!(:key) { create(:personal_key, key_options) }
it { expect(notification.new_key(key)).to be_truthy }
+ it { should_email(key.user) }
- it 'sends email to key owner' do
- expect { notification.new_key(key) }.to change { ActionMailer::Base.deliveries.size }.by(1)
+ describe 'never emails the ghost user' do
+ let(:key_options) { { user: User.ghost } }
+
+ it { should_not_email_anyone }
end
end
end
@@ -1173,19 +1177,39 @@ describe NotificationService, :mailer do
end
end
- describe '#project_exported' do
- it do
- notification.project_exported(project, @u_disabled)
+ context 'user with notifications disabled' do
+ describe '#project_exported' do
+ it do
+ notification.project_exported(project, @u_disabled)
+
+ should_not_email_anyone
+ end
+ end
+
+ describe '#project_not_exported' do
+ it do
+ notification.project_not_exported(project, @u_disabled, ['error'])
- should_only_email(@u_disabled)
+ should_not_email_anyone
+ end
end
end
- describe '#project_not_exported' do
- it do
- notification.project_not_exported(project, @u_disabled, ['error'])
+ context 'user with notifications enabled' do
+ describe '#project_exported' do
+ it do
+ notification.project_exported(project, @u_participating)
- should_only_email(@u_disabled)
+ should_only_email(@u_participating)
+ end
+ end
+
+ describe '#project_not_exported' do
+ it do
+ notification.project_not_exported(project, @u_participating, ['error'])
+
+ should_only_email(@u_participating)
+ end
end
end
end
@@ -1209,6 +1233,35 @@ describe NotificationService, :mailer do
end.to change { ActionMailer::Base.deliveries.size }.by(1)
end
end
+
+ describe '#new_group_member' do
+ let(:group) { create(:group) }
+ let(:added_user) { create(:user) }
+
+ def create_member!
+ GroupMember.create(
+ group: group,
+ user: added_user,
+ access_level: Gitlab::Access::GUEST
+ )
+ end
+
+ it 'sends a notification' do
+ create_member!
+ should_only_email(added_user)
+ end
+
+ describe 'when notifications are disabled' do
+ before do
+ create_global_setting_for(added_user, :disabled)
+ end
+
+ it 'does not send a notification' do
+ create_member!
+ should_not_email_anyone
+ end
+ end
+ end
end
describe 'ProjectMember' do
@@ -1228,6 +1281,31 @@ describe NotificationService, :mailer do
end.to change { ActionMailer::Base.deliveries.size }.by(1)
end
end
+
+ describe '#new_project_member' do
+ let(:project) { create(:project) }
+ let(:added_user) { create(:user) }
+
+ def create_member!
+ create(:project_member, user: added_user, project: project)
+ end
+
+ it do
+ create_member!
+ should_only_email(added_user)
+ end
+
+ describe 'when notifications are disabled' do
+ before do
+ create_global_setting_for(added_user, :disabled)
+ end
+
+ it do
+ create_member!
+ should_not_email_anyone
+ end
+ end
+ end
end
context 'guest user in private project' do
diff --git a/spec/services/push_event_payload_service_spec.rb b/spec/services/push_event_payload_service_spec.rb
new file mode 100644
index 00000000000..81956200bff
--- /dev/null
+++ b/spec/services/push_event_payload_service_spec.rb
@@ -0,0 +1,218 @@
+require 'spec_helper'
+
+describe PushEventPayloadService do
+ let(:event) { create(:push_event) }
+
+ describe '#execute' do
+ let(:push_data) do
+ {
+ commits: [
+ {
+ id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ message: 'This is a commit'
+ }
+ ],
+ before: '0000000000000000000000000000000000000000',
+ after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
+ total_commits_count: 1,
+ ref: 'refs/heads/my-branch'
+ }
+ end
+
+ it 'creates a new PushEventPayload row' do
+ payload = described_class.new(event, push_data).execute
+
+ expect(payload.commit_count).to eq(1)
+ expect(payload.action).to eq('created')
+ expect(payload.ref_type).to eq('branch')
+ expect(payload.commit_from).to be_nil
+ expect(payload.commit_to).to eq(push_data[:after])
+ expect(payload.ref).to eq('my-branch')
+ expect(payload.commit_title).to eq('This is a commit')
+ expect(payload.event_id).to eq(event.id)
+ end
+
+ it 'sets the push_event_payload association of the used event' do
+ payload = described_class.new(event, push_data).execute
+
+ expect(event.push_event_payload).to eq(payload)
+ end
+ end
+
+ describe '#commit_title' do
+ it 'returns nil if no commits were pushed' do
+ service = described_class.new(event, commits: [])
+
+ expect(service.commit_title).to be_nil
+ end
+
+ it 'returns a String limited to 70 characters' do
+ service = described_class.new(event, commits: [{ message: 'a' * 100 }])
+
+ expect(service.commit_title).to eq(('a' * 67) + '...')
+ end
+
+ it 'does not truncate the commit message if it is shorter than 70 characters' do
+ service = described_class.new(event, commits: [{ message: 'Hello' }])
+
+ expect(service.commit_title).to eq('Hello')
+ end
+
+ it 'includes the first line of a commit message if the message spans multiple lines' do
+ service = described_class
+ .new(event, commits: [{ message: "Hello\n\nworld" }])
+
+ expect(service.commit_title).to eq('Hello')
+ end
+ end
+
+ describe '#commit_from_id' do
+ it 'returns nil when creating a new ref' do
+ service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+
+ expect(service.commit_from_id).to be_nil
+ end
+
+ it 'returns the ID of the first commit when pushing to an existing ref' do
+ service = described_class.new(event, before: '123')
+
+ expect(service.commit_from_id).to eq('123')
+ end
+ end
+
+ describe '#commit_to_id' do
+ it 'returns nil when removing an existing ref' do
+ service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+
+ expect(service.commit_to_id).to be_nil
+ end
+ end
+
+ describe '#commit_count' do
+ it 'returns the number of commits' do
+ service = described_class.new(event, total_commits_count: 1)
+
+ expect(service.commit_count).to eq(1)
+ end
+
+ it 'raises when the push data does not contain the commits count' do
+ service = described_class.new(event, {})
+
+ expect { service.commit_count }.to raise_error(KeyError)
+ end
+ end
+
+ describe '#ref' do
+ it 'returns the name of the ref' do
+ service = described_class.new(event, ref: 'refs/heads/foo')
+
+ expect(service.ref).to eq('refs/heads/foo')
+ end
+
+ it 'raises when the push data does not contain the ref name' do
+ service = described_class.new(event, {})
+
+ expect { service.ref }.to raise_error(KeyError)
+ end
+ end
+
+ describe '#revision_before' do
+ it 'returns the revision from before the push' do
+ service = described_class.new(event, before: 'foo')
+
+ expect(service.revision_before).to eq('foo')
+ end
+
+ it 'raises when the push data does not contain the before revision' do
+ service = described_class.new(event, {})
+
+ expect { service.revision_before }.to raise_error(KeyError)
+ end
+ end
+
+ describe '#revision_after' do
+ it 'returns the revision from after the push' do
+ service = described_class.new(event, after: 'foo')
+
+ expect(service.revision_after).to eq('foo')
+ end
+
+ it 'raises when the push data does not contain the after revision' do
+ service = described_class.new(event, {})
+
+ expect { service.revision_after }.to raise_error(KeyError)
+ end
+ end
+
+ describe '#trimmed_ref' do
+ it 'returns the ref name without its prefix' do
+ service = described_class.new(event, ref: 'refs/heads/foo')
+
+ expect(service.trimmed_ref).to eq('foo')
+ end
+ end
+
+ describe '#create?' do
+ it 'returns true when creating a new ref' do
+ service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+
+ expect(service.create?).to eq(true)
+ end
+
+ it 'returns false when pushing to an existing ref' do
+ service = described_class.new(event, before: 'foo')
+
+ expect(service.create?).to eq(false)
+ end
+ end
+
+ describe '#remove?' do
+ it 'returns true when removing an existing ref' do
+ service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+
+ expect(service.remove?).to eq(true)
+ end
+
+ it 'returns false pushing to an existing ref' do
+ service = described_class.new(event, after: 'foo')
+
+ expect(service.remove?).to eq(false)
+ end
+ end
+
+ describe '#action' do
+ it 'returns :created when creating a ref' do
+ service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+
+ expect(service.action).to eq(:created)
+ end
+
+ it 'returns :removed when removing an existing ref' do
+ service = described_class.new(event,
+ before: '123',
+ after: Gitlab::Git::BLANK_SHA)
+
+ expect(service.action).to eq(:removed)
+ end
+
+ it 'returns :pushed when pushing to an existing ref' do
+ service = described_class.new(event, before: '123', after: '456')
+
+ expect(service.action).to eq(:pushed)
+ end
+ end
+
+ describe '#ref_type' do
+ it 'returns :tag for a tag' do
+ service = described_class.new(event, ref: 'refs/tags/1.2')
+
+ expect(service.ref_type).to eq(:tag)
+ end
+
+ it 'returns :branch for a branch' do
+ service = described_class.new(event, ref: 'refs/heads/master')
+
+ expect(service.ref_type).to eq(:branch)
+ end
+ end
+end
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index ac2c89b3ff9..25ddf932d42 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -36,18 +36,25 @@ module SimpleCovEnv
track_files '{app,lib}/**/*.rb'
add_filter '/vendor/ruby/'
+ add_filter 'app/controllers/sherlock/'
add_filter 'config/initializers/'
+ add_filter 'db/fixtures/'
+ add_filter 'lib/gitlab/sidekiq_middleware/'
+ add_filter 'lib/system_check/'
add_group 'Controllers', 'app/controllers'
- add_group 'Models', 'app/models'
- add_group 'Mailers', 'app/mailers'
- add_group 'Helpers', 'app/helpers'
- add_group 'Workers', %w(app/jobs app/workers)
- add_group 'Libraries', 'lib'
- add_group 'Services', 'app/services'
- add_group 'Finders', 'app/finders'
- add_group 'Uploaders', 'app/uploaders'
- add_group 'Validators', 'app/validators'
+ add_group 'Finders', 'app/finders'
+ add_group 'Helpers', 'app/helpers'
+ add_group 'Libraries', 'lib'
+ add_group 'Mailers', 'app/mailers'
+ add_group 'Models', 'app/models'
+ add_group 'Policies', 'app/policies'
+ add_group 'Presenters', 'app/presenters'
+ add_group 'Serializers', 'app/serializers'
+ add_group 'Services', 'app/services'
+ add_group 'Uploaders', 'app/uploaders'
+ add_group 'Validators', 'app/validators'
+ add_group 'Workers', %w(app/jobs app/workers)
merge_timeout 365.days
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 0ba6ed56314..0cddbe89833 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -69,7 +69,14 @@ RSpec.configure do |config|
config.raise_errors_for_deprecations!
+ if ENV['CI']
+ # This includes the first try, i.e. tests will be run 4 times before failing.
+ config.default_retry_count = 4
+ config.reporter.register_listener(RspecFlaky::Listener.new, :example_passed, :dump_summary)
+ end
+
config.before(:suite) do
+ Timecop.safe_mode = true
TestEnv.init
end
@@ -97,12 +104,6 @@ RSpec.configure do |config|
reset_delivered_emails!
end
- if ENV['CI']
- config.around(:each) do |ex|
- ex.run_with_retry retry: 2
- end
- end
-
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index d7c1b390f9a..0cf462e9553 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -4,11 +4,11 @@ describe FileMover do
let(:filename) { 'banana_sample.gif' }
let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
let(:temp_description) do
- 'test ![banana_sample](/uploads/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
- '(/uploads/system/temp/secret55/banana_sample.gif)'
+ 'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
+ '(/uploads/-/system/temp/secret55/banana_sample.gif)'
end
let(:temp_file_path) { File.join('secret55', filename).to_s }
- let(:file_path) { File.join('uploads', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
+ let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
let(:snippet) { create(:personal_snippet, description: temp_description) }
@@ -28,8 +28,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
+ " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
)
end
@@ -50,8 +50,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/system/temp/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/system/temp/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
+ " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
)
end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index e505edc75ce..cbafa9f478d 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -10,7 +10,7 @@ describe PersonalFileUploader do
dynamic_segment = "personal_snippet/#{snippet.id}"
- expect(described_class.absolute_path(upload)).to end_with("/system/#{dynamic_segment}/secret/foo.jpg")
+ expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
end
end
@@ -19,7 +19,7 @@ describe PersonalFileUploader do
uploader = described_class.new(snippet, 'secret')
allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
- expected_url = "/uploads/system/personal_snippet/#{snippet.id}/secret/file_name"
+ expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
expect(uploader.to_h).to eq(
alt: 'file_name',
diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb
index 94899e26292..1af422941d7 100644
--- a/spec/views/projects/edit.html.haml_spec.rb
+++ b/spec/views/projects/edit.html.haml_spec.rb
@@ -11,14 +11,26 @@ describe 'projects/edit' do
allow(controller).to receive(:current_user).and_return(user)
allow(view).to receive_messages(current_user: user, can?: true)
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
end
context 'LFS enabled setting' do
it 'displays the correct elements' do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+
render
+
expect(rendered).to have_select('project_lfs_enabled')
expect(rendered).to have_content('Git Large File Storage')
end
end
+
+ context 'project export disabled' do
+ it 'does not display the project export option' do
+ stub_application_setting(project_export_enabled?: false)
+
+ render
+
+ expect(rendered).not_to have_content('Export project')
+ end
+ end
end
diff --git a/spec/workers/prune_old_events_worker_spec.rb b/spec/workers/prune_old_events_worker_spec.rb
index 35e1518a35e..ea974355050 100644
--- a/spec/workers/prune_old_events_worker_spec.rb
+++ b/spec/workers/prune_old_events_worker_spec.rb
@@ -2,9 +2,11 @@ require 'spec_helper'
describe PruneOldEventsWorker do
describe '#perform' do
- let!(:expired_event) { create(:event, author_id: 0, created_at: 13.months.ago) }
- let!(:not_expired_event) { create(:event, author_id: 0, created_at: 1.day.ago) }
- let!(:exactly_12_months_event) { create(:event, author_id: 0, created_at: 12.months.ago) }
+ let(:user) { create(:user) }
+
+ let!(:expired_event) { create(:event, :closed, author: user, created_at: 13.months.ago) }
+ let!(:not_expired_event) { create(:event, :closed, author: user, created_at: 1.day.ago) }
+ let!(:exactly_12_months_event) { create(:event, :closed, author: user, created_at: 12.months.ago) }
it 'prunes events older than 12 months' do
expect { subject.perform }.to change { Event.count }.by(-1)