summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzegorz@gitlab.com>2018-03-29 11:07:54 +0000
committerGrzegorz Bizon <grzegorz@gitlab.com>2018-03-29 11:07:54 +0000
commit5501dfd13ffda6abe8ff191a65964abbd4f3bb30 (patch)
tree3b25faf2d41893bc67e972fa56a89b6efd3ac0dc
parentf4d81536ac26f75e0aad248ad95c31c9e1f2680b (diff)
parent8230b774b5a6375d9b84c71e72c8e41e3bfd7fad (diff)
downloadgitlab-ce-5501dfd13ffda6abe8ff191a65964abbd4f3bb30.tar.gz
Merge branch 'master' into 'feature/gb/variables-expressions-in-only-except'
Conflicts: app/models/ci/build.rb
-rw-r--r--.flayignore1
-rw-r--r--.gitignore1
-rw-r--r--.gitlab/merge_request_templates/Documentation.md27
-rw-r--r--CHANGELOG.md20
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile18
-rw-r--r--Gemfile.lock40
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_canceled.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_created.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_failed.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_manual.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_not_found.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_pending.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_running.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_skipped.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_success.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/ci_favicons/canary/favicon_status_warning.icobin0 -> 4286 bytes
-rw-r--r--app/assets/images/favicon-yellow.icobin0 -> 5430 bytes
-rw-r--r--app/assets/javascripts/behaviors/markdown/render_math.js2
-rw-r--r--app/assets/javascripts/gl_dropdown.js2
-rw-r--r--app/assets/javascripts/ide/lib/editor_options.js1
-rw-r--r--app/assets/javascripts/jobs/components/sidebar_detail_row.vue24
-rw-r--r--app/assets/javascripts/jobs/components/sidebar_details_block.vue27
-rw-r--r--app/assets/javascripts/jobs/job_details_bundle.js1
-rw-r--r--app/assets/javascripts/monitoring/components/dashboard.vue291
-rw-r--r--app/assets/javascripts/monitoring/components/empty_state.vue159
-rw-r--r--app/assets/javascripts/monitoring/components/graph.vue413
-rw-r--r--app/assets/javascripts/monitoring/components/graph/deployment.vue48
-rw-r--r--app/assets/javascripts/monitoring/components/graph/flag.vue226
-rw-r--r--app/assets/javascripts/monitoring/components/graph/legend.vue228
-rw-r--r--app/assets/javascripts/monitoring/components/graph/path.vue60
-rw-r--r--app/assets/javascripts/monitoring/components/graph_group.vue24
-rw-r--r--app/assets/javascripts/notes/components/noteable_discussion.vue6
-rw-r--r--app/assets/javascripts/pages/milestones/shared/components/promote_milestone_modal.vue8
-rw-r--r--app/assets/javascripts/pages/milestones/shared/promote_milestone_modal_init.js2
-rw-r--r--app/assets/javascripts/pages/projects/ci/lints/ci_lint_editor.js (renamed from app/assets/javascripts/pages/ci/lints/ci_lint_editor.js)0
-rw-r--r--app/assets/javascripts/pages/projects/ci/lints/new/index.js (renamed from app/assets/javascripts/pages/ci/lints/new/index.js)0
-rw-r--r--app/assets/javascripts/pages/projects/ci/lints/show/index.js (renamed from app/assets/javascripts/pages/ci/lints/show/index.js)0
-rw-r--r--app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue17
-rw-r--r--app/assets/javascripts/pages/projects/labels/index/index.js2
-rw-r--r--app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.vue (renamed from app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js)76
-rw-r--r--app/assets/javascripts/sidebar/components/time_tracking/time_tracker.vue4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/sha_mismatch.vue4
-rw-r--r--app/assets/stylesheets/framework/gitlab_theme.scss113
-rw-r--r--app/assets/stylesheets/framework/modal.scss6
-rw-r--r--app/assets/stylesheets/pages/lint.scss21
-rw-r--r--app/assets/stylesheets/pages/projects.scss22
-rw-r--r--app/assets/stylesheets/pages/repo.scss34
-rw-r--r--app/controllers/ci/lints_controller.rb15
-rw-r--r--app/controllers/concerns/send_file_upload.rb17
-rw-r--r--app/controllers/concerns/uploads_actions.rb30
-rw-r--r--app/controllers/projects/artifacts_controller.rb12
-rw-r--r--app/controllers/projects/branches_controller.rb16
-rw-r--r--app/controllers/projects/ci/lints_controller.rb27
-rw-r--r--app/controllers/projects/jobs_controller.rb32
-rw-r--r--app/controllers/projects/labels_controller.rb2
-rw-r--r--app/controllers/projects/lfs_storage_controller.rb56
-rw-r--r--app/controllers/projects/milestones_controller.rb8
-rw-r--r--app/controllers/projects/protected_branches_controller.rb8
-rw-r--r--app/controllers/projects/protected_refs_controller.rb14
-rw-r--r--app/controllers/projects/protected_tags_controller.rb8
-rw-r--r--app/controllers/projects/raw_controller.rb3
-rw-r--r--app/controllers/projects/settings/ci_cd_controller.rb6
-rw-r--r--app/controllers/root_controller.rb4
-rw-r--r--app/helpers/namespaces_helper.rb2
-rw-r--r--app/helpers/page_layout_helper.rb5
-rw-r--r--app/helpers/preferences_helper.rb14
-rw-r--r--app/models/appearance.rb2
-rw-r--r--app/models/ci/build.rb33
-rw-r--r--app/models/ci/build_metadata.rb35
-rw-r--r--app/models/ci/job_artifact.rb10
-rw-r--r--app/models/ci/runner.rb9
-rw-r--r--app/models/clusters/cluster.rb2
-rw-r--r--app/models/concerns/avatarable.rb1
-rw-r--r--app/models/concerns/chronic_duration_attribute.rb39
-rw-r--r--app/models/concerns/deployment_platform.rb22
-rw-r--r--app/models/deploy_key.rb4
-rw-r--r--app/models/lfs_object.rb15
-rw-r--r--app/models/merge_request.rb27
-rw-r--r--app/models/project_services/gemnasium_service.rb2
-rw-r--r--app/models/redirect_route.rb28
-rw-r--r--app/models/repository.rb8
-rw-r--r--app/models/route.rb26
-rw-r--r--app/models/service.rb1
-rw-r--r--app/models/upload.rb19
-rw-r--r--app/models/user.rb9
-rw-r--r--app/policies/protected_branch_policy.rb9
-rw-r--r--app/presenters/ci/build_metadata_presenter.rb18
-rw-r--r--app/serializers/build_details_entity.rb2
-rw-r--r--app/serializers/build_metadata_entity.rb9
-rw-r--r--app/serializers/status_entity.rb10
-rw-r--r--app/services/projects/create_service.rb23
-rw-r--r--app/services/projects/update_pages_service.rb18
-rw-r--r--app/services/protected_branches/create_service.rb17
-rw-r--r--app/services/protected_branches/destroy_service.rb9
-rw-r--r--app/services/protected_branches/update_service.rb2
-rw-r--r--app/services/protected_tags/destroy_service.rb7
-rw-r--r--app/services/verify_pages_domain_service.rb10
-rw-r--r--app/uploaders/attachment_uploader.rb6
-rw-r--r--app/uploaders/avatar_uploader.rb8
-rw-r--r--app/uploaders/file_mover.rb9
-rw-r--r--app/uploaders/file_uploader.rb56
-rw-r--r--app/uploaders/gitlab_uploader.rb10
-rw-r--r--app/uploaders/job_artifact_uploader.rb9
-rw-r--r--app/uploaders/legacy_artifact_uploader.rb1
-rw-r--r--app/uploaders/lfs_object_uploader.rb6
-rw-r--r--app/uploaders/namespace_file_uploader.rb11
-rw-r--r--app/uploaders/object_storage.rb434
-rw-r--r--app/uploaders/personal_file_uploader.rb17
-rw-r--r--app/uploaders/records_uploads.rb3
-rw-r--r--app/views/admin/application_settings/_ci_cd.html.haml47
-rw-r--r--app/views/admin/application_settings/_form.html.haml134
-rw-r--r--app/views/admin/application_settings/_influx.html.haml68
-rw-r--r--app/views/admin/application_settings/_prometheus.html.haml28
-rw-r--r--app/views/admin/application_settings/show.html.haml33
-rw-r--r--app/views/admin/projects/show.html.haml10
-rw-r--r--app/views/ci/lints/show.html.haml36
-rw-r--r--app/views/ci/variables/_variable_row.html.haml2
-rw-r--r--app/views/import/github/new.html.haml3
-rw-r--r--app/views/layouts/nav/sidebar/_project.html.haml16
-rw-r--r--app/views/projects/ci/lints/_create.html.haml (renamed from app/views/ci/lints/_create.html.haml)0
-rw-r--r--app/views/projects/ci/lints/show.html.haml27
-rw-r--r--app/views/projects/clusters/user/_header.html.haml2
-rw-r--r--app/views/projects/jobs/_sidebar.html.haml2
-rw-r--r--app/views/projects/jobs/show.html.haml2
-rw-r--r--app/views/projects/milestones/show.html.haml1
-rw-r--r--app/views/projects/new.html.haml8
-rw-r--r--app/views/projects/pages_domains/new.html.haml2
-rw-r--r--app/views/projects/pipelines/index.html.haml2
-rw-r--r--app/views/projects/runners/_form.html.haml6
-rw-r--r--app/views/projects/runners/show.html.haml3
-rw-r--r--app/views/shared/_import_form.html.haml3
-rw-r--r--app/views/shared/_label.html.haml1
-rw-r--r--app/views/shared/milestones/_milestone.html.haml1
-rw-r--r--app/workers/all_queues.yml4
-rw-r--r--app/workers/concerns/object_storage_queue.rb8
-rw-r--r--app/workers/git_garbage_collect_worker.rb9
-rw-r--r--app/workers/object_storage/background_move_worker.rb29
-rw-r--r--app/workers/object_storage/migrate_uploads_worker.rb202
-rw-r--r--app/workers/object_storage_upload_worker.rb21
-rw-r--r--app/workers/repository_fork_worker.rb1
-rw-r--r--changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml6
-rw-r--r--changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml5
-rw-r--r--changelogs/unreleased-ee/bvl-external-policy-classification.yml5
-rw-r--r--changelogs/unreleased/27210-add-cancel-btn-to-new-page-domain.yml5
-rw-r--r--changelogs/unreleased/33803-drop-json-support-in-project-milestone.yml5
-rw-r--r--changelogs/unreleased/38167-ui-bug-when-creating-new-branch.yml5
-rw-r--r--changelogs/unreleased/40781-os-to-ce.yml5
-rw-r--r--changelogs/unreleased/43512-add-support-for-omniauth-jwt-provider.yml5
-rw-r--r--changelogs/unreleased/43603-ci-lint-support.yml5
-rw-r--r--changelogs/unreleased/43794-fix-domain-verification-validation-errors.yml5
-rw-r--r--changelogs/unreleased/44232-docs-for-runner-ip-address.yml5
-rw-r--r--changelogs/unreleased/44392-resolve-projects-creation-silently-failing-on-after-create-error.yml5
-rw-r--r--changelogs/unreleased/44508-fix-fork-namespace-images.yml5
-rw-r--r--changelogs/unreleased/44564-error-500-while-attempting-to-resolve-conflicts-due-to-utf-8-conversion-error.yml5
-rw-r--r--changelogs/unreleased/44649-reference-parsing-conflicting-with-auto-linking.yml5
-rw-r--r--changelogs/unreleased/44657-reuse-root_ref_hash-on-branches.yml5
-rw-r--r--changelogs/unreleased/44712-update-asciidoctor-from-1-5-3-to-1-5-6-2.yml5
-rw-r--r--changelogs/unreleased/44717-no-resolve-issue.yml5
-rw-r--r--changelogs/unreleased/ab-44446-add-indexes-for-user-activity-queries.yml5
-rw-r--r--changelogs/unreleased/ab-44467-remove-index.yml5
-rw-r--r--changelogs/unreleased/ac-fix-use_file-race.yml5
-rw-r--r--changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml5
-rw-r--r--changelogs/unreleased/add-canary-favicon.yml5
-rw-r--r--changelogs/unreleased/add-per-runner-job-timeout.yml5
-rw-r--r--changelogs/unreleased/bvl-no-permanent-redirect.yml5
-rw-r--r--changelogs/unreleased/dashboard-view-user-choices-issues-merge-requests.yml5
-rw-r--r--changelogs/unreleased/dm-deploy-keys-default-user.yml5
-rw-r--r--changelogs/unreleased/dm-refs-contains-sha-encoding.yml5
-rw-r--r--changelogs/unreleased/dz-improve-app-settings-2.yml5
-rw-r--r--changelogs/unreleased/expose-commits-mr-api.yml5
-rw-r--r--changelogs/unreleased/fix-ci-job-auto-retry.yml5
-rw-r--r--changelogs/unreleased/fix-gb-fix-background-pipeline-stages-migration.yml5
-rw-r--r--changelogs/unreleased/ide-file-row-hover-style.yml5
-rw-r--r--changelogs/unreleased/jivl-change-copy-text-promote-milestones-labels.yml5
-rw-r--r--changelogs/unreleased/move-registry-after-cicd-project-nav-sidebar.yml5
-rw-r--r--changelogs/unreleased/poc-upload-hashing-path.yml5
-rw-r--r--changelogs/unreleased/reduce-query-count-for-mergerequestscontroller-show.yml5
-rw-r--r--changelogs/unreleased/refactor-move-time-tracking-comparison-pane-vue-component.yml5
-rw-r--r--changelogs/unreleased/sh-update-loofah.yml5
-rw-r--r--changelogs/unreleased/update-unresolved-discussions-vue-component.yml5
-rw-r--r--changelogs/unreleased/workhorse-gitaly-mandatory.yml5
-rw-r--r--changelogs/unreleased/zj-remote-repo-exists.yml5
-rw-r--r--config/gitlab.yml.example89
-rw-r--r--config/initializers/1_settings.rb22
-rw-r--r--config/initializers/carrierwave.rb12
-rw-r--r--config/initializers/fog_google_https_private_urls.rb20
-rw-r--r--config/routes/ci.rb2
-rw-r--r--config/routes/project.rb4
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--config/webpack.config.js2
-rw-r--r--db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb10
-rw-r--r--db/migrate/20170825015534_add_file_store_to_lfs_objects.rb31
-rw-r--r--db/migrate/20170918072949_add_file_store_job_artifacts.rb10
-rw-r--r--db/migrate/20171214144320_add_store_column_to_uploads.rb12
-rw-r--r--db/migrate/20180219153455_add_maximum_timeout_to_ci_runners.rb9
-rw-r--r--db/migrate/20180301010859_create_ci_builds_metadata_table.rb20
-rw-r--r--db/migrate/20180327101207_remove_index_from_events_table.rb18
-rw-r--r--db/post_migrate/20180305100050_remove_permanent_from_redirect_routes.rb37
-rw-r--r--db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb38
-rw-r--r--db/schema.rb22
-rw-r--r--doc/README.md6
-rw-r--r--doc/administration/auth/jwt.md72
-rw-r--r--doc/administration/index.md5
-rw-r--r--doc/administration/job_artifacts.md122
-rw-r--r--doc/administration/job_traces.md42
-rw-r--r--doc/administration/raketasks/uploads/migrate.md74
-rw-r--r--doc/administration/uploads.md209
-rw-r--r--doc/api/commits.md68
-rw-r--r--doc/api/runners.md7
-rw-r--r--doc/ci/docker/using_docker_build.md91
-rw-r--r--doc/ci/examples/README.md4
-rw-r--r--doc/ci/examples/code_climate.md7
-rw-r--r--doc/ci/quick_start/README.md4
-rw-r--r--doc/ci/runners/README.md45
-rw-r--r--doc/ci/yaml/README.md5
-rw-r--r--doc/development/ee_features.md272
-rw-r--r--doc/development/new_fe_guide/style/javascript.md194
-rw-r--r--doc/integration/omniauth.md1
-rw-r--r--doc/raketasks/README.md1
-rw-r--r--doc/topics/autodevops/index.md18
-rw-r--r--doc/user/profile/preferences.md4
-rw-r--r--doc/user/project/clusters/index.md4
-rw-r--r--doc/user/project/index.md14
-rw-r--r--doc/user/project/integrations/prometheus_library/kubernetes.md15
-rw-r--r--doc/user/project/issue_board.md2
-rw-r--r--doc/user/project/merge_requests/img/remove_source_branch_status.pngbin0 -> 32649 bytes
-rw-r--r--doc/user/project/merge_requests/index.md16
-rw-r--r--doc/user/project/pipelines/settings.md9
-rw-r--r--doc/workflow/lfs/lfs_administration.md116
-rw-r--r--features/groups.feature73
-rw-r--r--features/steps/groups.rb147
-rw-r--r--lib/api/commits.rb14
-rw-r--r--lib/api/deploy_keys.rb23
-rw-r--r--lib/api/entities.rb3
-rw-r--r--lib/api/helpers.rb16
-rw-r--r--lib/api/job_artifacts.rb4
-rw-r--r--lib/api/jobs.rb2
-rw-r--r--lib/api/project_export.rb2
-rw-r--r--lib/api/protected_branches.rb5
-rw-r--r--lib/api/runner.rb6
-rw-r--r--lib/api/runners.rb1
-rw-r--r--lib/api/v3/builds.rb6
-rw-r--r--lib/banzai/filter/autolink_filter.rb8
-rw-r--r--lib/gitlab/background_migration/migrate_build_stage.rb1
-rw-r--r--lib/gitlab/bare_repository_import/importer.rb2
-rw-r--r--lib/gitlab/checks/project_moved.rb20
-rw-r--r--lib/gitlab/ci/build/step.rb4
-rw-r--r--lib/gitlab/ci/config.rb3
-rw-r--r--lib/gitlab/ci/trace/http_io.rb187
-rw-r--r--lib/gitlab/ci/trace/stream.rb2
-rw-r--r--lib/gitlab/ci/yaml_processor.rb8
-rw-r--r--lib/gitlab/database/migration_helpers.rb39
-rw-r--r--lib/gitlab/git/repository.rb3
-rw-r--r--lib/gitlab/git_access.rb18
-rw-r--r--lib/gitlab/gitaly_client.rb4
-rw-r--r--lib/gitlab/gitaly_client/remote_service.rb11
-rw-r--r--lib/gitlab/github_import/importer/repository_importer.rb8
-rw-r--r--lib/gitlab/verify/lfs_objects.rb2
-rw-r--r--lib/gitlab/verify/uploads.rb2
-rw-r--r--lib/gitlab/workhorse.rb37
-rw-r--r--lib/tasks/gitlab/artifacts/migrate.rake25
-rw-r--r--lib/tasks/gitlab/lfs/migrate.rake22
-rw-r--r--lib/tasks/gitlab/uploads/migrate.rake33
-rw-r--r--lib/tasks/migrate/setup_postgresql.rake4
-rw-r--r--package.json3
-rw-r--r--qa/qa/scenario/bootable.rb2
-rw-r--r--qa/qa/scenario/test/instance.rb8
-rw-r--r--qa/qa/scenario/test/integration/mattermost.rb4
-rw-r--r--qa/qa/specs/runner.rb6
-rw-r--r--qa/spec/scenario/test/instance_spec.rb4
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb89
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb14
-rw-r--r--spec/controllers/projects/ci/lints_controller_spec.rb123
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb63
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb15
-rw-r--r--spec/controllers/projects/protected_branches_controller_spec.rb97
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb56
-rw-r--r--spec/controllers/root_controller_spec.rb24
-rw-r--r--spec/factories/appearances.rb17
-rw-r--r--spec/factories/ci/build_metadata.rb9
-rw-r--r--spec/factories/ci/job_artifacts.rb4
-rw-r--r--spec/factories/clusters/clusters.rb4
-rw-r--r--spec/factories/lfs_objects.rb4
-rw-r--r--spec/factories/redirect_routes.rb9
-rw-r--r--spec/factories/uploads.rb5
-rw-r--r--spec/features/admin/admin_settings_spec.rb30
-rw-r--r--spec/features/groups/activity_spec.rb25
-rw-r--r--spec/features/groups/group_settings_spec.rb21
-rw-r--r--spec/features/groups/issues_spec.rb20
-rw-r--r--spec/features/groups/merge_requests_spec.rb16
-rw-r--r--spec/features/groups/show_spec.rb36
-rw-r--r--spec/features/groups/user_browse_projects_group_page_spec.rb29
-rw-r--r--spec/features/projects/ci/lint_spec.rb (renamed from spec/features/ci_lint_spec.rb)8
-rw-r--r--spec/features/projects/hook_logs/user_reads_log_spec.rb21
-rw-r--r--spec/finders/clusters_finder_spec.rb2
-rw-r--r--spec/helpers/page_layout_helper_spec.rb5
-rw-r--r--spec/helpers/preferences_helper_spec.rb4
-rw-r--r--spec/initializers/fog_google_https_private_urls_spec.rb24
-rw-r--r--spec/javascripts/fixtures/gl_dropdown.html.haml3
-rw-r--r--spec/javascripts/fixtures/projects.rb2
-rw-r--r--spec/javascripts/gl_dropdown_spec.js25
-rw-r--r--spec/javascripts/helpers/vue_component_helper.js3
-rw-r--r--spec/javascripts/ide/lib/editor_spec.js40
-rw-r--r--spec/javascripts/jobs/mock_data.js4
-rw-r--r--spec/javascripts/jobs/sidebar_detail_row_spec.js21
-rw-r--r--spec/javascripts/jobs/sidebar_details_block_spec.js6
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js18
-rw-r--r--spec/javascripts/pages/labels/components/promote_label_modal_spec.js3
-rw-r--r--spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js3
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js6
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js5
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js28
-rw-r--r--spec/lib/backup/manager_spec.rb4
-rw-r--r--spec/lib/banzai/filter/autolink_filter_spec.rb9
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb16
-rw-r--r--spec/lib/gitlab/checks/project_moved_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/build/step_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/trace/http_io_spec.rb315
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb29
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb5
-rw-r--r--spec/lib/gitlab/git_access_spec.rb59
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb10
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/verify/lfs_objects_spec.rb16
-rw-r--r--spec/lib/gitlab/verify/uploads_spec.rb16
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb10
-rw-r--r--spec/mailers/previews/notify_preview.rb2
-rw-r--r--spec/migrations/remove_empty_fork_networks_spec.rb4
-rw-r--r--spec/models/ci/build_metadata_spec.rb61
-rw-r--r--spec/models/ci/build_spec.rb100
-rw-r--r--spec/models/ci/job_artifact_spec.rb44
-rw-r--r--spec/models/concerns/chronic_duration_attribute_spec.rb115
-rw-r--r--spec/models/deploy_key_spec.rb21
-rw-r--r--spec/models/lfs_object_spec.rb85
-rw-r--r--spec/models/merge_request_spec.rb11
-rw-r--r--spec/models/repository_spec.rb22
-rw-r--r--spec/models/route_spec.rb159
-rw-r--r--spec/models/service_spec.rb15
-rw-r--r--spec/models/user_spec.rb47
-rw-r--r--spec/policies/protected_branch_policy_spec.rb22
-rw-r--r--spec/presenters/project_presenter_spec.rb2
-rw-r--r--spec/requests/api/commits_spec.rb29
-rw-r--r--spec/requests/api/deploy_keys_spec.rb4
-rw-r--r--spec/requests/api/jobs_spec.rb84
-rw-r--r--spec/requests/api/protected_branches_spec.rb34
-rw-r--r--spec/requests/api/runner_spec.rb129
-rw-r--r--spec/requests/api/runners_spec.rb5
-rw-r--r--spec/requests/api/v3/builds_spec.rb32
-rw-r--r--spec/requests/git_http_spec.rb28
-rw-r--r--spec/requests/lfs_http_spec.rb208
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb1
-rw-r--r--spec/serializers/status_entity_spec.rb5
-rw-r--r--spec/services/ci/retry_build_service_spec.rb4
-rw-r--r--spec/services/clusters/create_service_spec.rb2
-rw-r--r--spec/services/groups/transfer_service_spec.rb42
-rw-r--r--spec/services/issues/move_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb17
-rw-r--r--spec/services/protected_branches/create_service_spec.rb13
-rw-r--r--spec/services/protected_branches/destroy_service_spec.rb30
-rw-r--r--spec/services/protected_branches/update_service_spec.rb11
-rw-r--r--spec/services/protected_tags/destroy_service_spec.rb17
-rw-r--r--spec/services/verify_pages_domain_service_spec.rb19
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/gitaly.rb6
-rw-r--r--spec/support/http_io/http_io_helpers.rb64
-rw-r--r--spec/support/migrations_helpers.rb5
-rw-r--r--spec/support/shared_examples/uploaders/object_storage_shared_examples.rb138
-rw-r--r--spec/support/stub_object_storage.rb48
-rw-r--r--spec/tasks/gitlab/artifacts/migrate_rake_spec.rb118
-rw-r--r--spec/tasks/gitlab/lfs/migrate_rake_spec.rb37
-rw-r--r--spec/tasks/gitlab/uploads/migrate_rake_spec.rb28
-rw-r--r--spec/uploaders/attachment_uploader_spec.rb22
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb27
-rw-r--r--spec/uploaders/file_mover_spec.rb6
-rw-r--r--spec/uploaders/file_uploader_spec.rb61
-rw-r--r--spec/uploaders/gitlab_uploader_spec.rb4
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb35
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb14
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb58
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb22
-rw-r--r--spec/uploaders/object_storage_spec.rb654
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb22
-rw-r--r--spec/views/projects/ci/lints/show.html.haml_spec.rb (renamed from spec/views/ci/lints/show.html.haml_spec.rb)33
-rw-r--r--spec/workers/object_storage_upload_worker_spec.rb108
-rw-r--r--yarn.lock2
389 files changed, 8740 insertions, 2387 deletions
diff --git a/.flayignore b/.flayignore
index 87cb3507b05..3d69bb2c985 100644
--- a/.flayignore
+++ b/.flayignore
@@ -8,3 +8,4 @@ lib/gitlab/redis/*.rb
lib/gitlab/gitaly_client/operation_service.rb
lib/gitlab/background_migration/*
app/models/project_services/kubernetes_service.rb
+lib/gitlab/workhorse.rb
diff --git a/.gitignore b/.gitignore
index 447fb71bd64..e9ff0048c1c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,6 +50,7 @@ eslint-report.html
/db/data.yml
/doc/code/*
/dump.rdb
+/jsconfig.json
/log/*.log*
/node_modules/
/nohup.out
diff --git a/.gitlab/merge_request_templates/Documentation.md b/.gitlab/merge_request_templates/Documentation.md
index 102eb7e7953..da38a703c3c 100644
--- a/.gitlab/merge_request_templates/Documentation.md
+++ b/.gitlab/merge_request_templates/Documentation.md
@@ -1,16 +1,29 @@
-See the general Documentation guidelines http://docs.gitlab.com/ce/development/doc_styleguide.html
+<!--See the general Documentation guidelines https://docs.gitlab.com/ce/development/writing_documentation.html -->
## What does this MR do?
-(briefly describe what this MR is about)
+<!-- Briefly describe what this MR is about -->
+
+## Related issues
+
+<!-- Mention the issue(s) this MR closes or is related to -->
+
+Closes
## Moving docs to a new location?
-See the guidelines: http://docs.gitlab.com/ce/development/doc_styleguide.html#changing-document-location
+Read the guidelines:
+https://docs.gitlab.com/ce/development/writing_documentation.html#changing-document-location
-- [ ] Make sure the old link is not removed and has its contents replaced with a link to the new location.
+- [ ] Make sure the old link is not removed and has its contents replaced with
+ a link to the new location.
- [ ] Make sure internal links pointing to the document in question are not broken.
-- [ ] Search and replace any links referring to old docs in GitLab Rails app, specifically under the `app/views/` directory.
-- [ ] Make sure to add [`redirect_from`](https://docs.gitlab.com/ee/development/doc_styleguide.html#redirections-for-pages-with-disqus-comments) to the new document if there are any Disqus comments on the old document thread.
-- [ ] If working on CE, submit an MR to EE with the changes as well.
+- [ ] Search and replace any links referring to old docs in GitLab Rails app,
+ specifically under the `app/views/` and `ee/app/views` (for GitLab EE) directories.
+- [ ] Make sure to add [`redirect_from`](https://docs.gitlab.com/ce/development/writing_documentation.html#redirections-for-pages-with-disqus-comments)
+ to the new document if there are any Disqus comments on the old document thread.
+- [ ] If working on CE and the `ee-compat-check` jobs fails, submit an MR to EE
+ with the changes as well (https://docs.gitlab.com/ce/development/writing_documentation.html#cherry-picking-from-ce-to-ee).
- [ ] Ping one of the technical writers for review.
+
+/label ~Documentation
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4426cd20732..adb0ec9f5b1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,26 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
+## 10.6.1 (2018-03-27)
+
+### Security (1 change)
+
+- Bump rails-html-sanitizer to 1.0.4.
+
+### Fixed (2 changes)
+
+- Prevent auto-retry AccessDenied error from stopping transition to failed. !17862
+- Fix 500 error when trying to resolve non-ASCII conflicts in the editor. !17962
+
+### Performance (1 change)
+
+- Add indexes for user activity queries. !17890
+
+### Other (1 change)
+
+- Add documentation for runner IP address (#44232). !17837
+
+
## 10.6.0 (2018-03-22)
### Security (4 changes)
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 8f63f4f9a10..36545ad338e 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-0.91.0
+0.92.0
diff --git a/Gemfile b/Gemfile
index a670d86104c..a1e43700260 100644
--- a/Gemfile
+++ b/Gemfile
@@ -28,7 +28,7 @@ gem 'default_value_for', gem_versions['default_value_for']
gem 'mysql2', '~> 0.4.10', group: :mysql
gem 'pg', '~> 0.18.2', group: :postgres
-gem 'rugged', '~> 0.26.0'
+gem 'rugged', '~> 0.27'
gem 'grape-route-helpers', '~> 2.1.0'
gem 'faraday', '~> 0.12'
@@ -44,7 +44,7 @@ gem 'omniauth-cas3', '~> 1.1.4'
gem 'omniauth-facebook', '~> 4.0.0'
gem 'omniauth-github', '~> 1.1.1'
gem 'omniauth-gitlab', '~> 1.0.2'
-gem 'omniauth-google-oauth2', '~> 0.5.2'
+gem 'omniauth-google-oauth2', '~> 0.5.3'
gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos
gem 'omniauth-oauth2-generic', '~> 0.2.2'
gem 'omniauth-saml', '~> 1.10'
@@ -52,6 +52,7 @@ gem 'omniauth-shibboleth', '~> 1.2.0'
gem 'omniauth-twitter', '~> 1.4'
gem 'omniauth_crowd', '~> 2.2.0'
gem 'omniauth-authentiq', '~> 0.3.1'
+gem 'omniauth-jwt', '~> 0.0.2'
gem 'rack-oauth2', '~> 1.2.1'
gem 'jwt', '~> 1.5.6'
@@ -117,9 +118,9 @@ gem 'carrierwave', '~> 1.2'
gem 'dropzonejs-rails', '~> 0.7.1'
# for backups
-gem 'fog-aws', '~> 2.0'
+gem 'fog-aws', '~> 2.0.1'
gem 'fog-core', '~> 1.44'
-gem 'fog-google', '~> 0.5'
+gem 'fog-google', '~> 1.3.3'
gem 'fog-local', '~> 0.3'
gem 'fog-openstack', '~> 0.1'
gem 'fog-rackspace', '~> 0.1.1'
@@ -145,8 +146,8 @@ gem 'rdoc', '~> 4.2'
gem 'org-ruby', '~> 0.9.12'
gem 'creole', '~> 0.5.0'
gem 'wikicloth', '0.8.1'
-gem 'asciidoctor', '~> 1.5.2'
-gem 'asciidoctor-plantuml', '0.0.7'
+gem 'asciidoctor', '~> 1.5.6'
+gem 'asciidoctor-plantuml', '0.0.8'
gem 'rouge', '~> 2.0'
gem 'truncato', '~> 0.7.9'
gem 'bootstrap_form', '~> 2.7.0'
@@ -375,6 +376,8 @@ group :development, :test do
gem 'stackprof', '~> 0.2.10', require: false
gem 'simple_po_parser', '~> 1.1.2', require: false
+
+ gem 'timecop', '~> 0.8.0'
end
group :test do
@@ -384,7 +387,6 @@ group :test do
gem 'webmock', '~> 2.3.2'
gem 'test_after_commit', '~> 1.1'
gem 'sham_rack', '~> 1.3.6'
- gem 'timecop', '~> 0.8.0'
gem 'concurrent-ruby', '~> 1.0.5'
gem 'test-prof', '~> 0.2.5'
end
@@ -420,7 +422,7 @@ group :ed25519 do
end
# Gitaly GRPC client
-gem 'gitaly-proto', '~> 0.88.0', require: 'gitaly'
+gem 'gitaly-proto', '~> 0.91.0', require: 'gitaly'
gem 'grpc', '~> 1.10.0'
# Locked until https://github.com/google/protobuf/issues/4210 is closed
diff --git a/Gemfile.lock b/Gemfile.lock
index 61107a2130b..bb510f6b539 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -56,8 +56,8 @@ GEM
faraday_middleware (~> 0.9)
faraday_middleware-multi_json (~> 0.0)
oauth2 (~> 1.0)
- asciidoctor (1.5.3)
- asciidoctor-plantuml (0.0.7)
+ asciidoctor (1.5.6.2)
+ asciidoctor-plantuml (0.0.8)
asciidoctor (~> 1.5)
asset_sync (2.2.0)
activemodel (>= 4.1.0)
@@ -244,10 +244,11 @@ GEM
builder
excon (~> 0.58)
formatador (~> 0.2)
- fog-google (0.5.3)
+ fog-google (1.3.3)
fog-core
fog-json
fog-xml
+ google-api-client (~> 0.19.1)
fog-json (1.0.2)
fog-core (~> 1.0)
multi_json (~> 1.10)
@@ -289,7 +290,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
- gitaly-proto (0.88.0)
+ gitaly-proto (0.91.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (5.3.3)
@@ -549,11 +550,13 @@ GEM
omniauth-gitlab (1.0.2)
omniauth (~> 1.0)
omniauth-oauth2 (~> 1.0)
- omniauth-google-oauth2 (0.5.2)
- jwt (~> 1.5)
- multi_json (~> 1.3)
+ omniauth-google-oauth2 (0.5.3)
+ jwt (>= 1.5)
omniauth (>= 1.1.1)
- omniauth-oauth2 (>= 1.3.1)
+ omniauth-oauth2 (>= 1.5)
+ omniauth-jwt (0.0.2)
+ jwt
+ omniauth (~> 1.1)
omniauth-kerberos (0.3.0)
omniauth-multipassword
timfel-krb5-auth (~> 0.8)
@@ -562,8 +565,8 @@ GEM
omniauth-oauth (1.1.0)
oauth
omniauth (~> 1.0)
- omniauth-oauth2 (1.4.0)
- oauth2 (~> 1.0)
+ omniauth-oauth2 (1.5.0)
+ oauth2 (~> 1.1)
omniauth (~> 1.2)
omniauth-oauth2-generic (0.2.2)
omniauth-oauth2 (~> 1.0)
@@ -810,7 +813,7 @@ GEM
rubyzip (1.2.1)
rufus-scheduler (3.4.0)
et-orbi (~> 1.0)
- rugged (0.26.0)
+ rugged (0.27.0)
safe_yaml (1.0.4)
sanitize (2.1.0)
nokogiri (>= 1.4.4)
@@ -995,8 +998,8 @@ DEPENDENCIES
akismet (~> 2.0)
allocations (~> 1.0)
asana (~> 0.6.0)
- asciidoctor (~> 1.5.2)
- asciidoctor-plantuml (= 0.0.7)
+ asciidoctor (~> 1.5.6)
+ asciidoctor-plantuml (= 0.0.8)
asset_sync (~> 2.2.0)
attr_encrypted (~> 3.0.0)
awesome_print (~> 1.2.0)
@@ -1044,9 +1047,9 @@ DEPENDENCIES
flipper-active_record (~> 0.13.0)
flipper-active_support_cache_store (~> 0.13.0)
fog-aliyun (~> 0.2.0)
- fog-aws (~> 2.0)
+ fog-aws (~> 2.0.1)
fog-core (~> 1.44)
- fog-google (~> 0.5)
+ fog-google (~> 1.3.3)
fog-local (~> 0.3)
fog-openstack (~> 0.1)
fog-rackspace (~> 0.1.1)
@@ -1058,7 +1061,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.3)
- gitaly-proto (~> 0.88.0)
+ gitaly-proto (~> 0.91.0)
github-linguist (~> 5.3.3)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-markup (~> 1.6.2)
@@ -1114,7 +1117,8 @@ DEPENDENCIES
omniauth-facebook (~> 4.0.0)
omniauth-github (~> 1.1.1)
omniauth-gitlab (~> 1.0.2)
- omniauth-google-oauth2 (~> 0.5.2)
+ omniauth-google-oauth2 (~> 0.5.3)
+ omniauth-jwt (~> 0.0.2)
omniauth-kerberos (~> 0.3.0)
omniauth-oauth2-generic (~> 0.2.2)
omniauth-saml (~> 1.10)
@@ -1169,7 +1173,7 @@ DEPENDENCIES
ruby-prof (~> 0.17.0)
ruby_parser (~> 3.8)
rufus-scheduler (~> 3.4)
- rugged (~> 0.26.0)
+ rugged (~> 0.27)
sanitize (~> 2.0)
sass-rails (~> 5.0.6)
scss_lint (~> 0.56.0)
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_canceled.ico b/app/assets/images/ci_favicons/canary/favicon_status_canceled.ico
new file mode 100644
index 00000000000..48b1095370d
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_canceled.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_created.ico b/app/assets/images/ci_favicons/canary/favicon_status_created.ico
new file mode 100644
index 00000000000..623c728faf6
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_created.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_failed.ico b/app/assets/images/ci_favicons/canary/favicon_status_failed.ico
new file mode 100644
index 00000000000..3073fe5a761
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_failed.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_manual.ico b/app/assets/images/ci_favicons/canary/favicon_status_manual.ico
new file mode 100644
index 00000000000..6c713d7b675
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_manual.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_not_found.ico b/app/assets/images/ci_favicons/canary/favicon_status_not_found.ico
new file mode 100644
index 00000000000..dbf855fdafd
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_not_found.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_pending.ico b/app/assets/images/ci_favicons/canary/favicon_status_pending.ico
new file mode 100644
index 00000000000..ccd00606aeb
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_pending.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_running.ico b/app/assets/images/ci_favicons/canary/favicon_status_running.ico
new file mode 100644
index 00000000000..968e7c4c2d4
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_running.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_skipped.ico b/app/assets/images/ci_favicons/canary/favicon_status_skipped.ico
new file mode 100644
index 00000000000..7e3be35cc3a
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_skipped.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_success.ico b/app/assets/images/ci_favicons/canary/favicon_status_success.ico
new file mode 100644
index 00000000000..a1fb6e91d65
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_success.ico
Binary files differ
diff --git a/app/assets/images/ci_favicons/canary/favicon_status_warning.ico b/app/assets/images/ci_favicons/canary/favicon_status_warning.ico
new file mode 100644
index 00000000000..5d931619fb2
--- /dev/null
+++ b/app/assets/images/ci_favicons/canary/favicon_status_warning.ico
Binary files differ
diff --git a/app/assets/images/favicon-yellow.ico b/app/assets/images/favicon-yellow.ico
new file mode 100644
index 00000000000..b650f277fb6
--- /dev/null
+++ b/app/assets/images/favicon-yellow.ico
Binary files differ
diff --git a/app/assets/javascripts/behaviors/markdown/render_math.js b/app/assets/javascripts/behaviors/markdown/render_math.js
index 7dcf1aeed17..eb4e59d12b1 100644
--- a/app/assets/javascripts/behaviors/markdown/render_math.js
+++ b/app/assets/javascripts/behaviors/markdown/render_math.js
@@ -31,7 +31,7 @@ export default function renderMath($els) {
if (!$els.length) return;
Promise.all([
import(/* webpackChunkName: 'katex' */ 'katex'),
- import(/* webpackChunkName: 'katex' */ 'katex/dist/katex.css'),
+ import(/* webpackChunkName: 'katex' */ 'katex/dist/katex.min.css'),
]).then(([katex]) => {
renderWithKaTeX($els, katex);
}).catch(() => flash(__('An error occurred while rendering KaTeX')));
diff --git a/app/assets/javascripts/gl_dropdown.js b/app/assets/javascripts/gl_dropdown.js
index 86b34a6e360..fa48d7d1915 100644
--- a/app/assets/javascripts/gl_dropdown.js
+++ b/app/assets/javascripts/gl_dropdown.js
@@ -753,7 +753,7 @@ GitLabDropdown = (function() {
}
if (this.options.isSelectable && !this.options.isSelectable(selectedObject, el)) {
- return;
+ return [selectedObject];
}
if (el.hasClass(ACTIVE_CLASS) && value !== 0) {
diff --git a/app/assets/javascripts/ide/lib/editor_options.js b/app/assets/javascripts/ide/lib/editor_options.js
index d69d4b8c615..a213862f9b3 100644
--- a/app/assets/javascripts/ide/lib/editor_options.js
+++ b/app/assets/javascripts/ide/lib/editor_options.js
@@ -6,6 +6,7 @@ export const defaultEditorOptions = {
minimap: {
enabled: false,
},
+ wordWrap: 'bounded',
};
export default [
diff --git a/app/assets/javascripts/jobs/components/sidebar_detail_row.vue b/app/assets/javascripts/jobs/components/sidebar_detail_row.vue
index a6819aaeb12..dfe87d89a39 100644
--- a/app/assets/javascripts/jobs/components/sidebar_detail_row.vue
+++ b/app/assets/javascripts/jobs/components/sidebar_detail_row.vue
@@ -11,11 +11,19 @@
type: String,
required: true,
},
+ helpUrl: {
+ type: String,
+ required: false,
+ default: '',
+ },
},
computed: {
hasTitle() {
return this.title.length > 0;
},
+ hasHelpURL() {
+ return this.helpUrl.length > 0;
+ },
},
};
</script>
@@ -28,5 +36,21 @@
{{ title }}:
</span>
{{ value }}
+
+ <span
+ v-if="hasHelpURL"
+ class="help-button pull-right"
+ >
+ <a
+ :href="helpUrl"
+ target="_blank"
+ rel="noopener noreferrer nofollow"
+ >
+ <i
+ class="fa fa-question-circle"
+ aria-hidden="true"
+ ></i>
+ </a>
+ </span>
</p>
</template>
diff --git a/app/assets/javascripts/jobs/components/sidebar_details_block.vue b/app/assets/javascripts/jobs/components/sidebar_details_block.vue
index 56814a52525..172de6b3679 100644
--- a/app/assets/javascripts/jobs/components/sidebar_details_block.vue
+++ b/app/assets/javascripts/jobs/components/sidebar_details_block.vue
@@ -22,6 +22,11 @@
type: Boolean,
required: true,
},
+ runnerHelpUrl: {
+ type: String,
+ required: false,
+ default: '',
+ },
},
computed: {
shouldRenderContent() {
@@ -39,6 +44,21 @@
runnerId() {
return `#${this.job.runner.id}`;
},
+ hasTimeout() {
+ return this.job.metadata != null && this.job.metadata.timeout_human_readable !== '';
+ },
+ timeout() {
+ if (this.job.metadata == null) {
+ return '';
+ }
+
+ let t = this.job.metadata.timeout_human_readable;
+ if (this.job.metadata.timeout_source !== '') {
+ t += ` (from ${this.job.metadata.timeout_source})`;
+ }
+
+ return t;
+ },
renderBlock() {
return this.job.merge_request ||
this.job.duration ||
@@ -115,6 +135,13 @@
:value="queued"
/>
<detail-row
+ class="js-job-timeout"
+ v-if="hasTimeout"
+ title="Timeout"
+ :help-url="runnerHelpUrl"
+ :value="timeout"
+ />
+ <detail-row
class="js-job-runner"
v-if="job.runner"
title="Runner"
diff --git a/app/assets/javascripts/jobs/job_details_bundle.js b/app/assets/javascripts/jobs/job_details_bundle.js
index 85a88ae409b..656676ead91 100644
--- a/app/assets/javascripts/jobs/job_details_bundle.js
+++ b/app/assets/javascripts/jobs/job_details_bundle.js
@@ -51,6 +51,7 @@ export default () => {
props: {
isLoading: this.mediator.state.isLoading,
job: this.mediator.store.state.job,
+ runnerHelpUrl: dataset.runnerHelpUrl,
},
});
},
diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue
index 10b3a4d2fee..f5572be5fbf 100644
--- a/app/assets/javascripts/monitoring/components/dashboard.vue
+++ b/app/assets/javascripts/monitoring/components/dashboard.vue
@@ -1,162 +1,155 @@
<script>
- import _ from 'underscore';
- import Flash from '../../flash';
- import MonitoringService from '../services/monitoring_service';
- import GraphGroup from './graph_group.vue';
- import Graph from './graph.vue';
- import EmptyState from './empty_state.vue';
- import MonitoringStore from '../stores/monitoring_store';
- import eventHub from '../event_hub';
+import _ from 'underscore';
+import Flash from '../../flash';
+import MonitoringService from '../services/monitoring_service';
+import GraphGroup from './graph_group.vue';
+import Graph from './graph.vue';
+import EmptyState from './empty_state.vue';
+import MonitoringStore from '../stores/monitoring_store';
+import eventHub from '../event_hub';
- export default {
- components: {
- Graph,
- GraphGroup,
- EmptyState,
+export default {
+ components: {
+ Graph,
+ GraphGroup,
+ EmptyState,
+ },
+ props: {
+ hasMetrics: {
+ type: Boolean,
+ required: false,
+ default: true,
},
-
- props: {
- hasMetrics: {
- type: Boolean,
- required: false,
- default: true,
- },
- showLegend: {
- type: Boolean,
- required: false,
- default: true,
- },
- showPanels: {
- type: Boolean,
- required: false,
- default: true,
- },
- forceSmallGraph: {
- type: Boolean,
- required: false,
- default: false,
- },
- documentationPath: {
- type: String,
- required: true,
- },
- settingsPath: {
- type: String,
- required: true,
- },
- clustersPath: {
- type: String,
- required: true,
- },
- tagsPath: {
- type: String,
- required: true,
- },
- projectPath: {
- type: String,
- required: true,
- },
- metricsEndpoint: {
- type: String,
- required: true,
- },
- deploymentEndpoint: {
- type: String,
- required: false,
- default: null,
- },
- emptyGettingStartedSvgPath: {
- type: String,
- required: true,
- },
- emptyLoadingSvgPath: {
- type: String,
- required: true,
- },
- emptyNoDataSvgPath: {
- type: String,
- required: true,
- },
- emptyUnableToConnectSvgPath: {
- type: String,
- required: true,
- },
+ showLegend: {
+ type: Boolean,
+ required: false,
+ default: true,
},
-
- data() {
- return {
- store: new MonitoringStore(),
- state: 'gettingStarted',
- showEmptyState: true,
- updateAspectRatio: false,
- updatedAspectRatios: 0,
- hoverData: {},
- resizeThrottled: {},
- };
+ showPanels: {
+ type: Boolean,
+ required: false,
+ default: true,
},
-
- created() {
- this.service = new MonitoringService({
- metricsEndpoint: this.metricsEndpoint,
- deploymentEndpoint: this.deploymentEndpoint,
- });
- eventHub.$on('toggleAspectRatio', this.toggleAspectRatio);
- eventHub.$on('hoverChanged', this.hoverChanged);
+ forceSmallGraph: {
+ type: Boolean,
+ required: false,
+ default: false,
},
-
- beforeDestroy() {
- eventHub.$off('toggleAspectRatio', this.toggleAspectRatio);
- eventHub.$off('hoverChanged', this.hoverChanged);
- window.removeEventListener('resize', this.resizeThrottled, false);
+ documentationPath: {
+ type: String,
+ required: true,
},
-
- mounted() {
- this.resizeThrottled = _.throttle(this.resize, 600);
- if (!this.hasMetrics) {
- this.state = 'gettingStarted';
- } else {
- this.getGraphsData();
- window.addEventListener('resize', this.resizeThrottled, false);
+ settingsPath: {
+ type: String,
+ required: true,
+ },
+ clustersPath: {
+ type: String,
+ required: true,
+ },
+ tagsPath: {
+ type: String,
+ required: true,
+ },
+ projectPath: {
+ type: String,
+ required: true,
+ },
+ metricsEndpoint: {
+ type: String,
+ required: true,
+ },
+ deploymentEndpoint: {
+ type: String,
+ required: false,
+ default: null,
+ },
+ emptyGettingStartedSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyLoadingSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyNoDataSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyUnableToConnectSvgPath: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ store: new MonitoringStore(),
+ state: 'gettingStarted',
+ showEmptyState: true,
+ updateAspectRatio: false,
+ updatedAspectRatios: 0,
+ hoverData: {},
+ resizeThrottled: {},
+ };
+ },
+ created() {
+ this.service = new MonitoringService({
+ metricsEndpoint: this.metricsEndpoint,
+ deploymentEndpoint: this.deploymentEndpoint,
+ });
+ eventHub.$on('toggleAspectRatio', this.toggleAspectRatio);
+ eventHub.$on('hoverChanged', this.hoverChanged);
+ },
+ beforeDestroy() {
+ eventHub.$off('toggleAspectRatio', this.toggleAspectRatio);
+ eventHub.$off('hoverChanged', this.hoverChanged);
+ window.removeEventListener('resize', this.resizeThrottled, false);
+ },
+ mounted() {
+ this.resizeThrottled = _.throttle(this.resize, 600);
+ if (!this.hasMetrics) {
+ this.state = 'gettingStarted';
+ } else {
+ this.getGraphsData();
+ window.addEventListener('resize', this.resizeThrottled, false);
+ }
+ },
+ methods: {
+ getGraphsData() {
+ this.state = 'loading';
+ Promise.all([
+ this.service.getGraphsData().then(data => this.store.storeMetrics(data)),
+ this.service
+ .getDeploymentData()
+ .then(data => this.store.storeDeploymentData(data))
+ .catch(() => new Flash('Error getting deployment information.')),
+ ])
+ .then(() => {
+ if (this.store.groups.length < 1) {
+ this.state = 'noData';
+ return;
+ }
+ this.showEmptyState = false;
+ })
+ .catch(() => {
+ this.state = 'unableToConnect';
+ });
+ },
+ resize() {
+ this.updateAspectRatio = true;
+ },
+ toggleAspectRatio() {
+ this.updatedAspectRatios = this.updatedAspectRatios += 1;
+ if (this.store.getMetricsCount() === this.updatedAspectRatios) {
+ this.updateAspectRatio = !this.updateAspectRatio;
+ this.updatedAspectRatios = 0;
}
},
-
- methods: {
- getGraphsData() {
- this.state = 'loading';
- Promise.all([
- this.service.getGraphsData()
- .then(data => this.store.storeMetrics(data)),
- this.service.getDeploymentData()
- .then(data => this.store.storeDeploymentData(data))
- .catch(() => new Flash('Error getting deployment information.')),
- ])
- .then(() => {
- if (this.store.groups.length < 1) {
- this.state = 'noData';
- return;
- }
- this.showEmptyState = false;
- })
- .catch(() => { this.state = 'unableToConnect'; });
- },
-
- resize() {
- this.updateAspectRatio = true;
- },
-
- toggleAspectRatio() {
- this.updatedAspectRatios = this.updatedAspectRatios += 1;
- if (this.store.getMetricsCount() === this.updatedAspectRatios) {
- this.updateAspectRatio = !this.updateAspectRatio;
- this.updatedAspectRatios = 0;
- }
- },
-
- hoverChanged(data) {
- this.hoverData = data;
- },
+ hoverChanged(data) {
+ this.hoverData = data;
},
- };
+ },
+};
</script>
<template>
diff --git a/app/assets/javascripts/monitoring/components/empty_state.vue b/app/assets/javascripts/monitoring/components/empty_state.vue
index fbf451fce68..c77f451c2d3 100644
--- a/app/assets/javascripts/monitoring/components/empty_state.vue
+++ b/app/assets/javascripts/monitoring/components/empty_state.vue
@@ -1,91 +1,90 @@
<script>
- export default {
- props: {
- documentationPath: {
- type: String,
- required: true,
- },
- settingsPath: {
- type: String,
- required: false,
- default: '',
- },
- clustersPath: {
- type: String,
- required: false,
- default: '',
- },
- selectedState: {
- type: String,
- required: true,
- },
- emptyGettingStartedSvgPath: {
- type: String,
- required: true,
- },
- emptyLoadingSvgPath: {
- type: String,
- required: true,
- },
- emptyNoDataSvgPath: {
- type: String,
- required: true,
- },
- emptyUnableToConnectSvgPath: {
- type: String,
- required: true,
- },
+export default {
+ props: {
+ documentationPath: {
+ type: String,
+ required: true,
+ },
+ settingsPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ clustersPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ selectedState: {
+ type: String,
+ required: true,
},
- data() {
- return {
- states: {
- gettingStarted: {
- svgUrl: this.emptyGettingStartedSvgPath,
- title: 'Get started with performance monitoring',
- description: `Stay updated about the performance and health
+ emptyGettingStartedSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyLoadingSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyNoDataSvgPath: {
+ type: String,
+ required: true,
+ },
+ emptyUnableToConnectSvgPath: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ states: {
+ gettingStarted: {
+ svgUrl: this.emptyGettingStartedSvgPath,
+ title: 'Get started with performance monitoring',
+ description: `Stay updated about the performance and health
of your environment by configuring Prometheus to monitor your deployments.`,
- buttonText: 'Install Prometheus on clusters',
- buttonPath: this.clustersPath,
- secondaryButtonText: 'Configure existing Prometheus',
- secondaryButtonPath: this.settingsPath,
- },
- loading: {
- svgUrl: this.emptyLoadingSvgPath,
- title: 'Waiting for performance data',
- description: `Creating graphs uses the data from the Prometheus server.
+ buttonText: 'Install Prometheus on clusters',
+ buttonPath: this.clustersPath,
+ secondaryButtonText: 'Configure existing Prometheus',
+ secondaryButtonPath: this.settingsPath,
+ },
+ loading: {
+ svgUrl: this.emptyLoadingSvgPath,
+ title: 'Waiting for performance data',
+ description: `Creating graphs uses the data from the Prometheus server.
If this takes a long time, ensure that data is available.`,
- buttonText: 'View documentation',
- buttonPath: this.documentationPath,
- },
- noData: {
- svgUrl: this.emptyNoDataSvgPath,
- title: 'No data found',
- description: `You are connected to the Prometheus server, but there is currently
+ buttonText: 'View documentation',
+ buttonPath: this.documentationPath,
+ },
+ noData: {
+ svgUrl: this.emptyNoDataSvgPath,
+ title: 'No data found',
+ description: `You are connected to the Prometheus server, but there is currently
no data to display.`,
- buttonText: 'Configure Prometheus',
- buttonPath: this.settingsPath,
- },
- unableToConnect: {
- svgUrl: this.emptyUnableToConnectSvgPath,
- title: 'Unable to connect to Prometheus server',
- description: 'Ensure connectivity is available from the GitLab server to the ',
- buttonText: 'View documentation',
- buttonPath: this.documentationPath,
- },
+ buttonText: 'Configure Prometheus',
+ buttonPath: this.settingsPath,
+ },
+ unableToConnect: {
+ svgUrl: this.emptyUnableToConnectSvgPath,
+ title: 'Unable to connect to Prometheus server',
+ description: 'Ensure connectivity is available from the GitLab server to the ',
+ buttonText: 'View documentation',
+ buttonPath: this.documentationPath,
},
- };
- },
- computed: {
- currentState() {
- return this.states[this.selectedState];
- },
-
- showButtonDescription() {
- if (this.selectedState === 'unableToConnect') return true;
- return false;
},
+ };
+ },
+ computed: {
+ currentState() {
+ return this.states[this.selectedState];
+ },
+ showButtonDescription() {
+ if (this.selectedState === 'unableToConnect') return true;
+ return false;
},
- };
+ },
+};
</script>
<template>
diff --git a/app/assets/javascripts/monitoring/components/graph.vue b/app/assets/javascripts/monitoring/components/graph.vue
index 42615d2bb8e..04d546fafa0 100644
--- a/app/assets/javascripts/monitoring/components/graph.vue
+++ b/app/assets/javascripts/monitoring/components/graph.vue
@@ -1,236 +1,229 @@
<script>
- import { scaleLinear, scaleTime } from 'd3-scale';
- import { axisLeft, axisBottom } from 'd3-axis';
- import { max, extent } from 'd3-array';
- import { select } from 'd3-selection';
- import GraphLegend from './graph/legend.vue';
- import GraphFlag from './graph/flag.vue';
- import GraphDeployment from './graph/deployment.vue';
- import GraphPath from './graph/path.vue';
- import MonitoringMixin from '../mixins/monitoring_mixins';
- import eventHub from '../event_hub';
- import measurements from '../utils/measurements';
- import { bisectDate, timeScaleFormat } from '../utils/date_time_formatters';
- import createTimeSeries from '../utils/multiple_time_series';
- import bp from '../../breakpoints';
+import { scaleLinear, scaleTime } from 'd3-scale';
+import { axisLeft, axisBottom } from 'd3-axis';
+import { max, extent } from 'd3-array';
+import { select } from 'd3-selection';
+import GraphLegend from './graph/legend.vue';
+import GraphFlag from './graph/flag.vue';
+import GraphDeployment from './graph/deployment.vue';
+import GraphPath from './graph/path.vue';
+import MonitoringMixin from '../mixins/monitoring_mixins';
+import eventHub from '../event_hub';
+import measurements from '../utils/measurements';
+import { bisectDate, timeScaleFormat } from '../utils/date_time_formatters';
+import createTimeSeries from '../utils/multiple_time_series';
+import bp from '../../breakpoints';
- const d3 = { scaleLinear, scaleTime, axisLeft, axisBottom, max, extent, select };
+const d3 = { scaleLinear, scaleTime, axisLeft, axisBottom, max, extent, select };
- export default {
- components: {
- GraphLegend,
- GraphFlag,
- GraphDeployment,
- GraphPath,
+export default {
+ components: {
+ GraphLegend,
+ GraphFlag,
+ GraphDeployment,
+ GraphPath,
+ },
+ mixins: [MonitoringMixin],
+ props: {
+ graphData: {
+ type: Object,
+ required: true,
},
-
- mixins: [MonitoringMixin],
-
- props: {
- graphData: {
- type: Object,
- required: true,
- },
- updateAspectRatio: {
- type: Boolean,
- required: true,
- },
- deploymentData: {
- type: Array,
- required: true,
- },
- hoverData: {
- type: Object,
- required: false,
- default: () => ({}),
- },
- projectPath: {
- type: String,
- required: true,
- },
- tagsPath: {
- type: String,
- required: true,
- },
- showLegend: {
- type: Boolean,
- required: false,
- default: true,
- },
- smallGraph: {
- type: Boolean,
- required: false,
- default: false,
+ updateAspectRatio: {
+ type: Boolean,
+ required: true,
+ },
+ deploymentData: {
+ type: Array,
+ required: true,
+ },
+ hoverData: {
+ type: Object,
+ required: false,
+ default: () => ({}),
+ },
+ projectPath: {
+ type: String,
+ required: true,
+ },
+ tagsPath: {
+ type: String,
+ required: true,
+ },
+ showLegend: {
+ type: Boolean,
+ required: false,
+ default: true,
+ },
+ smallGraph: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
+ },
+ data() {
+ return {
+ baseGraphHeight: 450,
+ baseGraphWidth: 600,
+ graphHeight: 450,
+ graphWidth: 600,
+ graphHeightOffset: 120,
+ margin: {},
+ unitOfDisplay: '',
+ yAxisLabel: '',
+ legendTitle: '',
+ reducedDeploymentData: [],
+ measurements: measurements.large,
+ currentData: {
+ time: new Date(),
+ value: 0,
},
+ currentDataIndex: 0,
+ currentXCoordinate: 0,
+ currentFlagPosition: 0,
+ showFlag: false,
+ showFlagContent: false,
+ timeSeries: [],
+ realPixelRatio: 1,
+ };
+ },
+ computed: {
+ outerViewBox() {
+ return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`;
},
-
- data() {
+ innerViewBox() {
+ return `0 0 ${this.baseGraphWidth - 150} ${this.baseGraphHeight}`;
+ },
+ axisTransform() {
+ return `translate(70, ${this.graphHeight - 100})`;
+ },
+ paddingBottomRootSvg() {
return {
- baseGraphHeight: 450,
- baseGraphWidth: 600,
- graphHeight: 450,
- graphWidth: 600,
- graphHeightOffset: 120,
- margin: {},
- unitOfDisplay: '',
- yAxisLabel: '',
- legendTitle: '',
- reducedDeploymentData: [],
- measurements: measurements.large,
- currentData: {
- time: new Date(),
- value: 0,
- },
- currentDataIndex: 0,
- currentXCoordinate: 0,
- currentFlagPosition: 0,
- showFlag: false,
- showFlagContent: false,
- timeSeries: [],
- realPixelRatio: 1,
+ paddingBottom: `${Math.ceil(this.baseGraphHeight * 100) / this.baseGraphWidth || 0}%`,
};
},
-
- computed: {
- outerViewBox() {
- return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`;
- },
-
- innerViewBox() {
- return `0 0 ${this.baseGraphWidth - 150} ${this.baseGraphHeight}`;
- },
-
- axisTransform() {
- return `translate(70, ${this.graphHeight - 100})`;
- },
-
- paddingBottomRootSvg() {
- return {
- paddingBottom: `${(Math.ceil(this.baseGraphHeight * 100) / this.baseGraphWidth) || 0}%`,
- };
- },
-
- deploymentFlagData() {
- return this.reducedDeploymentData.find(deployment => deployment.showDeploymentFlag);
- },
+ deploymentFlagData() {
+ return this.reducedDeploymentData.find(deployment => deployment.showDeploymentFlag);
},
-
- watch: {
- updateAspectRatio() {
- if (this.updateAspectRatio) {
- this.graphHeight = 450;
- this.graphWidth = 600;
- this.measurements = measurements.large;
- this.draw();
- eventHub.$emit('toggleAspectRatio');
- }
- },
-
- hoverData() {
- this.positionFlag();
- },
+ },
+ watch: {
+ updateAspectRatio() {
+ if (this.updateAspectRatio) {
+ this.graphHeight = 450;
+ this.graphWidth = 600;
+ this.measurements = measurements.large;
+ this.draw();
+ eventHub.$emit('toggleAspectRatio');
+ }
},
-
- mounted() {
- this.draw();
+ hoverData() {
+ this.positionFlag();
},
+ },
+ mounted() {
+ this.draw();
+ },
+ methods: {
+ draw() {
+ const breakpointSize = bp.getBreakpointSize();
+ const query = this.graphData.queries[0];
+ this.margin = measurements.large.margin;
+ if (this.smallGraph || breakpointSize === 'xs' || breakpointSize === 'sm') {
+ this.graphHeight = 300;
+ this.margin = measurements.small.margin;
+ this.measurements = measurements.small;
+ }
+ this.unitOfDisplay = query.unit || '';
+ this.yAxisLabel = this.graphData.y_label || 'Values';
+ this.legendTitle = query.label || 'Average';
+ this.graphWidth = this.$refs.baseSvg.clientWidth - this.margin.left - this.margin.right;
+ this.graphHeight = this.graphHeight - this.margin.top - this.margin.bottom;
+ this.baseGraphHeight = this.graphHeight;
+ this.baseGraphWidth = this.graphWidth;
- methods: {
- draw() {
- const breakpointSize = bp.getBreakpointSize();
- const query = this.graphData.queries[0];
- this.margin = measurements.large.margin;
- if (this.smallGraph || breakpointSize === 'xs' || breakpointSize === 'sm') {
- this.graphHeight = 300;
- this.margin = measurements.small.margin;
- this.measurements = measurements.small;
- }
- this.unitOfDisplay = query.unit || '';
- this.yAxisLabel = this.graphData.y_label || 'Values';
- this.legendTitle = query.label || 'Average';
- this.graphWidth = this.$refs.baseSvg.clientWidth -
- this.margin.left - this.margin.right;
- this.graphHeight = this.graphHeight - this.margin.top - this.margin.bottom;
- this.baseGraphHeight = this.graphHeight;
- this.baseGraphWidth = this.graphWidth;
-
- // pixel offsets inside the svg and outside are not 1:1
- this.realPixelRatio = (this.$refs.baseSvg.clientWidth / this.baseGraphWidth);
-
- this.renderAxesPaths();
- this.formatDeployments();
- },
-
- handleMouseOverGraph(e) {
- let point = this.$refs.graphData.createSVGPoint();
- point.x = e.clientX;
- point.y = e.clientY;
- point = point.matrixTransform(this.$refs.graphData.getScreenCTM().inverse());
- point.x = point.x += 7;
- const firstTimeSeries = this.timeSeries[0];
- const timeValueOverlay = firstTimeSeries.timeSeriesScaleX.invert(point.x);
- const overlayIndex = bisectDate(firstTimeSeries.values, timeValueOverlay, 1);
- const d0 = firstTimeSeries.values[overlayIndex - 1];
- const d1 = firstTimeSeries.values[overlayIndex];
- if (d0 === undefined || d1 === undefined) return;
- const evalTime = timeValueOverlay - d0[0] > d1[0] - timeValueOverlay;
- const hoveredDataIndex = evalTime ? overlayIndex : (overlayIndex - 1);
- const hoveredDate = firstTimeSeries.values[hoveredDataIndex].time;
- const currentDeployXPos = this.mouseOverDeployInfo(point.x);
+ // pixel offsets inside the svg and outside are not 1:1
+ this.realPixelRatio = this.$refs.baseSvg.clientWidth / this.baseGraphWidth;
- eventHub.$emit('hoverChanged', {
- hoveredDate,
- currentDeployXPos,
- });
- },
+ this.renderAxesPaths();
+ this.formatDeployments();
+ },
+ handleMouseOverGraph(e) {
+ let point = this.$refs.graphData.createSVGPoint();
+ point.x = e.clientX;
+ point.y = e.clientY;
+ point = point.matrixTransform(this.$refs.graphData.getScreenCTM().inverse());
+ point.x = point.x += 7;
+ const firstTimeSeries = this.timeSeries[0];
+ const timeValueOverlay = firstTimeSeries.timeSeriesScaleX.invert(point.x);
+ const overlayIndex = bisectDate(firstTimeSeries.values, timeValueOverlay, 1);
+ const d0 = firstTimeSeries.values[overlayIndex - 1];
+ const d1 = firstTimeSeries.values[overlayIndex];
+ if (d0 === undefined || d1 === undefined) return;
+ const evalTime = timeValueOverlay - d0[0] > d1[0] - timeValueOverlay;
+ const hoveredDataIndex = evalTime ? overlayIndex : overlayIndex - 1;
+ const hoveredDate = firstTimeSeries.values[hoveredDataIndex].time;
+ const currentDeployXPos = this.mouseOverDeployInfo(point.x);
- renderAxesPaths() {
- this.timeSeries = createTimeSeries(
- this.graphData.queries,
- this.graphWidth,
- this.graphHeight,
- this.graphHeightOffset,
- );
+ eventHub.$emit('hoverChanged', {
+ hoveredDate,
+ currentDeployXPos,
+ });
+ },
+ renderAxesPaths() {
+ this.timeSeries = createTimeSeries(
+ this.graphData.queries,
+ this.graphWidth,
+ this.graphHeight,
+ this.graphHeightOffset,
+ );
- if (!this.showLegend) {
- this.baseGraphHeight -= 50;
- } else if (this.timeSeries.length > 3) {
- this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
- }
+ if (!this.showLegend) {
+ this.baseGraphHeight -= 50;
+ } else if (this.timeSeries.length > 3) {
+ this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
+ }
- const axisXScale = d3.scaleTime()
- .range([0, this.graphWidth - 70]);
- const axisYScale = d3.scaleLinear()
- .range([this.graphHeight - this.graphHeightOffset, 0]);
+ const axisXScale = d3.scaleTime().range([0, this.graphWidth - 70]);
+ const axisYScale = d3.scaleLinear().range([this.graphHeight - this.graphHeightOffset, 0]);
- const allValues = this.timeSeries.reduce((all, { values }) => all.concat(values), []);
- axisXScale.domain(d3.extent(allValues, d => d.time));
- axisYScale.domain([0, d3.max(allValues.map(d => d.value))]);
+ const allValues = this.timeSeries.reduce((all, { values }) => all.concat(values), []);
+ axisXScale.domain(d3.extent(allValues, d => d.time));
+ axisYScale.domain([0, d3.max(allValues.map(d => d.value))]);
- const xAxis = d3.axisBottom()
- .scale(axisXScale)
- .ticks(this.graphWidth / 120)
- .tickFormat(timeScaleFormat);
+ const xAxis = d3
+ .axisBottom()
+ .scale(axisXScale)
+ .ticks(this.graphWidth / 120)
+ .tickFormat(timeScaleFormat);
- const yAxis = d3.axisLeft()
- .scale(axisYScale)
- .ticks(measurements.yTicks);
+ const yAxis = d3
+ .axisLeft()
+ .scale(axisYScale)
+ .ticks(measurements.yTicks);
- d3.select(this.$refs.baseSvg).select('.x-axis').call(xAxis);
+ d3
+ .select(this.$refs.baseSvg)
+ .select('.x-axis')
+ .call(xAxis);
- const width = this.graphWidth;
- d3.select(this.$refs.baseSvg).select('.y-axis').call(yAxis)
- .selectAll('.tick')
- .each(function createTickLines(d, i) {
- if (i > 0) {
- d3.select(this).select('line')
- .attr('x2', width)
- .attr('class', 'axis-tick');
- } // Avoid adding the class to the first tick, to prevent coloring
- }); // This will select all of the ticks once they're rendered
- },
+ const width = this.graphWidth;
+ d3
+ .select(this.$refs.baseSvg)
+ .select('.y-axis')
+ .call(yAxis)
+ .selectAll('.tick')
+ .each(function createTickLines(d, i) {
+ if (i > 0) {
+ d3
+ .select(this)
+ .select('line')
+ .attr('x2', width)
+ .attr('class', 'axis-tick');
+ } // Avoid adding the class to the first tick, to prevent coloring
+ }); // This will select all of the ticks once they're rendered
},
- };
+ },
+};
</script>
<template>
diff --git a/app/assets/javascripts/monitoring/components/graph/deployment.vue b/app/assets/javascripts/monitoring/components/graph/deployment.vue
index 98c25307b74..4012191ceb9 100644
--- a/app/assets/javascripts/monitoring/components/graph/deployment.vue
+++ b/app/assets/javascripts/monitoring/components/graph/deployment.vue
@@ -1,32 +1,30 @@
<script>
- export default {
- props: {
- deploymentData: {
- type: Array,
- required: true,
- },
- graphHeight: {
- type: Number,
- required: true,
- },
- graphHeightOffset: {
- type: Number,
- required: true,
- },
+export default {
+ props: {
+ deploymentData: {
+ type: Array,
+ required: true,
},
-
- computed: {
- calculatedHeight() {
- return this.graphHeight - this.graphHeightOffset;
- },
+ graphHeight: {
+ type: Number,
+ required: true,
},
-
- methods: {
- transformDeploymentGroup(deployment) {
- return `translate(${Math.floor(deployment.xPos) - 5}, 20)`;
- },
+ graphHeightOffset: {
+ type: Number,
+ required: true,
},
- };
+ },
+ computed: {
+ calculatedHeight() {
+ return this.graphHeight - this.graphHeightOffset;
+ },
+ },
+ methods: {
+ transformDeploymentGroup(deployment) {
+ return `translate(${Math.floor(deployment.xPos) - 5}, 20)`;
+ },
+ },
+};
</script>
<template>
<g class="deploy-info">
diff --git a/app/assets/javascripts/monitoring/components/graph/flag.vue b/app/assets/javascripts/monitoring/components/graph/flag.vue
index 07aa6a3e5de..906c7c51f52 100644
--- a/app/assets/javascripts/monitoring/components/graph/flag.vue
+++ b/app/assets/javascripts/monitoring/components/graph/flag.vue
@@ -1,127 +1,119 @@
<script>
- import { dateFormat, timeFormat } from '../../utils/date_time_formatters';
- import { formatRelevantDigits } from '../../../lib/utils/number_utils';
- import icon from '../../../vue_shared/components/icon.vue';
+import { dateFormat, timeFormat } from '../../utils/date_time_formatters';
+import { formatRelevantDigits } from '../../../lib/utils/number_utils';
+import icon from '../../../vue_shared/components/icon.vue';
- export default {
- components: {
- icon,
- },
- props: {
- currentXCoordinate: {
- type: Number,
- required: true,
- },
- currentData: {
- type: Object,
- required: true,
- },
- deploymentFlagData: {
- type: Object,
- required: false,
- default: null,
- },
- graphHeight: {
- type: Number,
- required: true,
- },
- graphHeightOffset: {
- type: Number,
- required: true,
- },
- realPixelRatio: {
- type: Number,
- required: true,
- },
- showFlagContent: {
- type: Boolean,
- required: true,
- },
- timeSeries: {
- type: Array,
- required: true,
- },
- unitOfDisplay: {
- type: String,
- required: true,
- },
- currentDataIndex: {
- type: Number,
- required: true,
- },
- legendTitle: {
- type: String,
- required: true,
- },
+export default {
+ components: {
+ icon,
+ },
+ props: {
+ currentXCoordinate: {
+ type: Number,
+ required: true,
},
-
- computed: {
- formatTime() {
- return this.deploymentFlagData ?
- timeFormat(this.deploymentFlagData.time) :
- timeFormat(this.currentData.time);
- },
-
- formatDate() {
- return this.deploymentFlagData ?
- dateFormat(this.deploymentFlagData.time) :
- dateFormat(this.currentData.time);
- },
-
- cursorStyle() {
- const xCoordinate = this.deploymentFlagData ?
- this.deploymentFlagData.xPos :
- this.currentXCoordinate;
-
- const offsetTop = 20 * this.realPixelRatio;
- const offsetLeft = (70 + xCoordinate) * this.realPixelRatio;
- const height = (this.graphHeight - this.graphHeightOffset) * this.realPixelRatio;
-
- return {
- top: `${offsetTop}px`,
- left: `${offsetLeft}px`,
- height: `${height}px`,
- };
- },
-
- flagOrientation() {
- if (this.currentXCoordinate * this.realPixelRatio > 120) {
- return 'left';
- }
- return 'right';
- },
+ currentData: {
+ type: Object,
+ required: true,
},
+ deploymentFlagData: {
+ type: Object,
+ required: false,
+ default: null,
+ },
+ graphHeight: {
+ type: Number,
+ required: true,
+ },
+ graphHeightOffset: {
+ type: Number,
+ required: true,
+ },
+ realPixelRatio: {
+ type: Number,
+ required: true,
+ },
+ showFlagContent: {
+ type: Boolean,
+ required: true,
+ },
+ timeSeries: {
+ type: Array,
+ required: true,
+ },
+ unitOfDisplay: {
+ type: String,
+ required: true,
+ },
+ currentDataIndex: {
+ type: Number,
+ required: true,
+ },
+ legendTitle: {
+ type: String,
+ required: true,
+ },
+ },
+ computed: {
+ formatTime() {
+ return this.deploymentFlagData
+ ? timeFormat(this.deploymentFlagData.time)
+ : timeFormat(this.currentData.time);
+ },
+ formatDate() {
+ return this.deploymentFlagData
+ ? dateFormat(this.deploymentFlagData.time)
+ : dateFormat(this.currentData.time);
+ },
+ cursorStyle() {
+ const xCoordinate = this.deploymentFlagData
+ ? this.deploymentFlagData.xPos
+ : this.currentXCoordinate;
- methods: {
- seriesMetricValue(series) {
- const index = this.deploymentFlagData ?
- this.deploymentFlagData.seriesIndex :
- this.currentDataIndex;
- const value = series.values[index] &&
- series.values[index].value;
- if (isNaN(value)) {
- return '-';
- }
- return `${formatRelevantDigits(value)}${this.unitOfDisplay}`;
- },
-
- seriesMetricLabel(index, series) {
- if (this.timeSeries.length < 2) {
- return this.legendTitle;
- }
- if (series.metricTag) {
- return series.metricTag;
- }
- return `series ${index + 1}`;
- },
+ const offsetTop = 20 * this.realPixelRatio;
+ const offsetLeft = (70 + xCoordinate) * this.realPixelRatio;
+ const height = (this.graphHeight - this.graphHeightOffset) * this.realPixelRatio;
- strokeDashArray(type) {
- if (type === 'dashed') return '6, 3';
- if (type === 'dotted') return '3, 3';
- return null;
- },
+ return {
+ top: `${offsetTop}px`,
+ left: `${offsetLeft}px`,
+ height: `${height}px`,
+ };
+ },
+ flagOrientation() {
+ if (this.currentXCoordinate * this.realPixelRatio > 120) {
+ return 'left';
+ }
+ return 'right';
+ },
+ },
+ methods: {
+ seriesMetricValue(series) {
+ const index = this.deploymentFlagData
+ ? this.deploymentFlagData.seriesIndex
+ : this.currentDataIndex;
+ const value = series.values[index] && series.values[index].value;
+ if (isNaN(value)) {
+ return '-';
+ }
+ return `${formatRelevantDigits(value)}${this.unitOfDisplay}`;
+ },
+ seriesMetricLabel(index, series) {
+ if (this.timeSeries.length < 2) {
+ return this.legendTitle;
+ }
+ if (series.metricTag) {
+ return series.metricTag;
+ }
+ return `series ${index + 1}`;
+ },
+ strokeDashArray(type) {
+ if (type === 'dashed') return '6, 3';
+ if (type === 'dotted') return '3, 3';
+ return null;
},
- };
+ },
+};
</script>
<template>
diff --git a/app/assets/javascripts/monitoring/components/graph/legend.vue b/app/assets/javascripts/monitoring/components/graph/legend.vue
index 3149397b61f..a7a058a9203 100644
--- a/app/assets/javascripts/monitoring/components/graph/legend.vue
+++ b/app/assets/javascripts/monitoring/components/graph/legend.vue
@@ -1,127 +1,119 @@
<script>
- import { formatRelevantDigits } from '../../../lib/utils/number_utils';
-
- export default {
- props: {
- graphWidth: {
- type: Number,
- required: true,
- },
- graphHeight: {
- type: Number,
- required: true,
- },
- margin: {
- type: Object,
- required: true,
- },
- measurements: {
- type: Object,
- required: true,
- },
- legendTitle: {
- type: String,
- required: true,
- },
- yAxisLabel: {
- type: String,
- required: true,
- },
- timeSeries: {
- type: Array,
- required: true,
- },
- unitOfDisplay: {
- type: String,
- required: true,
- },
- currentDataIndex: {
- type: Number,
- required: true,
- },
- showLegendGroup: {
- type: Boolean,
- required: false,
- default: true,
- },
- },
- data() {
- return {
- yLabelWidth: 0,
- yLabelHeight: 0,
- seriesXPosition: 0,
- metricUsageXPosition: 0,
- };
- },
- computed: {
- textTransform() {
- const yCoordinate = (((this.graphHeight - this.margin.top)
- + this.measurements.axisLabelLineOffset) / 2) || 0;
-
- return `translate(15, ${yCoordinate}) rotate(-90)`;
- },
-
- rectTransform() {
- const yCoordinate = (((this.graphHeight - this.margin.top)
- + this.measurements.axisLabelLineOffset) / 2)
- + (this.yLabelWidth / 2) || 0;
-
- return `translate(0, ${yCoordinate}) rotate(-90)`;
- },
-
- xPosition() {
- return (((this.graphWidth + this.measurements.axisLabelLineOffset) / 2)
- - this.margin.right) || 0;
- },
-
- yPosition() {
- return ((this.graphHeight - this.margin.top) + this.measurements.axisLabelLineOffset) || 0;
- },
+import { formatRelevantDigits } from '../../../lib/utils/number_utils';
+export default {
+ props: {
+ graphWidth: {
+ type: Number,
+ required: true,
},
- mounted() {
- this.$nextTick(() => {
- const bbox = this.$refs.ylabel.getBBox();
- this.metricUsageXPosition = 0;
- this.seriesXPosition = 0;
- if (this.$refs.legendTitleSvg != null) {
- this.seriesXPosition = this.$refs.legendTitleSvg[0].getBBox().width;
- }
- if (this.$refs.seriesTitleSvg != null) {
- this.metricUsageXPosition = this.$refs.seriesTitleSvg[0].getBBox().width;
- }
- this.yLabelWidth = bbox.width + 10; // Added some padding
- this.yLabelHeight = bbox.height + 5;
- });
- },
- methods: {
- translateLegendGroup(index) {
- return `translate(0, ${12 * (index)})`;
- },
-
- formatMetricUsage(series) {
- const value = series.values[this.currentDataIndex] &&
- series.values[this.currentDataIndex].value;
- if (isNaN(value)) {
- return '-';
- }
- return `${formatRelevantDigits(value)} ${this.unitOfDisplay}`;
- },
+ graphHeight: {
+ type: Number,
+ required: true,
+ },
+ margin: {
+ type: Object,
+ required: true,
+ },
+ measurements: {
+ type: Object,
+ required: true,
+ },
+ legendTitle: {
+ type: String,
+ required: true,
+ },
+ yAxisLabel: {
+ type: String,
+ required: true,
+ },
+ timeSeries: {
+ type: Array,
+ required: true,
+ },
+ unitOfDisplay: {
+ type: String,
+ required: true,
+ },
+ currentDataIndex: {
+ type: Number,
+ required: true,
+ },
+ showLegendGroup: {
+ type: Boolean,
+ required: false,
+ default: true,
+ },
+ },
+ data() {
+ return {
+ yLabelWidth: 0,
+ yLabelHeight: 0,
+ seriesXPosition: 0,
+ metricUsageXPosition: 0,
+ };
+ },
+ computed: {
+ textTransform() {
+ const yCoordinate =
+ (this.graphHeight - this.margin.top + this.measurements.axisLabelLineOffset) / 2 || 0;
- createSeriesString(index, series) {
- if (series.metricTag) {
- return `${series.metricTag} ${this.formatMetricUsage(series)}`;
- }
- return `${this.legendTitle} series ${index + 1} ${this.formatMetricUsage(series)}`;
- },
+ return `translate(15, ${yCoordinate}) rotate(-90)`;
+ },
+ rectTransform() {
+ const yCoordinate =
+ (this.graphHeight - this.margin.top + this.measurements.axisLabelLineOffset) / 2 +
+ this.yLabelWidth / 2 || 0;
- strokeDashArray(type) {
- if (type === 'dashed') return '6, 3';
- if (type === 'dotted') return '3, 3';
- return null;
- },
+ return `translate(0, ${yCoordinate}) rotate(-90)`;
+ },
+ xPosition() {
+ return (this.graphWidth + this.measurements.axisLabelLineOffset) / 2 - this.margin.right || 0;
+ },
+ yPosition() {
+ return this.graphHeight - this.margin.top + this.measurements.axisLabelLineOffset || 0;
+ },
+ },
+ mounted() {
+ this.$nextTick(() => {
+ const bbox = this.$refs.ylabel.getBBox();
+ this.metricUsageXPosition = 0;
+ this.seriesXPosition = 0;
+ if (this.$refs.legendTitleSvg != null) {
+ this.seriesXPosition = this.$refs.legendTitleSvg[0].getBBox().width;
+ }
+ if (this.$refs.seriesTitleSvg != null) {
+ this.metricUsageXPosition = this.$refs.seriesTitleSvg[0].getBBox().width;
+ }
+ this.yLabelWidth = bbox.width + 10; // Added some padding
+ this.yLabelHeight = bbox.height + 5;
+ });
+ },
+ methods: {
+ translateLegendGroup(index) {
+ return `translate(0, ${12 * index})`;
+ },
+ formatMetricUsage(series) {
+ const value =
+ series.values[this.currentDataIndex] && series.values[this.currentDataIndex].value;
+ if (isNaN(value)) {
+ return '-';
+ }
+ return `${formatRelevantDigits(value)} ${this.unitOfDisplay}`;
+ },
+ createSeriesString(index, series) {
+ if (series.metricTag) {
+ return `${series.metricTag} ${this.formatMetricUsage(series)}`;
+ }
+ return `${this.legendTitle} series ${index + 1} ${this.formatMetricUsage(series)}`;
+ },
+ strokeDashArray(type) {
+ if (type === 'dashed') return '6, 3';
+ if (type === 'dotted') return '3, 3';
+ return null;
},
- };
+ },
+};
</script>
<template>
<g class="axis-label-container">
diff --git a/app/assets/javascripts/monitoring/components/graph/path.vue b/app/assets/javascripts/monitoring/components/graph/path.vue
index c9721c4cb01..881560124a5 100644
--- a/app/assets/javascripts/monitoring/components/graph/path.vue
+++ b/app/assets/javascripts/monitoring/components/graph/path.vue
@@ -1,36 +1,36 @@
<script>
- export default {
- props: {
- generatedLinePath: {
- type: String,
- required: true,
- },
- generatedAreaPath: {
- type: String,
- required: true,
- },
- lineStyle: {
- type: String,
- required: false,
- default: '',
- },
- lineColor: {
- type: String,
- required: true,
- },
- areaColor: {
- type: String,
- required: true,
- },
+export default {
+ props: {
+ generatedLinePath: {
+ type: String,
+ required: true,
},
- computed: {
- strokeDashArray() {
- if (this.lineStyle === 'dashed') return '3, 1';
- if (this.lineStyle === 'dotted') return '1, 1';
- return null;
- },
+ generatedAreaPath: {
+ type: String,
+ required: true,
},
- };
+ lineStyle: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ lineColor: {
+ type: String,
+ required: true,
+ },
+ areaColor: {
+ type: String,
+ required: true,
+ },
+ },
+ computed: {
+ strokeDashArray() {
+ if (this.lineStyle === 'dashed') return '3, 1';
+ if (this.lineStyle === 'dotted') return '1, 1';
+ return null;
+ },
+ },
+};
</script>
<template>
<g>
diff --git a/app/assets/javascripts/monitoring/components/graph_group.vue b/app/assets/javascripts/monitoring/components/graph_group.vue
index f71cf614552..a6dbe42a8f0 100644
--- a/app/assets/javascripts/monitoring/components/graph_group.vue
+++ b/app/assets/javascripts/monitoring/components/graph_group.vue
@@ -1,17 +1,17 @@
<script>
- export default {
- props: {
- name: {
- type: String,
- required: true,
- },
- showPanels: {
- type: Boolean,
- required: false,
- default: true,
- },
+export default {
+ props: {
+ name: {
+ type: String,
+ required: true,
},
- };
+ showPanels: {
+ type: Boolean,
+ required: false,
+ default: true,
+ },
+ },
+};
</script>
<template>
diff --git a/app/assets/javascripts/notes/components/noteable_discussion.vue b/app/assets/javascripts/notes/components/noteable_discussion.vue
index cf579c5d4dc..e0f883a8e08 100644
--- a/app/assets/javascripts/notes/components/noteable_discussion.vue
+++ b/app/assets/javascripts/notes/components/noteable_discussion.vue
@@ -292,10 +292,12 @@ Please check your network connection and try again.`;
</button>
</div>
<div
+ v-if="note.resolvable"
class="btn-group discussion-actions"
- role="group">
+ role="group"
+ >
<div
- v-if="note.resolvable && !discussionResolved"
+ v-if="!discussionResolved"
class="btn-group"
role="group">
<a
diff --git a/app/assets/javascripts/pages/milestones/shared/components/promote_milestone_modal.vue b/app/assets/javascripts/pages/milestones/shared/components/promote_milestone_modal.vue
index 22248418c41..2bda2aeb3a1 100644
--- a/app/assets/javascripts/pages/milestones/shared/components/promote_milestone_modal.vue
+++ b/app/assets/javascripts/pages/milestones/shared/components/promote_milestone_modal.vue
@@ -19,15 +19,19 @@
type: String,
required: true,
},
+ groupName: {
+ type: String,
+ required: true,
+ },
},
computed: {
title() {
return sprintf(s__('Milestones|Promote %{milestoneTitle} to group milestone?'), { milestoneTitle: this.milestoneTitle });
},
text() {
- return s__(`Milestones|Promoting this milestone will make it available for all projects inside the group.
+ return sprintf(s__(`Milestones|Promoting %{milestoneTitle} will make it available for all projects inside %{groupName}.
Existing project milestones with the same title will be merged.
- This action cannot be reversed.`);
+ This action cannot be reversed.`), { milestoneTitle: this.milestoneTitle, groupName: this.groupName });
},
},
methods: {
diff --git a/app/assets/javascripts/pages/milestones/shared/promote_milestone_modal_init.js b/app/assets/javascripts/pages/milestones/shared/promote_milestone_modal_init.js
index d00f81c9094..8e79341e96a 100644
--- a/app/assets/javascripts/pages/milestones/shared/promote_milestone_modal_init.js
+++ b/app/assets/javascripts/pages/milestones/shared/promote_milestone_modal_init.js
@@ -25,6 +25,7 @@ export default () => {
const modalProps = {
milestoneTitle: button.dataset.milestoneTitle,
url: button.dataset.url,
+ groupName: button.dataset.groupName,
};
eventHub.$once('promoteMilestoneModal.requestStarted', onRequestStarted);
eventHub.$emit('promoteMilestoneModal.props', modalProps);
@@ -54,6 +55,7 @@ export default () => {
return {
modalProps: {
milestoneTitle: '',
+ groupName: '',
url: '',
},
};
diff --git a/app/assets/javascripts/pages/ci/lints/ci_lint_editor.js b/app/assets/javascripts/pages/projects/ci/lints/ci_lint_editor.js
index 9ab73be80a0..9ab73be80a0 100644
--- a/app/assets/javascripts/pages/ci/lints/ci_lint_editor.js
+++ b/app/assets/javascripts/pages/projects/ci/lints/ci_lint_editor.js
diff --git a/app/assets/javascripts/pages/ci/lints/new/index.js b/app/assets/javascripts/pages/projects/ci/lints/new/index.js
index 8e8a843da0b..8e8a843da0b 100644
--- a/app/assets/javascripts/pages/ci/lints/new/index.js
+++ b/app/assets/javascripts/pages/projects/ci/lints/new/index.js
diff --git a/app/assets/javascripts/pages/ci/lints/show/index.js b/app/assets/javascripts/pages/projects/ci/lints/show/index.js
index 8e8a843da0b..8e8a843da0b 100644
--- a/app/assets/javascripts/pages/ci/lints/show/index.js
+++ b/app/assets/javascripts/pages/projects/ci/lints/show/index.js
diff --git a/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue b/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
index 54695dfeb99..ad6df51bb7a 100644
--- a/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
+++ b/app/assets/javascripts/pages/projects/labels/components/promote_label_modal.vue
@@ -1,4 +1,5 @@
<script>
+ import _ from 'underscore';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
import GlModal from '~/vue_shared/components/gl_modal.vue';
@@ -27,19 +28,26 @@
type: String,
required: true,
},
+ groupName: {
+ type: String,
+ required: true,
+ },
},
computed: {
text() {
- return s__(`Milestones|Promoting this label will make it available for all projects inside the group.
- Existing project labels with the same title will be merged. This action cannot be reversed.`);
+ return sprintf(s__(`Labels|Promoting %{labelTitle} will make it available for all projects inside %{groupName}.
+ Existing project labels with the same title will be merged. This action cannot be reversed.`), {
+ labelTitle: this.labelTitle,
+ groupName: this.groupName,
+ });
},
title() {
const label = `<span
class="label color-label"
style="background-color: ${this.labelColor}; color: ${this.labelTextColor};"
- >${this.labelTitle}</span>`;
+ >${_.escape(this.labelTitle)}</span>`;
- return sprintf(s__('Labels|Promote label %{labelTitle} to Group Label?'), {
+ return sprintf(s__('Labels|<span>Promote label</span> %{labelTitle} <span>to Group Label?</span>'), {
labelTitle: label,
}, false);
},
@@ -69,6 +77,7 @@
>
<div
slot="title"
+ class="modal-title-with-label"
v-html="title"
>
{{ title }}
diff --git a/app/assets/javascripts/pages/projects/labels/index/index.js b/app/assets/javascripts/pages/projects/labels/index/index.js
index 2abcbfab1ed..03cfef61311 100644
--- a/app/assets/javascripts/pages/projects/labels/index/index.js
+++ b/app/assets/javascripts/pages/projects/labels/index/index.js
@@ -30,6 +30,7 @@ const initLabelIndex = () => {
labelColor: button.dataset.labelColor,
labelTextColor: button.dataset.labelTextColor,
url: button.dataset.url,
+ groupName: button.dataset.groupName,
};
eventHub.$once('promoteLabelModal.requestStarted', onRequestStarted);
eventHub.$emit('promoteLabelModal.props', modalProps);
@@ -62,6 +63,7 @@ const initLabelIndex = () => {
labelColor: '',
labelTextColor: '',
url: '',
+ groupName: '',
},
};
},
diff --git a/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js b/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.vue
index b5ebccd3795..82c4562f9a9 100644
--- a/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js
+++ b/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.vue
@@ -1,7 +1,8 @@
+<script>
import { parseSeconds, stringifyTime } from '../../../lib/utils/pretty_time';
export default {
- name: 'time-tracking-comparison-pane',
+ name: 'TimeTrackingComparisonPane',
props: {
timeSpent: {
type: Number,
@@ -43,47 +44,50 @@ export default {
return this.timeEstimate >= this.timeSpent ? 'within_estimate' : 'over_estimate';
},
},
- template: `
- <div class="time-tracking-comparison-pane">
+};
+</script>
+
+<template>
+ <div class="time-tracking-comparison-pane">
+ <div
+ class="compare-meter"
+ data-toggle="tooltip"
+ data-placement="top"
+ role="timeRemainingDisplay"
+ :aria-valuenow="timeRemainingTooltip"
+ :title="timeRemainingTooltip"
+ :data-original-title="timeRemainingTooltip"
+ :class="timeRemainingStatusClass"
+ >
<div
- class="compare-meter"
- data-toggle="tooltip"
- data-placement="top"
- role="timeRemainingDisplay"
- :aria-valuenow="timeRemainingTooltip"
- :title="timeRemainingTooltip"
- :data-original-title="timeRemainingTooltip"
- :class="timeRemainingStatusClass"
+ class="meter-container"
+ role="timeSpentPercent"
+ :aria-valuenow="timeRemainingPercent"
>
<div
- class="meter-container"
- role="timeSpentPercent"
- :aria-valuenow="timeRemainingPercent"
+ :style="{ width: timeRemainingPercent }"
+ class="meter-fill"
>
- <div
- :style="{ width: timeRemainingPercent }"
- class="meter-fill"
- />
</div>
- <div class="compare-display-container">
- <div class="compare-display pull-left">
- <span class="compare-label">
+ </div>
+ <div class="compare-display-container">
+ <div class="compare-display pull-left">
+ <span class="compare-label">
{{ s__('TimeTracking|Spent') }}
- </span>
- <span class="compare-value spent">
- {{ timeSpentHumanReadable }}
- </span>
- </div>
- <div class="compare-display estimated pull-right">
- <span class="compare-label">
- {{ s__('TimeTrackingEstimated|Est') }}
- </span>
- <span class="compare-value">
- {{ timeEstimateHumanReadable }}
- </span>
- </div>
+ </span>
+ <span class="compare-value spent">
+ {{ timeSpentHumanReadable }}
+ </span>
+ </div>
+ <div class="compare-display estimated pull-right">
+ <span class="compare-label">
+ {{ s__('TimeTrackingEstimated|Est') }}
+ </span>
+ <span class="compare-value">
+ {{ timeEstimateHumanReadable }}
+ </span>
</div>
</div>
</div>
- `,
-};
+ </div>
+</template>
diff --git a/app/assets/javascripts/sidebar/components/time_tracking/time_tracker.vue b/app/assets/javascripts/sidebar/components/time_tracking/time_tracker.vue
index 28240468d2c..1c641c73ea3 100644
--- a/app/assets/javascripts/sidebar/components/time_tracking/time_tracker.vue
+++ b/app/assets/javascripts/sidebar/components/time_tracking/time_tracker.vue
@@ -4,7 +4,7 @@ import TimeTrackingCollapsedState from './collapsed_state.vue';
import timeTrackingSpentOnlyPane from './spent_only_pane';
import timeTrackingNoTrackingPane from './no_tracking_pane';
import timeTrackingEstimateOnlyPane from './estimate_only_pane';
-import timeTrackingComparisonPane from './comparison_pane';
+import TimeTrackingComparisonPane from './comparison_pane.vue';
import eventHub from '../../event_hub';
@@ -15,7 +15,7 @@ export default {
'time-tracking-estimate-only-pane': timeTrackingEstimateOnlyPane,
'time-tracking-spent-only-pane': timeTrackingSpentOnlyPane,
'time-tracking-no-tracking-pane': timeTrackingNoTrackingPane,
- 'time-tracking-comparison-pane': timeTrackingComparisonPane,
+ TimeTrackingComparisonPane,
'time-tracking-help-state': timeTrackingHelpState,
},
props: {
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/sha_mismatch.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/sha_mismatch.vue
index 04100871a94..7cc07401911 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/sha_mismatch.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/sha_mismatch.vue
@@ -17,8 +17,8 @@ export default {
/>
<div class="media-body space-children">
<span class="bold">
- The source branch HEAD has recently changed.
- Please reload the page and review the changes before merging.
+ {{ s__(`mrWidget|The source branch HEAD has recently changed.
+Please reload the page and review the changes before merging`) }}
</span>
</div>
</div>
diff --git a/app/assets/stylesheets/framework/gitlab_theme.scss b/app/assets/stylesheets/framework/gitlab_theme.scss
index db36e27fa74..05cb0196ced 100644
--- a/app/assets/stylesheets/framework/gitlab_theme.scss
+++ b/app/assets/stylesheets/framework/gitlab_theme.scss
@@ -2,7 +2,15 @@
* Styles the GitLab application with a specific color theme
*/
-@mixin gitlab-theme($color-100, $color-200, $color-500, $color-700, $color-800, $color-900, $color-alternate) {
+@mixin gitlab-theme(
+ $color-100,
+ $color-200,
+ $color-500,
+ $color-700,
+ $color-800,
+ $color-900,
+ $color-alternate
+) {
// Header
.navbar-gitlab {
@@ -23,7 +31,7 @@
> li {
> a:hover,
> a:focus {
- background-color: rgba($color-200, .2);
+ background-color: rgba($color-200, 0.2);
}
&.active > a,
@@ -33,7 +41,7 @@
}
&.line-separator {
- border-left: 1px solid rgba($color-200, .2);
+ border-left: 1px solid rgba($color-200, 0.2);
}
}
}
@@ -56,7 +64,7 @@
&:hover,
&:focus {
@media (min-width: $screen-sm-min) {
- background-color: rgba($color-200, .2);
+ background-color: rgba($color-200, 0.2);
}
svg {
@@ -91,34 +99,34 @@
> a {
&:hover,
&:focus {
- background-color: rgba($color-200, .2);
+ background-color: rgba($color-200, 0.2);
}
}
}
.search {
form {
- background-color: rgba($color-200, .2);
+ background-color: rgba($color-200, 0.2);
&:hover {
- background-color: rgba($color-200, .3);
+ background-color: rgba($color-200, 0.3);
}
}
.location-badge {
color: $color-100;
- background-color: rgba($color-200, .1);
+ background-color: rgba($color-200, 0.1);
border-right: 1px solid $color-800;
}
.search-input::placeholder {
- color: rgba($color-200, .8);
+ color: rgba($color-200, 0.8);
}
.search-input-wrap {
.search-icon,
.clear-icon {
- fill: rgba($color-200, .8);
+ fill: rgba($color-200, 0.8);
}
}
@@ -133,7 +141,7 @@
.search-input-wrap {
.search-icon {
- fill: rgba($color-200, .8);
+ fill: rgba($color-200, 0.8);
}
}
}
@@ -144,7 +152,6 @@
color: $color-900;
}
-
// Sidebar
.nav-sidebar li.active {
box-shadow: inset 4px 0 0 $color-700;
@@ -169,28 +176,94 @@
font-weight: $gl-font-weight-bold;
}
}
-}
+ // Web IDE
+ .ide-sidebar-link {
+ color: $color-200;
+ background-color: $color-700;
+
+ &:hover,
+ &:focus {
+ background-color: $color-500;
+ }
+
+ &:active {
+ background: $color-800;
+ }
+ }
+
+ .branch-container {
+ border-left-color: $color-700;
+ }
+
+ .branch-header-title {
+ color: $color-700;
+ }
+
+ .ide-file-list .file.file-active {
+ color: $color-700;
+ }
+}
body {
&.ui_indigo {
- @include gitlab-theme($indigo-100, $indigo-200, $indigo-500, $indigo-700, $indigo-800, $indigo-900, $white-light);
+ @include gitlab-theme(
+ $indigo-100,
+ $indigo-200,
+ $indigo-500,
+ $indigo-700,
+ $indigo-800,
+ $indigo-900,
+ $white-light
+ );
}
&.ui_dark {
- @include gitlab-theme($theme-gray-100, $theme-gray-200, $theme-gray-500, $theme-gray-700, $theme-gray-800, $theme-gray-900, $white-light);
+ @include gitlab-theme(
+ $theme-gray-100,
+ $theme-gray-200,
+ $theme-gray-500,
+ $theme-gray-700,
+ $theme-gray-800,
+ $theme-gray-900,
+ $white-light
+ );
}
&.ui_blue {
- @include gitlab-theme($theme-blue-100, $theme-blue-200, $theme-blue-500, $theme-blue-700, $theme-blue-800, $theme-blue-900, $white-light);
+ @include gitlab-theme(
+ $theme-blue-100,
+ $theme-blue-200,
+ $theme-blue-500,
+ $theme-blue-700,
+ $theme-blue-800,
+ $theme-blue-900,
+ $white-light
+ );
}
&.ui_green {
- @include gitlab-theme($theme-green-100, $theme-green-200, $theme-green-500, $theme-green-700, $theme-green-800, $theme-green-900, $white-light);
+ @include gitlab-theme(
+ $theme-green-100,
+ $theme-green-200,
+ $theme-green-500,
+ $theme-green-700,
+ $theme-green-800,
+ $theme-green-900,
+ $white-light
+ );
}
&.ui_light {
- @include gitlab-theme($theme-gray-900, $theme-gray-700, $theme-gray-800, $theme-gray-700, $theme-gray-700, $theme-gray-100, $theme-gray-700);
+ @include gitlab-theme(
+ $theme-gray-900,
+ $theme-gray-700,
+ $theme-gray-800,
+ $theme-gray-700,
+ $theme-gray-700,
+ $theme-gray-100,
+ $theme-gray-700
+ );
.navbar-gitlab {
background-color: $theme-gray-100;
@@ -270,5 +343,9 @@ body {
.sidebar-top-level-items > li.active .badge {
color: $theme-gray-900;
}
+
+ .ide-sidebar-link {
+ color: $white-light;
+ }
}
}
diff --git a/app/assets/stylesheets/framework/modal.scss b/app/assets/stylesheets/framework/modal.scss
index 48b981dd31f..eb789cc64b0 100644
--- a/app/assets/stylesheets/framework/modal.scss
+++ b/app/assets/stylesheets/framework/modal.scss
@@ -4,9 +4,15 @@
.page-title,
.modal-title {
+ .modal-title-with-label span {
+ vertical-align: middle;
+ display: inline-block;
+ }
+
.color-label {
font-size: $gl-font-size;
padding: $gl-vert-padding $label-padding-modal;
+ vertical-align: middle;
}
}
diff --git a/app/assets/stylesheets/pages/lint.scss b/app/assets/stylesheets/pages/lint.scss
deleted file mode 100644
index 68b6c5ecbd4..00000000000
--- a/app/assets/stylesheets/pages/lint.scss
+++ /dev/null
@@ -1,21 +0,0 @@
-.ci-body {
- .incorrect-syntax {
- font-size: 18px;
- color: $lint-incorrect-color;
- }
-
- .correct-syntax {
- font-size: 18px;
- color: $lint-correct-color;
- }
-}
-
-.ci-linter {
- .ci-editor {
- height: 400px;
- }
-
- .ci-template pre {
- white-space: pre-wrap;
- }
-}
diff --git a/app/assets/stylesheets/pages/projects.scss b/app/assets/stylesheets/pages/projects.scss
index 584b0579b72..9a770d77685 100644
--- a/app/assets/stylesheets/pages/projects.scss
+++ b/app/assets/stylesheets/pages/projects.scss
@@ -1121,3 +1121,25 @@ pre.light-well {
padding-top: $gl-padding;
padding-bottom: 37px;
}
+
+.project-ci-body {
+ .incorrect-syntax {
+ font-size: 18px;
+ color: $lint-incorrect-color;
+ }
+
+ .correct-syntax {
+ font-size: 18px;
+ color: $lint-correct-color;
+ }
+}
+
+.project-ci-linter {
+ .ci-editor {
+ height: 400px;
+ }
+
+ .ci-template pre {
+ white-space: pre-wrap;
+ }
+}
diff --git a/app/assets/stylesheets/pages/repo.scss b/app/assets/stylesheets/pages/repo.scss
index 57b995adb64..34340853165 100644
--- a/app/assets/stylesheets/pages/repo.scss
+++ b/app/assets/stylesheets/pages/repo.scss
@@ -20,7 +20,6 @@
display: flex;
height: calc(100vh - #{$header-height});
margin-top: 40px;
- color: $almost-black;
border-top: 1px solid $white-dark;
border-bottom: 1px solid $white-dark;
@@ -43,7 +42,11 @@
cursor: pointer;
&.file-open {
- background: $white-normal;
+ background: $link-active-background;
+ }
+
+ &.file-active {
+ font-weight: $gl-font-weight-bold;
}
.ide-file-name {
@@ -72,7 +75,10 @@
margin-right: -8px;
}
- &:hover {
+ &:hover,
+ &:focus {
+ background: $link-active-background;
+
.ide-new-btn {
display: block;
}
@@ -402,7 +408,7 @@
}
.branch-container {
- border-left: 4px solid $indigo-700;
+ border-left: 4px solid;
margin-bottom: $gl-bar-padding;
}
@@ -414,7 +420,6 @@
.branch-header-title {
flex: 1;
padding: $grid-size $gl-padding;
- color: $indigo-700;
font-weight: $gl-font-weight-bold;
svg {
@@ -721,9 +726,7 @@
}
.ide-view {
- height: calc(
- 100vh - #{$header-height + $performance-bar-height + $flash-height}
- );
+ height: calc(100vh - #{$header-height + $performance-bar-height + $flash-height});
}
}
}
@@ -767,20 +770,7 @@
.ide-sidebar-link {
padding: $gl-padding-8 $gl-padding;
- background: $indigo-700;
- color: $white-light;
- text-decoration: none;
display: flex;
align-items: center;
-
- &:focus,
- &:hover {
- color: $white-light;
- text-decoration: underline;
- background: $indigo-500;
- }
-
- &:active {
- background: $indigo-800;
- }
+ font-weight: $gl-font-weight-bold;
}
diff --git a/app/controllers/ci/lints_controller.rb b/app/controllers/ci/lints_controller.rb
index e9bd1689a1e..738a6a5173e 100644
--- a/app/controllers/ci/lints_controller.rb
+++ b/app/controllers/ci/lints_controller.rb
@@ -4,20 +4,5 @@ module Ci
def show
end
-
- def create
- @content = params[:content]
- @error = Gitlab::Ci::YamlProcessor.validation_message(@content)
- @status = @error.blank?
-
- if @error.blank?
- @config_processor = Gitlab::Ci::YamlProcessor.new(@content)
- @stages = @config_processor.stages
- @builds = @config_processor.builds
- @jobs = @config_processor.jobs
- end
-
- render :show
- end
end
end
diff --git a/app/controllers/concerns/send_file_upload.rb b/app/controllers/concerns/send_file_upload.rb
new file mode 100644
index 00000000000..55011c89886
--- /dev/null
+++ b/app/controllers/concerns/send_file_upload.rb
@@ -0,0 +1,17 @@
+module SendFileUpload
+ def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil, disposition: 'attachment')
+ if attachment
+ redirect_params[:query] = { "response-content-disposition" => "#{disposition};filename=#{attachment.inspect}" }
+ send_params.merge!(filename: attachment, disposition: disposition)
+ end
+
+ if file_upload.file_storage?
+ send_file file_upload.path, send_params
+ elsif file_upload.class.proxy_download_enabled?
+ headers.store(*Gitlab::Workhorse.send_url(file_upload.url(**redirect_params)))
+ head :ok
+ else
+ redirect_to file_upload.url(**redirect_params)
+ end
+ end
+end
diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index 3dbfabcae8a..b9b9b6e4e88 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -1,5 +1,6 @@
module UploadsActions
include Gitlab::Utils::StrongMemoize
+ include SendFileUpload
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
@@ -26,14 +27,11 @@ module UploadsActions
def show
return render_404 unless uploader&.exists?
- if uploader.file_storage?
- disposition = uploader.image_or_video? ? 'inline' : 'attachment'
- expires_in 0.seconds, must_revalidate: true, private: true
+ expires_in 0.seconds, must_revalidate: true, private: true
- send_file uploader.file.path, disposition: disposition
- else
- redirect_to uploader.url
- end
+ disposition = uploader.image_or_video? ? 'inline' : 'attachment'
+
+ send_upload(uploader, attachment: uploader.filename, disposition: disposition)
end
private
@@ -62,19 +60,27 @@ module UploadsActions
end
def build_uploader_from_upload
- return nil unless params[:secret] && params[:filename]
+ return unless uploader = build_uploader
- upload_path = uploader_class.upload_path(params[:secret], params[:filename])
- upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
+ upload_paths = uploader.upload_paths(params[:filename])
+ upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_paths)
upload&.build_uploader
end
def build_uploader_from_params
+ return unless uploader = build_uploader
+
+ uploader.retrieve_from_store!(params[:filename])
+ uploader
+ end
+
+ def build_uploader
+ return unless params[:secret] && params[:filename]
+
uploader = uploader_class.new(model, secret: params[:secret])
- return nil unless uploader.model_valid?
+ return unless uploader.model_valid?
- uploader.retrieve_from_store!(params[:filename])
uploader
end
diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index 0837451cc49..abc283d7aa9 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -1,6 +1,7 @@
class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath
include RendersBlob
+ include SendFileUpload
layout 'project'
before_action :authorize_read_build!
@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file]
def download
- if artifacts_file.file_storage?
- send_file artifacts_file.path, disposition: 'attachment'
- else
- redirect_to artifacts_file.url
- end
+ send_upload(artifacts_file, attachment: artifacts_file.filename)
end
def browse
@@ -45,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def raw
- path = Gitlab::Ci::Build::Artifacts::Path
- .new(params[:path])
+ path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
send_artifacts_entry(build, path)
end
@@ -75,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def validate_artifacts!
- render_404 unless build && build.artifacts?
+ render_404 unless build&.artifacts?
end
def build
diff --git a/app/controllers/projects/branches_controller.rb b/app/controllers/projects/branches_controller.rb
index 965cece600e..176679f0849 100644
--- a/app/controllers/projects/branches_controller.rb
+++ b/app/controllers/projects/branches_controller.rb
@@ -21,17 +21,13 @@ class Projects::BranchesController < Projects::ApplicationController
fetch_branches_by_mode
@refs_pipelines = @project.pipelines.latest_successful_for_refs(@branches.map(&:name))
- @merged_branch_names =
- repository.merged_branch_names(@branches.map(&:name))
- # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37429
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- @max_commits = @branches.reduce(0) do |memo, branch|
- diverging_commit_counts = repository.diverging_commit_counts(branch)
- [memo, diverging_commit_counts[:behind], diverging_commit_counts[:ahead]].max
- end
-
- render
+ @merged_branch_names = repository.merged_branch_names(@branches.map(&:name))
+ @max_commits = @branches.reduce(0) do |memo, branch|
+ diverging_commit_counts = repository.diverging_commit_counts(branch)
+ [memo, diverging_commit_counts[:behind], diverging_commit_counts[:ahead]].max
end
+
+ render
end
format.json do
branches = BranchesFinder.new(@repository, params).execute
diff --git a/app/controllers/projects/ci/lints_controller.rb b/app/controllers/projects/ci/lints_controller.rb
new file mode 100644
index 00000000000..a2185572a20
--- /dev/null
+++ b/app/controllers/projects/ci/lints_controller.rb
@@ -0,0 +1,27 @@
+class Projects::Ci::LintsController < Projects::ApplicationController
+ before_action :authorize_create_pipeline!
+
+ def show
+ end
+
+ def create
+ @content = params[:content]
+ @error = Gitlab::Ci::YamlProcessor.validation_message(@content, yaml_processor_options)
+ @status = @error.blank?
+
+ if @error.blank?
+ @config_processor = Gitlab::Ci::YamlProcessor.new(@content, yaml_processor_options)
+ @stages = @config_processor.stages
+ @builds = @config_processor.builds
+ @jobs = @config_processor.jobs
+ end
+
+ render :show
+ end
+
+ private
+
+ def yaml_processor_options
+ { project: @project, sha: project.repository.commit.sha }
+ end
+end
diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb
index 8b54ba3ad7c..85e972d9731 100644
--- a/app/controllers/projects/jobs_controller.rb
+++ b/app/controllers/projects/jobs_controller.rb
@@ -1,4 +1,6 @@
class Projects::JobsController < Projects::ApplicationController
+ include SendFileUpload
+
before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!,
@@ -117,11 +119,17 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
- build.trace.read do |stream|
- if stream.file?
- send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
- else
- render_404
+ if trace_artifact_file
+ send_upload(trace_artifact_file,
+ send_params: raw_send_params,
+ redirect_params: raw_redirect_params)
+ else
+ build.trace.read do |stream|
+ if stream.file?
+ send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
+ else
+ render_404
+ end
end
end
end
@@ -136,9 +144,21 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build)
end
+ def raw_send_params
+ { type: 'text/plain; charset=utf-8', disposition: 'inline' }
+ end
+
+ def raw_redirect_params
+ { query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
+ end
+
+ def trace_artifact_file
+ @trace_artifact_file ||= build.job_artifacts_trace&.file
+ end
+
def build
@build ||= project.builds.find(params[:id])
- .present(current_user: current_user)
+ .present(current_user: current_user)
end
def build_path(build)
diff --git a/app/controllers/projects/labels_controller.rb b/app/controllers/projects/labels_controller.rb
index 99790b8e7e8..516198b1b8a 100644
--- a/app/controllers/projects/labels_controller.rb
+++ b/app/controllers/projects/labels_controller.rb
@@ -112,7 +112,7 @@ class Projects::LabelsController < Projects::ApplicationController
begin
return render_404 unless promote_service.execute(@label)
- flash[:notice] = "#{@label.title} promoted to group label."
+ flash[:notice] = "#{@label.title} promoted to <a href=\"#{group_labels_path(@project.group)}\">group label</a>.".html_safe
respond_to do |format|
format.html do
redirect_to(project_labels_path(@project), status: 303)
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 941638db427..2515e4b9a17 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -1,6 +1,7 @@
class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest
include WorkhorseRequest
+ include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
@@ -11,25 +12,28 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return
end
- send_file lfs_object.file.path, content_type: "application/octet-stream"
+ send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end
def upload_authorize
set_workhorse_internal_api_content_type
- render json: Gitlab::Workhorse.lfs_upload_ok(oid, size)
+
+ authorized = LfsObjectUploader.workhorse_authorize
+ authorized.merge!(LfsOid: oid, LfsSize: size)
+
+ render json: authorized
end
def upload_finalize
- unless tmp_filename
- render_lfs_forbidden
- return
- end
-
- if store_file(oid, size, tmp_filename)
+ if store_file!(oid, size)
head 200
else
render plain: 'Unprocessable entity', status: 422
end
+ rescue ActiveRecord::RecordInvalid
+ render_400
+ rescue ObjectStorage::RemoteStoreError
+ render_lfs_forbidden
end
private
@@ -50,38 +54,28 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
params[:size].to_i
end
- def tmp_filename
- name = request.headers['X-Gitlab-Lfs-Tmp']
- return if name.include?('/')
- return unless oid.present? && name.start_with?(oid)
-
- name
- end
+ def store_file!(oid, size)
+ object = LfsObject.find_by(oid: oid, size: size)
+ unless object&.file&.exists?
+ object = create_file!(oid, size)
+ end
- def store_file(oid, size, tmp_file)
- # Define tmp_file_path early because we use it in "ensure"
- tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
+ return unless object
- object = LfsObject.find_or_create_by(oid: oid, size: size)
- file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
- file_exists && link_to_project(object)
- ensure
- FileUtils.rm_f(tmp_file_path)
+ link_to_project!(object)
end
- def move_tmp_file_to_storage(object, path)
- File.open(path) do |f|
- object.file = f
+ def create_file!(oid, size)
+ LfsObject.new(oid: oid, size: size).tap do |object|
+ object.file.store_workhorse_file!(params, :file)
+ object.save!
end
-
- object.file.store!
- object.save
end
- def link_to_project(object)
+ def link_to_project!(object)
if object && !object.projects.exists?(storage_project.id)
object.projects << storage_project
- object.save
+ object.save!
end
end
end
diff --git a/app/controllers/projects/milestones_controller.rb b/app/controllers/projects/milestones_controller.rb
index ff93147d00f..e898136d203 100644
--- a/app/controllers/projects/milestones_controller.rb
+++ b/app/controllers/projects/milestones_controller.rb
@@ -42,6 +42,10 @@ class Projects::MilestonesController < Projects::ApplicationController
def show
@project_namespace = @project.namespace.becomes(Namespace)
+
+ respond_to do |format|
+ format.html
+ end
end
def create
@@ -70,9 +74,9 @@ class Projects::MilestonesController < Projects::ApplicationController
end
def promote
- Milestones::PromoteService.new(project, current_user).execute(milestone)
+ promoted_milestone = Milestones::PromoteService.new(project, current_user).execute(milestone)
- flash[:notice] = "#{milestone.title} promoted to group milestone"
+ flash[:notice] = "#{milestone.title} promoted to <a href=\"#{group_milestone_path(project.group, promoted_milestone.iid)}\">group milestone</a>.".html_safe
respond_to do |format|
format.html do
redirect_to project_milestones_path(project)
diff --git a/app/controllers/projects/protected_branches_controller.rb b/app/controllers/projects/protected_branches_controller.rb
index d1719f12072..64954ac9a42 100644
--- a/app/controllers/projects/protected_branches_controller.rb
+++ b/app/controllers/projects/protected_branches_controller.rb
@@ -5,12 +5,8 @@ class Projects::ProtectedBranchesController < Projects::ProtectedRefsController
@project.repository.branches
end
- def create_service_class
- ::ProtectedBranches::CreateService
- end
-
- def update_service_class
- ::ProtectedBranches::UpdateService
+ def service_namespace
+ ::ProtectedBranches
end
def load_protected_ref
diff --git a/app/controllers/projects/protected_refs_controller.rb b/app/controllers/projects/protected_refs_controller.rb
index b51bdf7aa78..9e757a8d25f 100644
--- a/app/controllers/projects/protected_refs_controller.rb
+++ b/app/controllers/projects/protected_refs_controller.rb
@@ -37,7 +37,7 @@ class Projects::ProtectedRefsController < Projects::ApplicationController
end
def destroy
- @protected_ref.destroy
+ destroy_service_class.new(@project, current_user).execute(@protected_ref)
respond_to do |format|
format.html { redirect_to_repository_settings(@project) }
@@ -47,6 +47,18 @@ class Projects::ProtectedRefsController < Projects::ApplicationController
protected
+ def create_service_class
+ service_namespace::CreateService
+ end
+
+ def update_service_class
+ service_namespace::UpdateService
+ end
+
+ def destroy_service_class
+ service_namespace::DestroyService
+ end
+
def access_level_attributes
%i(access_level id)
end
diff --git a/app/controllers/projects/protected_tags_controller.rb b/app/controllers/projects/protected_tags_controller.rb
index a5dbd7e46ae..198c938ff35 100644
--- a/app/controllers/projects/protected_tags_controller.rb
+++ b/app/controllers/projects/protected_tags_controller.rb
@@ -5,12 +5,8 @@ class Projects::ProtectedTagsController < Projects::ProtectedRefsController
@project.repository.tags
end
- def create_service_class
- ::ProtectedTags::CreateService
- end
-
- def update_service_class
- ::ProtectedTags::UpdateService
+ def service_namespace
+ ::ProtectedTags
end
def load_protected_ref
diff --git a/app/controllers/projects/raw_controller.rb b/app/controllers/projects/raw_controller.rb
index a02cc477e08..9bc774b7636 100644
--- a/app/controllers/projects/raw_controller.rb
+++ b/app/controllers/projects/raw_controller.rb
@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController
include ExtractsPath
include BlobHelper
+ include SendFileUpload
before_action :require_non_empty_project
before_action :assign_ref_vars
@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project)
- send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
+ send_upload(lfs_object.file, attachment: @blob.name)
else
render_404
end
diff --git a/app/controllers/projects/settings/ci_cd_controller.rb b/app/controllers/projects/settings/ci_cd_controller.rb
index 259809f3429..96125b549b7 100644
--- a/app/controllers/projects/settings/ci_cd_controller.rb
+++ b/app/controllers/projects/settings/ci_cd_controller.rb
@@ -29,12 +29,12 @@ module Projects
@project_runners = @project.runners.ordered
@assignable_runners = current_user.ci_authorized_runners
.assignable_for(project).ordered.page(params[:page]).per(20)
- @shared_runners = Ci::Runner.shared.active
+ @shared_runners = ::Ci::Runner.shared.active
@shared_runners_count = @shared_runners.count(:all)
end
def define_secret_variables
- @variable = Ci::Variable.new(project: project)
+ @variable = ::Ci::Variable.new(project: project)
.present(current_user: current_user)
@variables = project.variables.order_key_asc
.map { |variable| variable.present(current_user: current_user) }
@@ -42,7 +42,7 @@ module Projects
def define_triggers_variables
@triggers = @project.triggers
- @trigger = Ci::Trigger.new
+ @trigger = ::Ci::Trigger.new
end
def define_badges_variables
diff --git a/app/controllers/root_controller.rb b/app/controllers/root_controller.rb
index 8acefd58e77..651b82f04f4 100644
--- a/app/controllers/root_controller.rb
+++ b/app/controllers/root_controller.rb
@@ -42,6 +42,10 @@ class RootController < Dashboard::ProjectsController
redirect_to(dashboard_groups_path)
when 'todos'
redirect_to(dashboard_todos_path)
+ when 'issues'
+ redirect_to(issues_dashboard_path(assignee_id: current_user.id))
+ when 'merge_requests'
+ redirect_to(merge_requests_dashboard_path(assignee_id: current_user.id))
end
end
diff --git a/app/helpers/namespaces_helper.rb b/app/helpers/namespaces_helper.rb
index 40ca666f1bf..9be93fa69ae 100644
--- a/app/helpers/namespaces_helper.rb
+++ b/app/helpers/namespaces_helper.rb
@@ -31,7 +31,7 @@ module NamespacesHelper
def namespace_icon(namespace, size = 40)
if namespace.is_a?(Group)
- group_icon(namespace)
+ group_icon_url(namespace)
else
avatar_icon_for_user(namespace.owner, size)
end
diff --git a/app/helpers/page_layout_helper.rb b/app/helpers/page_layout_helper.rb
index 18b9bf214a3..a8397b03d63 100644
--- a/app/helpers/page_layout_helper.rb
+++ b/app/helpers/page_layout_helper.rb
@@ -39,7 +39,10 @@ module PageLayoutHelper
end
def favicon
- Rails.env.development? ? 'favicon-blue.ico' : 'favicon.ico'
+ return 'favicon-yellow.ico' if Gitlab::Utils.to_boolean(ENV['CANARY'])
+ return 'favicon-blue.ico' if Rails.env.development?
+
+ 'favicon.ico'
end
def page_image
diff --git a/app/helpers/preferences_helper.rb b/app/helpers/preferences_helper.rb
index 373dfd457f7..fb523cb865b 100644
--- a/app/helpers/preferences_helper.rb
+++ b/app/helpers/preferences_helper.rb
@@ -9,12 +9,14 @@ module PreferencesHelper
# Maps `dashboard` values to more user-friendly option text
DASHBOARD_CHOICES = {
- projects: 'Your Projects (default)',
- stars: 'Starred Projects',
- project_activity: "Your Projects' Activity",
- starred_project_activity: "Starred Projects' Activity",
- groups: "Your Groups",
- todos: "Your Todos"
+ projects: _("Your Projects (default)"),
+ stars: _("Starred Projects"),
+ project_activity: _("Your Projects' Activity"),
+ starred_project_activity: _("Starred Projects' Activity"),
+ groups: _("Your Groups"),
+ todos: _("Your Todos"),
+ issues: _("Assigned Issues"),
+ merge_requests: _("Assigned Merge Requests")
}.with_indifferent_access.freeze
# Returns an Array usable by a select field for more user-friendly option text
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index dcd14c08f3c..2a6406d63c7 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -1,5 +1,7 @@
class Appearance < ActiveRecord::Base
include CacheMarkdownField
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 68aafdd7304..7e724de9e77 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -3,6 +3,7 @@ module Ci
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
include Presentable
include Importable
@@ -23,6 +24,9 @@ module Ci
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
+ has_one :metadata, class_name: 'Ci::BuildMetadata'
+ delegate :timeout, to: :metadata, prefix: true, allow_nil: true
+
##
# The "environment" field for builds is a String, and is the unexpanded name!
#
@@ -47,6 +51,7 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
+ scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
@@ -153,6 +158,14 @@ module Ci
before_transition any => [:running] do |build|
build.validates_dependencies! unless Feature.enabled?('ci_disable_validates_dependencies')
end
+
+ before_transition pending: :running do |build|
+ build.ensure_metadata.update_timeout_state
+ end
+ end
+
+ def ensure_metadata
+ metadata || build_metadata(project: project)
end
def detailed_status(current_user)
@@ -233,10 +246,6 @@ module Ci
latest_builds.where('stage_idx < ?', stage_idx)
end
- def timeout
- project.build_timeout
- end
-
def triggered_by?(current_user)
user == current_user
end
@@ -382,13 +391,19 @@ module Ci
project.running_or_pending_build_count(force: true)
end
+ def browsable_artifacts?
+ artifacts_metadata?
+ end
+
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
diff --git a/app/models/ci/build_metadata.rb b/app/models/ci/build_metadata.rb
new file mode 100644
index 00000000000..96762f8845c
--- /dev/null
+++ b/app/models/ci/build_metadata.rb
@@ -0,0 +1,35 @@
+module Ci
+ # The purpose of this class is to store Build related data that can be disposed.
+ # Data that should be persisted forever, should be stored with Ci::Build model.
+ class BuildMetadata < ActiveRecord::Base
+ extend Gitlab::Ci::Model
+ include Presentable
+ include ChronicDurationAttribute
+
+ self.table_name = 'ci_builds_metadata'
+
+ belongs_to :build, class_name: 'Ci::Build'
+ belongs_to :project
+
+ validates :build, presence: true
+ validates :project, presence: true
+
+ chronic_duration_attr_reader :timeout_human_readable, :timeout
+
+ enum timeout_source: {
+ unknown_timeout_source: 1,
+ project_timeout_source: 2,
+ runner_timeout_source: 3
+ }
+
+ def update_timeout_state
+ return unless build.runner.present?
+
+ project_timeout = project&.build_timeout
+ timeout = [project_timeout, build.runner.maximum_timeout].compact.min
+ timeout_source = timeout < project_timeout ? :runner_timeout_source : :project_timeout_source
+
+ update(timeout: timeout, timeout_source: timeout_source)
+ end
+ end
+end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 0a599f72bc7..df57b4f65e3 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -1,5 +1,7 @@
module Ci
class JobArtifact < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model
belongs_to :project
@@ -7,9 +9,11 @@ module Ci
before_save :set_size, if: :file_changed?
+ scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
+
mount_uploader :file, JobArtifactUploader
- delegate :open, :exists?, to: :file
+ delegate :exists?, :open, to: :file
enum file_type: {
archive: 1,
@@ -21,6 +25,10 @@ module Ci
self.where(project: project).sum(:size)
end
+ def local_store?
+ [nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
+ end
+
def set_size
self.size = file.size
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 7173f88f1c7..5a4c56ec0dc 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -3,12 +3,13 @@ module Ci
extend Gitlab::Ci::Model
include Gitlab::SQL::Pattern
include RedisCacheable
+ include ChronicDurationAttribute
RUNNER_QUEUE_EXPIRY_TIME = 60.minutes
ONLINE_CONTACT_TIMEOUT = 1.hour
UPDATE_DB_RUNNER_INFO_EVERY = 40.minutes
AVAILABLE_SCOPES = %w[specific shared active paused online].freeze
- FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level].freeze
+ FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level maximum_timeout_human_readable].freeze
has_many :builds
has_many :runner_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
@@ -51,6 +52,12 @@ module Ci
cached_attr_reader :version, :revision, :platform, :architecture, :contacted_at, :ip_address
+ chronic_duration_attr :maximum_timeout_human_readable, :maximum_timeout
+
+ validates :maximum_timeout, allow_nil: true,
+ numericality: { greater_than_or_equal_to: 600,
+ message: 'needs to be at least 10 minutes' }
+
# Searches for runners matching the given query.
#
# This method uses ILIKE on PostgreSQL and LIKE on MySQL.
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 49eb069016a..bfdfc5ae6fe 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -10,6 +10,7 @@ module Clusters
Applications::Prometheus.application_name => Applications::Prometheus,
Applications::Runner.application_name => Applications::Runner
}.freeze
+ DEFAULT_ENVIRONMENT = '*'.freeze
belongs_to :user
@@ -50,6 +51,7 @@ module Clusters
scope :enabled, -> { where(enabled: true) }
scope :disabled, -> { where(enabled: false) }
+ scope :default_environment, -> { where(environment_scope: DEFAULT_ENVIRONMENT) }
def status_name
if provider
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index 318df11727e..7677891b9ce 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -3,6 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
+ include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
diff --git a/app/models/concerns/chronic_duration_attribute.rb b/app/models/concerns/chronic_duration_attribute.rb
new file mode 100644
index 00000000000..fa1eafb1d7a
--- /dev/null
+++ b/app/models/concerns/chronic_duration_attribute.rb
@@ -0,0 +1,39 @@
+module ChronicDurationAttribute
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def chronic_duration_attr_reader(virtual_attribute, source_attribute)
+ define_method(virtual_attribute) do
+ chronic_duration_attributes[virtual_attribute] || output_chronic_duration_attribute(source_attribute)
+ end
+ end
+
+ def chronic_duration_attr_writer(virtual_attribute, source_attribute)
+ chronic_duration_attr_reader(virtual_attribute, source_attribute)
+
+ define_method("#{virtual_attribute}=") do |value|
+ chronic_duration_attributes[virtual_attribute] = value.presence || ''
+
+ begin
+ new_value = ChronicDuration.parse(value).to_i if value.present?
+ assign_attributes(source_attribute => new_value)
+ rescue ChronicDuration::DurationParseError
+ # ignore error as it will be caught by validation
+ end
+ end
+
+ validates virtual_attribute, allow_nil: true, duration: true
+ end
+
+ alias_method :chronic_duration_attr, :chronic_duration_attr_writer
+ end
+
+ def chronic_duration_attributes
+ @chronic_duration_attributes ||= {}
+ end
+
+ def output_chronic_duration_attribute(source_attribute)
+ value = attributes[source_attribute.to_s]
+ ChronicDuration.output(value, format: :short) if value
+ end
+end
diff --git a/app/models/concerns/deployment_platform.rb b/app/models/concerns/deployment_platform.rb
index faa94204e33..52851b3d0b2 100644
--- a/app/models/concerns/deployment_platform.rb
+++ b/app/models/concerns/deployment_platform.rb
@@ -1,16 +1,24 @@
module DeploymentPlatform
- # EE would override this and utilize the extra argument
+ # EE would override this and utilize environment argument
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def deployment_platform(environment: nil)
- @deployment_platform ||=
- find_cluster_platform_kubernetes ||
- find_kubernetes_service_integration ||
- build_cluster_and_deployment_platform
+ @deployment_platform ||= {}
+
+ @deployment_platform[environment] ||= find_deployment_platform(environment)
end
private
- def find_cluster_platform_kubernetes
- clusters.find_by(enabled: true)&.platform_kubernetes
+ def find_deployment_platform(environment)
+ find_cluster_platform_kubernetes(environment: environment) ||
+ find_kubernetes_service_integration ||
+ build_cluster_and_deployment_platform
+ end
+
+ # EE would override this and utilize environment argument
+ def find_cluster_platform_kubernetes(environment: nil)
+ clusters.enabled.default_environment
+ .last&.platform_kubernetes
end
def find_kubernetes_service_integration
diff --git a/app/models/deploy_key.rb b/app/models/deploy_key.rb
index c2e0a5fa126..89a74b7dcb1 100644
--- a/app/models/deploy_key.rb
+++ b/app/models/deploy_key.rb
@@ -27,6 +27,10 @@ class DeployKey < Key
self.private?
end
+ def user
+ super || User.ghost
+ end
+
def has_access_to?(project)
deploy_keys_project_for(project).present?
end
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index b444812a4cf..b7de46fa202 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -1,15 +1,30 @@
class LfsObject < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
+
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects
+ scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
+
validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader
+ before_save :update_file_store
+
+ def update_file_store
+ self.file_store = file.object_store
+ end
+
def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id)
end
+ def local_store?
+ [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
+ end
+
def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil })
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 7e6d89ec9c7..91d8be5559b 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -536,18 +536,25 @@ class MergeRequest < ActiveRecord::Base
merge_request_diff(true)
end
+ def viewable_diffs
+ @viewable_diffs ||= merge_request_diffs.viewable.to_a
+ end
+
def merge_request_diff_for(diff_refs_or_sha)
- @merge_request_diffs_by_diff_refs_or_sha ||= Hash.new do |h, diff_refs_or_sha|
- diffs = merge_request_diffs.viewable
- h[diff_refs_or_sha] =
- if diff_refs_or_sha.is_a?(Gitlab::Diff::DiffRefs)
- diffs.find_by_diff_refs(diff_refs_or_sha)
- else
- diffs.find_by(head_commit_sha: diff_refs_or_sha)
- end
- end
+ matcher =
+ if diff_refs_or_sha.is_a?(Gitlab::Diff::DiffRefs)
+ {
+ 'start_commit_sha' => diff_refs_or_sha.start_sha,
+ 'head_commit_sha' => diff_refs_or_sha.head_sha,
+ 'base_commit_sha' => diff_refs_or_sha.base_sha
+ }
+ else
+ { 'head_commit_sha' => diff_refs_or_sha }
+ end
- @merge_request_diffs_by_diff_refs_or_sha[diff_refs_or_sha]
+ viewable_diffs.find do |diff|
+ diff.attributes.slice(*matcher.keys) == matcher
+ end
end
def version_params_for(diff_refs)
diff --git a/app/models/project_services/gemnasium_service.rb b/app/models/project_services/gemnasium_service.rb
index 017a9b2df6e..26cbfd784ad 100644
--- a/app/models/project_services/gemnasium_service.rb
+++ b/app/models/project_services/gemnasium_service.rb
@@ -36,7 +36,7 @@ class GemnasiumService < Service
after: data[:after],
token: token,
api_key: api_key,
- repo: project.repository.path_to_repo
+ repo: project.repository.path_to_repo # Gitaly: fixed by https://gitlab.com/gitlab-org/security-products/gemnasium-migration/issues/9
)
end
end
diff --git a/app/models/redirect_route.rb b/app/models/redirect_route.rb
index 20532527346..31de204d824 100644
--- a/app/models/redirect_route.rb
+++ b/app/models/redirect_route.rb
@@ -17,32 +17,4 @@ class RedirectRoute < ActiveRecord::Base
where(wheres, path, "#{sanitize_sql_like(path)}/%")
end
-
- scope :permanent, -> do
- if column_permanent_exists?
- where(permanent: true)
- else
- none
- end
- end
-
- scope :temporary, -> do
- if column_permanent_exists?
- where(permanent: [false, nil])
- else
- all
- end
- end
-
- default_value_for :permanent, false
-
- def permanent=(value)
- if self.class.column_permanent_exists?
- super
- end
- end
-
- def self.column_permanent_exists?
- ActiveRecord::Base.connection.column_exists?(:redirect_routes, :permanent)
- end
end
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 42f1ac43e29..fd1afafe4df 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -93,10 +93,6 @@ class Repository
"#<#{self.class.name}:#{@disk_path}>"
end
- def create_hooks
- Gitlab::Git::Repository.create_hooks(path_to_repo, Gitlab.config.gitlab_shell.hooks_path)
- end
-
def commit(ref = 'HEAD')
return nil unless exists?
return ref if ref.is_a?(::Commit)
@@ -253,13 +249,13 @@ class Repository
end
def diverging_commit_counts(branch)
- root_ref_hash = raw_repository.commit(root_ref).id
+ @root_ref_hash ||= raw_repository.commit(root_ref).id
cache.fetch(:"diverging_commit_counts_#{branch.name}") do
# Rugged seems to throw a `ReferenceError` when given branch_names rather
# than SHA-1 hashes
number_commits_behind, number_commits_ahead =
raw_repository.count_commits_between(
- root_ref_hash,
+ @root_ref_hash,
branch.dereferenced_target.sha,
left_right: true,
max_count: MAX_DIVERGING_COUNT)
diff --git a/app/models/route.rb b/app/models/route.rb
index 07d96c21cf1..2d609920051 100644
--- a/app/models/route.rb
+++ b/app/models/route.rb
@@ -10,8 +10,6 @@ class Route < ActiveRecord::Base
presence: true,
uniqueness: { case_sensitive: false }
- validate :ensure_permanent_paths, if: :path_changed?
-
before_validation :delete_conflicting_orphaned_routes
after_create :delete_conflicting_redirects
after_update :delete_conflicting_redirects, if: :path_changed?
@@ -45,7 +43,7 @@ class Route < ActiveRecord::Base
# We are not calling route.delete_conflicting_redirects here, in hopes
# of avoiding deadlocks. The parent (self, in this method) already
# called it, which deletes conflicts for all descendants.
- route.create_redirect(old_path, permanent: permanent_redirect?) if attributes[:path]
+ route.create_redirect(old_path) if attributes[:path]
end
end
end
@@ -55,31 +53,17 @@ class Route < ActiveRecord::Base
end
def conflicting_redirects
- RedirectRoute.temporary.matching_path_and_descendants(path)
+ RedirectRoute.matching_path_and_descendants(path)
end
- def create_redirect(path, permanent: false)
- RedirectRoute.create(source: source, path: path, permanent: permanent)
+ def create_redirect(path)
+ RedirectRoute.create(source: source, path: path)
end
private
def create_redirect_for_old_path
- create_redirect(path_was, permanent: permanent_redirect?) if path_changed?
- end
-
- def permanent_redirect?
- source_type != "Project"
- end
-
- def ensure_permanent_paths
- return if path.nil?
-
- errors.add(:path, "has been taken before") if conflicting_redirect_exists?
- end
-
- def conflicting_redirect_exists?
- RedirectRoute.permanent.matching_path_and_descendants(path).exists?
+ create_redirect(path_was) if path_changed?
end
def delete_conflicting_orphaned_routes
diff --git a/app/models/service.rb b/app/models/service.rb
index 1dcb79157a2..7424cef0fc0 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -273,6 +273,7 @@ class Service < ActiveRecord::Base
def self.build_from_template(project_id, template)
service = template.dup
+ service.active = false unless service.valid?
service.template = false
service.project_id = project_id
service
diff --git a/app/models/upload.rb b/app/models/upload.rb
index 99ad37dc892..cf71a7b76fc 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
+ scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
+
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base
end
def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksummable?
- self.checksum = self.class.hexdigest(absolute_path)
+ self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
- def build_uploader
- uploader_class.new(model, mount_point, **uploader_context).tap do |uploader|
+ def build_uploader(mounted_as = nil)
+ uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact
end
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
private
def delete_file!
@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist?
end
- def local?
- true
- end
-
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
diff --git a/app/models/user.rb b/app/models/user.rb
index fa54581d220..f934b654225 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -82,11 +82,8 @@ class User < ActiveRecord::Base
has_one :namespace, -> { where(type: nil) }, dependent: :destroy, foreign_key: :owner_id, inverse_of: :owner, autosave: true # rubocop:disable Cop/ActiveRecordDependent
# Profile
- has_many :keys, -> do
- type = Key.arel_table[:type]
- where(type.not_eq('DeployKey').or(type.eq(nil)))
- end, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
- has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :keys, -> { where(type: ['Key', nil]) }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :nullify # rubocop:disable Cop/ActiveRecordDependent
has_many :gpg_keys
has_many :emails, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
@@ -187,7 +184,7 @@ class User < ActiveRecord::Base
# User's Dashboard preference
# Note: When adding an option, it MUST go on the end of the array.
- enum dashboard: [:projects, :stars, :project_activity, :starred_project_activity, :groups, :todos]
+ enum dashboard: [:projects, :stars, :project_activity, :starred_project_activity, :groups, :todos, :issues, :merge_requests]
# User's Project preference
# Note: When adding an option, it MUST go on the end of the array.
diff --git a/app/policies/protected_branch_policy.rb b/app/policies/protected_branch_policy.rb
new file mode 100644
index 00000000000..1a7faa4db40
--- /dev/null
+++ b/app/policies/protected_branch_policy.rb
@@ -0,0 +1,9 @@
+class ProtectedBranchPolicy < BasePolicy
+ delegate { @subject.project }
+
+ rule { can?(:admin_project) }.policy do
+ enable :create_protected_branch
+ enable :update_protected_branch
+ enable :destroy_protected_branch
+ end
+end
diff --git a/app/presenters/ci/build_metadata_presenter.rb b/app/presenters/ci/build_metadata_presenter.rb
new file mode 100644
index 00000000000..5048f967ea8
--- /dev/null
+++ b/app/presenters/ci/build_metadata_presenter.rb
@@ -0,0 +1,18 @@
+module Ci
+ class BuildMetadataPresenter < Gitlab::View::Presenter::Delegated
+ TIMEOUT_SOURCES = {
+ unknown_timeout_source: nil,
+ project_timeout_source: 'project',
+ runner_timeout_source: 'runner'
+ }.freeze
+
+ presents :metadata
+
+ def timeout_source
+ return unless metadata.timeout_source?
+
+ TIMEOUT_SOURCES[metadata.timeout_source.to_sym] ||
+ metadata.timeout_source
+ end
+ end
+end
diff --git a/app/serializers/build_details_entity.rb b/app/serializers/build_details_entity.rb
index 69d46f5ec14..ca4480fe2b1 100644
--- a/app/serializers/build_details_entity.rb
+++ b/app/serializers/build_details_entity.rb
@@ -5,6 +5,8 @@ class BuildDetailsEntity < JobEntity
expose :runner, using: RunnerEntity
expose :pipeline, using: PipelineEntity
+ expose :metadata, using: BuildMetadataEntity
+
expose :erased_by, if: -> (*) { build.erased? }, using: UserEntity
expose :erase_path, if: -> (*) { build.erasable? && can?(current_user, :erase_build, build) } do |build|
erase_project_job_path(project, build)
diff --git a/app/serializers/build_metadata_entity.rb b/app/serializers/build_metadata_entity.rb
new file mode 100644
index 00000000000..39f429aa6c3
--- /dev/null
+++ b/app/serializers/build_metadata_entity.rb
@@ -0,0 +1,9 @@
+class BuildMetadataEntity < Grape::Entity
+ expose :timeout_human_readable do |metadata|
+ metadata.timeout_human_readable unless metadata.timeout.nil?
+ end
+
+ expose :timeout_source do |metadata|
+ metadata.present.timeout_source
+ end
+end
diff --git a/app/serializers/status_entity.rb b/app/serializers/status_entity.rb
index 3e40ecf1c1c..a7c2e21e92b 100644
--- a/app/serializers/status_entity.rb
+++ b/app/serializers/status_entity.rb
@@ -7,8 +7,14 @@ class StatusEntity < Grape::Entity
expose :details_path
expose :favicon do |status|
- dir = 'ci_favicons'
- dir = File.join(dir, 'dev') if Rails.env.development?
+ dir =
+ if Gitlab::Utils.to_boolean(ENV['CANARY'])
+ File.join('ci_favicons', 'canary')
+ elsif Rails.env.development?
+ File.join('ci_favicons', 'dev')
+ else
+ 'ci_favicons'
+ end
ActionController::Base.helpers.image_path(File.join(dir, "#{status.favicon}.ico"))
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 7fa1387084c..633e2c8236c 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -90,9 +90,6 @@ module Projects
unless @project.gitlab_project_import?
@project.write_repository_config
@project.create_wiki unless skip_wiki?
- create_services_from_active_templates(@project)
-
- @project.create_labels
end
event_service.create_project(@project, current_user)
@@ -121,21 +118,29 @@ module Projects
Project.transaction do
@project.create_or_update_import_data(data: import_data[:data], credentials: import_data[:credentials]) if import_data
- if @project.save && !@project.import?
- raise 'Failed to create repository' unless @project.create_repository
+ if @project.save
+ unless @project.gitlab_project_import?
+ create_services_from_active_templates(@project)
+ @project.create_labels
+ end
+
+ unless @project.import?
+ raise 'Failed to create repository' unless @project.create_repository
+ end
end
end
end
def fail(error:)
message = "Unable to save project. Error: #{error}"
- message << "Project ID: #{@project.id}" if @project && @project.id
+ log_message = message.dup
- Rails.logger.error(message)
+ log_message << " Project ID: #{@project.id}" if @project&.id
+ Rails.logger.error(log_message)
- if @project && @project.import?
+ if @project
@project.errors.add(:base, message)
- @project.mark_import_as_failed(message)
+ @project.mark_import_as_failed(message) if @project.import?
end
@project
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 00fdd047208..5bf8208e035 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -81,11 +81,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
- results = Open3.pipeline(%W(gunzip -c #{artifacts}),
- %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
- %W(tar -x -C #{temp_path} #{SITE_PATH}),
- err: '/dev/null')
- raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
+ build.artifacts_file.use_file do |artifacts_path|
+ results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
+ %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+ %W(tar -x -C #{temp_path} #{SITE_PATH}),
+ err: '/dev/null')
+ raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
+ end
end
def extract_zip_archive!(temp_path)
@@ -103,8 +105,10 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
- unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
- raise FailedToExtractError, 'pages failed to extract'
+ build.artifacts_file.use_file do |artifacts_path|
+ unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+ raise FailedToExtractError, 'pages failed to extract'
+ end
end
end
diff --git a/app/services/protected_branches/create_service.rb b/app/services/protected_branches/create_service.rb
index 6212fd69077..9d947f73af1 100644
--- a/app/services/protected_branches/create_service.rb
+++ b/app/services/protected_branches/create_service.rb
@@ -1,11 +1,20 @@
module ProtectedBranches
class CreateService < BaseService
- attr_reader :protected_branch
-
def execute(skip_authorization: false)
- raise Gitlab::Access::AccessDeniedError unless skip_authorization || can?(current_user, :admin_project, project)
+ raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?
+
+ protected_branch.save
+ protected_branch
+ end
+
+ def authorized?
+ can?(current_user, :create_protected_branch, protected_branch)
+ end
+
+ private
- project.protected_branches.create(params)
+ def protected_branch
+ @protected_branch ||= project.protected_branches.new(params)
end
end
end
diff --git a/app/services/protected_branches/destroy_service.rb b/app/services/protected_branches/destroy_service.rb
new file mode 100644
index 00000000000..8172c896e76
--- /dev/null
+++ b/app/services/protected_branches/destroy_service.rb
@@ -0,0 +1,9 @@
+module ProtectedBranches
+ class DestroyService < BaseService
+ def execute(protected_branch)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :destroy_protected_branch, protected_branch)
+
+ protected_branch.destroy
+ end
+ end
+end
diff --git a/app/services/protected_branches/update_service.rb b/app/services/protected_branches/update_service.rb
index 4b3337a5c9d..95e46645374 100644
--- a/app/services/protected_branches/update_service.rb
+++ b/app/services/protected_branches/update_service.rb
@@ -1,7 +1,7 @@
module ProtectedBranches
class UpdateService < BaseService
def execute(protected_branch)
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, :admin_project, project)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :update_protected_branch, protected_branch)
protected_branch.update(params)
protected_branch
diff --git a/app/services/protected_tags/destroy_service.rb b/app/services/protected_tags/destroy_service.rb
new file mode 100644
index 00000000000..c868d7ad8e6
--- /dev/null
+++ b/app/services/protected_tags/destroy_service.rb
@@ -0,0 +1,7 @@
+module ProtectedTags
+ class DestroyService < BaseService
+ def execute(protected_tag)
+ protected_tag.destroy
+ end
+ end
+end
diff --git a/app/services/verify_pages_domain_service.rb b/app/services/verify_pages_domain_service.rb
index 86166047302..13cb53dee01 100644
--- a/app/services/verify_pages_domain_service.rb
+++ b/app/services/verify_pages_domain_service.rb
@@ -34,7 +34,8 @@ class VerifyPagesDomainService < BaseService
# Prevent any pre-existing grace period from being truncated
reverify = [domain.enabled_until, VERIFICATION_PERIOD.from_now].compact.max
- domain.update!(verified_at: Time.now, enabled_until: reverify)
+ domain.assign_attributes(verified_at: Time.now, enabled_until: reverify)
+ domain.save!(validate: false)
if was_disabled
notify(:enabled)
@@ -47,7 +48,9 @@ class VerifyPagesDomainService < BaseService
def unverify_domain!
if domain.verified?
- domain.update!(verified_at: nil)
+ domain.assign_attributes(verified_at: nil)
+ domain.save!(validate: false)
+
notify(:verification_failed)
end
@@ -55,7 +58,8 @@ class VerifyPagesDomainService < BaseService
end
def disable_domain!
- domain.update!(verified_at: nil, enabled_until: nil)
+ domain.assign_attributes(verified_at: nil, enabled_until: nil)
+ domain.save!(validate: false)
notify(:disabled)
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index 4930fb2fca7..cd819dc9bff 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -1,8 +1,8 @@
class AttachmentUploader < GitlabUploader
- include UploaderHelper
include RecordsUploads::Concern
-
- storage :file
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
+ include UploaderHelper
private
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index 5c8e1cea62e..5848e6c6994 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -1,18 +1,18 @@
class AvatarUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
-
- storage :file
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
def exists?
model.avatar.file && model.avatar.file.present?
end
- def move_to_cache
+ def move_to_store
false
end
- def move_to_store
+ def move_to_cache
false
end
diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb
index 8f56f09c9f7..bd7736ad74e 100644
--- a/app/uploaders/file_mover.rb
+++ b/app/uploaders/file_mover.rb
@@ -10,7 +10,11 @@ class FileMover
def execute
move
- uploader.record_upload if update_markdown
+
+ if update_markdown
+ uploader.record_upload
+ uploader.schedule_background_upload
+ end
end
private
@@ -24,11 +28,8 @@ class FileMover
updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text)
-
- true
rescue
revert
-
false
end
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index bde1161dfa8..133fdf6684d 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -9,14 +9,18 @@
class FileUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
- storage :file
-
after :remove, :prune_store_dir
+ # FileUploader do not run in a model transaction, so we can simply
+ # enqueue a job after the :store hook.
+ after :store, :schedule_background_upload
+
def self.root
File.join(options.storage_path, 'uploads')
end
@@ -28,8 +32,11 @@ class FileUploader < GitlabUploader
)
end
- def self.base_dir(model)
- model_path_segment(model)
+ def self.base_dir(model, store = Store::LOCAL)
+ decorated_model = model
+ decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE
+
+ model_path_segment(decorated_model)
end
# used in migrations and import/exports
@@ -47,21 +54,24 @@ class FileUploader < GitlabUploader
#
# Returns a String without a trailing slash
def self.model_path_segment(model)
- if model.hashed_storage?(:attachments)
- model.disk_path
+ case model
+ when Storage::HashedProject then model.disk_path
else
- model.full_path
+ model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
end
end
- def self.upload_path(secret, identifier)
- File.join(secret, identifier)
- end
-
def self.generate_secret
SecureRandom.hex
end
+ def upload_paths(filename)
+ [
+ File.join(secret, filename),
+ File.join(base_dir(Store::REMOTE), secret, filename)
+ ]
+ end
+
attr_accessor :model
def initialize(model, mounted_as = nil, **uploader_context)
@@ -71,8 +81,10 @@ class FileUploader < GitlabUploader
apply_context!(uploader_context)
end
- def base_dir
- self.class.base_dir(@model)
+ # enforce the usage of Hashed storage when storing to
+ # remote store as the FileMover doesn't support OS
+ def base_dir(store = nil)
+ self.class.base_dir(@model, store || object_store)
end
# we don't need to know the actual path, an uploader instance should be
@@ -82,15 +94,19 @@ class FileUploader < GitlabUploader
end
def upload_path
- self.class.upload_path(dynamic_segment, identifier)
- end
-
- def model_path_segment
- self.class.model_path_segment(@model)
+ if file_storage?
+ # Legacy path relative to project.full_path
+ File.join(dynamic_segment, identifier)
+ else
+ File.join(store_dir, identifier)
+ end
end
- def store_dir
- File.join(base_dir, dynamic_segment)
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
+ }
end
def markdown_link
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 010100f2da1..f12f0466a1d 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -37,12 +37,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
cache_storage.is_a?(CarrierWave::Storage::File)
end
- # Reduce disk IO
def move_to_cache
file_storage?
end
- # Reduce disk IO
def move_to_store
file_storage?
end
@@ -51,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
file.present?
end
- def store_dir
- File.join(base_dir, dynamic_segment)
- end
-
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
@@ -76,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
+ #
+ # For example, `FileUploader` builds the storage path based on the associated
+ # project model's `path_with_namespace` value, which can change when the
+ # project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index ad5385f45a4..ef0f8acefd6 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,5 +1,6 @@
class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
+ include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
@@ -14,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end
def open
- raise 'Only File System is supported' unless file_storage?
-
- File.open(path, "rb") if path
+ if file_storage?
+ File.open(path, "rb") if path
+ else
+ ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
+ end
end
private
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 28c458d3ff1..b726b053493 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,5 +1,6 @@
class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
+ include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index e04c97ce179..eb521a22ebc 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,10 +1,6 @@
class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
-
- # LfsObject are in `tmp/upload` instead of `tmp/uploads`
- def self.workhorse_upload_path
- File.join(root, 'tmp/upload')
- end
+ include ObjectStorage::Concern
storage_options Gitlab.config.lfs
diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb
index 993e85fbc13..1085ecb1700 100644
--- a/app/uploaders/namespace_file_uploader.rb
+++ b/app/uploaders/namespace_file_uploader.rb
@@ -4,7 +4,7 @@ class NamespaceFileUploader < FileUploader
options.storage_path
end
- def self.base_dir(model)
+ def self.base_dir(model, _store = nil)
File.join(options.base_dir, 'namespace', model_path_segment(model))
end
@@ -14,6 +14,13 @@ class NamespaceFileUploader < FileUploader
# Re-Override
def store_dir
- File.join(base_dir, dynamic_segment)
+ store_dirs[object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join('namespace', self.class.model_path_segment(model), dynamic_segment)
+ }
end
end
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
new file mode 100644
index 00000000000..4028b052768
--- /dev/null
+++ b/app/uploaders/object_storage.rb
@@ -0,0 +1,434 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+#
+# This concern should add object storage support
+# to the GitlabUploader class
+#
+module ObjectStorage
+ RemoteStoreError = Class.new(StandardError)
+ UnknownStoreError = Class.new(StandardError)
+ ObjectStorageUnavailable = Class.new(StandardError)
+
+ DIRECT_UPLOAD_TIMEOUT = 4.hours
+ TMP_UPLOAD_PATH = 'tmp/upload'.freeze
+
+ module Store
+ LOCAL = 1
+ REMOTE = 2
+ end
+
+ module Extension
+ # this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
+ module RecordsUploads
+ extend ActiveSupport::Concern
+
+ def prepended(base)
+ raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
+
+ base.include(RecordsUploads::Concern)
+ end
+
+ def retrieve_from_store!(identifier)
+ paths = store_dirs.map { |store, path| File.join(path, identifier) }
+
+ unless current_upload_satisfies?(paths, model)
+ # the upload we already have isn't right, find the correct one
+ self.upload = uploads.find_by(model: model, path: paths)
+ end
+
+ super
+ end
+
+ def build_upload
+ super.tap do |upload|
+ upload.store = object_store
+ end
+ end
+
+ def upload=(upload)
+ return unless upload
+
+ self.object_store = upload.store
+ super
+ end
+
+ def schedule_background_upload(*args)
+ return unless schedule_background_upload?
+ return unless upload
+
+ ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
+ upload.class.to_s,
+ mounted_as,
+ upload.id)
+ end
+
+ private
+
+ def current_upload_satisfies?(paths, model)
+ return false unless upload
+ return false unless model
+
+ paths.include?(upload.path) &&
+ upload.model_id == model.id &&
+ upload.model_type == model.class.base_class.sti_name
+ end
+ end
+ end
+
+ # Add support for automatic background uploading after the file is stored.
+ #
+ module BackgroundMove
+ extend ActiveSupport::Concern
+
+ def background_upload(mount_points = [])
+ return unless mount_points.any?
+
+ run_after_commit do
+ mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+
+ def changed_mounts
+ self.class.uploaders.select do |mount, uploader_class|
+ mounted_as = uploader_class.serialization_column(self.class, mount)
+ uploader = send(:"#{mounted_as}") # rubocop:disable GitlabSecurity/PublicSend
+
+ next unless uploader
+ next unless uploader.exists?
+ next unless send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
+
+ mount
+ end.keys
+ end
+
+ included do
+ after_save on: [:create, :update] do
+ background_upload(changed_mounts)
+ end
+ end
+ end
+
+ module Concern
+ extend ActiveSupport::Concern
+
+ included do |base|
+ base.include(ObjectStorage)
+
+ after :migrate, :delete_migrated_file
+ end
+
+ class_methods do
+ def object_store_options
+ options.object_store
+ end
+
+ def object_store_enabled?
+ object_store_options.enabled
+ end
+
+ def direct_upload_enabled?
+ object_store_options.direct_upload
+ end
+
+ def background_upload_enabled?
+ object_store_options.background_upload
+ end
+
+ def proxy_download_enabled?
+ object_store_options.proxy_download
+ end
+
+ def direct_download_enabled?
+ !proxy_download_enabled?
+ end
+
+ def object_store_credentials
+ object_store_options.connection.to_hash.deep_symbolize_keys
+ end
+
+ def remote_store_path
+ object_store_options.remote_directory
+ end
+
+ def serialization_column(model_class, mount_point)
+ model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
+ end
+
+ def workhorse_authorize
+ if options = workhorse_remote_upload_options
+ { RemoteObject: options }
+ else
+ { TempPath: workhorse_local_upload_path }
+ end
+ end
+
+ def workhorse_local_upload_path
+ File.join(self.root, TMP_UPLOAD_PATH)
+ end
+
+ def workhorse_remote_upload_options
+ return unless self.object_store_enabled?
+ return unless self.direct_upload_enabled?
+
+ id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
+ upload_path = File.join(TMP_UPLOAD_PATH, id)
+ connection = ::Fog::Storage.new(self.object_store_credentials)
+ expire_at = Time.now + DIRECT_UPLOAD_TIMEOUT
+ options = { 'Content-Type' => 'application/octet-stream' }
+
+ {
+ ID: id,
+ GetURL: connection.get_object_url(remote_store_path, upload_path, expire_at),
+ DeleteURL: connection.delete_object_url(remote_store_path, upload_path, expire_at),
+ StoreURL: connection.put_object_url(remote_store_path, upload_path, expire_at, options)
+ }
+ end
+ end
+
+ # allow to configure and overwrite the filename
+ def filename
+ @filename || super || file&.filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def filename=(filename)
+ @filename = filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def file_storage?
+ storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def file_cache_storage?
+ cache_storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def object_store
+ @object_store ||= model.try(store_serialization_column) || Store::LOCAL
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def object_store=(value)
+ @object_store = value || Store::LOCAL
+ @storage = storage_for(object_store)
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ # Return true if the current file is part or the model (i.e. is mounted in the model)
+ #
+ def persist_object_store?
+ model.respond_to?(:"#{store_serialization_column}=")
+ end
+
+ # Save the current @object_store to the model <mounted_as>_store column
+ def persist_object_store!
+ return unless persist_object_store?
+
+ updated = model.update_column(store_serialization_column, object_store)
+ raise 'Failed to update object store' unless updated
+ end
+
+ def use_file(&blk)
+ with_exclusive_lease do
+ unsafe_use_file(&blk)
+ end
+ end
+
+ #
+ # Move the file to another store
+ #
+ # new_store: Enum (Store::LOCAL, Store::REMOTE)
+ #
+ def migrate!(new_store)
+ with_exclusive_lease do
+ unsafe_migrate!(new_store)
+ end
+ end
+
+ def schedule_background_upload(*args)
+ return unless schedule_background_upload?
+
+ ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
+ model.class.name,
+ mounted_as,
+ model.id)
+ end
+
+ def fog_directory
+ self.class.remote_store_path
+ end
+
+ def fog_credentials
+ self.class.object_store_credentials
+ end
+
+ def fog_public
+ false
+ end
+
+ def delete_migrated_file(migrated_file)
+ migrated_file.delete if exists?
+ end
+
+ def exists?
+ file.present?
+ end
+
+ def store_dir(store = nil)
+ store_dirs[store || object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(dynamic_segment)
+ }
+ end
+
+ def store_workhorse_file!(params, identifier)
+ filename = params["#{identifier}.name"]
+
+ if remote_object_id = params["#{identifier}.remote_id"]
+ store_remote_file!(remote_object_id, filename)
+ elsif local_path = params["#{identifier}.path"]
+ store_local_file!(local_path, filename)
+ else
+ raise RemoteStoreError, 'Bad file'
+ end
+ end
+
+ private
+
+ def schedule_background_upload?
+ self.class.object_store_enabled? &&
+ self.class.background_upload_enabled? &&
+ self.file_storage?
+ end
+
+ def store_remote_file!(remote_object_id, filename)
+ raise RemoteStoreError, 'Missing filename' unless filename
+
+ file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
+ file_path = Pathname.new(file_path).cleanpath.to_s
+ raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
+
+ self.object_store = Store::REMOTE
+
+ # TODO:
+ # This should be changed to make use of `tmp/cache` mechanism
+ # instead of using custom upload directory,
+ # using tmp/cache makes this implementation way easier than it is today
+ CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file|
+ raise RemoteStoreError, 'Missing file' unless file.exists?
+
+ self.filename = filename
+ self.file = storage.store!(file)
+ end
+ end
+
+ def store_local_file!(local_path, filename)
+ raise RemoteStoreError, 'Missing filename' unless filename
+
+ root_path = File.realpath(self.class.workhorse_local_upload_path)
+ file_path = File.realpath(local_path)
+ raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(root_path)
+
+ self.object_store = Store::LOCAL
+ self.store!(UploadedFile.new(file_path, filename))
+ end
+
+ # this is a hack around CarrierWave. The #migrate method needs to be
+ # able to force the current file to the migrated file upon success.
+ def file=(file)
+ @file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def serialization_column
+ self.class.serialization_column(model.class, mounted_as)
+ end
+
+ # Returns the column where the 'store' is saved
+ # defaults to 'store'
+ def store_serialization_column
+ [serialization_column, 'store'].compact.join('_').to_sym
+ end
+
+ def storage
+ @storage ||= storage_for(object_store)
+ end
+
+ def storage_for(store)
+ case store
+ when Store::REMOTE
+ raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+ CarrierWave::Storage::Fog.new(self)
+ when Store::LOCAL
+ CarrierWave::Storage::File.new(self)
+ else
+ raise UnknownStoreError
+ end
+ end
+
+ def exclusive_lease_key
+ "object_storage_migrate:#{model.class}:#{model.id}"
+ end
+
+ def with_exclusive_lease
+ uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+ raise 'exclusive lease already taken' unless uuid
+
+ yield uuid
+ ensure
+ Gitlab::ExclusiveLease.cancel(exclusive_lease_key, uuid)
+ end
+
+ #
+ # Move the file to another store
+ #
+ # new_store: Enum (Store::LOCAL, Store::REMOTE)
+ #
+ def unsafe_migrate!(new_store)
+ return unless object_store != new_store
+ return unless file
+
+ new_file = nil
+ file_to_delete = file
+ from_object_store = object_store
+ self.object_store = new_store # changes the storage and file
+
+ cache_stored_file! if file_storage?
+
+ with_callbacks(:migrate, file_to_delete) do
+ with_callbacks(:store, file_to_delete) do # for #store_versions!
+ new_file = storage.store!(file)
+ persist_object_store!
+ self.file = new_file
+ end
+ end
+
+ file
+ rescue => e
+ # in case of failure delete new file
+ new_file.delete unless new_file.nil?
+ # revert back to the old file
+ self.object_store = from_object_store
+ self.file = file_to_delete
+ raise e
+ end
+ end
+
+ def unsafe_use_file
+ if file_storage?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ FileUtils.rm_f(cache_path)
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
+end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index f2ad0badd53..e3898b07730 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
options.storage_path
end
- def self.base_dir(model)
+ def self.base_dir(model, _store = nil)
File.join(options.base_dir, model_path_segment(model))
end
@@ -14,6 +14,12 @@ class PersonalFileUploader < FileUploader
File.join(model.class.to_s.underscore, model.id.to_s)
end
+ def object_store
+ return Store::LOCAL unless model
+
+ super
+ end
+
# model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model.
def model_valid?
@@ -22,7 +28,14 @@ class PersonalFileUploader < FileUploader
# Revert-Override
def store_dir
- File.join(base_dir, dynamic_segment)
+ store_dirs[object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(self.class.model_path_segment(model), dynamic_segment)
+ }
end
private
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index 458928bc067..89c74a78835 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -24,8 +24,7 @@ module RecordsUploads
uploads.where(path: upload_path).delete_all
upload.destroy! if upload
- self.upload = build_upload
- upload.save!
+ self.upload = build_upload.tap(&:save!)
end
end
diff --git a/app/views/admin/application_settings/_ci_cd.html.haml b/app/views/admin/application_settings/_ci_cd.html.haml
new file mode 100644
index 00000000000..b4d2a789df0
--- /dev/null
+++ b/app/views/admin/application_settings/_ci_cd.html.haml
@@ -0,0 +1,47 @@
+= form_for @application_setting, url: admin_application_settings_path, html: { class: 'form-horizontal fieldset-form' } do |f|
+ = form_errors(@application_setting)
+
+ %fieldset
+ .form-group
+ .col-sm-offset-2.col-sm-10
+ .checkbox
+ = f.label :auto_devops_enabled do
+ = f.check_box :auto_devops_enabled
+ Enabled Auto DevOps (Beta) for projects by default
+ .help-block
+ It will automatically build, test, and deploy applications based on a predefined CI/CD configuration
+ = link_to icon('question-circle'), help_page_path('topics/autodevops/index.md')
+ .form-group
+ = f.label :auto_devops_domain, class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.text_field :auto_devops_domain, class: 'form-control', placeholder: 'domain.com'
+ .help-block
+ = s_("AdminSettings|Specify a domain to use by default for every project's Auto Review Apps and Auto Deploy stages.")
+ .form-group
+ .col-sm-offset-2.col-sm-10
+ .checkbox
+ = f.label :shared_runners_enabled do
+ = f.check_box :shared_runners_enabled
+ Enable shared runners for new projects
+ .form-group
+ = f.label :shared_runners_text, class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.text_area :shared_runners_text, class: 'form-control', rows: 4
+ .help-block Markdown enabled
+ .form-group
+ = f.label :max_artifacts_size, 'Maximum artifacts size (MB)', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :max_artifacts_size, class: 'form-control'
+ .help-block
+ Set the maximum file size for each job's artifacts
+ = link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'maximum-artifacts-size')
+ .form-group
+ = f.label :default_artifacts_expire_in, 'Default artifacts expiration', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.text_field :default_artifacts_expire_in, class: 'form-control'
+ .help-block
+ Set the default expiration time for each job's artifacts.
+ 0 for unlimited.
+ = link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'default-artifacts-expiration')
+
+ = f.submit 'Save changes', class: "btn btn-success"
diff --git a/app/views/admin/application_settings/_form.html.haml b/app/views/admin/application_settings/_form.html.haml
index 0f75db3f6ae..636535fba84 100644
--- a/app/views/admin/application_settings/_form.html.haml
+++ b/app/views/admin/application_settings/_form.html.haml
@@ -1,50 +1,6 @@
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'form-horizontal fieldset-form' } do |f|
= form_errors(@application_setting)
- %fieldset
- %legend Continuous Integration and Deployment
- .form-group
- .col-sm-offset-2.col-sm-10
- .checkbox
- = f.label :auto_devops_enabled do
- = f.check_box :auto_devops_enabled
- Enabled Auto DevOps (Beta) for projects by default
- .help-block
- It will automatically build, test, and deploy applications based on a predefined CI/CD configuration
- = link_to icon('question-circle'), help_page_path('topics/autodevops/index.md')
- .form-group
- = f.label :auto_devops_domain, class: 'control-label col-sm-2'
- .col-sm-10
- = f.text_field :auto_devops_domain, class: 'form-control', placeholder: 'domain.com'
- .help-block
- = s_("AdminSettings|Specify a domain to use by default for every project's Auto Review Apps and Auto Deploy stages.")
- .form-group
- .col-sm-offset-2.col-sm-10
- .checkbox
- = f.label :shared_runners_enabled do
- = f.check_box :shared_runners_enabled
- Enable shared runners for new projects
- .form-group
- = f.label :shared_runners_text, class: 'control-label col-sm-2'
- .col-sm-10
- = f.text_area :shared_runners_text, class: 'form-control', rows: 4
- .help-block Markdown enabled
- .form-group
- = f.label :max_artifacts_size, 'Maximum artifacts size (MB)', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :max_artifacts_size, class: 'form-control'
- .help-block
- Set the maximum file size for each job's artifacts
- = link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'maximum-artifacts-size')
- .form-group
- = f.label :default_artifacts_expire_in, 'Default artifacts expiration', class: 'control-label col-sm-2'
- .col-sm-10
- = f.text_field :default_artifacts_expire_in, class: 'form-control'
- .help-block
- Set the default expiration time for each job's artifacts.
- 0 for unlimited.
- = link_to icon('question-circle'), help_page_path('user/admin_area/settings/continuous_integration', anchor: 'default-artifacts-expiration')
-
- if Gitlab.config.registry.enabled
%fieldset
%legend Container Registry
@@ -54,96 +10,6 @@
= f.number_field :container_registry_token_expire_delay, class: 'form-control'
%fieldset
- %legend Metrics - Influx
- %p
- Setup InfluxDB to measure a wide variety of statistics like the time spent
- in running SQL queries. These settings require a
- = link_to 'restart', help_page_path('administration/restart_gitlab')
- to take effect.
- = link_to icon('question-circle'), help_page_path('administration/monitoring/performance/introduction')
- .form-group
- .col-sm-offset-2.col-sm-10
- .checkbox
- = f.label :metrics_enabled do
- = f.check_box :metrics_enabled
- Enable InfluxDB Metrics
- .form-group
- = f.label :metrics_host, 'InfluxDB host', class: 'control-label col-sm-2'
- .col-sm-10
- = f.text_field :metrics_host, class: 'form-control', placeholder: 'influxdb.example.com'
- .form-group
- = f.label :metrics_port, 'InfluxDB port', class: 'control-label col-sm-2'
- .col-sm-10
- = f.text_field :metrics_port, class: 'form-control', placeholder: '8089'
- .help-block
- The UDP port to use for connecting to InfluxDB. InfluxDB requires that
- your server configuration specifies a database to store data in when
- sending messages to this port, without it metrics data will not be
- saved.
- .form-group
- = f.label :metrics_pool_size, 'Connection pool size', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :metrics_pool_size, class: 'form-control'
- .help-block
- The amount of InfluxDB connections to open. Connections are opened
- lazily. Users using multi-threaded application servers should ensure
- enough connections are available (at minimum the amount of application
- server threads).
- .form-group
- = f.label :metrics_timeout, 'Connection timeout', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :metrics_timeout, class: 'form-control'
- .help-block
- The amount of seconds after which an InfluxDB connection will time
- out.
- .form-group
- = f.label :metrics_method_call_threshold, 'Method Call Threshold (ms)', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :metrics_method_call_threshold, class: 'form-control'
- .help-block
- A method call is only tracked when it takes longer to complete than
- the given amount of milliseconds.
- .form-group
- = f.label :metrics_sample_interval, 'Sampler Interval (sec)', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :metrics_sample_interval, class: 'form-control'
- .help-block
- The sampling interval in seconds. Sampled data includes memory usage,
- retained Ruby objects, file descriptors and so on.
- .form-group
- = f.label :metrics_packet_size, 'Metrics per packet', class: 'control-label col-sm-2'
- .col-sm-10
- = f.number_field :metrics_packet_size, class: 'form-control'
- .help-block
- The amount of points to store in a single UDP packet. More points
- results in fewer but larger UDP packets being sent.
-
- %fieldset
- %legend Metrics - Prometheus
- %p
- Enable a Prometheus metrics endpoint at
- %code= metrics_path
- to expose a variety of statistics on the health and performance of GitLab. Additional information on authenticating and connecting to the metrics endpoint is available
- = link_to 'here', admin_health_check_path
- \. This setting requires a
- = link_to 'restart', help_page_path('administration/restart_gitlab')
- to take effect.
- = link_to icon('question-circle'), help_page_path('administration/monitoring/prometheus/index')
- .form-group
- .col-sm-offset-2.col-sm-10
- .checkbox
- = f.label :prometheus_metrics_enabled do
- = f.check_box :prometheus_metrics_enabled
- Enable Prometheus Metrics
- - unless Gitlab::Metrics.metrics_folder_present?
- .help-block
- %strong.cred WARNING:
- Environment variable
- %code prometheus_multiproc_dir
- does not exist or is not pointing to a valid directory.
- = link_to icon('question-circle'), help_page_path('administration/monitoring/prometheus/gitlab_metrics', anchor: 'metrics-shared-directory')
-
- %fieldset
%legend Profiling - Performance Bar
%p
Enable the Performance Bar for a given group.
diff --git a/app/views/admin/application_settings/_influx.html.haml b/app/views/admin/application_settings/_influx.html.haml
new file mode 100644
index 00000000000..a173fd38a9c
--- /dev/null
+++ b/app/views/admin/application_settings/_influx.html.haml
@@ -0,0 +1,68 @@
+= form_for @application_setting, url: admin_application_settings_path, html: { class: 'form-horizontal fieldset-form' } do |f|
+ = form_errors(@application_setting)
+
+ %fieldset
+ %p
+ Setup InfluxDB to measure a wide variety of statistics like the time spent
+ in running SQL queries. These settings require a
+ = link_to 'restart', help_page_path('administration/restart_gitlab')
+ to take effect.
+ = link_to icon('question-circle'), help_page_path('administration/monitoring/performance/introduction')
+ .form-group
+ .col-sm-offset-2.col-sm-10
+ .checkbox
+ = f.label :metrics_enabled do
+ = f.check_box :metrics_enabled
+ Enable InfluxDB Metrics
+ .form-group
+ = f.label :metrics_host, 'InfluxDB host', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.text_field :metrics_host, class: 'form-control', placeholder: 'influxdb.example.com'
+ .form-group
+ = f.label :metrics_port, 'InfluxDB port', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.text_field :metrics_port, class: 'form-control', placeholder: '8089'
+ .help-block
+ The UDP port to use for connecting to InfluxDB. InfluxDB requires that
+ your server configuration specifies a database to store data in when
+ sending messages to this port, without it metrics data will not be
+ saved.
+ .form-group
+ = f.label :metrics_pool_size, 'Connection pool size', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :metrics_pool_size, class: 'form-control'
+ .help-block
+ The amount of InfluxDB connections to open. Connections are opened
+ lazily. Users using multi-threaded application servers should ensure
+ enough connections are available (at minimum the amount of application
+ server threads).
+ .form-group
+ = f.label :metrics_timeout, 'Connection timeout', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :metrics_timeout, class: 'form-control'
+ .help-block
+ The amount of seconds after which an InfluxDB connection will time
+ out.
+ .form-group
+ = f.label :metrics_method_call_threshold, 'Method Call Threshold (ms)', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :metrics_method_call_threshold, class: 'form-control'
+ .help-block
+ A method call is only tracked when it takes longer to complete than
+ the given amount of milliseconds.
+ .form-group
+ = f.label :metrics_sample_interval, 'Sampler Interval (sec)', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :metrics_sample_interval, class: 'form-control'
+ .help-block
+ The sampling interval in seconds. Sampled data includes memory usage,
+ retained Ruby objects, file descriptors and so on.
+ .form-group
+ = f.label :metrics_packet_size, 'Metrics per packet', class: 'control-label col-sm-2'
+ .col-sm-10
+ = f.number_field :metrics_packet_size, class: 'form-control'
+ .help-block
+ The amount of points to store in a single UDP packet. More points
+ results in fewer but larger UDP packets being sent.
+
+ = f.submit 'Save changes', class: "btn btn-success"
diff --git a/app/views/admin/application_settings/_prometheus.html.haml b/app/views/admin/application_settings/_prometheus.html.haml
new file mode 100644
index 00000000000..48745db2991
--- /dev/null
+++ b/app/views/admin/application_settings/_prometheus.html.haml
@@ -0,0 +1,28 @@
+= form_for @application_setting, url: admin_application_settings_path, html: { class: 'form-horizontal fieldset-form' } do |f|
+ = form_errors(@application_setting)
+
+ %fieldset
+ %p
+ Enable a Prometheus metrics endpoint at
+ %code= metrics_path
+ to expose a variety of statistics on the health and performance of GitLab. Additional information on authenticating and connecting to the metrics endpoint is available
+ = link_to 'here', admin_health_check_path
+ \. This setting requires a
+ = link_to 'restart', help_page_path('administration/restart_gitlab')
+ to take effect.
+ = link_to icon('question-circle'), help_page_path('administration/monitoring/prometheus/index')
+ .form-group
+ .col-sm-offset-2.col-sm-10
+ .checkbox
+ = f.label :prometheus_metrics_enabled do
+ = f.check_box :prometheus_metrics_enabled
+ Enable Prometheus Metrics
+ - unless Gitlab::Metrics.metrics_folder_present?
+ .help-block
+ %strong.cred WARNING:
+ Environment variable
+ %code prometheus_multiproc_dir
+ does not exist or is not pointing to a valid directory.
+ = link_to icon('question-circle'), help_page_path('administration/monitoring/prometheus/gitlab_metrics', anchor: 'metrics-shared-directory')
+
+ = f.submit 'Save changes', class: "btn btn-success"
diff --git a/app/views/admin/application_settings/show.html.haml b/app/views/admin/application_settings/show.html.haml
index 82d97f90248..17f2f37d24e 100644
--- a/app/views/admin/application_settings/show.html.haml
+++ b/app/views/admin/application_settings/show.html.haml
@@ -69,5 +69,38 @@
.settings-content
= render 'pages'
+%section.settings.as-ci-cd.no-animate#js-ci-cd-settings{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4
+ = _('Continuous Integration and Deployment')
+ %button.btn.js-settings-toggle
+ = expanded ? 'Collapse' : 'Expand'
+ %p
+ = _('Auto DevOps, runners amd job artifacts')
+ .settings-content
+ = render 'ci_cd'
+
+%section.settings.as-influx.no-animate#js-influx-settings{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4
+ = _('Metrics - Influx')
+ %button.btn.js-settings-toggle
+ = expanded ? 'Collapse' : 'Expand'
+ %p
+ = _('Enable and configure InfluxDB metrics.')
+ .settings-content
+ = render 'influx'
+
+%section.settings.as-prometheus.no-animate#js-prometheus-settings{ class: ('expanded' if expanded) }
+ .settings-header
+ %h4
+ = _('Metrics - Prometheus')
+ %button.btn.js-settings-toggle
+ = expanded ? 'Collapse' : 'Expand'
+ %p
+ = _('Enable and configure Prometheus metrics.')
+ .settings-content
+ = render 'prometheus'
+
.prepend-top-20
= render 'form'
diff --git a/app/views/admin/projects/show.html.haml b/app/views/admin/projects/show.html.haml
index c02ddafe108..c47b8a88f56 100644
--- a/app/views/admin/projects/show.html.haml
+++ b/app/views/admin/projects/show.html.haml
@@ -62,12 +62,16 @@
= link_to @project.ssh_url_to_repo, project_path(@project)
- if @project.repository.exists?
%li
- %span.light fs:
+ %span.light Gitaly storage name:
%strong
- = @project.repository.path_to_repo
+ = @project.repository.storage
+ %li
+ %span.light Gitaly relative path:
+ %strong
+ = @project.repository.relative_path
%li
- %span.light Storage:
+ %span.light Storage used:
%strong= storage_counter(@project.statistics.storage_size)
(
= storage_counter(@project.statistics.repository_size)
diff --git a/app/views/ci/lints/show.html.haml b/app/views/ci/lints/show.html.haml
index 3c0881caa06..22f149d1caa 100644
--- a/app/views/ci/lints/show.html.haml
+++ b/app/views/ci/lints/show.html.haml
@@ -1,27 +1,9 @@
-- page_title "CI Lint"
-- page_description "Validate your GitLab CI configuration file"
-- content_for :library_javascripts do
- = page_specific_javascript_tag('lib/ace.js')
-
-%h2 Check your .gitlab-ci.yml
-
-.ci-linter
- .row
- = form_tag ci_lint_path, method: :post do
- .form-group
- .col-sm-12
- .file-holder
- .js-file-title.file-title.clearfix
- Content of .gitlab-ci.yml
- #ci-editor.ci-editor= @content
- = text_area_tag(:content, @content, class: 'hidden form-control span1', rows: 7, require: true)
- .col-sm-12
- .pull-left.prepend-top-10
- = submit_tag('Validate', class: 'btn btn-success submit-yml')
- .pull-right.prepend-top-10
- = button_tag('Clear', type: 'button', class: 'btn btn-default clear-yml')
-
- .row.prepend-top-20
- .col-sm-12
- .results.ci-template
- = render partial: 'create' if defined?(@status)
+.row.empty-state
+ .col-xs-12
+ .svg-content
+ = image_tag 'illustrations/feature_moved.svg'
+ .col-xs-12
+ .text-content.text-center
+ %h4= _("GitLab CI Linter has been moved")
+ %p
+ = _("To validate your GitLab CI configurations, go to 'CI/CD → Pipelines' inside your project, and click on the 'CI Lint' button.")
diff --git a/app/views/ci/variables/_variable_row.html.haml b/app/views/ci/variables/_variable_row.html.haml
index 5d4229c80af..440623b34f5 100644
--- a/app/views/ci/variables/_variable_row.html.haml
+++ b/app/views/ci/variables/_variable_row.html.haml
@@ -43,7 +43,5 @@
%span.toggle-icon
= sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked')
= sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked')
- -# EE-specific start
- -# EE-specific end
%button.js-row-remove-button.ci-variable-row-remove-button{ type: 'button', 'aria-label': s_('CiVariables|Remove variable row') }
= icon('minus-circle')
diff --git a/app/views/import/github/new.html.haml b/app/views/import/github/new.html.haml
index 54ef51b30e3..c63cf2b31cb 100644
--- a/app/views/import/github/new.html.haml
+++ b/app/views/import/github/new.html.haml
@@ -22,9 +22,6 @@
= text_field_tag :personal_access_token, '', class: 'form-control', placeholder: _('Personal Access Token'), size: 40
= submit_tag _('List your GitHub repositories'), class: 'btn btn-success'
- -# EE-specific start
- -# EE-specific end
-
- unless github_import_configured?
%hr
%p
diff --git a/app/views/layouts/nav/sidebar/_project.html.haml b/app/views/layouts/nav/sidebar/_project.html.haml
index 059571f795f..5c90d13420f 100644
--- a/app/views/layouts/nav/sidebar/_project.html.haml
+++ b/app/views/layouts/nav/sidebar/_project.html.haml
@@ -80,14 +80,6 @@
= link_to charts_project_graph_path(@project, current_ref) do
#{ _('Charts') }
- - if project_nav_tab? :container_registry
- = nav_link(controller: %w[projects/registry/repositories]) do
- = link_to project_container_registry_index_path(@project), class: 'shortcuts-container-registry' do
- .nav-icon-container
- = sprite_icon('disk')
- %span.nav-item-name
- Registry
-
- if project_nav_tab? :issues
= nav_link(controller: @project.issues_enabled? ? [:issues, :labels, :milestones, :boards] : :issues) do
= link_to project_issues_path(@project), class: 'shortcuts-issues' do
@@ -231,6 +223,14 @@
%span
Charts
+ - if project_nav_tab? :container_registry
+ = nav_link(controller: %w[projects/registry/repositories]) do
+ = link_to project_container_registry_index_path(@project), class: 'shortcuts-container-registry' do
+ .nav-icon-container
+ = sprite_icon('disk')
+ %span.nav-item-name
+ Registry
+
- if project_nav_tab? :wiki
= nav_link(controller: :wikis) do
= link_to get_project_wiki_path(@project), class: 'shortcuts-wiki' do
diff --git a/app/views/ci/lints/_create.html.haml b/app/views/projects/ci/lints/_create.html.haml
index 30bf1384b22..30bf1384b22 100644
--- a/app/views/ci/lints/_create.html.haml
+++ b/app/views/projects/ci/lints/_create.html.haml
diff --git a/app/views/projects/ci/lints/show.html.haml b/app/views/projects/ci/lints/show.html.haml
new file mode 100644
index 00000000000..6ca8152183d
--- /dev/null
+++ b/app/views/projects/ci/lints/show.html.haml
@@ -0,0 +1,27 @@
+- page_title "CI Lint"
+- page_description "Validate your GitLab CI configuration file"
+- content_for :library_javascripts do
+ = page_specific_javascript_tag('lib/ace.js')
+
+%h2 Check your .gitlab-ci.yml
+
+.project-ci-linter
+ .row
+ = form_tag project_ci_lint_path(@project), method: :post do
+ .form-group
+ .col-sm-12
+ .file-holder
+ .js-file-title.file-title.clearfix
+ Content of .gitlab-ci.yml
+ #ci-editor.ci-editor= @content
+ = text_area_tag(:content, @content, class: 'hidden form-control span1', rows: 7, require: true)
+ .col-sm-12
+ .pull-left.prepend-top-10
+ = submit_tag('Validate', class: 'btn btn-success submit-yml')
+ .pull-right.prepend-top-10
+ = button_tag('Clear', type: 'button', class: 'btn btn-default clear-yml')
+
+ .row.prepend-top-20
+ .col-sm-12
+ .results.project-ci-template
+ = render partial: 'create' if defined?(@status)
diff --git a/app/views/projects/clusters/user/_header.html.haml b/app/views/projects/clusters/user/_header.html.haml
index 04c7ce96a4b..37f6a788518 100644
--- a/app/views/projects/clusters/user/_header.html.haml
+++ b/app/views/projects/clusters/user/_header.html.haml
@@ -1,5 +1,5 @@
%h4.prepend-top-20
= s_('ClusterIntegration|Enter the details for your Kubernetes cluster')
%p
- - link_to_help_page = link_to(s_('ClusterIntegration|documentation'), help_page_path('user/project/clusters/index'), target: '_blank', rel: 'noopener noreferrer')
+ - link_to_help_page = link_to(s_('ClusterIntegration|documentation'), help_page_path('user/project/clusters/index', anchor: 'adding-an-existing-kubernetes-cluster'), target: '_blank', rel: 'noopener noreferrer')
= s_('ClusterIntegration|Please enter access information for your Kubernetes cluster. If you need help, you can read our %{link_to_help_page} on Kubernetes').html_safe % { link_to_help_page: link_to_help_page }
diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml
index e779473c239..ecf186e3dc8 100644
--- a/app/views/projects/jobs/_sidebar.html.haml
+++ b/app/views/projects/jobs/_sidebar.html.haml
@@ -35,7 +35,7 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
- - if @build.artifacts_metadata?
+ - if @build.browsable_artifacts?
= link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
Browse
diff --git a/app/views/projects/jobs/show.html.haml b/app/views/projects/jobs/show.html.haml
index 849c273db8c..fa27ded7cc2 100644
--- a/app/views/projects/jobs/show.html.haml
+++ b/app/views/projects/jobs/show.html.haml
@@ -111,4 +111,4 @@
.js-build-options{ data: javascript_build_options }
-#js-job-details-vue{ data: { endpoint: project_job_path(@project, @build, format: :json) } }
+#js-job-details-vue{ data: { endpoint: project_job_path(@project, @build, format: :json), runner_help_url: help_page_path('ci/runners/README.html', anchor: 'setting-maximum-job-timeout-for-a-runner') } }
diff --git a/app/views/projects/milestones/show.html.haml b/app/views/projects/milestones/show.html.haml
index b423888c875..5ec219fdf00 100644
--- a/app/views/projects/milestones/show.html.haml
+++ b/app/views/projects/milestones/show.html.haml
@@ -30,6 +30,7 @@
%button.js-promote-project-milestone-button.btn.btn-grouped{ data: { toggle: 'modal',
target: '#promote-milestone-modal',
milestone_title: @milestone.title,
+ group_name: @project.group.name,
url: promote_project_milestone_path(@milestone.project, @milestone),
container: 'body' },
disabled: true,
diff --git a/app/views/projects/new.html.haml b/app/views/projects/new.html.haml
index 8cdb0a6aff4..b66e0559603 100644
--- a/app/views/projects/new.html.haml
+++ b/app/views/projects/new.html.haml
@@ -18,8 +18,6 @@
= _('A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), %{among_other_things_link}.').html_safe % { among_other_things_link: among_other_things_link }
%p
= _('All features are enabled for blank projects, from templates, or when importing, but you can disable them afterward in the project settings.')
- -# EE-specific start
- -# EE-specific end
.md
= brand_new_project_guidelines
%p
@@ -43,8 +41,6 @@
%a{ href: '#import-project-pane', id: 'import-project-tab', data: { toggle: 'tab' }, role: 'tab' }
%span.hidden-xs Import project
%span.visible-xs Import
- -# EE-specific start
- -# EE-specific end
.tab-content.gitlab-tab-content
.tab-pane{ id: 'blank-project-pane', class: active_when(active_tab == 'blank'), role: 'tabpanel' }
@@ -110,10 +106,6 @@
= render "shared/import_form", f: f
= render 'new_project_fields', f: f, project_name_id: "import-url-name"
-
- -# EE-specific start
- -# EE-specific end
-
.save-project-loader.hide
.center
%h2
diff --git a/app/views/projects/pages_domains/new.html.haml b/app/views/projects/pages_domains/new.html.haml
index 5a397c9d3c7..e49163880c7 100644
--- a/app/views/projects/pages_domains/new.html.haml
+++ b/app/views/projects/pages_domains/new.html.haml
@@ -8,3 +8,5 @@
= render 'form', { f: f }
.form-actions
= f.submit 'Create New Domain', class: "btn btn-save"
+ .pull-right
+ = link_to _('Cancel'), project_pages_path(@project), class: 'btn btn-cancel'
diff --git a/app/views/projects/pipelines/index.html.haml b/app/views/projects/pipelines/index.html.haml
index 3e6b3346787..c0ee81fe28d 100644
--- a/app/views/projects/pipelines/index.html.haml
+++ b/app/views/projects/pipelines/index.html.haml
@@ -10,6 +10,6 @@
"no-pipelines-svg-path" => image_path('illustrations/pipelines_pending.svg'),
"can-create-pipeline" => can?(current_user, :create_pipeline, @project).to_s,
"new-pipeline-path" => can?(current_user, :create_pipeline, @project) && new_project_pipeline_path(@project),
- "ci-lint-path" => can?(current_user, :create_pipeline, @project) && ci_lint_path,
+ "ci-lint-path" => can?(current_user, :create_pipeline, @project) && project_ci_lint_path(@project),
"reset-cache-path" => can?(current_user, :admin_pipeline, @project) && reset_cache_project_settings_ci_cd_path(@project) ,
"has-gitlab-ci" => (@project.has_ci? && @project.builds_enabled?).to_s } }
diff --git a/app/views/projects/runners/_form.html.haml b/app/views/projects/runners/_form.html.haml
index 49c90869146..6a681736b6f 100644
--- a/app/views/projects/runners/_form.html.haml
+++ b/app/views/projects/runners/_form.html.haml
@@ -40,6 +40,12 @@
.col-sm-10
= f.text_field :description, class: 'form-control'
.form-group
+ = label_tag :maximum_timeout_human_readable, class: 'control-label' do
+ Maximum job timeout
+ .col-sm-10
+ = f.text_field :maximum_timeout_human_readable, class: 'form-control'
+ .help-block This timeout will take precedence when lower than Project-defined timeout
+ .form-group
= label_tag :tag_list, class: 'control-label' do
Tags
.col-sm-10
diff --git a/app/views/projects/runners/show.html.haml b/app/views/projects/runners/show.html.haml
index 4e57f5f844d..f33e7e25b68 100644
--- a/app/views/projects/runners/show.html.haml
+++ b/app/views/projects/runners/show.html.haml
@@ -56,6 +56,9 @@
%td Description
%td= @runner.description
%tr
+ %td Maximum job timeout
+ %td= @runner.maximum_timeout_human_readable
+ %tr
%td Last contact
%td
- if @runner.contacted_at
diff --git a/app/views/shared/_import_form.html.haml b/app/views/shared/_import_form.html.haml
index 5eaaa1448d5..3806ead6c87 100644
--- a/app/views/shared/_import_form.html.haml
+++ b/app/views/shared/_import_form.html.haml
@@ -17,6 +17,3 @@
= import_will_timeout_message(ci_cd_only)
%li
= import_svn_message(ci_cd_only)
-
--# EE-specific start
--# EE-specific end
diff --git a/app/views/shared/_label.html.haml b/app/views/shared/_label.html.haml
index 5afbc78df53..56403907844 100644
--- a/app/views/shared/_label.html.haml
+++ b/app/views/shared/_label.html.haml
@@ -55,6 +55,7 @@
label_title: label.title,
label_color: label.color,
label_text_color: label.text_color,
+ group_name: label.project.group.name,
target: '#promote-label-modal',
container: 'body',
toggle: 'modal' } }
diff --git a/app/views/shared/milestones/_milestone.html.haml b/app/views/shared/milestones/_milestone.html.haml
index 5926867e2d7..ac494814f55 100644
--- a/app/views/shared/milestones/_milestone.html.haml
+++ b/app/views/shared/milestones/_milestone.html.haml
@@ -56,6 +56,7 @@
type: 'button',
data: { url: promote_project_milestone_path(milestone.project, milestone),
milestone_title: milestone.title,
+ group_name: @project.group.name,
target: '#promote-milestone-modal',
container: 'body',
toggle: 'modal' } }
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index f65e8385ac8..9a11cdb121e 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -39,6 +39,10 @@
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
+- object_storage_upload
+- object_storage:object_storage_background_move
+- object_storage:object_storage_migrate_uploads
+
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
diff --git a/app/workers/concerns/object_storage_queue.rb b/app/workers/concerns/object_storage_queue.rb
new file mode 100644
index 00000000000..a80f473a6d4
--- /dev/null
+++ b/app/workers/concerns/object_storage_queue.rb
@@ -0,0 +1,8 @@
+# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
+module ObjectStorageQueue
+ extend ActiveSupport::Concern
+
+ included do
+ queue_namespace :object_storage
+ end
+end
diff --git a/app/workers/git_garbage_collect_worker.rb b/app/workers/git_garbage_collect_worker.rb
index 55fb817ca6e..be4203bc7ad 100644
--- a/app/workers/git_garbage_collect_worker.rb
+++ b/app/workers/git_garbage_collect_worker.rb
@@ -28,16 +28,17 @@ class GitGarbageCollectWorker
task = task.to_sym
cmd = command(task)
- repo_path = project.repository.path_to_repo
- description = "'#{cmd.join(' ')}' in #{repo_path}"
-
- Gitlab::GitLogger.info(description)
gitaly_migrate(GITALY_MIGRATED_TASKS[task]) do |is_enabled|
if is_enabled
gitaly_call(task, project.repository.raw_repository)
else
+ repo_path = project.repository.path_to_repo
+ description = "'#{cmd.join(' ')}' in #{repo_path}"
+ Gitlab::GitLogger.info(description)
+
output, status = Gitlab::Popen.popen(cmd, repo_path)
+
Gitlab::GitLogger.error("#{description} failed:\n#{output}") unless status.zero?
end
end
diff --git a/app/workers/object_storage/background_move_worker.rb b/app/workers/object_storage/background_move_worker.rb
new file mode 100644
index 00000000000..9c4d72e0ecf
--- /dev/null
+++ b/app/workers/object_storage/background_move_worker.rb
@@ -0,0 +1,29 @@
+module ObjectStorage
+ class BackgroundMoveWorker
+ include ApplicationWorker
+ include ObjectStorageQueue
+
+ sidekiq_options retry: 5
+
+ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+ uploader_class = uploader_class_name.constantize
+ subject_class = subject_class_name.constantize
+
+ return unless uploader_class < ObjectStorage::Concern
+ return unless uploader_class.object_store_enabled?
+ return unless uploader_class.background_upload_enabled?
+
+ subject = subject_class.find(subject_id)
+ uploader = build_uploader(subject, file_field&.to_sym)
+ uploader.migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ def build_uploader(subject, mount_point)
+ case subject
+ when Upload then subject.build_uploader(mount_point)
+ else
+ subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+end
diff --git a/app/workers/object_storage/migrate_uploads_worker.rb b/app/workers/object_storage/migrate_uploads_worker.rb
new file mode 100644
index 00000000000..01ed123e6c8
--- /dev/null
+++ b/app/workers/object_storage/migrate_uploads_worker.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
+module ObjectStorage
+ class MigrateUploadsWorker
+ include ApplicationWorker
+ include ObjectStorageQueue
+
+ SanityCheckError = Class.new(StandardError)
+
+ class Upload < ActiveRecord::Base
+ # Upper limit for foreground checksum processing
+ CHECKSUM_THRESHOLD = 100.megabytes
+
+ belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
+
+ validates :size, presence: true
+ validates :path, presence: true
+ validates :model, presence: true
+ validates :uploader, presence: true
+
+ before_save :calculate_checksum!, if: :foreground_checksummable?
+ after_commit :schedule_checksum, if: :checksummable?
+
+ scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
+ scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
+
+ def self.hexdigest(path)
+ Digest::SHA256.file(path).hexdigest
+ end
+
+ def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
+ return path unless relative_path?
+
+ uploader_class.absolute_path(self)
+ end
+
+ def calculate_checksum!
+ self.checksum = nil
+ return unless checksummable?
+
+ self.checksum = self.class.hexdigest(absolute_path)
+ end
+
+ def build_uploader(mounted_as = nil)
+ uploader_class.new(model, mounted_as).tap do |uploader|
+ uploader.upload = self
+ uploader.retrieve_from_store!(identifier)
+ end
+ end
+
+ def exist?
+ File.exist?(absolute_path)
+ end
+
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
+ private
+
+ def checksummable?
+ checksum.nil? && local? && exist?
+ end
+
+ def foreground_checksummable?
+ checksummable? && size <= CHECKSUM_THRESHOLD
+ end
+
+ def schedule_checksum
+ UploadChecksumWorker.perform_async(id)
+ end
+
+ def relative_path?
+ !path.start_with?('/')
+ end
+
+ def identifier
+ File.basename(path)
+ end
+
+ def uploader_class
+ Object.const_get(uploader)
+ end
+ end
+
+ class MigrationResult
+ attr_reader :upload
+ attr_accessor :error
+
+ def initialize(upload, error = nil)
+ @upload, @error = upload, error
+ end
+
+ def success?
+ error.nil?
+ end
+
+ def to_s
+ success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
+ end
+ end
+
+ module Report
+ class MigrationFailures < StandardError
+ attr_reader :errors
+
+ def initialize(errors)
+ @errors = errors
+ end
+
+ def message
+ errors.map(&:message).join("\n")
+ end
+ end
+
+ def report!(results)
+ success, failures = results.partition(&:success?)
+
+ Rails.logger.info header(success, failures)
+ Rails.logger.warn failures(failures)
+
+ raise MigrationFailures.new(failures.map(&:error)) if failures.any?
+ end
+
+ def header(success, failures)
+ "Migrated #{success.count}/#{success.count + failures.count} files."
+ end
+
+ def failures(failures)
+ failures.map { |f| "\t#{f}" }.join('\n')
+ end
+ end
+
+ include Report
+
+ def self.enqueue!(uploads, mounted_as, to_store)
+ sanity_check!(uploads, mounted_as)
+
+ perform_async(uploads.ids, mounted_as, to_store)
+ end
+
+ # We need to be sure all the uploads are for the same uploader and model type
+ # and that the mount point exists if provided.
+ #
+ def self.sanity_check!(uploads, mounted_as)
+ upload = uploads.first
+
+ uploader_class = upload.uploader.constantize
+ model_class = uploads.first.model_type.constantize
+
+ uploader_types = uploads.map(&:uploader).uniq
+ model_types = uploads.map(&:model_type).uniq
+ model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
+
+ raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
+ raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
+ raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
+ end
+
+ def perform(ids, mounted_as, to_store)
+ @mounted_as = mounted_as&.to_sym
+ @to_store = to_store
+
+ uploads = Upload.preload(:model).where(id: ids)
+
+ sanity_check!(uploads)
+ results = migrate(uploads)
+
+ report!(results)
+ rescue SanityCheckError => e
+ # do not retry: the job is insane
+ Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
+ end
+
+ def sanity_check!(uploads)
+ self.class.sanity_check!(uploads, @mounted_as)
+ end
+
+ def build_uploaders(uploads)
+ uploads.map { |upload| upload.build_uploader(@mounted_as) }
+ end
+
+ def migrate(uploads)
+ build_uploaders(uploads).map(&method(:process_uploader))
+ end
+
+ def process_uploader(uploader)
+ MigrationResult.new(uploader.upload).tap do |result|
+ begin
+ uploader.migrate!(@to_store)
+ rescue => e
+ result.error = e
+ end
+ end
+ end
+ end
+end
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
new file mode 100644
index 00000000000..5c80f34069c
--- /dev/null
+++ b/app/workers/object_storage_upload_worker.rb
@@ -0,0 +1,21 @@
+# @Deprecated - remove once the `object_storage_upload` queue is empty
+# The queue has been renamed `object_storage:object_storage_background_upload`
+#
+class ObjectStorageUploadWorker
+ include ApplicationWorker
+
+ sidekiq_options retry: 5
+
+ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+ uploader_class = uploader_class_name.constantize
+ subject_class = subject_class_name.constantize
+
+ return unless uploader_class < ObjectStorage::Concern
+ return unless uploader_class.object_store_enabled?
+ return unless uploader_class.background_upload_enabled?
+
+ subject = subject_class.find(subject_id)
+ uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+ uploader.migrate!(ObjectStorage::Store::REMOTE)
+ end
+end
diff --git a/app/workers/repository_fork_worker.rb b/app/workers/repository_fork_worker.rb
index 07584fab7c8..712a63af532 100644
--- a/app/workers/repository_fork_worker.rb
+++ b/app/workers/repository_fork_worker.rb
@@ -1,3 +1,4 @@
+# Gitaly issue: https://gitlab.com/gitlab-org/gitaly/issues/1110
class RepositoryForkWorker
include ApplicationWorker
include Gitlab::ShellAdapter
diff --git a/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml b/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml
deleted file mode 100644
index a38b447e345..00000000000
--- a/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: Update CI/CD secret variables list to be dynamic and save without reloading
- the page
-merge_request: 4110
-author:
-type: added
diff --git a/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml b/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml
deleted file mode 100644
index bbb6cbd05be..00000000000
--- a/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Fix JavaScript bundle running on Cluster update/destroy pages
-merge_request:
-author:
-type: fixed
diff --git a/changelogs/unreleased-ee/bvl-external-policy-classification.yml b/changelogs/unreleased-ee/bvl-external-policy-classification.yml
deleted file mode 100644
index 074629c8c12..00000000000
--- a/changelogs/unreleased-ee/bvl-external-policy-classification.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Authorize project access with an external service
-merge_request: 4675
-author:
-type: added
diff --git a/changelogs/unreleased/27210-add-cancel-btn-to-new-page-domain.yml b/changelogs/unreleased/27210-add-cancel-btn-to-new-page-domain.yml
new file mode 100644
index 00000000000..d96f7e54c8d
--- /dev/null
+++ b/changelogs/unreleased/27210-add-cancel-btn-to-new-page-domain.yml
@@ -0,0 +1,5 @@
+---
+title: Adds cancel btn to new pages domain page
+merge_request: 18026
+author: Jacopo Beschi @jacopo-beschi
+type: added
diff --git a/changelogs/unreleased/33803-drop-json-support-in-project-milestone.yml b/changelogs/unreleased/33803-drop-json-support-in-project-milestone.yml
new file mode 100644
index 00000000000..0382ede4565
--- /dev/null
+++ b/changelogs/unreleased/33803-drop-json-support-in-project-milestone.yml
@@ -0,0 +1,5 @@
+---
+title: Drop JSON response in Project Milestone along with avoiding error
+merge_request: 17977
+author: Takuya Noguchi
+type: fixed
diff --git a/changelogs/unreleased/38167-ui-bug-when-creating-new-branch.yml b/changelogs/unreleased/38167-ui-bug-when-creating-new-branch.yml
new file mode 100644
index 00000000000..cec06bf2dfe
--- /dev/null
+++ b/changelogs/unreleased/38167-ui-bug-when-creating-new-branch.yml
@@ -0,0 +1,5 @@
+---
+title: Fixed bug in dropdown selector when selecting the same selection again
+merge_request: 14631
+author: bitsapien
+type: fixed
diff --git a/changelogs/unreleased/40781-os-to-ce.yml b/changelogs/unreleased/40781-os-to-ce.yml
new file mode 100644
index 00000000000..4a364292c60
--- /dev/null
+++ b/changelogs/unreleased/40781-os-to-ce.yml
@@ -0,0 +1,5 @@
+---
+title: Add object storage support for LFS objects, CI artifacts, and uploads.
+merge_request: 17358
+author:
+type: added
diff --git a/changelogs/unreleased/43512-add-support-for-omniauth-jwt-provider.yml b/changelogs/unreleased/43512-add-support-for-omniauth-jwt-provider.yml
new file mode 100644
index 00000000000..039d3de7168
--- /dev/null
+++ b/changelogs/unreleased/43512-add-support-for-omniauth-jwt-provider.yml
@@ -0,0 +1,5 @@
+---
+title: Adds support for OmniAuth JWT provider
+merge_request: 17774
+author:
+type: added
diff --git a/changelogs/unreleased/43603-ci-lint-support.yml b/changelogs/unreleased/43603-ci-lint-support.yml
new file mode 100644
index 00000000000..8e4a92c0287
--- /dev/null
+++ b/changelogs/unreleased/43603-ci-lint-support.yml
@@ -0,0 +1,5 @@
+---
+title: Move ci/lint under project's namespace
+merge_request: 17729
+author:
+type: added
diff --git a/changelogs/unreleased/43794-fix-domain-verification-validation-errors.yml b/changelogs/unreleased/43794-fix-domain-verification-validation-errors.yml
new file mode 100644
index 00000000000..861820c7538
--- /dev/null
+++ b/changelogs/unreleased/43794-fix-domain-verification-validation-errors.yml
@@ -0,0 +1,5 @@
+---
+title: Avoid validation errors when running the Pages domain verification service
+merge_request: 17992
+author:
+type: fixed
diff --git a/changelogs/unreleased/44232-docs-for-runner-ip-address.yml b/changelogs/unreleased/44232-docs-for-runner-ip-address.yml
deleted file mode 100644
index 82485d31b24..00000000000
--- a/changelogs/unreleased/44232-docs-for-runner-ip-address.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Add documentation for runner IP address (#44232)
-merge_request: 17837
-author:
-type: other
diff --git a/changelogs/unreleased/44392-resolve-projects-creation-silently-failing-on-after-create-error.yml b/changelogs/unreleased/44392-resolve-projects-creation-silently-failing-on-after-create-error.yml
new file mode 100644
index 00000000000..3bbd5a05b98
--- /dev/null
+++ b/changelogs/unreleased/44392-resolve-projects-creation-silently-failing-on-after-create-error.yml
@@ -0,0 +1,5 @@
+---
+title: Project creation will now raise an error if a service template is invalid
+merge_request: 18013
+author:
+type: fixed
diff --git a/changelogs/unreleased/44508-fix-fork-namespace-images.yml b/changelogs/unreleased/44508-fix-fork-namespace-images.yml
new file mode 100644
index 00000000000..63b4b9a5e56
--- /dev/null
+++ b/changelogs/unreleased/44508-fix-fork-namespace-images.yml
@@ -0,0 +1,5 @@
+---
+title: Fix bug rendering group icons when forking
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/44564-error-500-while-attempting-to-resolve-conflicts-due-to-utf-8-conversion-error.yml b/changelogs/unreleased/44564-error-500-while-attempting-to-resolve-conflicts-due-to-utf-8-conversion-error.yml
deleted file mode 100644
index 3fb96153b9c..00000000000
--- a/changelogs/unreleased/44564-error-500-while-attempting-to-resolve-conflicts-due-to-utf-8-conversion-error.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Fix 500 error when trying to resolve non-ASCII conflicts in the editor
-merge_request: 17962
-author:
-type: fixed
diff --git a/changelogs/unreleased/44649-reference-parsing-conflicting-with-auto-linking.yml b/changelogs/unreleased/44649-reference-parsing-conflicting-with-auto-linking.yml
new file mode 100644
index 00000000000..a64b0efa1ed
--- /dev/null
+++ b/changelogs/unreleased/44649-reference-parsing-conflicting-with-auto-linking.yml
@@ -0,0 +1,5 @@
+---
+title: Fix autolinking URLs containing ampersands
+merge_request: 18045
+author:
+type: fixed
diff --git a/changelogs/unreleased/44657-reuse-root_ref_hash-on-branches.yml b/changelogs/unreleased/44657-reuse-root_ref_hash-on-branches.yml
new file mode 100644
index 00000000000..4f21aadd86b
--- /dev/null
+++ b/changelogs/unreleased/44657-reuse-root_ref_hash-on-branches.yml
@@ -0,0 +1,5 @@
+---
+title: Reuse root_ref_hash for performance on Branches
+merge_request: 17998
+author: Takuya Noguchi
+type: performance
diff --git a/changelogs/unreleased/44712-update-asciidoctor-from-1-5-3-to-1-5-6-2.yml b/changelogs/unreleased/44712-update-asciidoctor-from-1-5-3-to-1-5-6-2.yml
new file mode 100644
index 00000000000..bdfed89d2ea
--- /dev/null
+++ b/changelogs/unreleased/44712-update-asciidoctor-from-1-5-3-to-1-5-6-2.yml
@@ -0,0 +1,5 @@
+---
+title: Update asciidoctor-plantuml to 0.0.8
+merge_request: 18022
+author: Takuya Noguchi
+type: performance
diff --git a/changelogs/unreleased/44717-no-resolve-issue.yml b/changelogs/unreleased/44717-no-resolve-issue.yml
new file mode 100644
index 00000000000..ce23f4e6e9f
--- /dev/null
+++ b/changelogs/unreleased/44717-no-resolve-issue.yml
@@ -0,0 +1,5 @@
+---
+title: Don't show Jump to Discussion button on Issues
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/ab-44446-add-indexes-for-user-activity-queries.yml b/changelogs/unreleased/ab-44446-add-indexes-for-user-activity-queries.yml
deleted file mode 100644
index 0f89c06fcee..00000000000
--- a/changelogs/unreleased/ab-44446-add-indexes-for-user-activity-queries.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Add indexes for user activity queries.
-merge_request: 17890
-author:
-type: performance
diff --git a/changelogs/unreleased/ab-44467-remove-index.yml b/changelogs/unreleased/ab-44467-remove-index.yml
new file mode 100644
index 00000000000..fb772ce85d5
--- /dev/null
+++ b/changelogs/unreleased/ab-44467-remove-index.yml
@@ -0,0 +1,5 @@
+---
+title: Remove unused index from events table.
+merge_request: 18014
+author:
+type: other
diff --git a/changelogs/unreleased/ac-fix-use_file-race.yml b/changelogs/unreleased/ac-fix-use_file-race.yml
new file mode 100644
index 00000000000..f1315d5d50e
--- /dev/null
+++ b/changelogs/unreleased/ac-fix-use_file-race.yml
@@ -0,0 +1,5 @@
+---
+title: Fix data race between ObjectStorage background_upload and Pages publishing
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml b/changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml
new file mode 100644
index 00000000000..4db7f76e0af
--- /dev/null
+++ b/changelogs/unreleased/ac-lfs-direct-upload-ee-to-ce.yml
@@ -0,0 +1,5 @@
+---
+title: Port direct upload of LFS artifacts from EE
+merge_request: 17752
+author:
+type: added
diff --git a/changelogs/unreleased/add-canary-favicon.yml b/changelogs/unreleased/add-canary-favicon.yml
new file mode 100644
index 00000000000..1af6572588d
--- /dev/null
+++ b/changelogs/unreleased/add-canary-favicon.yml
@@ -0,0 +1,5 @@
+---
+title: Add yellow favicon when `CANARY=true` to differientate canary environment
+merge_request: 12477
+author:
+type: changed
diff --git a/changelogs/unreleased/add-per-runner-job-timeout.yml b/changelogs/unreleased/add-per-runner-job-timeout.yml
new file mode 100644
index 00000000000..336b4d15ddf
--- /dev/null
+++ b/changelogs/unreleased/add-per-runner-job-timeout.yml
@@ -0,0 +1,5 @@
+---
+title: Add per-runner configured job timeout
+merge_request: 17221
+author:
+type: added
diff --git a/changelogs/unreleased/bvl-no-permanent-redirect.yml b/changelogs/unreleased/bvl-no-permanent-redirect.yml
new file mode 100644
index 00000000000..c34a3789b58
--- /dev/null
+++ b/changelogs/unreleased/bvl-no-permanent-redirect.yml
@@ -0,0 +1,5 @@
+---
+title: Don't create permanent redirect routes
+merge_request: 17521
+author:
+type: changed
diff --git a/changelogs/unreleased/dashboard-view-user-choices-issues-merge-requests.yml b/changelogs/unreleased/dashboard-view-user-choices-issues-merge-requests.yml
new file mode 100644
index 00000000000..92a03070d78
--- /dev/null
+++ b/changelogs/unreleased/dashboard-view-user-choices-issues-merge-requests.yml
@@ -0,0 +1,5 @@
+---
+title: Add 'Assigned Issues' and 'Assigned Merge Requests' as dashboard view choices for users
+merge_request: 17860
+author: Elias Werberich
+type: added
diff --git a/changelogs/unreleased/dm-deploy-keys-default-user.yml b/changelogs/unreleased/dm-deploy-keys-default-user.yml
new file mode 100644
index 00000000000..b82d67d028c
--- /dev/null
+++ b/changelogs/unreleased/dm-deploy-keys-default-user.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure hooks run when a deploy key without a user pushes
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/dm-refs-contains-sha-encoding.yml b/changelogs/unreleased/dm-refs-contains-sha-encoding.yml
new file mode 100644
index 00000000000..cdd9ead5a65
--- /dev/null
+++ b/changelogs/unreleased/dm-refs-contains-sha-encoding.yml
@@ -0,0 +1,5 @@
+---
+title: Fix listing commit branch/tags that contain special characters
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/dz-improve-app-settings-2.yml b/changelogs/unreleased/dz-improve-app-settings-2.yml
new file mode 100644
index 00000000000..ebe571decb8
--- /dev/null
+++ b/changelogs/unreleased/dz-improve-app-settings-2.yml
@@ -0,0 +1,5 @@
+---
+title: Redesign application settings to match project settings
+merge_request: 18019
+author:
+type: changed
diff --git a/changelogs/unreleased/expose-commits-mr-api.yml b/changelogs/unreleased/expose-commits-mr-api.yml
new file mode 100644
index 00000000000..77ea2f27431
--- /dev/null
+++ b/changelogs/unreleased/expose-commits-mr-api.yml
@@ -0,0 +1,5 @@
+---
+title: Allow merge requests related to a commit to be found via API
+merge_request:
+author:
+type: added
diff --git a/changelogs/unreleased/fix-ci-job-auto-retry.yml b/changelogs/unreleased/fix-ci-job-auto-retry.yml
deleted file mode 100644
index 442126461f0..00000000000
--- a/changelogs/unreleased/fix-ci-job-auto-retry.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Prevent auto-retry AccessDenied error from stopping transition to failed
-merge_request: 17862
-author:
-type: fixed
diff --git a/changelogs/unreleased/fix-gb-fix-background-pipeline-stages-migration.yml b/changelogs/unreleased/fix-gb-fix-background-pipeline-stages-migration.yml
new file mode 100644
index 00000000000..63948f0c196
--- /dev/null
+++ b/changelogs/unreleased/fix-gb-fix-background-pipeline-stages-migration.yml
@@ -0,0 +1,5 @@
+---
+title: Fix exceptions raised when migrating pipeline stages in the background
+merge_request: 18076
+author:
+type: fixed
diff --git a/changelogs/unreleased/ide-file-row-hover-style.yml b/changelogs/unreleased/ide-file-row-hover-style.yml
new file mode 100644
index 00000000000..158379a5aef
--- /dev/null
+++ b/changelogs/unreleased/ide-file-row-hover-style.yml
@@ -0,0 +1,5 @@
+---
+title: Added hover background color to IDE file list rows
+merge_request:
+author:
+type: changed
diff --git a/changelogs/unreleased/jivl-change-copy-text-promote-milestones-labels.yml b/changelogs/unreleased/jivl-change-copy-text-promote-milestones-labels.yml
new file mode 100644
index 00000000000..fb3095552d3
--- /dev/null
+++ b/changelogs/unreleased/jivl-change-copy-text-promote-milestones-labels.yml
@@ -0,0 +1,5 @@
+---
+title: Correct copy text for the promote milestone and label modals
+merge_request: 17726
+author:
+type: fixed
diff --git a/changelogs/unreleased/move-registry-after-cicd-project-nav-sidebar.yml b/changelogs/unreleased/move-registry-after-cicd-project-nav-sidebar.yml
new file mode 100644
index 00000000000..03a6fd42228
--- /dev/null
+++ b/changelogs/unreleased/move-registry-after-cicd-project-nav-sidebar.yml
@@ -0,0 +1,5 @@
+---
+ title: Move 'Registry' after 'CI/CD' in project navigation sidebar
+ merge_request: 18018
+ author: Elias Werberich
+ type: changed
diff --git a/changelogs/unreleased/poc-upload-hashing-path.yml b/changelogs/unreleased/poc-upload-hashing-path.yml
new file mode 100644
index 00000000000..7970405bea1
--- /dev/null
+++ b/changelogs/unreleased/poc-upload-hashing-path.yml
@@ -0,0 +1,5 @@
+---
+title: File uploads in remote storage now support project renaming.
+merge_request: 4597
+author:
+type: fixed
diff --git a/changelogs/unreleased/reduce-query-count-for-mergerequestscontroller-show.yml b/changelogs/unreleased/reduce-query-count-for-mergerequestscontroller-show.yml
new file mode 100644
index 00000000000..1f793fe5e7c
--- /dev/null
+++ b/changelogs/unreleased/reduce-query-count-for-mergerequestscontroller-show.yml
@@ -0,0 +1,5 @@
+---
+title: Reduce number of queries when viewing a merge request
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/refactor-move-time-tracking-comparison-pane-vue-component.yml b/changelogs/unreleased/refactor-move-time-tracking-comparison-pane-vue-component.yml
new file mode 100644
index 00000000000..88a4b8ec8c1
--- /dev/null
+++ b/changelogs/unreleased/refactor-move-time-tracking-comparison-pane-vue-component.yml
@@ -0,0 +1,5 @@
+---
+title: Move TimeTrackingComparisonPane vue component
+merge_request: 17931
+author: George Tsiolis
+type: performance
diff --git a/changelogs/unreleased/sh-update-loofah.yml b/changelogs/unreleased/sh-update-loofah.yml
deleted file mode 100644
index 6aff0f91939..00000000000
--- a/changelogs/unreleased/sh-update-loofah.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Bump rails-html-sanitizer to 1.0.4
-merge_request:
-author:
-type: security
diff --git a/changelogs/unreleased/update-unresolved-discussions-vue-component.yml b/changelogs/unreleased/update-unresolved-discussions-vue-component.yml
new file mode 100644
index 00000000000..246eaaae2bd
--- /dev/null
+++ b/changelogs/unreleased/update-unresolved-discussions-vue-component.yml
@@ -0,0 +1,5 @@
+---
+title: Add i18n and update specs for ShaMismatch vue component
+merge_request: 17870
+author: George Tsiolis
+type: performance
diff --git a/changelogs/unreleased/workhorse-gitaly-mandatory.yml b/changelogs/unreleased/workhorse-gitaly-mandatory.yml
new file mode 100644
index 00000000000..77b62302e86
--- /dev/null
+++ b/changelogs/unreleased/workhorse-gitaly-mandatory.yml
@@ -0,0 +1,5 @@
+---
+title: Make all workhorse gitaly calls opt-out, take 2
+merge_request: 18043
+author:
+type: other
diff --git a/changelogs/unreleased/zj-remote-repo-exists.yml b/changelogs/unreleased/zj-remote-repo-exists.yml
new file mode 100644
index 00000000000..f024b83159b
--- /dev/null
+++ b/changelogs/unreleased/zj-remote-repo-exists.yml
@@ -0,0 +1,5 @@
+---
+title: Test if remote repository exists when importing wikis
+merge_request:
+author:
+type: fixed
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index bd696a7f2c5..8db66037d61 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -145,18 +145,55 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
+ # object_store:
+ # enabled: false
+ # remote_directory: artifacts # The bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ # connection:
+ # provider: AWS # Only AWS supported at the moment
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
## Git LFS
lfs:
enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ connection:
+ provider: AWS
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...)
uploads:
# The location where uploads objects are stored (default: public/).
# storage_path: public/
# base_dir: uploads/-/system
+ object_store:
+ enabled: false
+ # remote_directory: uploads # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ # connection:
+ # provider: AWS
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
@@ -481,7 +518,17 @@ production: &base
# - { name: 'twitter',
# app_id: 'YOUR_APP_ID',
# app_secret: 'YOUR_APP_SECRET' }
- #
+ # - { name: 'jwt',
+ # app_secret: 'YOUR_APP_SECRET',
+ # args: {
+ # algorithm: 'HS256',
+ # uid_claim: 'email',
+ # required_claims: ["name", "email"],
+ # info_map: { name: "name", email: "email" },
+ # auth_url: 'https://example.com/',
+ # valid_within: nil,
+ # }
+ # }
# - { name: 'saml',
# label: 'Our SAML Provider',
# groups_attribute: 'Groups',
@@ -655,10 +702,39 @@ test:
enabled: true
lfs:
enabled: false
+ # The location where LFS objects are stored (default: shared/lfs-objects).
+ # storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: us-east-1
artifacts:
path: tmp/tests/artifacts
+ enabled: true
+ # The location where build artifacts are stored (default: shared/artifacts).
+ # path: shared/artifacts
+ object_store:
+ enabled: false
+ remote_directory: artifacts # The bucket name
+ background_upload: false
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: us-east-1
uploads:
storage_path: tmp/tests/public
+ object_store:
+ enabled: false
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: us-east-1
gitlab:
host: localhost
port: 80
@@ -733,6 +809,17 @@ test:
- { name: 'twitter',
app_id: 'YOUR_APP_ID',
app_secret: 'YOUR_APP_SECRET' }
+ - { name: 'jwt',
+ app_secret: 'YOUR_APP_SECRET',
+ args: {
+ algorithm: 'HS256',
+ uid_claim: 'email',
+ required_claims: ["name", "email"],
+ info_map: { name: "name", email: "email" },
+ auth_url: 'https://example.com/',
+ valid_within: nil,
+ }
+ }
- { name: 'auth0',
args: {
client_id: 'YOUR_AUTH0_CLIENT_ID',
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 53cf0010d8e..69b59b26d8c 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -305,6 +305,13 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values
# Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['object_store'] ||= Settingslogic.new({})
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Registry
@@ -340,6 +347,14 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
+Settings.lfs['object_store'] ||= Settingslogic.new({})
+Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['direct_upload'] = false if Settings.lfs['object_store']['direct_upload'].nil?
+Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
# Uploads
@@ -347,6 +362,13 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] ||
Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
+Settings.uploads['object_store'] ||= Settingslogic.new({})
+Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
+Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
+Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
+Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.uploads['object_store']['connection']&.deep_stringify_keys!
#
# Mattermost
diff --git a/config/initializers/carrierwave.rb b/config/initializers/carrierwave.rb
index cd7df44351a..5cde6cbb0ff 100644
--- a/config/initializers/carrierwave.rb
+++ b/config/initializers/carrierwave.rb
@@ -28,16 +28,4 @@ if File.exist?(aws_file)
# when fog_public is false and provider is AWS or Google, defaults to 600
config.fog_authenticated_url_expiration = 1 << 29
end
-
- # Mocking Fog requests, based on: https://github.com/carrierwaveuploader/carrierwave/wiki/How-to%3A-Test-Fog-based-uploaders
- if Rails.env.test?
- Fog.mock!
- connection = ::Fog::Storage.new(
- aws_access_key_id: AWS_CONFIG['access_key_id'],
- aws_secret_access_key: AWS_CONFIG['secret_access_key'],
- provider: 'AWS',
- region: AWS_CONFIG['region']
- )
- connection.directories.create(key: AWS_CONFIG['bucket'])
- end
end
diff --git a/config/initializers/fog_google_https_private_urls.rb b/config/initializers/fog_google_https_private_urls.rb
new file mode 100644
index 00000000000..f92e623a5d2
--- /dev/null
+++ b/config/initializers/fog_google_https_private_urls.rb
@@ -0,0 +1,20 @@
+#
+# Monkey patching the https support for private urls
+# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879
+#
+module Fog
+ module Storage
+ class GoogleXML
+ class File < Fog::Model
+ module MonkeyPatch
+ def url(expires)
+ requires :key
+ collection.get_https_url(key, expires)
+ end
+ end
+
+ prepend MonkeyPatch
+ end
+ end
+ end
+end
diff --git a/config/routes/ci.rb b/config/routes/ci.rb
index 60c1724bc05..ebd321ed097 100644
--- a/config/routes/ci.rb
+++ b/config/routes/ci.rb
@@ -1,5 +1,5 @@
namespace :ci do
- resource :lint, only: [:show, :create]
+ resource :lint, only: :show
root to: redirect('')
end
diff --git a/config/routes/project.rb b/config/routes/project.rb
index f50b9aded8d..48ba8ef06f9 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -280,6 +280,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
post :keep
end
end
+
+ namespace :ci do
+ resource :lint, only: [:show, :create]
+ end
end
draw :legacy_builds
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index 554502c5d83..c811034b29d 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -68,5 +68,7 @@
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
- [pages_domain_verification, 1]
+ - [object_storage_upload, 1]
+ - [object_storage, 1]
- [plugin, 1]
- [pipeline_background, 1]
diff --git a/config/webpack.config.js b/config/webpack.config.js
index b74d9dde494..39e9fbbd530 100644
--- a/config/webpack.config.js
+++ b/config/webpack.config.js
@@ -104,7 +104,7 @@ const config = {
},
},
{
- test: /katex.css$/,
+ test: /katex.min.css$/,
include: /node_modules\/katex\/dist/,
use: [
{ loader: 'style-loader' },
diff --git a/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
new file mode 100644
index 00000000000..e82109190a7
--- /dev/null
+++ b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
@@ -0,0 +1,10 @@
+class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column(:ci_builds, :artifacts_file_store, :integer)
+ add_column(:ci_builds, :artifacts_metadata_store, :integer)
+ end
+end
diff --git a/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
new file mode 100644
index 00000000000..41bb031014f
--- /dev/null
+++ b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
@@ -0,0 +1,31 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddFileStoreToLfsObjects < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ # DOWNTIME_REASON = ''
+
+ # When using the methods "add_concurrent_index", "remove_concurrent_index" or
+ # "add_column_with_default" you must disable the use of transactions
+ # as these methods can not run in an existing transaction.
+ # When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
+ # that either of them is the _only_ method called in the migration,
+ # any other changes should go in a separate migration.
+ # This ensures that upon failure _only_ the index creation or removing fails
+ # and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def change
+ add_column(:lfs_objects, :file_store, :integer)
+ end
+end
diff --git a/db/migrate/20170918072949_add_file_store_job_artifacts.rb b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
new file mode 100644
index 00000000000..b1f1bea6deb
--- /dev/null
+++ b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
@@ -0,0 +1,10 @@
+class AddFileStoreJobArtifacts < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+ DOWNTIME = false
+
+ def change
+ add_column(:ci_job_artifacts, :file_store, :integer)
+ end
+end
diff --git a/db/migrate/20171214144320_add_store_column_to_uploads.rb b/db/migrate/20171214144320_add_store_column_to_uploads.rb
new file mode 100644
index 00000000000..e35798e2c41
--- /dev/null
+++ b/db/migrate/20171214144320_add_store_column_to_uploads.rb
@@ -0,0 +1,12 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddStoreColumnToUploads < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column(:uploads, :store, :integer)
+ end
+end
diff --git a/db/migrate/20180219153455_add_maximum_timeout_to_ci_runners.rb b/db/migrate/20180219153455_add_maximum_timeout_to_ci_runners.rb
new file mode 100644
index 00000000000..072e696a43e
--- /dev/null
+++ b/db/migrate/20180219153455_add_maximum_timeout_to_ci_runners.rb
@@ -0,0 +1,9 @@
+class AddMaximumTimeoutToCiRunners < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :ci_runners, :maximum_timeout, :integer
+ end
+end
diff --git a/db/migrate/20180301010859_create_ci_builds_metadata_table.rb b/db/migrate/20180301010859_create_ci_builds_metadata_table.rb
new file mode 100644
index 00000000000..ce737444092
--- /dev/null
+++ b/db/migrate/20180301010859_create_ci_builds_metadata_table.rb
@@ -0,0 +1,20 @@
+class CreateCiBuildsMetadataTable < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :ci_builds_metadata do |t|
+ t.integer :build_id, null: false
+ t.integer :project_id, null: false
+ t.integer :timeout
+ t.integer :timeout_source, null: false, default: 1
+
+ t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade
+ t.foreign_key :projects, column: :project_id, on_delete: :cascade
+
+ t.index :build_id, unique: true
+ t.index :project_id
+ end
+ end
+end
diff --git a/db/migrate/20180327101207_remove_index_from_events_table.rb b/db/migrate/20180327101207_remove_index_from_events_table.rb
new file mode 100644
index 00000000000..172441da65b
--- /dev/null
+++ b/db/migrate/20180327101207_remove_index_from_events_table.rb
@@ -0,0 +1,18 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class RemoveIndexFromEventsTable < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :events, :author_id
+ end
+
+ def down
+ add_concurrent_index :events, :author_id
+ end
+end
diff --git a/db/post_migrate/20180305100050_remove_permanent_from_redirect_routes.rb b/db/post_migrate/20180305100050_remove_permanent_from_redirect_routes.rb
new file mode 100644
index 00000000000..db5165dbe70
--- /dev/null
+++ b/db/post_migrate/20180305100050_remove_permanent_from_redirect_routes.rb
@@ -0,0 +1,37 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ INDEX_NAME_PERM = "index_redirect_routes_on_path_text_pattern_ops_where_permanent"
+ INDEX_NAME_TEMP = "index_redirect_routes_on_path_text_pattern_ops_where_temporary"
+
+ def up
+ # These indexes were created on Postgres only in:
+ # ReworkRedirectRoutesIndexes:
+ # https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211
+ if Gitlab::Database.postgresql?
+ disable_statement_timeout
+
+ execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};"
+ execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};"
+ end
+
+ remove_column(:redirect_routes, :permanent)
+ end
+
+ def down
+ add_column(:redirect_routes, :permanent, :boolean)
+
+ if Gitlab::Database.postgresql?
+ disable_statement_timeout
+
+ execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
+ execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
+ end
+ end
+end
diff --git a/db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb b/db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb
new file mode 100644
index 00000000000..d6fb4f06695
--- /dev/null
+++ b/db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb
@@ -0,0 +1,38 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddPathIndexToRedirectRoutes < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ INDEX_NAME = 'index_redirect_routes_on_path_unique_text_pattern_ops'
+
+ # Indexing on LOWER(path) varchar_pattern_ops speeds up the LIKE query in
+ # RedirectRoute.matching_path_and_descendants
+ #
+ # This same index is also added in the `ReworkRedirectRoutesIndexes` so this
+ # is a no-op in most cases. But this migration is also called from the
+ # `setup_postgresql.rake` task when setting up a new database, in which case
+ # we want to create the index.
+ def up
+ return unless Gitlab::Database.postgresql?
+
+ disable_statement_timeout
+
+ unless index_exists_by_name?(:redirect_routes, INDEX_NAME)
+ execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);")
+ end
+ end
+
+ def down
+ # Do nothing in the DOWN. Since the index above is originally created in the
+ # `ReworkRedirectRoutesIndexes`. This migration wouldn't have actually
+ # created any new index.
+ #
+ # This migration is only here to be called form `setup_postgresql.rake` so
+ # any newly created database would have this index.
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index b6adc3fe1f4..9aaefcf1c8d 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 20180323150945) do
+ActiveRecord::Schema.define(version: 20180327101207) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -307,6 +307,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
+ t.integer "artifacts_file_store"
+ t.integer "artifacts_metadata_store"
t.boolean "protected"
t.integer "failure_reason"
end
@@ -327,6 +329,16 @@ ActiveRecord::Schema.define(version: 20180323150945) do
add_index "ci_builds", ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
add_index "ci_builds", ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
+ create_table "ci_builds_metadata", force: :cascade do |t|
+ t.integer "build_id", null: false
+ t.integer "project_id", null: false
+ t.integer "timeout"
+ t.integer "timeout_source", default: 1, null: false
+ end
+
+ add_index "ci_builds_metadata", ["build_id"], name: "index_ci_builds_metadata_on_build_id", unique: true, using: :btree
+ add_index "ci_builds_metadata", ["project_id"], name: "index_ci_builds_metadata_on_project_id", using: :btree
+
create_table "ci_group_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
@@ -345,6 +357,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
+ t.integer "file_store"
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -456,6 +469,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.boolean "locked", default: false, null: false
t.integer "access_level", default: 0, null: false
t.string "ip_address"
+ t.integer "maximum_timeout"
end
add_index "ci_runners", ["contacted_at"], name: "index_ci_runners_on_contacted_at", using: :btree
@@ -729,7 +743,6 @@ ActiveRecord::Schema.define(version: 20180323150945) do
add_index "events", ["action"], name: "index_events_on_action", using: :btree
add_index "events", ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree
- add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree
add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
add_index "events", ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree
@@ -1009,6 +1022,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
+ t.integer "file_store"
end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
@@ -1602,7 +1616,6 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.string "path", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
- t.boolean "permanent"
end
add_index "redirect_routes", ["path"], name: "index_redirect_routes_on_path", unique: true, using: :btree
@@ -1824,6 +1837,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at", null: false
t.string "mount_point"
t.string "secret"
+ t.integer "store"
end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
@@ -2024,6 +2038,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do
add_foreign_key "ci_builds", "ci_pipelines", column: "auto_canceled_by_id", name: "fk_a2141b1522", on_delete: :nullify
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
+ add_foreign_key "ci_builds_metadata", "ci_builds", column: "build_id", on_delete: :cascade
+ add_foreign_key "ci_builds_metadata", "projects", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
diff --git a/doc/README.md b/doc/README.md
index be805a2ccc4..604f7244a34 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -211,9 +211,9 @@ straight away.
### GitLab self-hosted
-With GitLab self-hosted, you deploy your own GitLab instance on-premises or on a private cloud of your choice. GitLab self-hosted is available for [free and with paid subscriptions](https://about.gitlab.com/products/): Libre, Starter, Premium, and Ultimate.
+With GitLab self-hosted, you deploy your own GitLab instance on-premises or on a private cloud of your choice. GitLab self-hosted is available for [free and with paid subscriptions](https://about.gitlab.com/products/): Core, Starter, Premium, and Ultimate.
-Every feature available in Libre is also available in Starter, Premium, and Ultimate.
+Every feature available in Core is also available in Starter, Premium, and Ultimate.
Starter features are also available in Premium and Ultimate, and Premium features are also
available in Ultimate.
@@ -227,7 +227,7 @@ GitLab.com subscriptions grants access
to the same features available in GitLab self-hosted, **expect
[administration](administration/index.md) tools and settings**:
-- GitLab.com Free includes the same features available in GitLab Libre
+- GitLab.com Free includes the same features available in Core
- GitLab.com Bronze includes the same features available in GitLab Starter
- GitLab.com Silver includes the same features available in GitLab Premium
- GitLab.com Gold includes the same features available in GitLab Ultimate
diff --git a/doc/administration/auth/jwt.md b/doc/administration/auth/jwt.md
new file mode 100644
index 00000000000..b51e705ab52
--- /dev/null
+++ b/doc/administration/auth/jwt.md
@@ -0,0 +1,72 @@
+# JWT OmniAuth provider
+
+To enable the JWT OmniAuth provider, you must register your application with JWT.
+JWT will provide you with a secret key for you to use.
+
+1. On your GitLab server, open the configuration file.
+
+ For Omnibus GitLab:
+
+ ```sh
+ sudo editor /etc/gitlab/gitlab.rb
+ ```
+
+ For installations from source:
+
+ ```sh
+ cd /home/git/gitlab
+ sudo -u git -H editor config/gitlab.yml
+ ```
+
+1. See [Initial OmniAuth Configuration](../../integration/omniauth.md#initial-omniauth-configuration) for initial settings.
+1. Add the provider configuration.
+
+ For Omnibus GitLab:
+
+ ```ruby
+ gitlab_rails['omniauth_providers'] = [
+ { name: 'jwt',
+ app_secret: 'YOUR_APP_SECRET',
+ args: {
+ algorithm: 'HS256',
+ uid_claim: 'email',
+ required_claims: ["name", "email"],
+ info_maps: { name: "name", email: "email" },
+ auth_url: 'https://example.com/',
+ valid_within: nil,
+ }
+ }
+ ]
+ ```
+
+ For installation from source:
+
+ ```
+ - { name: 'jwt',
+ app_secret: 'YOUR_APP_SECRET',
+ args: {
+ algorithm: 'HS256',
+ uid_claim: 'email',
+ required_claims: ["name", "email"],
+ info_map: { name: "name", email: "email" },
+ auth_url: 'https://example.com/',
+ valid_within: nil,
+ }
+ }
+ ```
+
+ NOTE: **Note:** For more information on each configuration option refer to
+ the [OmniAuth JWT usage documentation](https://github.com/mbleigh/omniauth-jwt#usage).
+
+1. Change `YOUR_APP_SECRET` to the client secret and set `auth_url` to your redirect URL.
+1. Save the configuration file.
+1. [Reconfigure GitLab][] or [restart GitLab][] for the changes to take effect if you
+ installed GitLab via Omnibus or from source respectively.
+
+On the sign in page there should now be a JWT icon below the regular sign in form.
+Click the icon to begin the authentication process. JWT will ask the user to
+sign in and authorize the GitLab application. If everything goes well, the user
+will be redirected to GitLab and will be signed in.
+
+[reconfigure GitLab]: ../restart_gitlab.md#omnibus-gitlab-reconfigure
+[restart GitLab]: ../restart_gitlab.md#installations-from-source
diff --git a/doc/administration/index.md b/doc/administration/index.md
index 69efaf75140..60a45426636 100644
--- a/doc/administration/index.md
+++ b/doc/administration/index.md
@@ -11,8 +11,8 @@ available through [different subscriptions](https://about.gitlab.com/products/).
You can [install GitLab CE or GitLab EE](https://about.gitlab.com/installation/ce-or-ee/),
but the features you'll have access to depend on the subscription you choose
-(Libre, Starter, Premium, or Ultimate). GitLab Community Edition installations
-only have access to Libre features.
+(Core, Starter, Premium, or Ultimate). GitLab Community Edition installations
+only have access to Core features.
GitLab.com is administered by GitLab, Inc., therefore, only GitLab team members have
access to its admin configurations. If you're a GitLab.com user, please check the
@@ -111,6 +111,7 @@ server with IMAP authentication on Ubuntu, to be used with Reply by email.
- [Enable/disable GitLab CI/CD](../ci/enable_or_disable_ci.md#site-wide-admin-setting): Enable or disable GitLab CI/CD for your instance.
- [GitLab CI/CD admin settings](../user/admin_area/settings/continuous_integration.md): Define max artifacts size and expiration time.
- [Job artifacts](job_artifacts.md): Enable, disable, and configure job artifacts (a set of files and directories which are outputted by a job when it completes successfully).
+- [Job traces](job_traces.md): Information about the job traces (logs).
- [Artifacts size and expiration](../user/admin_area/settings/continuous_integration.md#maximum-artifacts-size): Define maximum artifacts limits and expiration date.
- [Register Shared and specific Runners](../ci/runners/README.md#registering-a-shared-runner): Learn how to register and configure Shared and specific Runners to your own instance.
- [Shared Runners pipelines quota](../user/admin_area/settings/continuous_integration.md#shared-runners-pipeline-minutes-quota): Limit the usage of pipeline minutes for Shared Runners.
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index d86a54daadd..ac3a12930c3 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -87,10 +87,124 @@ _The artifacts are stored by default in
### Using object storage
+>**Notes:**
+- [Introduced][ee-1762] in [GitLab Premium][eep] 9.4.
+- Since version 9.5, artifacts are [browsable], when object storage is enabled.
+ 9.4 lacks this feature.
> Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
+> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
+
+If you don't want to use the local disk where GitLab is installed to store the
+artifacts, you can use an object storage like AWS S3 instead.
+This configuration relies on valid AWS credentials to be configured already.
+Use an [Object storage option][os] like AWS S3 to store job artifacts.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `artifacts:` and then `object_store:`. On omnibus installs they are prefixed by `artifacts_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where Artfacts will be stored| |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+**In Omnibus installations:**
+
+_The artifacts are stored by default in
+`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['artifacts_enabled'] = true
+ gitlab_rails['artifacts_object_store_enabled'] = true
+ gitlab_rails['artifacts_object_store_remote_directory'] = "artifacts"
+ gitlab_rails['artifacts_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
+ 'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
+ }
+ ```
+
+ NOTE: For GitLab 9.4+, if you are using AWS IAM profiles, be sure to omit the
+ AWS access key and secret acces key/value pairs. For example:
+
+ ```ruby
+ gitlab_rails['artifacts_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'use_iam_profile' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+1. Migrate any existing local artifacts to the object storage:
+
+ ```bash
+ gitlab-rake gitlab:artifacts:migrate
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing artifacts to the object storage, but all new
+ artifacts will still be stored on the local disk. In the future
+ you will be given an option to define a default storage artifacts for all
+ new files.
+
+---
+
+**In installations from source:**
+
+_The artifacts are stored by default in
+`/home/git/gitlab/shared/artifacts`._
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ artifacts:
+ enabled: true
+ object_store:
+ enabled: true
+ remote_directory: "artifacts" # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local artifacts to the object storage:
+
+ ```bash
+ sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
+ ```
-Use an [Object storage option][ee-os] like AWS S3 to store job artifacts.
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing artifacts to the object storage, but all new
+ artifacts will still be stored on the local disk. In the future
+ you will be given an option to define a default storage artifacts for all
+ new files.
## Expiring artifacts
@@ -194,7 +308,7 @@ When clicking on a specific file, [GitLab Workhorse] extracts it
from the archive and the download begins. This implementation saves space,
memory and disk I/O.
-[reconfigure gitlab]: restart_gitlab.md "How to restart GitLab"
-[restart gitlab]: restart_gitlab.md "How to restart GitLab"
+[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[gitlab workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse "GitLab Workhorse repository"
-[ee-os]: https://docs.gitlab.com/ee/administration/job_artifacts.html#using-object-storage
+[os]: https://docs.gitlab.com/administration/job_artifacts.html#using-object-storage
diff --git a/doc/administration/job_traces.md b/doc/administration/job_traces.md
new file mode 100644
index 00000000000..84a1ffeec98
--- /dev/null
+++ b/doc/administration/job_traces.md
@@ -0,0 +1,42 @@
+# Job traces (logs)
+
+By default, all job traces (logs) are saved to `/var/opt/gitlab/gitlab-ci/builds`
+and `/home/git/gitlab/builds` for Omnibus packages and installations from source
+respectively. The job logs are organized by year and month (for example, `2017_03`),
+and then by project ID.
+
+There isn't a way to automatically expire old job logs, but it's safe to remove
+them if they're taking up too much space. If you remove the logs manually, the
+job output in the UI will be empty.
+
+## Changing the job traces location
+
+To change the location where the job logs will be stored, follow the steps below.
+
+**In Omnibus installations:**
+
+1. Edit `/etc/gitlab/gitlab.rb` and add or amend the following line:
+
+ ```
+ gitlab_ci['builds_directory'] = '/mnt/to/gitlab-ci/builds'
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+
+---
+
+**In installations from source:**
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
+
+ ```yaml
+ gitlab_ci:
+ # The location where build traces are stored (default: builds/).
+ # Relative paths are relative to Rails.root.
+ builds_path: path/to/builds/
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+
+[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
diff --git a/doc/administration/raketasks/uploads/migrate.md b/doc/administration/raketasks/uploads/migrate.md
new file mode 100644
index 00000000000..0cd33ffc122
--- /dev/null
+++ b/doc/administration/raketasks/uploads/migrate.md
@@ -0,0 +1,74 @@
+# Uploads Migrate Rake Task
+
+## Migrate to Object Storage
+
+After [configuring the object storage](../../uploads.md#using-object-storage) for GitLab's uploads, you may use this task to migrate existing uploads from the local storage to the remote storage.
+
+>**Note:**
+All of the processing will be done in a background worker and requires **no downtime**.
+
+This tasks uses 3 parameters to find uploads to migrate.
+
+>**Note:**
+These parameters are mainly internal to GitLab's structure, you may want to refer to the task list instead below.
+
+Parameter | Type | Description
+--------- | ---- | -----------
+`uploader_class` | string | Type of the uploader to migrate from
+`model_class` | string | Type of the model to migrate from
+`mount_point` | string/symbol | Name of the model's column on which the uploader is mounted on.
+
+This task also accepts some environment variables which you can use to override
+certain values:
+
+Variable | Type | Description
+-------- | ---- | -----------
+`BATCH` | integer | Specifies the size of the batch. Defaults to 200.
+
+** Omnibus Installation**
+
+```bash
+# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
+
+# Avatars
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+# Attachments
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+# Markdown
+gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
+gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+```
+
+**Source Installation**
+
+>**Note:**
+Use `RAILS_ENV=production` for every task.
+
+```bash
+# sudo -u git -H bundle exec rake gitlab:uploads:migrate
+
+# Avatars
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+# Attachments
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+# Markdown
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+
+```
diff --git a/doc/administration/uploads.md b/doc/administration/uploads.md
new file mode 100644
index 00000000000..a82735cc72c
--- /dev/null
+++ b/doc/administration/uploads.md
@@ -0,0 +1,209 @@
+# Uploads administration
+
+>**Notes:**
+Uploads represent all user data that may be sent to GitLab as a single file. As an example, avatars and notes' attachments are uploads. Uploads are integral to GitLab functionality, and therefore cannot be disabled.
+
+### Using local storage
+
+>**Notes:**
+This is the default configuration
+
+To change the location where the uploads are stored locally, follow the steps
+below.
+
+---
+
+**In Omnibus installations:**
+
+>**Notes:**
+For historical reasons, uploads are stored into a base directory, which by default is `uploads/-/system`. It is strongly discouraged to change this configuration option on an existing GitLab installation.
+
+_The uploads are stored by default in `/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
+
+1. To change the storage path for example to `/mnt/storage/uploads`, edit
+ `/etc/gitlab/gitlab.rb` and add the following line:
+
+ ```ruby
+ gitlab_rails['uploads_storage_path'] = "/mnt/storage/"
+ gitlab_rails['uploads_base_dir'] = "uploads"
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+
+---
+
+**In installations from source:**
+
+_The uploads are stored by default in
+`/home/git/gitlab/public/uploads/-/system`._
+
+1. To change the storage path for example to `/mnt/storage/uploads`, edit
+ `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
+
+ ```yaml
+ uploads:
+ storage_path: /mnt/storage
+ base_dir: uploads
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+
+### Using object storage
+
+>**Notes:**
+- [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
+
+If you don't want to use the local disk where GitLab is installed to store the
+uploads, you can use an object storage provider like AWS S3 instead.
+This configuration relies on valid AWS credentials to be configured already.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `uploads:` and then `object_store:`. On omnibus installs they are prefixed by `uploads_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where Uploads will be stored| |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+**In Omnibus installations:**
+
+_The uploads are stored by default in
+`/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['uploads_object_store_enabled'] = true
+ gitlab_rails['uploads_object_store_remote_directory'] = "uploads"
+ gitlab_rails['uploads_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
+ 'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
+ }
+ ```
+
+>**Note:**
+If you are using AWS IAM profiles, be sure to omit the AWS access key and secret acces key/value pairs.
+
+ ```ruby
+ gitlab_rails['uploads_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'use_iam_profile' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+1. Migrate any existing local uploads to the object storage:
+
+>**Notes:**
+These task complies with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
+
+ ```bash
+ # gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
+
+ # Avatars
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+ # Attachments
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+ # Markdown
+ gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
+ gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+ gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+ gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing uploads to the object storage, but all new
+ uploads will still be stored on the local disk. In the future
+ you will be given an option to define a default storage for all
+ new files.
+
+---
+
+**In installations from source:**
+
+_The uploads are stored by default in
+`/home/git/gitlab/public/uploads/-/system`._
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ uploads:
+ object_store:
+ enabled: true
+ remote_directory: "uploads" # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local uploads to the object storage:
+
+>**Notes:**
+
+- These task comply with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
+
+- To migrate in production use `RAILS_ENV=production` environment variable.
+
+ ```bash
+ # sudo -u git -H bundle exec rake gitlab:uploads:migrate
+
+ # Avatars
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+ # Attachments
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+ # Markdown
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing uploads to the object storage, but all new
+ uploads will still be stored on the local disk. In the future
+ you will be given an option to define a default storage for all
+ new files.
+
+[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
+[eep]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition Premium"
+[ee-3867]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3867
diff --git a/doc/api/commits.md b/doc/api/commits.md
index 55c673fd06a..a6b96ba539f 100644
--- a/doc/api/commits.md
+++ b/doc/api/commits.md
@@ -536,6 +536,74 @@ Example response:
}
```
+## List Merge Requests associated with a commit
+
+Get a list of Merge Requests related to the specified commit.
+
+```
+GET /projects/:id/repository/commits/:sha/merge_requests
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
+| `sha` | string | yes | The commit SHA
+
+
+```bash
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/projects/5/repository/commits/af5b13261899fb2c0db30abdd0af8b07cb44fdc5/merge_requests"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id":45,
+ "iid":1,
+ "project_id":35,
+ "title":"Add new file",
+ "description":"",
+ "state":"opened",
+ "created_at":"2018-03-26T17:26:30.916Z",
+ "updated_at":"2018-03-26T17:26:30.916Z",
+ "target_branch":"master",
+ "source_branch":"test-branch",
+ "upvotes":0,
+ "downvotes":0,
+ "author" : {
+ "web_url" : "https://gitlab.example.com/thedude",
+ "name" : "Jeff Lebowski",
+ "avatar_url" : "https://gitlab.example.com/uploads/user/avatar/28/The-Big-Lebowski-400-400.png",
+ "username" : "thedude",
+ "state" : "active",
+ "id" : 28
+ },
+ "assignee":null,
+ "source_project_id":35,
+ "target_project_id":35,
+ "labels":[ ],
+ "work_in_progress":false,
+ "milestone":null,
+ "merge_when_pipeline_succeeds":false,
+ "merge_status":"can_be_merged",
+ "sha":"af5b13261899fb2c0db30abdd0af8b07cb44fdc5",
+ "merge_commit_sha":null,
+ "user_notes_count":0,
+ "discussion_locked":null,
+ "should_remove_source_branch":null,
+ "force_remove_source_branch":false,
+ "web_url":"http://https://gitlab.example.com/root/test-project/merge_requests/1",
+ "time_stats":{
+ "time_estimate":0,
+ "total_time_spent":0,
+ "human_time_estimate":null,
+ "human_total_time_spent":null
+ }
+ }
+]
+```
+
[ce-6096]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/6096 "Multi-file commit"
[ce-8047]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/8047
[ce-15026]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/15026
diff --git a/doc/api/runners.md b/doc/api/runners.md
index 7495c6cdedb..f384ac57bfe 100644
--- a/doc/api/runners.md
+++ b/doc/api/runners.md
@@ -153,7 +153,8 @@ Example response:
"mysql"
],
"version": null,
- "access_level": "ref_protected"
+ "access_level": "ref_protected",
+ "maximum_timeout": 3600
}
```
@@ -174,6 +175,7 @@ PUT /runners/:id
| `run_untagged` | boolean | no | Flag indicating the runner can execute untagged jobs |
| `locked` | boolean | no | Flag indicating the runner is locked |
| `access_level` | string | no | The access_level of the runner; `not_protected` or `ref_protected` |
+| `maximum_timeout` | integer | no | Maximum timeout set when this Runner will handle the job |
```
curl --request PUT --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/6" --form "description=test-1-20150125-test" --form "tag_list=ruby,mysql,tag1,tag2"
@@ -211,7 +213,8 @@ Example response:
"tag2"
],
"version": null,
- "access_level": "ref_protected"
+ "access_level": "ref_protected",
+ "maximum_timeout": null
}
```
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index 22afcb9199d..183808641c0 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -1,26 +1,29 @@
-# Using Docker Build
+# Building Docker images with GitLab CI/CD
-GitLab CI allows you to use Docker Engine to build and test docker-based projects.
+GitLab CI/CD allows you to use Docker Engine to build and test docker-based projects.
-**This also allows to you to use `docker-compose` and other docker-enabled tools.**
+TIP: **Tip:**
+This also allows to you to use `docker-compose` and other docker-enabled tools.
One of the new trends in Continuous Integration/Deployment is to:
-1. create an application image,
-1. run tests against the created image,
-1. push image to a remote registry, and
-1. deploy to a server from the pushed image.
+1. Create an application image
+1. Run tests against the created image
+1. Push image to a remote registry
+1. Deploy to a server from the pushed image
-It's also useful when your application already has the `Dockerfile` that can be used to create and test an image:
+It's also useful when your application already has the `Dockerfile` that can be
+used to create and test an image:
```bash
-$ docker build -t my-image dockerfiles/
-$ docker run my-docker-image /script/to/run/tests
-$ docker tag my-image my-registry:5000/my-image
-$ docker push my-registry:5000/my-image
+docker build -t my-image dockerfiles/
+docker run my-docker-image /script/to/run/tests
+docker tag my-image my-registry:5000/my-image
+docker push my-registry:5000/my-image
```
-This requires special configuration of GitLab Runner to enable `docker` support during jobs.
+This requires special configuration of GitLab Runner to enable `docker` support
+during jobs.
## Runner Configuration
@@ -74,8 +77,8 @@ GitLab Runner then executes job scripts as the `gitlab-runner` user.
5. You can now use `docker` command and install `docker-compose` if needed.
-> **Note:**
-* By adding `gitlab-runner` to the `docker` group you are effectively granting `gitlab-runner` full root permissions.
+NOTE: **Note:**
+By adding `gitlab-runner` to the `docker` group you are effectively granting `gitlab-runner` full root permissions.
For more information please read [On Docker security: `docker` group considered harmful](https://www.andreas-jung.com/contents/on-docker-security-docker-group-considered-harmful).
### Use docker-in-docker executor
@@ -259,8 +262,66 @@ aware of the following implications:
docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests
```
+## Making docker-in-docker builds faster with Docker layer caching
+
+When using docker-in-docker, Docker will download all layers of your image every
+time you create a build. Recent versions of Docker (Docker 1.13 and above) can
+use a pre-existing image as a cache during the `docker build` step, considerably
+speeding up the build process.
+
+### How Docker caching works
+
+When running `docker build`, each command in `Dockerfile` results in a layer.
+These layers are kept around as a cache and can be reused if there haven't been
+any changes. Change in one layer causes all subsequent layers to be recreated.
+
+You can specify a tagged image to be used as a cache source for the `docker build`
+command by using the `--cache-from` argument. Multiple images can be specified
+as a cache source by using multiple `--cache-from` arguments. Keep in mind that
+any image that's used with the `--cache-from` argument must first be pulled
+(using `docker pull`) before it can be used as a cache source.
+
+### Using Docker caching
+
+Here's a simple `.gitlab-ci.yml` file showing how Docker caching can be utilized:
+
+```yaml
+image: docker:latest
+
+services:
+ - docker:dind
+
+variables:
+ CONTAINER_IMAGE: registry.gitlab.com/$CI_PROJECT_PATH
+ DOCKER_DRIVER: overlay2
+
+before_script:
+ - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN registry.gitlab.com
+
+build:
+ stage: build
+ script:
+ - docker pull $CONTAINER_IMAGE:latest || true
+ - docker build --cache-from $CONTAINER_IMAGE:latest --tag $CONTAINER_IMAGE:$CI_BUILD_REF --tag $CONTAINER_IMAGE:latest .
+ - docker push $CONTAINER_IMAGE:$CI_BUILD_REF
+ - docker push $CONTAINER_IMAGE:latest
+```
+
+The steps in the `script` section for the `build` stage can be summed up to:
+
+1. The first command tries to pull the image from the registry so that it can be
+ used as a cache for the `docker build` command.
+1. The second command builds a Docker image using the pulled image as a
+ cache (notice the `--cache-from $CONTAINER_IMAGE:latest` argument) if
+ available, and tags it.
+1. The last two commands push the tagged Docker images to the container registry
+ so that they may also be used as cache for subsequent builds.
+
## Using the OverlayFS driver
+NOTE: **Note:**
+The shared Runners on GitLab.com use the `overlay2` driver by default.
+
By default, when using `docker:dind`, Docker uses the `vfs` storage driver which
copies the filesystem on every run. This is a very disk-intensive operation
which can be avoided if a different driver is used, for example `overlay2`.
diff --git a/doc/ci/examples/README.md b/doc/ci/examples/README.md
index c1e258aedca..de60cd27cd1 100644
--- a/doc/ci/examples/README.md
+++ b/doc/ci/examples/README.md
@@ -49,6 +49,10 @@ There's also a collection of repositories with [example projects](https://gitlab
**(Ultimate)** [Scan your code for vulnerabilities](https://docs.gitlab.com/ee/ci/examples/sast.html)
+## Dependency Scanning
+
+**(Ultimate)** [Scan your dependencies for vulnerabilities](https://docs.gitlab.com/ee/ci/examples/dependency_scanning.html)
+
## Container Scanning
[Scan your Docker images for vulnerabilities](container_scanning.md)
diff --git a/doc/ci/examples/code_climate.md b/doc/ci/examples/code_climate.md
index ec5e5afb8c6..64a759a9a99 100644
--- a/doc/ci/examples/code_climate.md
+++ b/doc/ci/examples/code_climate.md
@@ -15,13 +15,8 @@ codequality:
services:
- docker:dind
script:
- - docker pull codeclimate/codeclimate
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- - docker run
- --env SOURCE_CODE="$PWD" \
- --volume "$PWD":/code \
- --volume /var/run/docker.sock:/var/run/docker.sock \
- "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
+ - docker run --env SOURCE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
paths: [codeclimate.json]
```
diff --git a/doc/ci/quick_start/README.md b/doc/ci/quick_start/README.md
index e504b81eae8..f64e868d390 100644
--- a/doc/ci/quick_start/README.md
+++ b/doc/ci/quick_start/README.md
@@ -104,8 +104,8 @@ Jobs are used to create jobs, which are then picked by
What is important is that each job is run independently from each other.
-If you want to check whether your `.gitlab-ci.yml` file is valid, there is a
-Lint tool under the page `/ci/lint` of your GitLab instance. You can also find
+If you want to check whether the `.gitlab-ci.yml` of your project is valid, there is a
+Lint tool under the page `/ci/lint` of your project namespace. You can also find
a "CI Lint" button to go to this page under **CI/CD âž” Pipelines** and
**Pipelines âž” Jobs** in your project.
diff --git a/doc/ci/runners/README.md b/doc/ci/runners/README.md
index 7a7b50b294d..b91aa334ff3 100644
--- a/doc/ci/runners/README.md
+++ b/doc/ci/runners/README.md
@@ -231,6 +231,38 @@ To make a Runner pick tagged/untagged jobs:
1. Check the **Run untagged jobs** option
1. Click **Save changes** for the changes to take effect
+### Setting maximum job timeout for a Runner
+
+For each Runner you can specify a _maximum job timeout_. Such timeout,
+if smaller than [project defined timeout], will take the precedence. This
+feature can be used to prevent Shared Runner from being appropriated
+by a project by setting a ridiculous big timeout (e.g. one week).
+
+When not configured, Runner will not override project timeout.
+
+How this feature will work:
+
+**Example 1 - Runner timeout bigger than project timeout**
+
+1. You set the _maximum job timeout_ for a Runner to 24 hours
+1. You set the _CI/CD Timeout_ for a project to **2 hours**
+1. You start a job
+1. The job, if running longer, will be timeouted after **2 hours**
+
+**Example 2 - Runner timeout not configured**
+
+1. You remove the _maximum job timeout_ configuration from a Runner
+1. You set the _CI/CD Timeout_ for a project to **2 hours**
+1. You start a job
+1. The job, if running longer, will be timeouted after **2 hours**
+
+**Example 3 - Runner timeout smaller than project timeout**
+
+1. You set the _maximum job timeout_ for a Runner to **30 minutes**
+1. You set the _CI/CD Timeout_ for a project to 2 hours
+1. You start a job
+1. The job, if running longer, will be timeouted after **30 minutes**
+
### Be careful with sensitive information
With some [Runner Executors](https://docs.gitlab.com/runner/executors/README.html),
@@ -259,12 +291,6 @@ Mentioned briefly earlier, but the following things of Runners can be exploited.
We're always looking for contributions that can mitigate these
[Security Considerations](https://docs.gitlab.com/runner/security/).
-[install]: http://docs.gitlab.com/runner/install/
-[fifo]: https://en.wikipedia.org/wiki/FIFO_(computing_and_electronics)
-[register]: http://docs.gitlab.com/runner/register/
-[protected branches]: ../../user/project/protected_branches.md
-[protected tags]: ../../user/project/protected_tags.md
-
## Determining the IP address of a Runner
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/17286) in GitLab 10.6.
@@ -297,3 +323,10 @@ You can find the IP address of a Runner for a specific project by:
1. On the details page you should see a row for "IP Address"
![specific Runner IP address](img/specific_runner_ip_address.png)
+
+[install]: http://docs.gitlab.com/runner/install/
+[fifo]: https://en.wikipedia.org/wiki/FIFO_(computing_and_electronics)
+[register]: http://docs.gitlab.com/runner/register/
+[protected branches]: ../../user/project/protected_branches.md
+[protected tags]: ../../user/project/protected_tags.md
+[project defined timeout]: ../../user/project/pipelines/settings.html#timeout
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 3382fbc2d12..9aa443fa69d 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -1545,8 +1545,9 @@ capitalization, the commit will be created but the pipeline will be skipped.
## Validate the .gitlab-ci.yml
-Each instance of GitLab CI has an embedded debug tool called Lint.
-You can find the link under `/ci/lint` of your gitlab instance.
+Each instance of GitLab CI has an embedded debug tool called Lint, which validates the
+content of your `.gitlab-ci.yml` files. You can find the Lint under the page `ci/lint` of your
+project namespace (e.g, `http://gitlab-example.com/gitlab-org/project-123/ci/lint`)
## Using reserved keywords
diff --git a/doc/development/ee_features.md b/doc/development/ee_features.md
index fea92e740cb..3ba03d2d591 100644
--- a/doc/development/ee_features.md
+++ b/doc/development/ee_features.md
@@ -33,6 +33,26 @@ rest of the code should be as close to the CE files as possible.
[single code base]: https://gitlab.com/gitlab-org/gitlab-ee/issues/2952#note_41016454
+### EE-specific comments
+
+When complete separation can't be achieved with the `ee/` directory, you can wrap
+code in EE specific comments to designate the difference from CE/EE and add
+some context for someone resolving a conflict.
+
+```rb
+# EE-specific start
+stub_licensed_features(variable_environment_scope: true)
+# EE specific end
+```
+
+```haml
+-# EE-specific start
+= render 'ci/variables/environment_scope', form_field: form_field, variable: variable
+-# EE-specific end
+```
+
+EE-specific comments should not be backported to CE.
+
### Detection of EE-only files
For each commit (except on `master`), the `ee-files-location-check` CI job tries
@@ -350,6 +370,255 @@ class beneath the `EE` module just as you would normally.
For example, if CE has LDAP classes in `lib/gitlab/ldap/` then you would place
EE-specific LDAP classes in `ee/lib/ee/gitlab/ldap`.
+### Code in `lib/api/`
+
+It can be very tricky to extend EE features by a single line of `prepend`,
+and for each different [Grape](https://github.com/ruby-grape/grape) feature,
+we might need different strategies to extend it. To apply different strategies
+easily, we would use `extend ActiveSupport::Concern` in the EE module.
+
+Put the EE module files following
+[EE features based on CE features](#ee-features-based-on-ce-features).
+
+#### EE API routes
+
+For EE API routes, we put them in a `prepended` block:
+
+``` ruby
+module EE
+ module API
+ module MergeRequests
+ extend ActiveSupport::Concern
+
+ prepended do
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: ::API::API::PROJECT_ENDPOINT_REQUIREMENTS do
+ # ...
+ end
+ end
+ end
+ end
+end
+```
+
+Note that due to namespace differences, we need to use the full qualifier for some
+constants.
+
+#### EE params
+
+We can define `params` and utilize `use` in another `params` definition to
+include params defined in EE. However, we need to define the "interface" first
+in CE in order for EE to override it. We don't have to do this in other places
+due to `prepend`, but Grape is complex internally and we couldn't easily do
+that, so we'll follow regular object-oriented practices that we define the
+interface first here.
+
+For example, suppose we have a few more optional params for EE, given this CE
+API code:
+
+``` ruby
+module API
+ class MergeRequests < Grape::API
+ # EE::API::MergeRequests would override the following helpers
+ helpers do
+ params :optional_params_ee do
+ end
+ end
+
+ prepend EE::API::MergeRequests
+
+ params :optional_params do
+ # CE specific params go here...
+
+ use :optional_params_ee
+ end
+ end
+end
+```
+
+And then we could override it in EE module:
+
+``` ruby
+module EE
+ module API
+ module MergeRequests
+ extend ActiveSupport::Concern
+
+ prepended do
+ helpers do
+ params :optional_params_ee do
+ # EE specific params go here...
+ end
+ end
+ end
+ end
+ end
+end
+```
+
+This way, the only difference between CE and EE for that API file would be
+`prepend EE::API::MergeRequests`.
+
+#### EE helpers
+
+To make it easy for an EE module to override the CE helpers, we need to define
+those helpers we want to extend first. Try to do that immediately after the
+class definition to make it easy and clear:
+
+``` ruby
+module API
+ class JobArtifacts < Grape::API
+ # EE::API::JobArtifacts would override the following helpers
+ helpers do
+ def authorize_download_artifacts!
+ authorize_read_builds!
+ end
+ end
+
+ prepend EE::API::JobArtifacts
+ end
+end
+```
+
+And then we can follow regular object-oriented practices to override it:
+
+``` ruby
+module EE
+ module API
+ module JobArtifacts
+ extend ActiveSupport::Concern
+
+ prepended do
+ helpers do
+ def authorize_download_artifacts!
+ super
+ check_cross_project_pipelines_feature!
+ end
+ end
+ end
+ end
+ end
+end
+```
+
+#### EE-specific behaviour
+
+Sometimes we need EE-specific behaviour in some of the APIs. Normally we could
+use EE methods to override CE methods, however API routes are not methods and
+therefore can't be simply overridden. We need to extract them into a standalone
+method, or introduce some "hooks" where we could inject behavior in the CE
+route. Something like this:
+
+``` ruby
+module API
+ class MergeRequests < Grape::API
+ helpers do
+ # EE::API::MergeRequests would override the following helpers
+ def update_merge_request_ee(merge_request)
+ end
+ end
+
+ prepend EE::API::MergeRequests
+
+ put ':id/merge_requests/:merge_request_iid/merge' do
+ merge_request = find_project_merge_request(params[:merge_request_iid])
+
+ # ...
+
+ update_merge_request_ee(merge_request)
+
+ # ...
+ end
+ end
+end
+```
+
+Note that `update_merge_request_ee` doesn't do anything in CE, but
+then we could override it in EE:
+
+``` ruby
+module EE
+ module API
+ module MergeRequests
+ extend ActiveSupport::Concern
+
+ prepended do
+ helpers do
+ def update_merge_request_ee(merge_request)
+ # ...
+ end
+ end
+ end
+ end
+ end
+end
+```
+
+#### EE `route_setting`
+
+It's very hard to extend this in an EE module, and this is simply storing
+some meta-data for a particular route. Given that, we could simply leave the
+EE `route_setting` in CE as it won't hurt and we are just not going to use
+those meta-data in CE.
+
+We could revisit this policy when we're using `route_setting` more and whether
+or not we really need to extend it from EE. For now we're not using it much.
+
+#### Utilizing class methods for setting up EE-specific data
+
+Sometimes we need to use different arguments for a particular API route, and we
+can't easily extend it with an EE module because Grape has different context in
+different blocks. In order to overcome this, we could use class methods from the
+API class.
+
+For example, in one place we need to pass an extra argument to
+`at_least_one_of` so that the API could consider an EE-only argument as the
+least argument. This is not quite beautiful but it's working:
+
+``` ruby
+module API
+ class MergeRequests < Grape::API
+ def self.update_params_at_least_one_of
+ %i[
+ assignee_id
+ description
+ ]
+ end
+
+ prepend EE::API::MergeRequests
+
+ params do
+ at_least_one_of(*::API::MergeRequests.update_params_at_least_one_of)
+ end
+ end
+end
+```
+
+And then we could easily extend that argument in the EE class method:
+
+``` ruby
+module EE
+ module API
+ module MergeRequests
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def update_params_at_least_one_of
+ super.push(*%i[
+ squash
+ ])
+ end
+ end
+ end
+ end
+end
+```
+
+It could be annoying if we need this for a lot of routes, but it might be the
+simplest solution right now.
+
### Code in `spec/`
When you're testing EE-only features, avoid adding examples to the
@@ -405,12 +674,13 @@ to avoid conflicts during CE to EE merge.
}
}
-/* EE-specific styles */
+// EE-specific start
.section-body.ee-section-body {
.section-title {
background: $gl-header-color-cyan;
}
}
+// EE-specific end
```
## gitlab-svgs
diff --git a/doc/development/new_fe_guide/style/javascript.md b/doc/development/new_fe_guide/style/javascript.md
index 480d50a211f..57efd9353bc 100644
--- a/doc/development/new_fe_guide/style/javascript.md
+++ b/doc/development/new_fe_guide/style/javascript.md
@@ -1,3 +1,195 @@
# JavaScript style guide
-> TODO: Add content
+We use [Airbnb's JavaScript Style Guide][airbnb-style-guide] and it's accompanying linter to manage most of our JavaScript style guidelines.
+
+In addition to the style guidelines set by Airbnb, we also have a few specific rules listed below.
+
+> **Tip:**
+You can run eslint locally by running `yarn eslint`
+
+## Arrays
+
+<a name="avoid-foreach"></a><a name="1.1"></a>
+- [1.1](#avoid-foreach) **Avoid ForEach when mutating data** Use `map`, `reduce` or `filter` instead of `forEach` when mutating data. This will minimize mutations in functions ([which is aligned with Airbnb's style guide][airbnb-minimize-mutations])
+
+```
+// bad
+users.forEach((user, index) => {
+ user.id = index;
+});
+
+// good
+const usersWithId = users.map((user, index) => {
+ return Object.assign({}, user, { id: index });
+});
+```
+
+## Functions
+
+<a name="limit-params"></a><a name="2.1"></a>
+- [2.1](#limit-params) **Limit number of parameters** If your function or method has more than 3 parameters, use an object as a parameter instead.
+
+```
+// bad
+function a(p1, p2, p3) {
+ // ...
+};
+
+// good
+function a(p) {
+ // ...
+};
+```
+
+## Classes & constructors
+
+<a name="avoid-constructor-side-effects"></a><a name="3.1"></a>
+- [3.1](#avoid-constructor-side-effects) **Avoid side effects in constructors** Avoid making some operations in the `constructor`, such as asynchronous calls, API requests and DOM manipulations. Prefer moving them into separate functions. This will make tests easier to write and code easier to maintain.
+
+ ```javascript
+ // bad
+ class myClass {
+ constructor(config) {
+ this.config = config;
+ axios.get(this.config.endpoint)
+ }
+ }
+
+ // good
+ class myClass {
+ constructor(config) {
+ this.config = config;
+ }
+
+ makeRequest() {
+ axios.get(this.config.endpoint)
+ }
+ }
+ const instance = new myClass();
+ instance.makeRequest();
+
+ ```
+
+<a name="avoid-classes-to-handle-dom-events"></a><a name="3.2"></a>
+- [3.2](#avoid-classes-to-handle-dom-events) **Avoid classes to handle DOM events** If the only purpose of the class is to bind a DOM event and handle the callback, prefer using a function.
+
+```
+// bad
+class myClass {
+ constructor(config) {
+ this.config = config;
+ }
+
+ init() {
+ document.addEventListener('click', () => {});
+ }
+}
+
+// good
+
+const myFunction = () => {
+ document.addEventListener('click', () => {
+ // handle callback here
+ });
+}
+```
+
+<a name="element-container"></a><a name="3.3"></a>
+- [3.3](#element-container) **Pass element container to constructor** When your class manipulates the DOM, receive the element container as a parameter.
+This is more maintainable and performant.
+
+```
+// bad
+class a {
+ constructor() {
+ document.querySelector('.b');
+ }
+}
+
+// good
+class a {
+ constructor(options) {
+ options.container.querySelector('.b');
+ }
+}
+```
+
+## Type Casting & Coercion
+
+<a name="use-parseint"></a><a name="4.1"></a>
+- [4.1](#use-parseint) **Use ParseInt** Use `ParseInt` when converting a numeric string into a number.
+
+```
+// bad
+Number('10')
+
+
+// good
+parseInt('10', 10);
+```
+
+## CSS Selectors
+
+<a name="use-js-prefix"></a><a name="5.1"></a>
+- [5.1](#use-js-prefix) **Use js prefix** If a CSS class is only being used in JavaScript as a reference to the element, prefix the class name with `js-`
+
+```
+// bad
+<button class="add-user"></button>
+
+// good
+<button class="js-add-user"></button>
+```
+
+## Modules
+
+<a name="use-absolute-paths"></a><a name="6.1"></a>
+- [6.1](#use-absolute-paths) **Use absolute paths for nearby modules** Use absolute paths if the module you are importing is less than two levels up.
+
+```
+// bad
+import GitLabStyleGuide from '~/guides/GitLabStyleGuide';
+
+// good
+import GitLabStyleGuide from '../GitLabStyleGuide';
+```
+
+<a name="use-relative-paths"></a><a name="6.2"></a>
+- [6.2](#use-relative-paths) **Use relative paths for distant modules** If the module you are importing is two or more levels up, use a relative path instead of an absolute path.
+
+```
+// bad
+import GitLabStyleGuide from '../../../guides/GitLabStyleGuide';
+
+// good
+import GitLabStyleGuide from '~/GitLabStyleGuide';
+```
+
+<a name="global-namespace"></a><a name="6.3"></a>
+- [6.3](#global-namespace) **Do not add to global namespace**
+
+<a name="domcontentloaded"></a><a name="6.4"></a>
+- [6.4](#domcontentloaded) **Do not use DOMContentLoaded in non-page modules** Imported modules should act the same each time they are loaded. `DOMContentLoaded` events are only allowed on modules loaded in the `/pages/*` directory because those are loaded dynamically with webpack.
+
+## Security
+
+<a name="avoid-xss"></a><a name="7.1"></a>
+- [7.1](#avoid-xss) **Avoid XSS** Do not use `innerHTML`, `append()` or `html()` to set content. It opens up too many vulnerabilities.
+
+## ESLint
+
+<a name="disable-eslint-file"></a><a name="8.1"></a>
+- [8.1](#disable-eslint-file) **Disabling ESLint in new files** Do not disable ESLint when creating new files. Existing files may have existing rules disabled due to legacy compatibility reasons but they are in the process of being refactored.
+
+<a name="disable-eslint-rule"></a><a name="8.2"></a>
+- [8.2](#disable-eslint-rule) **Disabling ESLint rule** Do not disable specific ESLint rules. Due to technical debt, you may disable the following rules only if you are invoking/instantiating existing code modules
+
+ - [no-new][no-new]
+ - [class-method-use-this][class-method-use-this]
+
+> Note: Disable these rules on a per line basis. This makes it easier to refactor in the future. E.g. use `eslint-disable-next-line` or `eslint-disable-line`
+
+[airbnb-style-guide]: https://github.com/airbnb/javascript
+[airbnb-minimize-mutations]: https://github.com/airbnb/javascript#testing--for-real
+[no-new]: http://eslint.org/docs/rules/no-new
+[class-method-use-this]: http://eslint.org/docs/rules/class-methods-use-this
diff --git a/doc/integration/omniauth.md b/doc/integration/omniauth.md
index 20087a981f9..3edde3de83d 100644
--- a/doc/integration/omniauth.md
+++ b/doc/integration/omniauth.md
@@ -32,6 +32,7 @@ contains some settings that are common for all providers.
- [Auth0](auth0.md)
- [Authentiq](../administration/auth/authentiq.md)
- [OAuth2Generic](oauth2_generic.md)
+- [JWT](../administration/auth/jwt.md)
## Initial OmniAuth Configuration
diff --git a/doc/raketasks/README.md b/doc/raketasks/README.md
index 2f916f5dea7..90187617c41 100644
--- a/doc/raketasks/README.md
+++ b/doc/raketasks/README.md
@@ -14,3 +14,4 @@ comments: false
- [Webhooks](web_hooks.md)
- [Import](import.md) of git repositories in bulk
- [Rebuild authorized_keys file](http://docs.gitlab.com/ce/raketasks/maintenance.html#rebuild-authorized_keys-file) task for administrators
+- [Migrate Uploads](../administration/raketasks/uploads/migrate.md)
diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md
index 4dc3adc1441..e88b787187c 100644
--- a/doc/topics/autodevops/index.md
+++ b/doc/topics/autodevops/index.md
@@ -20,6 +20,7 @@ project in an easy and automatic way:
1. [Auto Test](#auto-test)
1. [Auto Code Quality](#auto-code-quality)
1. [Auto SAST (Static Application Security Testing)](#auto-sast)
+1. [Auto Dependency Scanning](#auto-dependency-scanning)
1. [Auto Container Scanning](#auto-container-scanning)
1. [Auto Review Apps](#auto-review-apps)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
@@ -95,7 +96,7 @@ Auto Deploy, and Auto Monitoring will be silently skipped.
The Auto DevOps base domain is required if you want to make use of [Auto
Review Apps](#auto-review-apps) and [Auto Deploy](#auto-deploy). It is defined
-either under the project's CI/CD settings while
+either under the project's CI/CD settings while
[enabling Auto DevOps](#enabling-auto-devops) or in instance-wide settings in
the CI/CD section.
It can also be set at the project or group level as a variable, `AUTO_DEVOPS_DOMAIN`.
@@ -209,7 +210,7 @@ target branches are also
> Introduced in [GitLab Ultimate][ee] 10.3.
Static Application Security Testing (SAST) uses the
-[gl-sast Docker image](https://gitlab.com/gitlab-org/gl-sast) to run static
+[SAST Docker image](https://gitlab.com/gitlab-org/security-products/sast) to run static
analysis on the current code and checks for potential security issues. Once the
report is created, it's uploaded as an artifact which you can later download and
check out.
@@ -217,6 +218,19 @@ check out.
In GitLab Ultimate, any security warnings are also
[shown in the merge request widget](https://docs.gitlab.com/ee/user/project/merge_requests/sast.html).
+### Auto Dependency Scanning
+
+> Introduced in [GitLab Ultimate][ee] 10.7.
+
+Dependency Scanning uses the
+[Dependency Scanning Docker image](https://gitlab.com/gitlab-org/security-products/dependency-scanning)
+to run analysis on the project dependencies and checks for potential security issues. Once the
+report is created, it's uploaded as an artifact which you can later download and
+check out.
+
+In GitLab Ultimate, any security warnings are also
+[shown in the merge request widget](https://docs.gitlab.com/ee/user/project/merge_requests/dependency_scanning.html).
+
### Auto Container Scanning
> Introduced in GitLab 10.4.
diff --git a/doc/user/profile/preferences.md b/doc/user/profile/preferences.md
index 022d6317555..930e506802a 100644
--- a/doc/user/profile/preferences.md
+++ b/doc/user/profile/preferences.md
@@ -41,7 +41,7 @@ select few, the amount of activity on the default Dashboard page can be
overwhelming. Changing this setting allows you to redefine what your default
dashboard will be.
-You have 6 options here that you can use for your default dashboard view:
+You have 8 options here that you can use for your default dashboard view:
- Your projects (default)
- Starred projects
@@ -49,6 +49,8 @@ You have 6 options here that you can use for your default dashboard view:
- Starred projects' activity
- Your groups
- Your [Todos]
+- Assigned Issues
+- Assigned Merge Requests
### Project home page content
diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md
index bd9bcfadb99..716787532fc 100644
--- a/doc/user/project/clusters/index.md
+++ b/doc/user/project/clusters/index.md
@@ -71,7 +71,7 @@ You need Master [permissions] and above to access the Kubernetes page.
To add an existing Kubernetes cluster to your project:
1. Navigate to your project's **CI/CD > Kubernetes** page.
-1. Click on **Add Kuberntes cluster**.
+1. Click on **Add Kubernetes cluster**.
1. Click on **Add an existing Kubernetes cluster** and fill in the details:
- **Kubernetes cluster name** (required) - The name you wish to give the cluster.
- **Environment scope** (required)- The
@@ -101,7 +101,7 @@ To add an existing Kubernetes cluster to your project:
- If you or someone created a secret specifically for the project, usually
with limited permissions, the secret's namespace and project namespace may
be the same.
-1. Finally, click the **Create Kuberntes cluster** button.
+1. Finally, click the **Create Kubernetes cluster** button.
After a few moments, your cluster should be created. If something goes wrong,
you will be notified.
diff --git a/doc/user/project/index.md b/doc/user/project/index.md
index 175a8975ae1..f94e93dd7d8 100644
--- a/doc/user/project/index.md
+++ b/doc/user/project/index.md
@@ -128,11 +128,9 @@ and Git push/pull redirects.
Depending on the situation, different things apply.
-When [renaming a user](../profile/index.md#changing-your-username) or
-[changing a group path](../group/index.md#changing-a-group-s-path):
+When [renaming a user](../profile/index.md#changing-your-username),
+[changing a group path](../group/index.md#changing-a-group-s-path) or [renaming a repository](settings/index.md#renaming-a-repository):
-- **The redirect to the new URL is permanent**, which means that the original
- namespace can't be claimed again by any group or user.
- Existing web URLs for the namespace and anything under it (e.g., projects) will
redirect to the new URLs.
- Starting with GitLab 10.3, existing Git remote URLs for projects under the
@@ -141,9 +139,5 @@ When [renaming a user](../profile/index.md#changing-your-username) or
your remote will be displayed instead of rejecting your action.
This means that any automation scripts, or Git clients will continue to
work after a rename, making any transition a lot smoother.
- To avoid pulling from or pushing to an entirely incorrect repository, the old
- path will be reserved.
-
-When [renaming-a-repository](settings/index.md#renaming-a-repository), the same
-things apply, except for the Git push/pull actions which will be rejected with a
-warning message to change to the new remote URL.
+- The redirects will be available as long as the original path is not claimed by
+ another group, user or project.
diff --git a/doc/user/project/integrations/prometheus_library/kubernetes.md b/doc/user/project/integrations/prometheus_library/kubernetes.md
index 8ac753c07bf..6b190deaa6c 100644
--- a/doc/user/project/integrations/prometheus_library/kubernetes.md
+++ b/doc/user/project/integrations/prometheus_library/kubernetes.md
@@ -11,10 +11,17 @@ integration services must be enabled.
## Metrics supported
-| Name | Query |
-| ---- | ----- |
-| Average Memory Usage (MB) | avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024 |
-| Average CPU Utilization (%) | avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name)) |
+- Average Memory Usage (MB):
+
+ ```
+ avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024
+ ```
+
+- Average CPU Utilization (%):
+
+ ```
+ avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))
+ ```
## Configuring Prometheus to monitor for Kubernetes metrics
diff --git a/doc/user/project/issue_board.md b/doc/user/project/issue_board.md
index d403d5698a9..b4a842f33d6 100644
--- a/doc/user/project/issue_board.md
+++ b/doc/user/project/issue_board.md
@@ -251,7 +251,7 @@ Different issue board features are available in different [GitLab tiers](https:/
| Tier | Number of project issue boards | Board with configuration in project issue boards | Number of group issue boards | Board with configuration in group issue boards |
| --- | --- | --- | --- | --- |
-| Libre | 1 | No | 1 | No |
+| Core | 1 | No | 1 | No |
| Starter | Multiple | Yes | 1 | No |
| Premium | Multiple | Yes | Multiple | Yes |
| Ultimate | Multiple | Yes | Multiple | Yes |
diff --git a/doc/user/project/merge_requests/img/remove_source_branch_status.png b/doc/user/project/merge_requests/img/remove_source_branch_status.png
new file mode 100644
index 00000000000..1377fab54ec
--- /dev/null
+++ b/doc/user/project/merge_requests/img/remove_source_branch_status.png
Binary files differ
diff --git a/doc/user/project/merge_requests/index.md b/doc/user/project/merge_requests/index.md
index 10d67729734..3640d236db4 100644
--- a/doc/user/project/merge_requests/index.md
+++ b/doc/user/project/merge_requests/index.md
@@ -77,6 +77,22 @@ You can [search and filter the results](../../search/index.md#issues-and-merge-r
![Group Issues list view](img/group_merge_requests_list_view.png)
+## Removing the source branch
+
+When creating a merge request, select the "Remove source branch when merge
+request accepted" option and the source branch will be removed when the merge
+request is merged.
+
+This option is also visible in an existing merge request next to the merge
+request button and can be selected/deselected before merging. It's only visible
+to users with [Master permissions](../../permissions.md) in the source project.
+
+If the user viewing the merge request does not have the correct permissions to
+remove the source branch and the source branch is set for removal, the merge
+request widget will show the "Removes source branch" text.
+
+![Remove source branch status](img/remove_source_branch_status.png)
+
## Authorization for merge requests
There are two main ways to have a merge request flow with GitLab:
diff --git a/doc/user/project/pipelines/settings.md b/doc/user/project/pipelines/settings.md
index 43451844f2d..1052c9efa25 100644
--- a/doc/user/project/pipelines/settings.md
+++ b/doc/user/project/pipelines/settings.md
@@ -27,6 +27,13 @@ The default value is 60 minutes. Decrease the time limit if you want to impose
a hard limit on your jobs' running time or increase it otherwise. In any case,
if the job surpasses the threshold, it is marked as failed.
+### Timeout overriding on Runner level
+
+> - [Introduced][ce-17221] in GitLab 10.6.
+
+Project defined timeout (either specific timeout set by user or the default
+60 minutes timeout) may be [overridden on Runner level][timeout overriding].
+
## Custom CI config path
> - [Introduced][ce-12509] in GitLab 9.4.
@@ -152,5 +159,7 @@ into your `README.md`:
[var]: ../../../ci/yaml/README.md#git-strategy
[coverage report]: #test-coverage-parsing
+[timeout overriding]: ../../../ci/runners/README.html#setting-maximum-job-timeout-for-a-runner
[ce-9362]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9362
[ce-12509]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/12509
+[ce-17221]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/17221
diff --git a/doc/workflow/lfs/lfs_administration.md b/doc/workflow/lfs/lfs_administration.md
index d768b73286d..cac3cb599dd 100644
--- a/doc/workflow/lfs/lfs_administration.md
+++ b/doc/workflow/lfs/lfs_administration.md
@@ -5,6 +5,7 @@ Documentation on how to use Git LFS are under [Managing large binary files with
## Requirements
* Git LFS is supported in GitLab starting with version 8.2.
+* Support for object storage, such as AWS S3, was introduced in 10.0.
* Users need to install [Git LFS client](https://git-lfs.github.com) version 1.0.1 and up.
## Configuration
@@ -12,16 +13,18 @@ Documentation on how to use Git LFS are under [Managing large binary files with
Git LFS objects can be large in size. By default, they are stored on the server
GitLab is installed on.
-There are two configuration options to help GitLab server administrators:
+There are various configuration options to help GitLab server administrators:
* Enabling/disabling Git LFS support
* Changing the location of LFS object storage
+* Setting up AWS S3 compatible object storage
### Omnibus packages
In `/etc/gitlab/gitlab.rb`:
```ruby
+# Change to true to enable lfs
gitlab_rails['lfs_enabled'] = false
# Optionally, change the storage path location. Defaults to
@@ -35,11 +38,115 @@ gitlab_rails['lfs_storage_path'] = "/mnt/storage/lfs-objects"
In `config/gitlab.yml`:
```yaml
+# Change to true to enable lfs
lfs:
enabled: false
storage_path: /mnt/storage/lfs-objects
```
+## Setting up S3 compatible object storage
+
+> **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0.
+> Available in [GitLab CE][ce] 10.7
+
+It is possible to store LFS objects on remote object storage instead of on a local disk.
+
+This allows you to offload storage to an external AWS S3 compatible service, freeing up disk space locally. You can also host your own S3 compatible storage decoupled from GitLab, with with a service such as [Minio](https://www.minio.io/).
+
+Object storage currently transfers files first to GitLab, and then on the object storage in a second stage. This can be done either by using a rake task to transfer existing objects, or in a background job after each file is received.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `lfs:` and then `object_store:`. On omnibus installs they are prefixed by `lfs_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where LFS objects will be stored| |
+| `direct_upload` | Set to true to enable direct upload of LFS without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+
+### From source
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ lfs:
+ enabled: true
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # Bucket name
+ connection:
+ provider: AWS
+ aws_access_key_id: 1ABCD2EFGHI34JKLM567N
+ aws_secret_access_key: abcdefhijklmnopQRSTUVwxyz0123456789ABCDE
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host such as Minio
+ host: 'localhost'
+ endpoint: 'http://127.0.0.1:9000'
+ path_style: true
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local LFS objects to the object storage:
+
+ ```bash
+ sudo -u git -H bundle exec rake gitlab:lfs:migrate RAILS_ENV=production
+ ```
+
+ This will migrate existing LFS objects to object storage. New LFS objects
+ will be forwarded to object storage unless
+ `gitlab_rails['lfs_object_store_background_upload']` is set to false.
+
+### In Omnibus
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['lfs_object_store_enabled'] = true
+ gitlab_rails['lfs_object_store_remote_directory'] = "lfs-objects"
+ gitlab_rails['lfs_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => '1ABCD2EFGHI34JKLM567N',
+ 'aws_secret_access_key' => 'abcdefhijklmnopQRSTUVwxyz0123456789ABCDE',
+ # The below options configure an S3 compatible host instead of AWS
+ 'host' => 'localhost',
+ 'endpoint' => 'http://127.0.0.1:9000',
+ 'path_style' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab]s for the changes to take effect.
+1. Migrate any existing local LFS objects to the object storage:
+
+ ```bash
+ gitlab-rake gitlab:lfs:migrate
+ ```
+
+ This will migrate existing LFS objects to object storage. New LFS objects
+ will be forwarded to object storage unless
+ `gitlab_rails['lfs_object_store_background_upload']` is set to false.
+
## Storage statistics
You can see the total storage used for LFS objects on groups and projects
@@ -48,10 +155,13 @@ and [projects APIs](../../api/projects.md).
## Known limitations
-* Currently, storing GitLab Git LFS objects on a non-local storage (like S3 buckets)
- is not supported
* Support for removing unreferenced LFS objects was added in 8.14 onwards.
* LFS authentications via SSH was added with GitLab 8.12
* Only compatible with the GitLFS client versions 1.1.0 and up, or 1.0.2.
* The storage statistics currently count each LFS object multiple times for
every project linking to it
+
+[reconfigure gitlab]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: ../../administration/restart_gitlab.md#installations-from-source "How to restart GitLab"
+[eep]: https://about.gitlab.com/products/ "GitLab Premium"
+[ee-2760]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/2760
diff --git a/features/groups.feature b/features/groups.feature
deleted file mode 100644
index 4044bd9be79..00000000000
--- a/features/groups.feature
+++ /dev/null
@@ -1,73 +0,0 @@
-Feature: Groups
- Background:
- Given I sign in as "John Doe"
- And "John Doe" is owner of group "Owned"
-
- Scenario: I should not see a group if it does not exist
- When I visit group "NonExistentGroup" page
- Then page status code should be 404
-
- @javascript
- Scenario: I should see group "Owned" dashboard list
- When I visit group "Owned" page
- Then I should see group "Owned" projects list
-
- @javascript
- Scenario: I should see group "Owned" activity feed
- When I visit group "Owned" activity page
- And I should see projects activity feed
-
- Scenario: I should see group "Owned" issues list
- Given project from group "Owned" has issues assigned to me
- When I visit group "Owned" issues page
- Then I should see issues from group "Owned" assigned to me
-
- Scenario: I should not see issues from archived project in "Owned" group issues list
- Given Group "Owned" has archived project
- And the archived project have some issues
- When I visit group "Owned" issues page
- Then I should not see issues from the archived project
-
- Scenario: I should see group "Owned" merge requests list
- Given project from group "Owned" has merge requests assigned to me
- When I visit group "Owned" merge requests page
- Then I should see merge requests from group "Owned" assigned to me
-
- Scenario: I should not see merge requests from archived project in "Owned" group merge requests list
- Given Group "Owned" has archived project
- And the archived project have some merge_requests
- When I visit group "Owned" merge requests page
- Then I should not see merge requests from the archived project
-
- Scenario: I edit group "Owned" avatar
- When I visit group "Owned" settings page
- And I change group "Owned" avatar
- And I visit group "Owned" settings page
- Then I should see new group "Owned" avatar
- And I should see the "Remove avatar" button
-
- Scenario: I remove group "Owned" avatar
- When I visit group "Owned" settings page
- And I have group "Owned" avatar
- And I visit group "Owned" settings page
- And I remove group "Owned" avatar
- Then I should not see group "Owned" avatar
- And I should not see the "Remove avatar" button
-
- # Group projects in settings
- Scenario: I should see all projects in the project list in settings
- Given Group "Owned" has archived project
- When I visit group "Owned" projects page
- Then I should see group "Owned" projects list
- And I should see "archived" label
-
- # Public group
- @javascript
- Scenario: Signed out user should see group
- Given "Mary Jane" is owner of group "Owned"
- And I am a signed out user
- And Group "Owned" has a public project "Public-project"
- When I visit group "Owned" page
- Then I should see group "Owned"
- Then I should see project "Public-project"
-
diff --git a/features/steps/groups.rb b/features/steps/groups.rb
deleted file mode 100644
index 753694a5392..00000000000
--- a/features/steps/groups.rb
+++ /dev/null
@@ -1,147 +0,0 @@
-class Spinach::Features::Groups < Spinach::FeatureSteps
- include SharedAuthentication
- include SharedPaths
- include SharedGroup
- include SharedUser
-
- step 'I should see group "Owned"' do
- expect(page).to have_content 'Owned'
- end
-
- step 'I am a signed out user' do
- logout
- end
-
- step 'Group "Owned" has a public project "Public-project"' do
- group = owned_group
-
- @project = create :project, :public,
- group: group,
- name: "Public-project"
- end
-
- step 'I should see project "Public-project"' do
- expect(page).to have_content 'Public-project'
- end
-
- step 'I should see group "Owned" projects list' do
- owned_group.projects.each do |project|
- expect(page).to have_link project.name
- end
- end
-
- step 'I should see projects activity feed' do
- expect(page).to have_content 'joined project'
- end
-
- step 'I should see issues from group "Owned" assigned to me' do
- assigned_to_me(:issues).each do |issue|
- expect(page).to have_content issue.title
- end
- end
-
- step 'I should not see issues from the archived project' do
- @archived_project.issues.each do |issue|
- expect(page).not_to have_content issue.title
- end
- end
-
- step 'I should not see merge requests from the archived project' do
- @archived_project.merge_requests.each do |mr|
- expect(page).not_to have_content mr.title
- end
- end
-
- step 'I should see merge requests from group "Owned" assigned to me' do
- assigned_to_me(:merge_requests).each do |issue|
- expect(page).to have_content issue.title[0..80]
- end
- end
-
- step 'project from group "Owned" has issues assigned to me' do
- create :issue,
- project: project,
- assignees: [current_user],
- author: current_user
- end
-
- step 'project from group "Owned" has merge requests assigned to me' do
- create :merge_request,
- source_project: project,
- target_project: project,
- assignee: current_user,
- author: current_user
- end
-
- step 'I change group "Owned" avatar' do
- attach_file(:group_avatar, File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif'))
- click_button "Save group"
- owned_group.reload
- end
-
- step 'I should see new group "Owned" avatar' do
- expect(owned_group.avatar).to be_instance_of AvatarUploader
- expect(owned_group.avatar.url).to eq "/uploads/-/system/group/avatar/#{Group.find_by(name: "Owned").id}/banana_sample.gif"
- end
-
- step 'I should see the "Remove avatar" button' do
- expect(page).to have_link("Remove avatar")
- end
-
- step 'I have group "Owned" avatar' do
- attach_file(:group_avatar, File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif'))
- click_button "Save group"
- owned_group.reload
- end
-
- step 'I remove group "Owned" avatar' do
- click_link "Remove avatar"
- owned_group.reload
- end
-
- step 'I should not see group "Owned" avatar' do
- expect(owned_group.avatar?).to eq false
- end
-
- step 'I should not see the "Remove avatar" button' do
- expect(page).not_to have_link("Remove avatar")
- end
-
- step 'Group "Owned" has archived project' do
- group = Group.find_by(name: 'Owned')
- @archived_project = create(:project, :archived, namespace: group, path: "archived-project")
- end
-
- step 'I should see "archived" label' do
- expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived')
- end
-
- step 'I visit group "NonExistentGroup" page' do
- visit group_path("NonExistentGroup")
- end
-
- step 'the archived project have some issues' do
- create :issue,
- project: @archived_project,
- assignees: [current_user],
- author: current_user
- end
-
- step 'the archived project have some merge requests' do
- create :merge_request,
- source_project: @archived_project,
- target_project: @archived_project,
- assignee: current_user,
- author: current_user
- end
-
- private
-
- def assigned_to_me(key)
- project.send(key).assigned_to(current_user)
- end
-
- def project
- owned_group.projects.first
- end
-end
diff --git a/lib/api/commits.rb b/lib/api/commits.rb
index 982f45425a3..684955a1b24 100644
--- a/lib/api/commits.rb
+++ b/lib/api/commits.rb
@@ -231,6 +231,20 @@ module API
render_api_error!("Failed to save note #{note.errors.messages}", 400)
end
end
+
+ desc 'Get Merge Requests associated with a commit' do
+ success Entities::MergeRequestBasic
+ end
+ params do
+ requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag on which to find Merge Requests'
+ use :pagination
+ end
+ get ':id/repository/commits/:sha/merge_requests', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
+ commit = user_project.commit(params[:sha])
+ not_found! 'Commit' unless commit
+
+ present paginate(commit.merge_requests), with: Entities::MergeRequestBasic
+ end
end
end
end
diff --git a/lib/api/deploy_keys.rb b/lib/api/deploy_keys.rb
index b0b7b50998f..70d43ac1d79 100644
--- a/lib/api/deploy_keys.rb
+++ b/lib/api/deploy_keys.rb
@@ -54,7 +54,7 @@ module API
present key, with: Entities::DeployKeysProject
end
- desc 'Add new deploy key to currently authenticated user' do
+ desc 'Add new deploy key to a project' do
success Entities::DeployKeysProject
end
params do
@@ -66,33 +66,32 @@ module API
params[:key].strip!
# Check for an existing key joined to this project
- key = user_project.deploy_keys_projects
+ deploy_key_project = user_project.deploy_keys_projects
.joins(:deploy_key)
.find_by(keys: { key: params[:key] })
- if key
- present key, with: Entities::DeployKeysProject
+ if deploy_key_project
+ present deploy_key_project, with: Entities::DeployKeysProject
break
end
# Check for available deploy keys in other projects
key = current_user.accessible_deploy_keys.find_by(key: params[:key])
if key
- added_key = add_deploy_keys_project(user_project, deploy_key: key, can_push: !!params[:can_push])
+ deploy_key_project = add_deploy_keys_project(user_project, deploy_key: key, can_push: !!params[:can_push])
- present added_key, with: Entities::DeployKeysProject
+ present deploy_key_project, with: Entities::DeployKeysProject
break
end
# Create a new deploy key
- key_attributes = { can_push: !!params[:can_push],
- deploy_key_attributes: declared_params.except(:can_push) }
- key = add_deploy_keys_project(user_project, key_attributes)
+ deploy_key_attributes = declared_params.except(:can_push).merge(user: current_user)
+ deploy_key_project = add_deploy_keys_project(user_project, deploy_key_attributes: deploy_key_attributes, can_push: !!params[:can_push])
- if key.valid?
- present key, with: Entities::DeployKeysProject
+ if deploy_key_project.valid?
+ present deploy_key_project, with: Entities::DeployKeysProject
else
- render_validation_error!(key)
+ render_validation_error!(deploy_key_project)
end
end
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index 16147ee90c9..38161d1f127 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -951,6 +951,7 @@ module API
expose :tag_list
expose :run_untagged
expose :locked
+ expose :maximum_timeout
expose :access_level
expose :version, :revision, :platform, :architecture
expose :contacted_at
@@ -1119,7 +1120,7 @@ module API
end
class RunnerInfo < Grape::Entity
- expose :timeout
+ expose :metadata_timeout, as: :timeout
end
class Step < Grape::Entity
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index e4fca77ab5d..e59e8a45908 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -410,7 +410,7 @@ module API
)
end
- def present_file!(path, filename, content_type = 'application/octet-stream')
+ def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}"
header['Content-Transfer-Encoding'] = 'binary'
@@ -426,13 +426,17 @@ module API
end
end
- def present_artifacts!(artifacts_file)
- return not_found! unless artifacts_file.exists?
+ def present_carrierwave_file!(file, supports_direct_download: true)
+ return not_found! unless file.exists?
- if artifacts_file.file_storage?
- present_file!(artifacts_file.path, artifacts_file.filename)
+ if file.file_storage?
+ present_disk_file!(file.path, file.filename)
+ elsif supports_direct_download && file.class.direct_download_enabled?
+ redirect(file.url)
else
- redirect_to(artifacts_file.url)
+ header(*Gitlab::Workhorse.send_url(file.url))
+ status :ok
+ body
end
end
diff --git a/lib/api/job_artifacts.rb b/lib/api/job_artifacts.rb
index 47e5eeab31d..b1adef49d46 100644
--- a/lib/api/job_artifacts.rb
+++ b/lib/api/job_artifacts.rb
@@ -28,7 +28,7 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
- present_artifacts!(latest_build.artifacts_file)
+ present_carrierwave_file!(latest_build.artifacts_file)
end
desc 'Download the artifacts archive from a job' do
@@ -43,7 +43,7 @@ module API
build = find_build!(params[:job_id])
- present_artifacts!(build.artifacts_file)
+ present_carrierwave_file!(build.artifacts_file)
end
desc 'Download a specific file from artifacts archive' do
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 9c205514b3a..60911c8d733 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -72,7 +72,7 @@ module API
present build, with: Entities::Job
end
- # TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
+ # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific job of a project'
diff --git a/lib/api/project_export.rb b/lib/api/project_export.rb
index b0a7fd6f4ab..efc4a33ae1b 100644
--- a/lib/api/project_export.rb
+++ b/lib/api/project_export.rb
@@ -25,7 +25,7 @@ module API
render_api_error!('404 Not found or has expired', 404) unless path
- present_file!(path, File.basename(path), 'application/gzip')
+ present_disk_file!(path, File.basename(path), 'application/gzip')
end
desc 'Start export' do
diff --git a/lib/api/protected_branches.rb b/lib/api/protected_branches.rb
index 33321db46e9..aa7cab4a741 100644
--- a/lib/api/protected_branches.rb
+++ b/lib/api/protected_branches.rb
@@ -70,7 +70,10 @@ module API
delete ':id/protected_branches/:name', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
protected_branch = user_project.protected_branches.find_by!(name: params[:name])
- destroy_conditionally!(protected_branch)
+ destroy_conditionally!(protected_branch) do
+ destroy_service = ::ProtectedBranches::DestroyService.new(user_project, current_user)
+ destroy_service.execute(protected_branch)
+ end
end
end
end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 7e6c33ec33d..57c0a729535 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -14,9 +14,10 @@ module API
optional :locked, type: Boolean, desc: 'Should Runner be locked for current project'
optional :run_untagged, type: Boolean, desc: 'Should Runner handle untagged jobs'
optional :tag_list, type: Array[String], desc: %q(List of Runner's tags)
+ optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
end
post '/' do
- attributes = attributes_for_keys([:description, :locked, :run_untagged, :tag_list])
+ attributes = attributes_for_keys([:description, :locked, :run_untagged, :tag_list, :maximum_timeout])
.merge(get_runner_details_from_request)
runner =
@@ -244,11 +245,12 @@ module API
params do
requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token)
+ optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
end
get '/:id/artifacts' do
job = authenticate_job!
- present_artifacts!(job.artifacts_file)
+ present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
end
end
end
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
index 996457c5dfe..5f2a9567605 100644
--- a/lib/api/runners.rb
+++ b/lib/api/runners.rb
@@ -57,6 +57,7 @@ module API
optional :locked, type: Boolean, desc: 'Flag indicating the runner is locked'
optional :access_level, type: String, values: Ci::Runner.access_levels.keys,
desc: 'The access_level of the runner'
+ optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
at_least_one_of :description, :active, :tag_list, :run_untagged, :locked, :access_level
end
put ':id' do
diff --git a/lib/api/v3/builds.rb b/lib/api/v3/builds.rb
index ac76fece931..683b9c993cb 100644
--- a/lib/api/v3/builds.rb
+++ b/lib/api/v3/builds.rb
@@ -85,7 +85,7 @@ module API
build = get_build!(params[:build_id])
- present_artifacts!(build.artifacts_file)
+ present_carrierwave_file!(build.artifacts_file)
end
desc 'Download the artifacts file from build' do
@@ -102,10 +102,10 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
- present_artifacts!(latest_build.artifacts_file)
+ present_carrierwave_file!(latest_build.artifacts_file)
end
- # TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
+ # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific build of a project'
diff --git a/lib/banzai/filter/autolink_filter.rb b/lib/banzai/filter/autolink_filter.rb
index ce401c1c31c..4a143baeef6 100644
--- a/lib/banzai/filter/autolink_filter.rb
+++ b/lib/banzai/filter/autolink_filter.rb
@@ -105,8 +105,12 @@ module Banzai
end
end
- options = link_options.merge(href: match)
- content_tag(:a, match.html_safe, options) + dropped
+ # match has come from node.to_html above, so we know it's encoded
+ # correctly.
+ html_safe_match = match.html_safe
+ options = link_options.merge(href: html_safe_match)
+
+ content_tag(:a, html_safe_match, options) + dropped
end
def autolink_filter(text)
diff --git a/lib/gitlab/background_migration/migrate_build_stage.rb b/lib/gitlab/background_migration/migrate_build_stage.rb
index 8fe4f1a2289..242e3143e71 100644
--- a/lib/gitlab/background_migration/migrate_build_stage.rb
+++ b/lib/gitlab/background_migration/migrate_build_stage.rb
@@ -12,6 +12,7 @@ module Gitlab
class Build < ActiveRecord::Base
self.table_name = 'ci_builds'
+ self.inheritance_column = :_type_disabled
def ensure_stage!(attempts: 2)
find_stage || create_stage!
diff --git a/lib/gitlab/bare_repository_import/importer.rb b/lib/gitlab/bare_repository_import/importer.rb
index 884a3de8f62..1a25138e7d6 100644
--- a/lib/gitlab/bare_repository_import/importer.rb
+++ b/lib/gitlab/bare_repository_import/importer.rb
@@ -63,7 +63,7 @@ module Gitlab
log " * Created #{project.name} (#{project_full_path})".color(:green)
project.write_repository_config
- project.repository.create_hooks
+ Gitlab::Git::Repository.create_hooks(project.repository.path_to_repo, Gitlab.config.gitlab_shell.hooks_path)
ProjectCacheWorker.perform_async(project.id)
else
diff --git a/lib/gitlab/checks/project_moved.rb b/lib/gitlab/checks/project_moved.rb
index 3263790a876..3a197078d08 100644
--- a/lib/gitlab/checks/project_moved.rb
+++ b/lib/gitlab/checks/project_moved.rb
@@ -9,20 +9,16 @@ module Gitlab
super(project, user, protocol)
end
- def message(rejected: false)
+ def message
<<~MESSAGE
Project '#{redirected_path}' was moved to '#{project.full_path}'.
Please update your Git remote:
- #{remote_url_message(rejected)}
+ git remote set-url origin #{url_to_repo}
MESSAGE
end
- def permanent_redirect?
- RedirectRoute.permanent.exists?(path: redirected_path)
- end
-
private
attr_reader :redirected_path
@@ -30,18 +26,6 @@ module Gitlab
def self.message_key(user_id, project_id)
"#{REDIRECT_NAMESPACE}:#{user_id}:#{project_id}"
end
-
- def remote_url_message(rejected)
- if rejected
- "git remote set-url origin #{url_to_repo} and try again."
- else
- "git remote set-url origin #{url_to_repo}"
- end
- end
-
- def url
- protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
- end
end
end
end
diff --git a/lib/gitlab/ci/build/step.rb b/lib/gitlab/ci/build/step.rb
index 411f67f8ce7..0b1ebe4e048 100644
--- a/lib/gitlab/ci/build/step.rb
+++ b/lib/gitlab/ci/build/step.rb
@@ -14,7 +14,7 @@ module Gitlab
self.new(:script).tap do |step|
step.script = job.options[:before_script].to_a + job.options[:script].to_a
step.script = job.commands.split("\n") if step.script.empty?
- step.timeout = job.timeout
+ step.timeout = job.metadata_timeout
step.when = WHEN_ON_SUCCESS
end
end
@@ -25,7 +25,7 @@ module Gitlab
self.new(:after_script).tap do |step|
step.script = after_script
- step.timeout = job.timeout
+ step.timeout = job.metadata_timeout
step.when = WHEN_ALWAYS
step.allow_failure = true
end
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index f7ff7ea212e..66ac4a40616 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -4,7 +4,8 @@ module Gitlab
# Base GitLab CI Configuration facade
#
class Config
- def initialize(config)
+ # EE would override this and utilize opts argument
+ def initialize(config, opts = {})
@config = Loader.new(config).load!
@global = Entry::Global.new(@config)
diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb
new file mode 100644
index 00000000000..ac4308f4e2c
--- /dev/null
+++ b/lib/gitlab/ci/trace/http_io.rb
@@ -0,0 +1,187 @@
+##
+# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
+# source: https://gitlab.com/snippets/1685610
+module Gitlab
+ module Ci
+ class Trace
+ class HttpIO
+ BUFFER_SIZE = 128.kilobytes
+
+ InvalidURLError = Class.new(StandardError)
+ FailedToGetChunkError = Class.new(StandardError)
+
+ attr_reader :uri, :size
+ attr_reader :tell
+ attr_reader :chunk, :chunk_range
+
+ alias_method :pos, :tell
+
+ def initialize(url, size)
+ raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
+
+ @uri = URI(url)
+ @size = size
+ @tell = 0
+ end
+
+ def close
+ # no-op
+ end
+
+ def binmode
+ # no-op
+ end
+
+ def binmode?
+ true
+ end
+
+ def path
+ nil
+ end
+
+ def url
+ @uri.to_s
+ end
+
+ def seek(pos, where = IO::SEEK_SET)
+ new_pos =
+ case where
+ when IO::SEEK_END
+ size + pos
+ when IO::SEEK_SET
+ pos
+ when IO::SEEK_CUR
+ tell + pos
+ else
+ -1
+ end
+
+ raise 'new position is outside of file' if new_pos < 0 || new_pos > size
+
+ @tell = new_pos
+ end
+
+ def eof?
+ tell == size
+ end
+
+ def each_line
+ until eof?
+ line = readline
+ break if line.nil?
+
+ yield(line)
+ end
+ end
+
+ def read(length = nil)
+ out = ""
+
+ until eof? || (length && out.length >= length)
+ data = get_chunk
+ break if data.empty?
+
+ out << data
+ @tell += data.bytesize
+ end
+
+ out = out[0, length] if length && out.length > length
+
+ out
+ end
+
+ def readline
+ out = ""
+
+ until eof?
+ data = get_chunk
+ new_line = data.index("\n")
+
+ if !new_line.nil?
+ out << data[0..new_line]
+ @tell += new_line + 1
+ break
+ else
+ out << data
+ @tell += data.bytesize
+ end
+ end
+
+ out
+ end
+
+ def write(data)
+ raise NotImplementedError
+ end
+
+ def truncate(offset)
+ raise NotImplementedError
+ end
+
+ def flush
+ raise NotImplementedError
+ end
+
+ def present?
+ true
+ end
+
+ private
+
+ ##
+ # The below methods are not implemented in IO class
+ #
+ def in_range?
+ @chunk_range&.include?(tell)
+ end
+
+ def get_chunk
+ unless in_range?
+ response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
+ http.request(request)
+ end
+
+ raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
+
+ @chunk = response.body.force_encoding(Encoding::BINARY)
+ @chunk_range = response.content_range
+
+ ##
+ # Note: If provider does not return content_range, then we set it as we requested
+ # Provider: minio
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # Provider: AWS
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # Provider: GCS
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
+ @chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
+ end
+
+ @chunk[chunk_offset..BUFFER_SIZE]
+ end
+
+ def request
+ Net::HTTP::Get.new(uri).tap do |request|
+ request.set_range(chunk_start, BUFFER_SIZE)
+ end
+ end
+
+ def chunk_offset
+ tell % BUFFER_SIZE
+ end
+
+ def chunk_start
+ (tell / BUFFER_SIZE) * BUFFER_SIZE
+ end
+
+ def chunk_end
+ [chunk_start + BUFFER_SIZE, size].min
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb
index d52194f688b..b3fe3ef1c4d 100644
--- a/lib/gitlab/ci/trace/stream.rb
+++ b/lib/gitlab/ci/trace/stream.rb
@@ -8,7 +8,7 @@ module Gitlab
attr_reader :stream
- delegate :close, :tell, :seek, :size, :path, :truncate, to: :stream, allow_nil: true
+ delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true
delegate :valid?, to: :stream, as: :present?, allow_nil: true
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index bc2a6f98dae..e829f2a95f8 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -7,8 +7,8 @@ module Gitlab
attr_reader :cache, :stages, :jobs
- def initialize(config)
- @ci_config = Gitlab::Ci::Config.new(config)
+ def initialize(config, opts = {})
+ @ci_config = Gitlab::Ci::Config.new(config, opts)
@config = @ci_config.to_hash
unless @ci_config.valid?
@@ -73,11 +73,11 @@ module Gitlab
end
end
- def self.validation_message(content)
+ def self.validation_message(content, opts = {})
return 'Please provide content of .gitlab-ci.yml' if content.blank?
begin
- Gitlab::Ci::YamlProcessor.new(content)
+ Gitlab::Ci::YamlProcessor.new(content, opts)
nil
rescue ValidationError => e
e.message
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 44ca434056f..1634fe4e9cb 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -900,11 +900,42 @@ into similar problems in the future (e.g. when new tables are created).
end
end
- # Rails' index_exists? doesn't work when you only give it a table and index
- # name. As such we have to use some extra code to check if an index exists for
- # a given name.
+ # Fetches indexes on a column by name for postgres.
+ #
+ # This will include indexes using an expression on the column, for example:
+ # `CREATE INDEX CONCURRENTLY index_name ON table (LOWER(column));`
+ #
+ # For mysql, it falls back to the default ActiveRecord implementation that
+ # will not find custom indexes. But it will select by name without passing
+ # a column.
+ #
+ # We can remove this when upgrading to Rails 5 with an updated `index_exists?`:
+ # - https://github.com/rails/rails/commit/edc2b7718725016e988089b5fb6d6fb9d6e16882
+ #
+ # Or this can be removed when we no longer support postgres < 9.5, so we
+ # can use `CREATE INDEX IF NOT EXISTS`.
def index_exists_by_name?(table, index)
- indexes(table).map(&:name).include?(index)
+ # We can't fall back to the normal `index_exists?` method because that
+ # does not find indexes without passing a column name.
+ if indexes(table).map(&:name).include?(index.to_s)
+ true
+ elsif Gitlab::Database.postgresql?
+ postgres_exists_by_name?(table, index)
+ else
+ false
+ end
+ end
+
+ def postgres_exists_by_name?(table, name)
+ index_sql = <<~SQL
+ SELECT COUNT(*)
+ FROM pg_index
+ JOIN pg_class i ON (indexrelid=i.oid)
+ JOIN pg_class t ON (indrelid=t.oid)
+ WHERE i.relname = '#{name}' AND t.relname = '#{table}'
+ SQL
+
+ connection.select_value(index_sql).to_i > 0
end
end
end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 20b0647fce9..2d16a81c888 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -8,6 +8,7 @@ module Gitlab
class Repository
include Gitlab::Git::RepositoryMirroring
include Gitlab::Git::Popen
+ include Gitlab::EncodingHelper
ALLOWED_OBJECT_DIRECTORIES_VARIABLES = %w[
GIT_OBJECT_DIRECTORY
@@ -1479,7 +1480,7 @@ module Gitlab
names.lines.each do |line|
next unless line.start_with?(refs_prefix)
- refs << line.rstrip[left_slice_count..-1]
+ refs << encode_utf8(line.rstrip[left_slice_count..-1])
end
refs
diff --git a/lib/gitlab/git_access.rb b/lib/gitlab/git_access.rb
index 6400089a22f..ed0644f6cf1 100644
--- a/lib/gitlab/git_access.rb
+++ b/lib/gitlab/git_access.rb
@@ -53,7 +53,7 @@ module Gitlab
ensure_project_on_push!(cmd, changes)
check_project_accessibility!
- check_project_moved!
+ add_project_moved_message!
check_repository_existence!
case cmd
@@ -99,8 +99,6 @@ module Gitlab
end
def check_active_user!
- return if deploy_key?
-
if user && !user_access.allowed?
raise UnauthorizedError, ERROR_MESSAGES[:account_blocked]
end
@@ -125,16 +123,12 @@ module Gitlab
end
end
- def check_project_moved!
+ def add_project_moved_message!
return if redirected_path.nil?
project_moved = Checks::ProjectMoved.new(project, user, protocol, redirected_path)
- if project_moved.permanent_redirect?
- project_moved.add_message
- else
- raise ProjectMovedError, project_moved.message(rejected: true)
- end
+ project_moved.add_message
end
def check_command_disabled!(cmd)
@@ -219,7 +213,7 @@ module Gitlab
raise UnauthorizedError, ERROR_MESSAGES[:read_only]
end
- if deploy_key
+ if deploy_key?
unless deploy_key.can_push_to?(project)
raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
end
@@ -309,8 +303,10 @@ module Gitlab
case actor
when User
actor
+ when DeployKey
+ nil
when Key
- actor.user unless actor.is_a?(DeployKey)
+ actor.user
when :ci
nil
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 8ca30ffc232..0abae70c443 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -83,6 +83,10 @@ module Gitlab
end
end
+ def self.random_storage
+ Gitlab.config.repositories.storages.keys.sample
+ end
+
def self.address(storage)
params = Gitlab.config.repositories.storages[storage]
raise "storage not found: #{storage.inspect}" if params.nil?
diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb
index 58c356edfd1..f2d699d9dfb 100644
--- a/lib/gitlab/gitaly_client/remote_service.rb
+++ b/lib/gitlab/gitaly_client/remote_service.rb
@@ -3,6 +3,17 @@ module Gitlab
class RemoteService
MAX_MSG_SIZE = 128.kilobytes.freeze
+ def self.exists?(remote_url)
+ request = Gitaly::FindRemoteRepositoryRequest.new(remote: remote_url)
+
+ response = GitalyClient.call(GitalyClient.random_storage,
+ :remote_service,
+ :find_remote_repository, request,
+ timeout: GitalyClient.medium_timeout)
+
+ response.exists
+ end
+
def initialize(repository)
@repository = repository
@gitaly_repo = repository.gitaly_repository
diff --git a/lib/gitlab/github_import/importer/repository_importer.rb b/lib/gitlab/github_import/importer/repository_importer.rb
index ab0b751fe24..b1b283e98b5 100644
--- a/lib/gitlab/github_import/importer/repository_importer.rb
+++ b/lib/gitlab/github_import/importer/repository_importer.rb
@@ -16,7 +16,8 @@ module Gitlab
# Returns true if we should import the wiki for the project.
def import_wiki?
client.repository(project.import_source)&.has_wiki &&
- !project.wiki_repository_exists?
+ !project.wiki_repository_exists? &&
+ Gitlab::GitalyClient::RemoteService.exists?(wiki_url)
end
# Imports the repository data.
@@ -55,7 +56,6 @@ module Gitlab
def import_wiki_repository
wiki_path = "#{project.disk_path}.wiki"
- wiki_url = project.import_url.sub(/\.git\z/, '.wiki.git')
storage_path = project.repository_storage_path
gitlab_shell.import_repository(storage_path, wiki_path, wiki_url)
@@ -70,6 +70,10 @@ module Gitlab
end
end
+ def wiki_url
+ project.import_url.sub(/\.git\z/, '.wiki.git')
+ end
+
def update_clone_time
project.update_column(:last_repository_updated_at, Time.zone.now)
end
diff --git a/lib/gitlab/verify/lfs_objects.rb b/lib/gitlab/verify/lfs_objects.rb
index fe51edbdeeb..970e2a7b718 100644
--- a/lib/gitlab/verify/lfs_objects.rb
+++ b/lib/gitlab/verify/lfs_objects.rb
@@ -12,7 +12,7 @@ module Gitlab
private
def relation
- LfsObject.all
+ LfsObject.with_files_stored_locally
end
def expected_checksum(lfs_object)
diff --git a/lib/gitlab/verify/uploads.rb b/lib/gitlab/verify/uploads.rb
index 6972e517ea5..0ffa71a6d72 100644
--- a/lib/gitlab/verify/uploads.rb
+++ b/lib/gitlab/verify/uploads.rb
@@ -12,7 +12,7 @@ module Gitlab
private
def relation
- Upload.all
+ Upload.with_files_stored_locally
end
def expected_checksum(upload)
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 0b0d667d4fd..b102812ec12 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -21,29 +21,18 @@ module Gitlab
raise "Unsupported action: #{action}" unless ALLOWED_GIT_HTTP_ACTIONS.include?(action.to_s)
project = repository.project
- repo_path = repository.path_to_repo
- params = {
+
+ {
GL_ID: Gitlab::GlId.gl_id(user),
GL_REPOSITORY: Gitlab::GlRepository.gl_repository(project, is_wiki),
GL_USERNAME: user&.username,
- RepoPath: repo_path,
- ShowAllRefs: show_all_refs
- }
- server = {
- address: Gitlab::GitalyClient.address(project.repository_storage),
- token: Gitlab::GitalyClient.token(project.repository_storage)
- }
- params[:Repository] = repository.gitaly_repository.to_h
- params[:GitalyServer] = server
-
- params
- end
-
- def lfs_upload_ok(oid, size)
- {
- StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
- LfsOid: oid,
- LfsSize: size
+ ShowAllRefs: show_all_refs,
+ Repository: repository.gitaly_repository.to_h,
+ RepoPath: 'ignored but not allowed to be empty in gitlab-workhorse',
+ GitalyServer: {
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ }
}
end
@@ -52,7 +41,7 @@ module Gitlab
end
def send_git_blob(repository, blob)
- params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show)
+ params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{
'GitalyServer' => gitaly_server_hash(repository),
'GetBlobRequest' => {
@@ -80,7 +69,7 @@ module Gitlab
params = repository.archive_metadata(ref, Gitlab.config.gitlab.repository_downloads_path, format)
raise "Repository or ref not found" if params.empty?
- if Gitlab::GitalyClient.feature_enabled?(:workhorse_archive)
+ if Gitlab::GitalyClient.feature_enabled?(:workhorse_archive, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
params.merge!(
'GitalyServer' => gitaly_server_hash(repository),
'GitalyRepository' => repository.gitaly_repository.to_h
@@ -97,7 +86,7 @@ module Gitlab
end
def send_git_diff(repository, diff_refs)
- params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_diff)
+ params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_diff, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{
'GitalyServer' => gitaly_server_hash(repository),
'RawDiffRequest' => Gitaly::RawDiffRequest.new(
@@ -115,7 +104,7 @@ module Gitlab
end
def send_git_patch(repository, diff_refs)
- params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_patch)
+ params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_patch, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{
'GitalyServer' => gitaly_server_hash(repository),
'RawPatchRequest' => Gitaly::RawPatchRequest.new(
diff --git a/lib/tasks/gitlab/artifacts/migrate.rake b/lib/tasks/gitlab/artifacts/migrate.rake
new file mode 100644
index 00000000000..bfca4bfb3f7
--- /dev/null
+++ b/lib/tasks/gitlab/artifacts/migrate.rake
@@ -0,0 +1,25 @@
+require 'logger'
+require 'resolv-replace'
+
+desc "GitLab | Migrate files for artifacts to comply with new storage format"
+namespace :gitlab do
+ namespace :artifacts do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of artifacts')
+
+ Ci::Build.joins(:project)
+ .with_artifacts_stored_locally
+ .find_each(batch_size: 10) do |build|
+ begin
+ build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
+ build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
+
+ logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
+ rescue => e
+ logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/lfs/migrate.rake b/lib/tasks/gitlab/lfs/migrate.rake
new file mode 100644
index 00000000000..a45e5ca91e0
--- /dev/null
+++ b/lib/tasks/gitlab/lfs/migrate.rake
@@ -0,0 +1,22 @@
+require 'logger'
+
+desc "GitLab | Migrate LFS objects to remote storage"
+namespace :gitlab do
+ namespace :lfs do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of LFS files to object storage')
+
+ LfsObject.with_files_stored_locally
+ .find_each(batch_size: 10) do |lfs_object|
+ begin
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+
+ logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
+ rescue => e
+ logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/uploads/migrate.rake b/lib/tasks/gitlab/uploads/migrate.rake
new file mode 100644
index 00000000000..c26c3ccb3be
--- /dev/null
+++ b/lib/tasks/gitlab/uploads/migrate.rake
@@ -0,0 +1,33 @@
+namespace :gitlab do
+ namespace :uploads do
+ desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
+ task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
+ batch_size = ENV.fetch('BATCH', 200).to_i
+ @to_store = ObjectStorage::Store::REMOTE
+ @mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
+ @uploader_class = args.uploader_class.constantize
+ @model_class = args.model_class.constantize
+
+ uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
+ end
+
+ def enqueue_batch(batch, index)
+ job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
+ @mounted_as,
+ @to_store)
+ puts "Enqueued job ##{index}: #{job}"
+ rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
+ # continue for the next batch
+ puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
+ end
+
+ def uploads
+ Upload.class_eval { include EachBatch } unless Upload < EachBatch
+
+ Upload
+ .where.not(store: @to_store)
+ .where(uploader: @uploader_class.to_s,
+ model_type: @model_class.base_class.sti_name)
+ end
+ end
+end
diff --git a/lib/tasks/migrate/setup_postgresql.rake b/lib/tasks/migrate/setup_postgresql.rake
index 1c7a8a90f5c..af30ecb0e9b 100644
--- a/lib/tasks/migrate/setup_postgresql.rake
+++ b/lib/tasks/migrate/setup_postgresql.rake
@@ -7,8 +7,8 @@ task setup_postgresql: :environment do
require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes')
require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like')
require Rails.root.join('db/migrate/20171220191323_add_index_on_namespaces_lower_name.rb')
- require Rails.root.join('db/migrate/20180113220114_rework_redirect_routes_indexes.rb')
require Rails.root.join('db/migrate/20180215181245_users_name_lower_index.rb')
+ require Rails.root.join('db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb')
NamespacesProjectsPathLowerIndexes.new.up
AddUsersLowerUsernameEmailIndexes.new.up
@@ -17,6 +17,6 @@ task setup_postgresql: :environment do
AddLowerPathIndexToRedirectRoutes.new.up
IndexRedirectRoutesPathForLike.new.up
AddIndexOnNamespacesLowerName.new.up
- ReworkRedirectRoutesIndexes.new.up
UsersNameLowerIndex.new.up
+ AddPathIndexToRedirectRoutes.new.up
end
diff --git a/package.json b/package.json
index 56fd2575e91..31edc3a8016 100644
--- a/package.json
+++ b/package.json
@@ -121,5 +121,8 @@
"nodemon": "^1.15.1",
"prettier": "1.11.1",
"webpack-dev-server": "^2.11.2"
+ },
+ "optionalDependencies": {
+ "fsevents": "^1.1.3"
}
}
diff --git a/qa/qa/scenario/bootable.rb b/qa/qa/scenario/bootable.rb
index d6de4d404c8..dd12ea6d492 100644
--- a/qa/qa/scenario/bootable.rb
+++ b/qa/qa/scenario/bootable.rb
@@ -23,7 +23,7 @@ module QA
arguments.parse!(argv)
- self.perform(**Runtime::Scenario.attributes)
+ self.perform(Runtime::Scenario.attributes, *arguments.default_argv)
end
private
diff --git a/qa/qa/scenario/test/instance.rb b/qa/qa/scenario/test/instance.rb
index 0af9afd1ea4..567e5fd6cca 100644
--- a/qa/qa/scenario/test/instance.rb
+++ b/qa/qa/scenario/test/instance.rb
@@ -11,7 +11,7 @@ module QA
tags :core
- def perform(address, *files)
+ def perform(address, *rspec_options)
Runtime::Scenario.define(:gitlab_address, address)
##
@@ -22,9 +22,9 @@ module QA
Specs::Runner.perform do |specs|
specs.tty = true
specs.tags = self.class.focus
- specs.files =
- if files.any?
- files
+ specs.options =
+ if rspec_options.any?
+ rspec_options
else
File.expand_path('../../specs/features', __dir__)
end
diff --git a/qa/qa/scenario/test/integration/mattermost.rb b/qa/qa/scenario/test/integration/mattermost.rb
index d939f52ab16..13bfad28b0b 100644
--- a/qa/qa/scenario/test/integration/mattermost.rb
+++ b/qa/qa/scenario/test/integration/mattermost.rb
@@ -9,10 +9,10 @@ module QA
class Mattermost < Test::Instance
tags :core, :mattermost
- def perform(address, mattermost, *files)
+ def perform(address, mattermost, *rspec_options)
Runtime::Scenario.define(:mattermost_address, mattermost)
- super(address, *files)
+ super(address, *rspec_options)
end
end
end
diff --git a/qa/qa/specs/runner.rb b/qa/qa/specs/runner.rb
index 752e3e60b8c..f8f6fe65599 100644
--- a/qa/qa/specs/runner.rb
+++ b/qa/qa/specs/runner.rb
@@ -3,19 +3,19 @@ require 'rspec/core'
module QA
module Specs
class Runner < Scenario::Template
- attr_accessor :tty, :tags, :files
+ attr_accessor :tty, :tags, :options
def initialize
@tty = false
@tags = []
- @files = [File.expand_path('./features', __dir__)]
+ @options = [File.expand_path('./features', __dir__)]
end
def perform
args = []
args.push('--tty') if tty
tags.to_a.each { |tag| args.push(['-t', tag.to_s]) }
- args.push(files)
+ args.push(options)
Runtime::Browser.configure!
diff --git a/qa/spec/scenario/test/instance_spec.rb b/qa/spec/scenario/test/instance_spec.rb
index bd09c28e924..a74a9538be8 100644
--- a/qa/spec/scenario/test/instance_spec.rb
+++ b/qa/spec/scenario/test/instance_spec.rb
@@ -29,7 +29,7 @@ describe QA::Scenario::Test::Instance do
it 'should call runner with default arguments' do
subject.perform("test")
- expect(runner).to have_received(:files=)
+ expect(runner).to have_received(:options=)
.with(File.expand_path('../../../qa/specs/features', __dir__))
end
end
@@ -38,7 +38,7 @@ describe QA::Scenario::Test::Instance do
it 'should call runner with paths' do
subject.perform('test', 'path1', 'path2')
- expect(runner).to have_received(:files=).with(%w[path1 path2])
+ expect(runner).to have_received(:options=).with(%w[path1 path2])
end
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
new file mode 100644
index 00000000000..f4c99ea4064
--- /dev/null
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe SendFileUpload do
+ let(:uploader_class) do
+ Class.new(GitlabUploader) do
+ include ObjectStorage::Concern
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+ end
+ end
+
+ let(:controller_class) do
+ Class.new do
+ include SendFileUpload
+ end
+ end
+
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ describe '#send_upload' do
+ let(:controller) { controller_class.new }
+ let(:temp_file) { Tempfile.new('test') }
+
+ subject { controller.send_upload(uploader) }
+
+ before do
+ FileUtils.touch(temp_file)
+ end
+
+ after do
+ FileUtils.rm_f(temp_file)
+ end
+
+ context 'when local file is used' do
+ before do
+ uploader.store!(temp_file)
+ end
+
+ it 'sends a file' do
+ expect(controller).to receive(:send_file).with(uploader.path, anything)
+
+ subject
+ end
+ end
+
+ context 'when remote file is used' do
+ before do
+ stub_uploads_object_storage(uploader: uploader_class)
+ uploader.object_store = ObjectStorage::Store::REMOTE
+ uploader.store!(temp_file)
+ end
+
+ context 'and proxying is enabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
+ end
+
+ it 'sends a file' do
+ headers = double
+ expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
+ expect(controller).to receive(:headers) { headers }
+ expect(controller).to receive(:head).with(:ok)
+
+ subject
+ end
+ end
+
+ context 'and proxying is disabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
+ end
+
+ it 'sends a file' do
+ expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 25a2e13fe1a..4ea6f869aa3 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,9 +145,23 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
+ let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.root }
end
end
+
+ context 'when using remote file storage' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it_behaves_like 'a valid file' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+ let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+ let(:store) { ObjectStorage::Store::REMOTE }
+ let(:archive_path) { 'https://' }
+ end
+ end
end
end
diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb
new file mode 100644
index 00000000000..1249a5528a9
--- /dev/null
+++ b/spec/controllers/projects/ci/lints_controller_spec.rb
@@ -0,0 +1,123 @@
+require 'spec_helper'
+
+describe Projects::Ci::LintsController do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET #show' do
+ context 'with enough privileges' do
+ before do
+ project.add_developer(user)
+
+ get :show, namespace_id: project.namespace, project_id: project
+ end
+
+ it 'should be success' do
+ expect(response).to be_success
+ end
+
+ it 'should render show page' do
+ expect(response).to render_template :show
+ end
+
+ it 'should retrieve project' do
+ expect(assigns(:project)).to eq(project)
+ end
+ end
+
+ context 'without enough privileges' do
+ before do
+ project.add_guest(user)
+
+ get :show, namespace_id: project.namespace, project_id: project
+ end
+
+ it 'should respond with 404' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ describe 'POST #create' do
+ let(:remote_file_path) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' }
+
+ let(:remote_file_content) do
+ <<~HEREDOC
+ before_script:
+ - apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs
+ - ruby -v
+ - which ruby
+ - gem install bundler --no-ri --no-rdoc
+ - bundle install --jobs $(nproc) "${FLAGS[@]}"
+ HEREDOC
+ end
+
+ let(:content) do
+ <<~HEREDOC
+ include:
+ - #{remote_file_path}
+
+ rubocop:
+ script:
+ - bundle exec rubocop
+ HEREDOC
+ end
+
+ context 'with a valid gitlab-ci.yml' do
+ before do
+ WebMock.stub_request(:get, remote_file_path).to_return(body: remote_file_content)
+ project.add_developer(user)
+
+ post :create, namespace_id: project.namespace, project_id: project, content: content
+ end
+
+ it 'should be success' do
+ expect(response).to be_success
+ end
+
+ it 'render show page' do
+ expect(response).to render_template :show
+ end
+
+ it 'should retrieve project' do
+ expect(assigns(:project)).to eq(project)
+ end
+ end
+
+ context 'with an invalid gitlab-ci.yml' do
+ let(:content) do
+ <<~HEREDOC
+ rubocop:
+ scriptt:
+ - bundle exec rubocop
+ HEREDOC
+ end
+
+ before do
+ project.add_developer(user)
+
+ post :create, namespace_id: project.namespace, project_id: project, content: content
+ end
+
+ it 'should assign errors' do
+ expect(assigns[:error]).to eq('jobs:rubocop config contains unknown keys: scriptt')
+ end
+ end
+
+ context 'without enough privileges' do
+ before do
+ project.add_guest(user)
+
+ post :create, namespace_id: project.namespace, project_id: project, content: content
+ end
+
+ it 'should respond with 404' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 15ce418d0d6..82b20e12850 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -18,7 +18,7 @@ describe Projects::ClustersController do
context 'when project has one or more clusters' do
let(:project) { create(:project) }
let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) }
+ let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
it 'lists available clusters' do
go
@@ -32,7 +32,7 @@ describe Projects::ClustersController do
before do
allow(Clusters::Cluster).to receive(:paginates_per).and_return(1)
- create_list(:cluster, 2, :provided_by_gcp, projects: [project])
+ create_list(:cluster, 2, :provided_by_gcp, :production_environment, projects: [project])
get :index, namespace_id: project.namespace, project_id: project, page: last_page
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index f3e303bb0fe..31046c202e6 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1,7 +1,9 @@
+# coding: utf-8
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
+ include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -203,6 +205,41 @@ describe Projects::JobsController do
end
end
+ context 'when trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+ allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+ end
+
+ context 'when there are no network issues' do
+ before do
+ stub_remote_trace_206
+
+ get_trace
+ end
+
+ it 'returns a trace' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq job.id
+ expect(json_response['status']).to eq job.status
+ expect(json_response['html']).to eq(job.trace.html)
+ end
+ end
+
+ context 'when there is a network issue' do
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'returns a trace' do
+ expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+ end
+
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
@@ -446,14 +483,18 @@ describe Projects::JobsController do
end
describe 'GET raw' do
- before do
- get_raw
+ subject do
+ post :raw, namespace_id: project.namespace,
+ project_id: project,
+ id: job.id
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE'
@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
- def get_raw
- post :raw, namespace_id: project.namespace,
- project_id: project,
- id: job.id
+ context 'when the trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ end
+
+ it 'redirect to the trace file url' do
+ expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
+ end
end
end
end
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index 306094f7ffb..548c5ef36e7 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -20,14 +20,23 @@ describe Projects::MilestonesController do
describe "#show" do
render_views
- def view_milestone
- get :show, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid
+ def view_milestone(options = {})
+ params = { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid }
+ get :show, params.merge(options)
end
it 'shows milestone page' do
view_milestone
expect(response).to have_gitlab_http_status(200)
+ expect(response.content_type).to eq 'text/html'
+ end
+
+ it 'returns milestone json' do
+ view_milestone format: :json
+
+ expect(response).to have_http_status(404)
+ expect(response.content_type).to eq 'application/json'
end
end
@@ -98,7 +107,7 @@ describe Projects::MilestonesController do
it 'shows group milestone' do
post :promote, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid
- expect(flash[:notice]).to eq("#{milestone.title} promoted to group milestone")
+ expect(flash[:notice]).to eq("#{milestone.title} promoted to <a href=\"#{group_milestone_path(project.group, milestone.iid)}\">group milestone</a>.")
expect(response).to redirect_to(project_milestones_path(project))
end
end
diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb
index 80be135b5d8..096e29bc39f 100644
--- a/spec/controllers/projects/protected_branches_controller_spec.rb
+++ b/spec/controllers/projects/protected_branches_controller_spec.rb
@@ -1,6 +1,16 @@
require('spec_helper')
describe Projects::ProtectedBranchesController do
+ let(:project) { create(:project, :repository) }
+ let(:protected_branch) { create(:protected_branch, project: project) }
+ let(:project_params) { { namespace_id: project.namespace.to_param, project_id: project } }
+ let(:base_params) { project_params.merge(id: protected_branch.id) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+ end
+
describe "GET #index" do
let(:project) { create(:project_empty_repo, :public) }
@@ -8,4 +18,91 @@ describe Projects::ProtectedBranchesController do
get(:index, namespace_id: project.namespace.to_param, project_id: project)
end
end
+
+ describe "POST #create" do
+ let(:master_access_level) { [{ access_level: Gitlab::Access::MASTER }] }
+ let(:access_level_params) do
+ { merge_access_levels_attributes: master_access_level,
+ push_access_levels_attributes: master_access_level }
+ end
+ let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'creates the protected branch rule' do
+ expect do
+ post(:create, project_params.merge(protected_branch: create_params))
+ end.to change(ProtectedBranch, :count).by(1)
+ end
+
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents creation of the protected branch rule" do
+ post(:create, project_params.merge(protected_branch: create_params))
+
+ expect(ProtectedBranch.count).to eq 0
+ end
+ end
+ end
+
+ describe "PUT #update" do
+ let(:update_params) { { name: 'new_name' } }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'updates the protected branch rule' do
+ put(:update, base_params.merge(protected_branch: update_params))
+
+ expect(protected_branch.reload.name).to eq('new_name')
+ expect(json_response["name"]).to eq('new_name')
+ end
+
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents update of the protected branch rule" do
+ old_name = protected_branch.name
+
+ put(:update, base_params.merge(protected_branch: update_params))
+
+ expect(protected_branch.reload.name).to eq(old_name)
+ end
+ end
+ end
+
+ describe "DELETE #destroy" do
+ before do
+ sign_in(user)
+ end
+
+ it "deletes the protected branch rule" do
+ delete(:destroy, base_params)
+
+ expect { ProtectedBranch.find(protected_branch.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents deletion of the protected branch rule" do
+ delete(:destroy, base_params)
+
+ expect(response.status).to eq(403)
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index b7df42168e0..08e2ccf893a 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' }
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg')
@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
end
+
+ context 'and lfs uses object storage' do
+ before do
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ stub_lfs_object_storage
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'responds with redirect to file' do
+ get_show(public_project, id)
+
+ expect(response).to have_gitlab_http_status(302)
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+
+ it 'sets content disposition' do
+ get_show(public_project, id)
+
+ file_uri = URI.parse(response.location)
+ params = CGI.parse(file_uri.query)
+
+ expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
+ end
+ end
end
context 'when project does not have access' do
it 'does not serve the file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(404)
end
@@ -81,10 +94,7 @@ describe Projects::RawController do
end
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -95,4 +105,10 @@ describe Projects::RawController do
end
end
end
+
+ def get_show(project, id)
+ get(:show, namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: id)
+ end
end
diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb
index b32eb39b1fb..7688538a468 100644
--- a/spec/controllers/root_controller_spec.rb
+++ b/spec/controllers/root_controller_spec.rb
@@ -90,6 +90,30 @@ describe RootController do
end
end
+ context 'who has customized their dashboard setting for assigned issues' do
+ before do
+ user.dashboard = 'issues'
+ end
+
+ it 'redirects to their assigned issues' do
+ get :index
+
+ expect(response).to redirect_to issues_dashboard_path(assignee_id: user.id)
+ end
+ end
+
+ context 'who has customized their dashboard setting for assigned merge requests' do
+ before do
+ user.dashboard = 'merge_requests'
+ end
+
+ it 'redirects to their assigned merge requests' do
+ get :index
+
+ expect(response).to redirect_to merge_requests_dashboard_path(assignee_id: user.id)
+ end
+ end
+
context 'who uses the default dashboard setting' do
it 'renders the default dashboard' do
get :index
diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb
index 5f9c57c0c8d..18c7453bd1b 100644
--- a/spec/factories/appearances.rb
+++ b/spec/factories/appearances.rb
@@ -2,8 +2,21 @@
FactoryBot.define do
factory :appearance do
- title "MepMep"
- description "This is my Community Edition instance"
+ title "GitLab Community Edition"
+ description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines"
end
+
+ trait :with_logo do
+ logo { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+
+ trait :with_header_logo do
+ header_logo { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+
+ trait :with_logos do
+ with_logo
+ with_header_logo
+ end
end
diff --git a/spec/factories/ci/build_metadata.rb b/spec/factories/ci/build_metadata.rb
new file mode 100644
index 00000000000..66bbd977b88
--- /dev/null
+++ b/spec/factories/ci/build_metadata.rb
@@ -0,0 +1,9 @@
+FactoryBot.define do
+ factory :ci_build_metadata, class: Ci::BuildMetadata do
+ build factory: :ci_build
+
+ after(:build) do |build_metadata, _|
+ build_metadata.project ||= build_metadata.build.project
+ end
+ end
+end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 8544d54ccaa..3d3287d8168 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -5,6 +5,10 @@ FactoryBot.define do
job factory: :ci_build
file_type :archive
+ trait :remote_store do
+ file_store JobArtifactUploader::Store::REMOTE
+ end
+
after :build do |artifact|
artifact.project ||= artifact.job.project
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 20d5580f0c2..98566f907f9 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -32,5 +32,9 @@ FactoryBot.define do
trait :disabled do
enabled false
end
+
+ trait :production_environment do
+ sequence(:environment_scope) { |n| "production#{n}/*" }
+ end
end
end
diff --git a/spec/factories/lfs_objects.rb b/spec/factories/lfs_objects.rb
index caaed4d5246..eaf3a4ed497 100644
--- a/spec/factories/lfs_objects.rb
+++ b/spec/factories/lfs_objects.rb
@@ -15,4 +15,8 @@ FactoryBot.define do
trait :correct_oid do
oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'
end
+
+ trait :object_storage do
+ file_store { LfsObjectUploader::Store::REMOTE }
+ end
end
diff --git a/spec/factories/redirect_routes.rb b/spec/factories/redirect_routes.rb
index c29c81c5df9..774232d0b34 100644
--- a/spec/factories/redirect_routes.rb
+++ b/spec/factories/redirect_routes.rb
@@ -2,14 +2,5 @@ FactoryBot.define do
factory :redirect_route do
sequence(:path) { |n| "redirect#{n}" }
source factory: :group
- permanent false
-
- trait :permanent do
- permanent true
- end
-
- trait :temporary do
- permanent false
- end
end
end
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index ff3a2a76acc..b45f6f30e40 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
uploader "AvatarUploader"
mount_point :avatar
secret nil
+ store ObjectStorage::Store::LOCAL
# we should build a mount agnostic upload by default
transient do
@@ -27,6 +28,10 @@ FactoryBot.define do
secret SecureRandom.hex
end
+ trait :object_storage do
+ store ObjectStorage::Store::REMOTE
+ end
+
trait :namespace_upload do
model { build(:group) }
path { File.join(secret, filename) }
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 34d45aec2fd..c89bc54cad4 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -88,16 +88,38 @@ feature 'Admin updates settings' do
expect(page).to have_content "Application settings saved successfully"
end
- scenario 'Change AutoDevOps settings' do
- check 'Enabled Auto DevOps (Beta) for projects by default'
- fill_in 'Auto devops domain', with: 'domain.com'
- click_button 'Save'
+ scenario 'Change CI/CD settings' do
+ page.within('.as-ci-cd') do
+ check 'Enabled Auto DevOps (Beta) for projects by default'
+ fill_in 'Auto devops domain', with: 'domain.com'
+ click_button 'Save changes'
+ end
expect(Gitlab::CurrentSettings.auto_devops_enabled?).to be true
expect(Gitlab::CurrentSettings.auto_devops_domain).to eq('domain.com')
expect(page).to have_content "Application settings saved successfully"
end
+ scenario 'Change Influx settings' do
+ page.within('.as-influx') do
+ check 'Enable InfluxDB Metrics'
+ click_button 'Save changes'
+ end
+
+ expect(Gitlab::CurrentSettings.metrics_enabled?).to be true
+ expect(page).to have_content "Application settings saved successfully"
+ end
+
+ scenario 'Change Prometheus settings' do
+ page.within('.as-prometheus') do
+ check 'Enable Prometheus Metrics'
+ click_button 'Save changes'
+ end
+
+ expect(Gitlab::CurrentSettings.prometheus_metrics_enabled?).to be true
+ expect(page).to have_content "Application settings saved successfully"
+ end
+
scenario 'Change Slack Notifications Service template settings' do
first(:link, 'Service Templates').click
click_link 'Slack notifications'
diff --git a/spec/features/groups/activity_spec.rb b/spec/features/groups/activity_spec.rb
index d3b25ec3d6c..7bc809b3104 100644
--- a/spec/features/groups/activity_spec.rb
+++ b/spec/features/groups/activity_spec.rb
@@ -8,11 +8,30 @@ feature 'Group activity page' do
context 'when signed in' do
before do
sign_in(user)
- visit path
end
- it_behaves_like "it has an RSS button with current_user's RSS token"
- it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token"
+ describe 'RSS' do
+ before do
+ visit path
+ end
+
+ it_behaves_like "it has an RSS button with current_user's RSS token"
+ it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token"
+ end
+
+ context 'when project is in the group', :js do
+ let(:project) { create(:project, :public, namespace: group) }
+
+ before do
+ project.add_master(user)
+
+ visit path
+ end
+
+ it 'renders user joined to project event' do
+ expect(page).to have_content 'joined project'
+ end
+ end
end
context 'when signed out' do
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index b83bad3befb..1ce30015e81 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -76,6 +76,27 @@ feature 'Edit group settings' do
end
end
end
+
+ describe 'edit group avatar' do
+ before do
+ visit edit_group_path(group)
+
+ attach_file(:group_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif'))
+
+ expect { click_button 'Save group' }.to change { group.reload.avatar? }.to(true)
+ end
+
+ it 'uploads new group avatar' do
+ expect(group.avatar).to be_instance_of AvatarUploader
+ expect(group.avatar.url).to eq "/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif"
+ expect(page).to have_link('Remove avatar')
+ end
+
+ it 'removes group avatar' do
+ expect { click_link 'Remove avatar' }.to change { group.reload.avatar? }.to(false)
+ expect(page).not_to have_link('Remove avatar')
+ end
+ end
end
def update_path(new_group_path)
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 450bc0ff8cf..90bf7ba49f6 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -3,8 +3,11 @@ require 'spec_helper'
feature 'Group issues page' do
include FilteredSearchHelpers
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :public, group: group)}
+ let(:path) { issues_group_path(group) }
+
context 'with shared examples' do
- let(:path) { issues_group_path(group) }
let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
include_examples 'project features apply to issuables', Issue
@@ -31,7 +34,6 @@ feature 'Group issues page' do
let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group }
let(:user2) { user_outside_group }
- let(:path) { issues_group_path(group) }
it 'filters by only group users' do
filtered_search.set('assignee:')
@@ -43,9 +45,7 @@ feature 'Group issues page' do
end
context 'issues list', :nested_groups do
- let(:group) { create(:group)}
let(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, :public, group: group)}
let(:subgroup_project) { create(:project, :public, group: subgroup)}
let!(:issue) { create(:issue, project: project, title: 'root group issue') }
let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') }
@@ -59,5 +59,17 @@ feature 'Group issues page' do
expect(page).to have_content('subgroup issue')
end
end
+
+ context 'when project is archived' do
+ before do
+ project.archive!
+ end
+
+ it 'does not render issue' do
+ visit path
+
+ expect(page).not_to have_content issue.title[0..80]
+ end
+ end
end
end
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 7ce6a61d50c..672ae785c2d 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -5,14 +5,14 @@ feature 'Group merge requests page' do
let(:path) { merge_requests_group_path(group) }
let(:issuable) { create(:merge_request, source_project: project, target_project: project, title: 'this is my created issuable') }
+ let(:access_level) { ProjectFeature::ENABLED }
+ let(:user) { user_in_group }
include_examples 'project features apply to issuables', MergeRequest
context 'archived issuable' do
let(:project_archived) { create(:project, :archived, :merge_requests_enabled, :repository, group: group) }
let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') }
- let(:access_level) { ProjectFeature::ENABLED }
- let(:user) { user_in_group }
before do
issuable_archived
@@ -36,9 +36,17 @@ feature 'Group merge requests page' do
end
end
+ context 'when merge request assignee to user' do
+ before do
+ issuable.update!(assignee: user)
+
+ visit path
+ end
+
+ it { expect(page).to have_content issuable.title[0..80] }
+ end
+
context 'group filtered search', :js do
- let(:access_level) { ProjectFeature::ENABLED }
- let(:user) { user_in_group }
let(:user2) { user_outside_group }
it 'filters by assignee only group users' do
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index ceccc471405..4ffadbbcd35 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -15,14 +15,44 @@ feature 'Group show page' do
end
it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token"
+
+ context 'when group does not exist' do
+ let(:path) { group_path('not-exist') }
+
+ it { expect(status_code).to eq(404) }
+ end
end
context 'when signed out' do
- before do
- visit path
+ describe 'RSS' do
+ before do
+ visit path
+ end
+
+ it_behaves_like "an autodiscoverable RSS feed without an RSS token"
+ end
+
+ context 'when group has a public project', :js do
+ let!(:project) { create(:project, :public, namespace: group) }
+
+ it 'renders public project' do
+ visit path
+
+ expect(page).to have_link group.name
+ expect(page).to have_link project.name
+ end
end
- it_behaves_like "an autodiscoverable RSS feed without an RSS token"
+ context 'when group has a private project', :js do
+ let!(:project) { create(:project, :private, namespace: group) }
+
+ it 'does not render private project' do
+ visit path
+
+ expect(page).to have_link group.name
+ expect(page).not_to have_link project.name
+ end
+ end
end
context 'subgroup support' do
diff --git a/spec/features/groups/user_browse_projects_group_page_spec.rb b/spec/features/groups/user_browse_projects_group_page_spec.rb
new file mode 100644
index 00000000000..e81c3180e78
--- /dev/null
+++ b/spec/features/groups/user_browse_projects_group_page_spec.rb
@@ -0,0 +1,29 @@
+require 'rails_helper'
+
+describe 'User browse group projects page' do
+ let(:user) { create :user }
+ let(:group) { create :group }
+
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ context 'when user signed in' do
+ before do
+ sign_in(user)
+ end
+
+ context 'when group has archived project', :js do
+ let!(:project) { create :project, :archived, namespace: group }
+
+ it 'renders projects list' do
+ visit projects_group_path(group)
+
+ expect(page).to have_link project.name
+ expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/ci_lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 220b934154e..313950072e7 100644
--- a/spec/features/ci_lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -1,10 +1,14 @@
require 'spec_helper'
describe 'CI Lint', :js do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
before do
- sign_in(create(:user))
+ project.add_developer(user)
+ sign_in(user)
- visit ci_lint_path
+ visit project_ci_lint_path(project)
find('#ci-editor')
execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});")
diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb
new file mode 100644
index 00000000000..18e975fa653
--- /dev/null
+++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb
@@ -0,0 +1,21 @@
+require 'spec_helper'
+
+feature 'Hook logs' do
+ given(:web_hook_log) { create(:web_hook_log, response_body: '<script>') }
+ given(:project) { web_hook_log.web_hook.project }
+ given(:user) { create(:user) }
+
+ before do
+ project.add_master(user)
+
+ sign_in(user)
+ end
+
+ scenario 'user reads log without getting XSS' do
+ visit(
+ project_hook_hook_log_path(
+ project, web_hook_log.web_hook, web_hook_log))
+
+ expect(page).to have_content('<script>')
+ end
+end
diff --git a/spec/finders/clusters_finder_spec.rb b/spec/finders/clusters_finder_spec.rb
index c10efac2432..da529e0670f 100644
--- a/spec/finders/clusters_finder_spec.rb
+++ b/spec/finders/clusters_finder_spec.rb
@@ -6,7 +6,7 @@ describe ClustersFinder do
describe '#execute' do
let(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) }
+ let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
subject { described_class.new(project, user, scope).execute }
diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb
index baf927a9acc..b77114a8152 100644
--- a/spec/helpers/page_layout_helper_spec.rb
+++ b/spec/helpers/page_layout_helper_spec.rb
@@ -50,6 +50,11 @@ describe PageLayoutHelper do
allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
expect(helper.favicon).to eq 'favicon-blue.ico'
end
+
+ it 'has yellow favicon for canary' do
+ stub_env('CANARY', 'true')
+ expect(helper.favicon).to eq 'favicon-yellow.ico'
+ end
end
describe 'page_image' do
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index e2a0c4322ff..c9d2ec8a4ae 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -21,7 +21,9 @@ describe PreferencesHelper do
["Your Projects' Activity", 'project_activity'],
["Starred Projects' Activity", 'starred_project_activity'],
["Your Groups", 'groups'],
- ["Your Todos", 'todos']
+ ["Your Todos", 'todos'],
+ ["Assigned Issues", 'issues'],
+ ["Assigned Merge Requests", 'merge_requests']
]
end
end
diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb
new file mode 100644
index 00000000000..de3c157ab7b
--- /dev/null
+++ b/spec/initializers/fog_google_https_private_urls_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'Fog::Storage::GoogleXML::File' do
+ let(:storage) do
+ Fog.mock!
+ Fog::Storage.new({
+ google_storage_access_key_id: "asdf",
+ google_storage_secret_access_key: "asdf",
+ provider: "Google"
+ })
+ end
+
+ let(:file) do
+ directory = storage.directories.create(key: 'data')
+ directory.files.create(
+ body: 'Hello World!',
+ key: 'hello_world.txt'
+ )
+ end
+
+ it 'delegates to #get_https_url' do
+ expect(file.url(Time.now)).to start_with("https://")
+ end
+end
diff --git a/spec/javascripts/fixtures/gl_dropdown.html.haml b/spec/javascripts/fixtures/gl_dropdown.html.haml
index a20390c08ee..43d57c2c4dc 100644
--- a/spec/javascripts/fixtures/gl_dropdown.html.haml
+++ b/spec/javascripts/fixtures/gl_dropdown.html.haml
@@ -1,7 +1,8 @@
%div
.dropdown.inline
%button#js-project-dropdown.dropdown-menu-toggle{type: 'button', data: {toggle: 'dropdown'}}
- Projects
+ .dropdown-toggle-text
+ Projects
%i.fa.fa-chevron-down.dropdown-toggle-caret.js-projects-dropdown-toggle
.dropdown-menu.dropdown-select.dropdown-menu-selectable
.dropdown-title
diff --git a/spec/javascripts/fixtures/projects.rb b/spec/javascripts/fixtures/projects.rb
index b344b389241..e8865b04874 100644
--- a/spec/javascripts/fixtures/projects.rb
+++ b/spec/javascripts/fixtures/projects.rb
@@ -17,8 +17,6 @@ describe 'Projects (JavaScript fixtures)', type: :controller do
end
before do
- # EE-specific start
- # EE specific end
project.add_master(admin)
sign_in(admin)
end
diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js
index 0e4a7017406..5393502196e 100644
--- a/spec/javascripts/gl_dropdown_spec.js
+++ b/spec/javascripts/gl_dropdown_spec.js
@@ -256,4 +256,29 @@ describe('glDropdown', function describeDropdown() {
});
});
});
+
+ it('should keep selected item after selecting a second time', () => {
+ const options = {
+ isSelectable(item, $el) {
+ return !$el.hasClass('is-active');
+ },
+ toggleLabel(item) {
+ return item && item.id;
+ },
+ };
+ initDropDown.call(this, false, false, options);
+ const $item = $(`${ITEM_SELECTOR}:first() a`, this.$dropdownMenuElement);
+
+ // select item the first time
+ this.dropdownButtonElement.click();
+ $item.click();
+ expect($item).toHaveClass('is-active');
+ // select item the second time
+ this.dropdownButtonElement.click();
+ $item.click();
+ expect($item).toHaveClass('is-active');
+
+ expect($('.dropdown-toggle-text')).toHaveText(this.projectsData[0].id.toString());
+ });
});
+
diff --git a/spec/javascripts/helpers/vue_component_helper.js b/spec/javascripts/helpers/vue_component_helper.js
new file mode 100644
index 00000000000..257c9f5526a
--- /dev/null
+++ b/spec/javascripts/helpers/vue_component_helper.js
@@ -0,0 +1,3 @@
+export default function removeBreakLine (data) {
+ return data.replace(/\r?\n|\r/g, ' ');
+}
diff --git a/spec/javascripts/ide/lib/editor_spec.js b/spec/javascripts/ide/lib/editor_spec.js
index 3c48d94d17a..2ccd87de1a7 100644
--- a/spec/javascripts/ide/lib/editor_spec.js
+++ b/spec/javascripts/ide/lib/editor_spec.js
@@ -64,22 +64,20 @@ describe('Multi-file editor library', () => {
instance.createDiffInstance(holder);
- expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith(
- holder,
- {
- model: null,
- contextmenu: true,
- minimap: {
- enabled: false,
- },
- readOnly: true,
- scrollBeyondLastLine: false,
- quickSuggestions: false,
- occurrencesHighlight: false,
- renderLineHighlight: 'none',
- hideCursorInOverviewRuler: true,
+ expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith(holder, {
+ model: null,
+ contextmenu: true,
+ minimap: {
+ enabled: false,
},
- );
+ readOnly: true,
+ scrollBeyondLastLine: false,
+ quickSuggestions: false,
+ occurrencesHighlight: false,
+ renderLineHighlight: 'none',
+ hideCursorInOverviewRuler: true,
+ wordWrap: 'bounded',
+ });
});
});
@@ -117,9 +115,7 @@ describe('Multi-file editor library', () => {
});
it('sets original & modified when diff editor', () => {
- spyOn(instance.instance, 'getEditorType').and.returnValue(
- 'vs.editor.IDiffEditor',
- );
+ spyOn(instance.instance, 'getEditorType').and.returnValue('vs.editor.IDiffEditor');
spyOn(instance.instance, 'setModel');
instance.attachModel(model);
@@ -135,9 +131,7 @@ describe('Multi-file editor library', () => {
instance.attachModel(model);
- expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(
- model,
- );
+ expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(model);
});
it('re-decorates with the dirty diff controller', () => {
@@ -145,9 +139,7 @@ describe('Multi-file editor library', () => {
instance.attachModel(model);
- expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(
- model,
- );
+ expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(model);
});
});
diff --git a/spec/javascripts/jobs/mock_data.js b/spec/javascripts/jobs/mock_data.js
index 43589d54be4..25ca8eb6c0b 100644
--- a/spec/javascripts/jobs/mock_data.js
+++ b/spec/javascripts/jobs/mock_data.js
@@ -115,6 +115,10 @@ export default {
commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
},
},
+ metadata: {
+ timeout_human_readable: '1m 40s',
+ timeout_source: 'runner',
+ },
merge_request: {
iid: 2,
path: '/root/ci-mock/merge_requests/2',
diff --git a/spec/javascripts/jobs/sidebar_detail_row_spec.js b/spec/javascripts/jobs/sidebar_detail_row_spec.js
index 3ac65709c4a..e6bfb0c4adc 100644
--- a/spec/javascripts/jobs/sidebar_detail_row_spec.js
+++ b/spec/javascripts/jobs/sidebar_detail_row_spec.js
@@ -37,4 +37,25 @@ describe('Sidebar detail row', () => {
vm.$el.textContent.replace(/\s+/g, ' ').trim(),
).toEqual('this is the title: this is the value');
});
+
+ describe('when helpUrl not provided', () => {
+ it('should not render help', () => {
+ expect(vm.$el.querySelector('.help-button')).toBeNull();
+ });
+ });
+
+ describe('when helpUrl provided', () => {
+ beforeEach(() => {
+ vm = new SidebarDetailRow({
+ propsData: {
+ helpUrl: 'help url',
+ value: 'foo',
+ },
+ }).$mount();
+ });
+
+ it('should render help', () => {
+ expect(vm.$el.querySelector('.help-button a').getAttribute('href')).toEqual('help url');
+ });
+ });
});
diff --git a/spec/javascripts/jobs/sidebar_details_block_spec.js b/spec/javascripts/jobs/sidebar_details_block_spec.js
index 95532ef5382..602dae514b1 100644
--- a/spec/javascripts/jobs/sidebar_details_block_spec.js
+++ b/spec/javascripts/jobs/sidebar_details_block_spec.js
@@ -96,6 +96,12 @@ describe('Sidebar details block', () => {
).toEqual('Runner: #1');
});
+ it('should render timeout information', () => {
+ expect(
+ trimWhitespace(vm.$el.querySelector('.js-job-timeout')),
+ ).toEqual('Timeout: 1m 40s (from runner)');
+ });
+
it('should render coverage', () => {
expect(
trimWhitespace(vm.$el.querySelector('.js-job-coverage')),
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index 19504e4f7c8..cda550760fe 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -25,26 +25,34 @@ describe('issue_discussion component', () => {
});
it('should render user avatar', () => {
- expect(vm.$el.querySelector('.user-avatar-link')).toBeDefined();
+ expect(vm.$el.querySelector('.user-avatar-link')).not.toBeNull();
});
it('should render discussion header', () => {
- expect(vm.$el.querySelector('.discussion-header')).toBeDefined();
+ expect(vm.$el.querySelector('.discussion-header')).not.toBeNull();
expect(vm.$el.querySelector('.notes').children.length).toEqual(discussionMock.notes.length);
});
describe('actions', () => {
it('should render reply button', () => {
- expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual('Reply...');
+ expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual(
+ 'Reply...',
+ );
});
- it('should toggle reply form', (done) => {
+ it('should toggle reply form', done => {
vm.$el.querySelector('.js-vue-discussion-reply').click();
Vue.nextTick(() => {
- expect(vm.$refs.noteForm).toBeDefined();
+ expect(vm.$refs.noteForm).not.toBeNull();
expect(vm.isReplying).toEqual(true);
done();
});
});
+
+ it('does not render jump to discussion button', () => {
+ expect(
+ vm.$el.querySelector('*[data-original-title="Jump to next unresolved discussion"]'),
+ ).toBeNull();
+ });
});
});
diff --git a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js
index 080158a8ee0..a24f8204fe1 100644
--- a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js
+++ b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js
@@ -12,6 +12,7 @@ describe('Promote label modal', () => {
labelColor: '#5cb85c',
labelTextColor: '#ffffff',
url: `${gl.TEST_HOST}/dummy/promote/labels`,
+ groupName: 'group',
};
describe('Modal title and description', () => {
@@ -24,7 +25,7 @@ describe('Promote label modal', () => {
});
it('contains the proper description', () => {
- expect(vm.text).toContain('Promoting this label will make it available for all projects inside the group');
+ expect(vm.text).toContain(`Promoting ${labelMockData.labelTitle} will make it available for all projects inside ${labelMockData.groupName}`);
});
it('contains a label span with the color', () => {
diff --git a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js
index 22956929e7b..8b220423637 100644
--- a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js
+++ b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js
@@ -10,6 +10,7 @@ describe('Promote milestone modal', () => {
const milestoneMockData = {
milestoneTitle: 'v1.0',
url: `${gl.TEST_HOST}/dummy/promote/milestones`,
+ groupName: 'group',
};
describe('Modal title and description', () => {
@@ -22,7 +23,7 @@ describe('Promote milestone modal', () => {
});
it('contains the proper description', () => {
- expect(vm.text).toContain('Promoting this milestone will make it available for all projects inside the group.');
+ expect(vm.text).toContain(`Promoting ${milestoneMockData.milestoneTitle} will make it available for all projects inside ${milestoneMockData.groupName}.`);
});
it('contains the correct title', () => {
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index 5323523abc0..fcbd8169bc7 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import removeBreakLine from 'spec/helpers/vue_component_helper';
describe('MRWidgetConflicts', () => {
let Component;
@@ -78,8 +79,9 @@ describe('MRWidgetConflicts', () => {
});
it('should tell you to rebase locally', () => {
- expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('Fast-forward merge is not possible.');
- expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('To merge this request, first rebase locally');
+ expect(
+ removeBreakLine(vm.$el.textContent).trim(),
+ ).toContain('Fast-forward merge is not possible. To merge this request, first rebase locally.');
});
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
index baacbc03fb1..894dbe3382f 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import removeBreakLine from 'spec/helpers/vue_component_helper';
describe('MRWidgetPipelineBlocked', () => {
let vm;
@@ -18,6 +19,8 @@ describe('MRWidgetPipelineBlocked', () => {
});
it('renders information text', () => {
- expect(vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ')).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed');
+ expect(
+ removeBreakLine(vm.$el.textContent).trim(),
+ ).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed');
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
index 25684861724..b02af94d03a 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
@@ -1,17 +1,25 @@
import Vue from 'vue';
import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue';
+import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import removeBreakLine from 'spec/helpers/vue_component_helper';
describe('ShaMismatch', () => {
- describe('template', () => {
+ let vm;
+
+ beforeEach(() => {
const Component = Vue.extend(ShaMismatch);
- const vm = new Component({
- el: document.createElement('div'),
- });
- it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy();
- expect(vm.$el.innerText).toContain('The source branch HEAD has recently changed.');
- expect(vm.$el.innerText).toContain('Please reload the page and review the changes before merging.');
- });
+ vm = mountComponent(Component);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render information message', () => {
+ expect(vm.$el.querySelector('button').disabled).toEqual(true);
+
+ expect(
+ removeBreakLine(vm.$el.textContent).trim(),
+ ).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging');
});
});
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 5100f5737c2..84688845fa5 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -278,6 +278,10 @@ describe Backup::Manager do
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory)
end
+ after do
+ Fog.unmock!
+ end
+
context 'target path' do
it 'uses the tar filename by default' do
expect_any_instance_of(Fog::Collection).to receive(:create)
diff --git a/spec/lib/banzai/filter/autolink_filter_spec.rb b/spec/lib/banzai/filter/autolink_filter_spec.rb
index cbb0089bde7..a50329473ad 100644
--- a/spec/lib/banzai/filter/autolink_filter_spec.rb
+++ b/spec/lib/banzai/filter/autolink_filter_spec.rb
@@ -167,6 +167,15 @@ describe Banzai::Filter::AutolinkFilter do
expect(actual).to eq(expected_complicated_link)
end
+ it 'does not double-encode HTML entities' do
+ encoded_link = "#{link}?foo=bar&amp;baz=quux"
+ expected_encoded_link = %Q{<a href="#{encoded_link}">#{encoded_link}</a>}
+ actual = unescape(filter(encoded_link).to_html)
+
+ expect(actual).to eq(Rinku.auto_link(encoded_link))
+ expect(actual).to eq(expected_encoded_link)
+ end
+
it 'does not include trailing HTML entities' do
doc = filter("See &lt;&lt;&lt;#{link}&gt;&gt;&gt;")
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 2a0e19ae796..e1782cff81a 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -48,7 +48,7 @@ module Gitlab
},
'images' => {
input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
- output: "<img src=\"https://localhost.com/image.png\" alt=\"Alt text\">"
+ output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt='Alt text\" onerror=\"alert(7)'></span></p>\n</div>"
},
'pre' => {
input: '```mypre"><script>alert(3)</script>',
diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
index e112e9e9e3d..5ce84c61042 100644
--- a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
@@ -51,4 +51,20 @@ describe Gitlab::BackgroundMigration::MigrateBuildStage, :migration, schema: 201
expect { described_class.new.perform(1, 6) }
.to raise_error ActiveRecord::RecordNotUnique
end
+
+ context 'when invalid class can be loaded due to single table inheritance' do
+ let(:commit_status) do
+ jobs.create!(id: 7, commit_id: 1, project_id: 123, stage_idx: 4,
+ stage: 'post-deploy', status: :failed)
+ end
+
+ before do
+ commit_status.update_column(:type, 'SomeClass')
+ end
+
+ it 'does ignore single table inheritance type' do
+ expect { described_class.new.perform(1, 7) }.not_to raise_error
+ expect(jobs.find(7)).to have_attributes(stage_id: (a_value > 0))
+ end
+ end
end
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb
index e263d29656c..8e9386b1ba1 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/project_moved_spec.rb
@@ -44,44 +44,17 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
end
describe '#message' do
- context 'when the push is rejected' do
- it 'returns a redirect message telling the user to try again' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- message = "Project 'foo/bar' was moved to '#{project.full_path}'." +
- "\n\nPlease update your Git remote:" +
- "\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n"
+ it 'returns a redirect message' do
+ project_moved = described_class.new(project, user, 'http', 'foo/bar')
+ message = <<~MSG
+ Project 'foo/bar' was moved to '#{project.full_path}'.
- expect(project_moved.message(rejected: true)).to eq(message)
- end
- end
+ Please update your Git remote:
- context 'when the push is not rejected' do
- it 'returns a redirect message' do
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- message = "Project 'foo/bar' was moved to '#{project.full_path}'." +
- "\n\nPlease update your Git remote:" +
- "\n\n git remote set-url origin #{project.http_url_to_repo}\n"
+ git remote set-url origin #{project.http_url_to_repo}
+ MSG
- expect(project_moved.message).to eq(message)
- end
- end
- end
-
- describe '#permanent_redirect?' do
- context 'with a permanent RedirectRoute' do
- it 'returns true' do
- project.route.create_redirect('foo/bar', permanent: true)
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- expect(project_moved.permanent_redirect?).to be_truthy
- end
- end
-
- context 'without a permanent RedirectRoute' do
- it 'returns false' do
- project.route.create_redirect('foo/bar')
- project_moved = described_class.new(project, user, 'http', 'foo/bar')
- expect(project_moved.permanent_redirect?).to be_falsy
- end
+ expect(project_moved.message).to eq(message)
end
end
end
diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb
index 5a21282712a..cce4efaa069 100644
--- a/spec/lib/gitlab/ci/build/step_spec.rb
+++ b/spec/lib/gitlab/ci/build/step_spec.rb
@@ -5,10 +5,14 @@ describe Gitlab::Ci::Build::Step do
shared_examples 'has correct script' do
subject { described_class.from_commands(job) }
+ before do
+ job.run!
+ end
+
it 'fabricates an object' do
expect(subject.name).to eq(:script)
expect(subject.script).to eq(script)
- expect(subject.timeout).to eq(job.timeout)
+ expect(subject.timeout).to eq(job.metadata_timeout)
expect(subject.when).to eq('on_success')
expect(subject.allow_failure).to be_falsey
end
@@ -47,6 +51,10 @@ describe Gitlab::Ci::Build::Step do
subject { described_class.from_after_script(job) }
+ before do
+ job.run!
+ end
+
context 'when after_script is empty' do
it 'doesn not fabricate an object' do
is_expected.to be_nil
@@ -59,7 +67,7 @@ describe Gitlab::Ci::Build::Step do
it 'fabricates an object' do
expect(subject.name).to eq(:after_script)
expect(subject.script).to eq(['ls -la', 'date'])
- expect(subject.timeout).to eq(job.timeout)
+ expect(subject.timeout).to eq(job.metadata_timeout)
expect(subject.when).to eq('always')
expect(subject.allow_failure).to be_truthy
end
diff --git a/spec/lib/gitlab/ci/trace/http_io_spec.rb b/spec/lib/gitlab/ci/trace/http_io_spec.rb
new file mode 100644
index 00000000000..5474e2f518c
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/http_io_spec.rb
@@ -0,0 +1,315 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace::HttpIO do
+ include HttpIOHelpers
+
+ let(:http_io) { described_class.new(url, size) }
+ let(:url) { remote_trace_url }
+ let(:size) { remote_trace_size }
+
+ describe '#close' do
+ subject { http_io.close }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#binmode' do
+ subject { http_io.binmode }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#binmode?' do
+ subject { http_io.binmode? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#path' do
+ subject { http_io.path }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#url' do
+ subject { http_io.url }
+
+ it { is_expected.to eq(url) }
+ end
+
+ describe '#seek' do
+ subject { http_io.seek(pos, where) }
+
+ context 'when moves pos to end of the file' do
+ let(:pos) { 0 }
+ let(:where) { IO::SEEK_END }
+
+ it { is_expected.to eq(size) }
+ end
+
+ context 'when moves pos to middle of the file' do
+ let(:pos) { size / 2 }
+ let(:where) { IO::SEEK_SET }
+
+ it { is_expected.to eq(size / 2) }
+ end
+
+ context 'when moves pos around' do
+ it 'matches the result' do
+ expect(http_io.seek(0)).to eq(0)
+ expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
+ expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
+ end
+ end
+ end
+
+ describe '#eof?' do
+ subject { http_io.eof? }
+
+ context 'when current pos is at end of the file' do
+ before do
+ http_io.seek(size, IO::SEEK_SET)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when current pos is not at end of the file' do
+ before do
+ http_io.seek(0, IO::SEEK_SET)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#each_line' do
+ subject { http_io.each_line }
+
+ let(:string_io) { StringIO.new(remote_trace_body) }
+
+ before do
+ stub_remote_trace_206
+ end
+
+ it 'yields lines' do
+ expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
+ end
+
+ context 'when buckets on GCS' do
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ stub_remote_trace_200
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'calls get_chunk only once' do
+ expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
+
+ http_io.each_line { |line| }
+ end
+ end
+ end
+ end
+
+ describe '#read' do
+ subject { http_io.read(length) }
+
+ context 'when there are no network issue' do
+ before do
+ stub_remote_trace_206
+ end
+
+ context 'when read whole size' do
+ let(:length) { nil }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+ end
+
+ context 'when read only first 100 bytes' do
+ let(:length) { 100 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body[0, length])
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body[0, length])
+ end
+ end
+ end
+
+ context 'when tries to read oversize' do
+ let(:length) { size + 1000 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+ end
+
+ context 'when tries to read 0 bytes' do
+ let(:length) { 0 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when there is anetwork issue' do
+ let(:length) { nil }
+
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'reads a trace' do
+ expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+ end
+
+ describe '#readline' do
+ subject { http_io.readline }
+
+ let(:string_io) { StringIO.new(remote_trace_body) }
+
+ before do
+ stub_remote_trace_206
+ end
+
+ shared_examples 'all line matching' do
+ it 'reads a line' do
+ (0...remote_trace_body.lines.count).each do
+ expect(http_io.readline).to eq(string_io.readline)
+ end
+ end
+ end
+
+ context 'when there is anetwork issue' do
+ let(:length) { nil }
+
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'reads a trace' do
+ expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it_behaves_like 'all line matching'
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it_behaves_like 'all line matching'
+ end
+
+ context 'when pos is at middle of the file' do
+ before do
+ set_smaller_buffer_size_than(size)
+
+ http_io.seek(size / 2)
+ string_io.seek(size / 2)
+ end
+
+ it 'reads from pos' do
+ expect(http_io.readline).to eq(string_io.readline)
+ end
+ end
+ end
+
+ describe '#write' do
+ subject { http_io.write(nil) }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#truncate' do
+ subject { http_io.truncate(nil) }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#flush' do
+ subject { http_io.flush }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#present?' do
+ subject { http_io.present? }
+
+ it { is_expected.to be_truthy }
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index a41b7f4e104..280f799f2ab 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1211,4 +1211,33 @@ describe Gitlab::Database::MigrationHelpers do
expect(model.perform_background_migration_inline?).to eq(false)
end
end
+
+ describe '#index_exists_by_name?' do
+ it 'returns true if an index exists' do
+ expect(model.index_exists_by_name?(:projects, 'index_projects_on_path'))
+ .to be_truthy
+ end
+
+ it 'returns false if the index does not exist' do
+ expect(model.index_exists_by_name?(:projects, 'this_does_not_exist'))
+ .to be_falsy
+ end
+
+ context 'when an index with a function exists', :postgresql do
+ before do
+ ActiveRecord::Base.connection.execute(
+ 'CREATE INDEX test_index ON projects (LOWER(path));'
+ )
+ end
+
+ after do
+ 'DROP INDEX IF EXISTS test_index;'
+ end
+
+ it 'returns true if an index exists' do
+ expect(model.index_exists_by_name?(:projects, 'test_index'))
+ .to be_truthy
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 54ada3e423f..0e315b3f49e 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -604,17 +604,20 @@ describe Gitlab::Git::Repository, seed_helper: true do
shared_examples 'returning the right branches' do
let(:head_id) { repository.rugged.head.target.oid }
let(:new_branch) { head_id }
+ let(:utf8_branch) { 'branch-é' }
before do
repository.create_branch(new_branch, 'master')
+ repository.create_branch(utf8_branch, 'master')
end
after do
repository.delete_branch(new_branch)
+ repository.delete_branch(utf8_branch)
end
it 'displays that branch' do
- expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch)
+ expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch)
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 6f07e423c1b..f8f09d29c73 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -240,14 +240,21 @@ describe Gitlab::GitAccess do
end
shared_examples 'check_project_moved' do
- it 'enqueues a redirected message' do
+ it 'enqueues a redirected message for pushing' do
push_access_check
expect(Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)).not_to be_nil
end
+
+ it 'allows push and pull access' do
+ aggregate_failures do
+ expect { push_access_check }.not_to raise_error
+ expect { pull_access_check }.not_to raise_error
+ end
+ end
end
- describe '#check_project_moved!', :clean_gitlab_redis_shared_state do
+ describe '#add_project_moved_message!', :clean_gitlab_redis_shared_state do
before do
project.add_master(user)
end
@@ -261,62 +268,18 @@ describe Gitlab::GitAccess do
end
end
- context 'when a permanent redirect and ssh protocol' do
+ context 'with a redirect and ssh protocol' do
let(:redirected_path) { 'some/other-path' }
- before do
- allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true)
- end
-
- it 'allows push and pull access' do
- aggregate_failures do
- expect { push_access_check }.not_to raise_error
- end
- end
-
it_behaves_like 'check_project_moved'
end
- context 'with a permanent redirect and http protocol' do
+ context 'with a redirect and http protocol' do
let(:redirected_path) { 'some/other-path' }
let(:protocol) { 'http' }
- before do
- allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true)
- end
-
- it 'allows_push and pull access' do
- aggregate_failures do
- expect { push_access_check }.not_to raise_error
- end
- end
-
it_behaves_like 'check_project_moved'
end
-
- context 'with a temporal redirect and ssh protocol' do
- let(:redirected_path) { 'some/other-path' }
-
- it 'blocks push and pull access' do
- aggregate_failures do
- expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/)
- expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/)
-
- expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/)
- expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/)
- end
- end
- end
-
- context 'with a temporal redirect and http protocol' do
- let(:redirected_path) { 'some/other-path' }
- let(:protocol) { 'http' }
-
- it 'does not allow to push and pull access' do
- expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/)
- expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/)
- end
- end
end
describe '#check_authentication_abilities!' do
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index 872377c93d8..f03c7e3f04b 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -58,4 +58,14 @@ describe Gitlab::GitalyClient::RemoteService do
client.update_remote_mirror(ref_name, only_branches_matching)
end
end
+
+ describe '.exists?' do
+ context "when the remote doesn't exist" do
+ let(:url) { 'https://gitlab.com/gitlab-org/ik-besta-niet-of-ik-word-geplaagd.git' }
+
+ it 'returns false' do
+ expect(described_class.exists?(url)).to be(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 5bedfc79dd3..1f0f1fdd7da 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -38,8 +38,12 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
expect(project)
.to receive(:wiki_repository_exists?)
.and_return(false)
+ expect(Gitlab::GitalyClient::RemoteService)
+ .to receive(:exists?)
+ .with("foo.wiki.git")
+ .and_return(true)
- expect(importer.import_wiki?).to eq(true)
+ expect(importer.import_wiki?).to be(true)
end
it 'returns false if the GitHub wiki is disabled' do
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 44e4c6ff94b..0716852f57f 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -265,7 +265,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
+- artifacts_file_store
- artifacts_metadata
+- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
diff --git a/spec/lib/gitlab/verify/lfs_objects_spec.rb b/spec/lib/gitlab/verify/lfs_objects_spec.rb
index 64f3a9660e0..0f890e2c7ce 100644
--- a/spec/lib/gitlab/verify/lfs_objects_spec.rb
+++ b/spec/lib/gitlab/verify/lfs_objects_spec.rb
@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do
expect(failures.keys).to contain_exactly(lfs_object)
expect(failure.to_s).to include('Checksum mismatch')
end
+
+ context 'with remote files' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'skips LFS objects in object storage' do
+ local_failure = create(:lfs_object)
+ create(:lfs_object, :object_storage)
+
+ failures = {}
+ described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
+
+ expect(failures.keys).to contain_exactly(local_failure)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/verify/uploads_spec.rb b/spec/lib/gitlab/verify/uploads_spec.rb
index 6146ce61226..85768308edc 100644
--- a/spec/lib/gitlab/verify/uploads_spec.rb
+++ b/spec/lib/gitlab/verify/uploads_spec.rb
@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do
expect(failures.keys).to contain_exactly(upload)
expect(failure.to_s).to include('Checksum missing')
end
+
+ context 'with remote files' do
+ before do
+ stub_uploads_object_storage(AvatarUploader)
+ end
+
+ it 'skips uploads in object storage' do
+ local_failure = create(:upload)
+ create(:upload, :object_storage)
+
+ failures = {}
+ described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
+
+ expect(failures.keys).to contain_exactly(local_failure)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 37a0bf1ad36..2b3ffb2d7c0 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -55,7 +55,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_archive feature is disabled', :skip_gitaly_mock do
+ context 'when Gitaly workhorse_archive feature is disabled', :disable_gitaly do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
@@ -100,7 +100,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_send_git_patch feature is disabled', :skip_gitaly_mock do
+ context 'when Gitaly workhorse_send_git_patch feature is disabled', :disable_gitaly do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
@@ -173,7 +173,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_send_git_diff feature is disabled', :skip_gitaly_mock do
+ context 'when Gitaly workhorse_send_git_diff feature is disabled', :disable_gitaly do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
@@ -275,7 +275,7 @@ describe Gitlab::Workhorse do
describe '.git_http_ok' do
let(:user) { create(:user) }
- let(:repo_path) { repository.path_to_repo }
+ let(:repo_path) { 'ignored but not allowed to be empty in gitlab-workhorse' }
let(:action) { 'info_refs' }
let(:params) do
{
@@ -455,7 +455,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_raw_show feature is disabled', :skip_gitaly_mock do
+ context 'when Gitaly workhorse_raw_show feature is disabled', :disable_gitaly do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb
index 580f0d56a92..43c3c89f140 100644
--- a/spec/mailers/previews/notify_preview.rb
+++ b/spec/mailers/previews/notify_preview.rb
@@ -65,7 +65,7 @@ class NotifyPreview < ActionMailer::Preview
end
def merge_request
- @merge_request ||= project.merge_requests.find_by(source_branch: 'master', target_branch: 'feature')
+ @merge_request ||= project.merge_requests.first
end
def user
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
index 7f7ce91378b..f6d030ab25c 100644
--- a/spec/migrations/remove_empty_fork_networks_spec.rb
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
+ after do
+ Upload.reset_column_information
+ end
+
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
new file mode 100644
index 00000000000..268561ee941
--- /dev/null
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Ci::BuildMetadata do
+ set(:user) { create(:user) }
+ set(:group) { create(:group, :access_requestable) }
+ set(:project) { create(:project, :repository, group: group, build_timeout: 2000) }
+
+ set(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: project.commit.id,
+ ref: project.default_branch,
+ status: 'success')
+ end
+
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:build_metadata) { create(:ci_build_metadata, build: build) }
+
+ describe '#update_timeout_state' do
+ subject { build_metadata }
+
+ context 'when runner is not assigned to the job' do
+ it "doesn't change timeout value" do
+ expect { subject.update_timeout_state }.not_to change { subject.reload.timeout }
+ end
+
+ it "doesn't change timeout_source value" do
+ expect { subject.update_timeout_state }.not_to change { subject.reload.timeout_source }
+ end
+ end
+
+ context 'when runner is assigned to the job' do
+ before do
+ build.update_attributes(runner: runner)
+ end
+
+ context 'when runner timeout is lower than project timeout' do
+ let(:runner) { create(:ci_runner, maximum_timeout: 1900) }
+
+ it 'sets runner timeout' do
+ expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(1900)
+ end
+
+ it 'sets runner_timeout_source' do
+ expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('runner_timeout_source')
+ end
+ end
+
+ context 'when runner timeout is higher than project timeout' do
+ let(:runner) { create(:ci_runner, maximum_timeout: 2100) }
+
+ it 'sets project timeout' do
+ expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(2000)
+ end
+
+ it 'sets project_timeout_source' do
+ expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('project_timeout_source')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 48eeca53f46..42b40ff91fc 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -198,6 +198,16 @@ describe Ci::Build do
end
context 'when legacy artifacts are used' do
+ let(:build) { create(:ci_build, :legacy_artifacts) }
+
+ subject { build.artifacts? }
+
+ context 'is expired' do
+ let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
+
+ it { is_expected.to be_falsy }
+ end
+
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
@@ -208,13 +218,25 @@ describe Ci::Build do
let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_truthy }
+ end
+ end
+ end
- context 'is expired' do
- let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
+ describe '#browsable_artifacts?' do
+ subject { build.browsable_artifacts? }
- it { is_expected.to be_falsy }
- end
+ context 'artifacts metadata does not exist' do
+ before do
+ build.update_attributes(legacy_artifacts_metadata: nil)
end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'artifacts metadata does exists' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it { is_expected.to be_truthy }
end
end
@@ -1249,12 +1271,6 @@ describe Ci::Build do
end
describe 'project settings' do
- describe '#timeout' do
- it 'returns project timeout configuration' do
- expect(build.timeout).to eq(project.build_timeout)
- end
- end
-
describe '#allow_git_fetch' do
it 'return project allow_git_fetch configuration' do
expect(build.allow_git_fetch).to eq(project.build_allow_git_fetch)
@@ -2089,6 +2105,70 @@ describe Ci::Build do
end
end
+ describe 'state transition: pending: :running' do
+ let(:runner) { create(:ci_runner) }
+ let(:job) { create(:ci_build, :pending, runner: runner) }
+
+ before do
+ job.project.update_attribute(:build_timeout, 1800)
+ end
+
+ def run_job_without_exception
+ job.run!
+ rescue StateMachines::InvalidTransition
+ end
+
+ shared_examples 'saves data on transition' do
+ it 'saves timeout' do
+ expect { job.run! }.to change { job.reload.ensure_metadata.timeout }.from(nil).to(expected_timeout)
+ end
+
+ it 'saves timeout_source' do
+ expect { job.run! }.to change { job.reload.ensure_metadata.timeout_source }.from('unknown_timeout_source').to(expected_timeout_source)
+ end
+
+ context 'when Ci::BuildMetadata#update_timeout_state fails update' do
+ before do
+ allow_any_instance_of(Ci::BuildMetadata).to receive(:update_timeout_state).and_return(false)
+ end
+
+ it "doesn't save timeout" do
+ expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source }
+ end
+
+ it "doesn't save timeout_source" do
+ expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source }
+ end
+
+ it 'raises an exception' do
+ expect { job.run! }.to raise_error(StateMachines::InvalidTransition)
+ end
+ end
+ end
+
+ context 'when runner timeout overrides project timeout' do
+ let(:expected_timeout) { 900 }
+ let(:expected_timeout_source) { 'runner_timeout_source' }
+
+ before do
+ runner.update_attribute(:maximum_timeout, 900)
+ end
+
+ it_behaves_like 'saves data on transition'
+ end
+
+ context "when runner timeout doesn't override project timeout" do
+ let(:expected_timeout) { 1800 }
+ let(:expected_timeout_source) { 'project_timeout_source' }
+
+ before do
+ runner.update_attribute(:maximum_timeout, 3600)
+ end
+
+ it_behaves_like 'saves data on transition'
+ end
+ end
+
describe 'state transition: any => [:running]' do
shared_examples 'validation is active' do
context 'when depended job has not been completed yet' do
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index a2bd36537e6..1aa28434879 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -15,6 +15,50 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) }
+ describe 'callbacks' do
+ subject { create(:ci_job_artifact, :archive) }
+
+ describe '#schedule_background_upload' do
+ context 'when object storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it 'does not schedule the migration' do
+ expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when object storage is enabled' do
+ context 'when background upload is enabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
+
+ subject
+ end
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: false)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+ end
+ end
+
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb
new file mode 100644
index 00000000000..27c86e60e60
--- /dev/null
+++ b/spec/models/concerns/chronic_duration_attribute_spec.rb
@@ -0,0 +1,115 @@
+require 'spec_helper'
+
+shared_examples 'ChronicDurationAttribute reader' do
+ it 'contains dynamically created reader method' do
+ expect(subject.class).to be_public_method_defined(virtual_field)
+ end
+
+ it 'outputs chronic duration formatted value' do
+ subject.send("#{source_field}=", 120)
+
+ expect(subject.send(virtual_field)).to eq('2m')
+ end
+
+ context 'when value is set to nil' do
+ it 'outputs nil' do
+ subject.send("#{source_field}=", nil)
+
+ expect(subject.send(virtual_field)).to be_nil
+ end
+ end
+end
+
+shared_examples 'ChronicDurationAttribute writer' do
+ it 'contains dynamically created writer method' do
+ expect(subject.class).to be_public_method_defined("#{virtual_field}=")
+ end
+
+ before do
+ subject.send("#{virtual_field}=", '10m')
+ end
+
+ it 'parses chronic duration input' do
+ expect(subject.send(source_field)).to eq(600)
+ end
+
+ it 'passes validation' do
+ expect(subject.valid?).to be_truthy
+ end
+
+ context 'when negative input is used' do
+ before do
+ subject.send("#{source_field}=", 3600)
+ end
+
+ it "doesn't raise exception" do
+ expect { subject.send("#{virtual_field}=", '-10m') }.not_to raise_error(ChronicDuration::DurationParseError)
+ end
+
+ it "doesn't change value" do
+ expect { subject.send("#{virtual_field}=", '-10m') }.not_to change { subject.send(source_field) }
+ end
+
+ it "doesn't pass validation" do
+ subject.send("#{virtual_field}=", '-10m')
+
+ expect(subject.valid?).to be_falsey
+ expect(subject.errors&.messages).to include(virtual_field => ['is not a correct duration'])
+ end
+ end
+
+ context 'when empty input is used' do
+ before do
+ subject.send("#{virtual_field}=", '')
+ end
+
+ it 'writes nil' do
+ expect(subject.send(source_field)).to be_nil
+ end
+
+ it 'passes validation' do
+ expect(subject.valid?).to be_truthy
+ end
+ end
+
+ context 'when nil input is used' do
+ before do
+ subject.send("#{virtual_field}=", nil)
+ end
+
+ it 'writes nil' do
+ expect(subject.send(source_field)).to be_nil
+ end
+
+ it 'passes validation' do
+ expect(subject.valid?).to be_truthy
+ end
+
+ it "doesn't raise exception" do
+ expect { subject.send("#{virtual_field}=", nil) }.not_to raise_error(NoMethodError)
+ end
+ end
+end
+
+describe 'ChronicDurationAttribute' do
+ let(:source_field) {:maximum_timeout}
+ let(:virtual_field) {:maximum_timeout_human_readable}
+
+ subject { Ci::Runner.new }
+
+ it_behaves_like 'ChronicDurationAttribute reader'
+ it_behaves_like 'ChronicDurationAttribute writer'
+end
+
+describe 'ChronicDurationAttribute - reader' do
+ let(:source_field) {:timeout}
+ let(:virtual_field) {:timeout_human_readable}
+
+ subject {Ci::BuildMetadata.new}
+
+ it "doesn't contain dynamically created writer method" do
+ expect(subject.class).not_to be_public_method_defined("#{virtual_field}=")
+ end
+
+ it_behaves_like 'ChronicDurationAttribute reader'
+end
diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb
index 3d7283e2164..41440c6d288 100644
--- a/spec/models/deploy_key_spec.rb
+++ b/spec/models/deploy_key_spec.rb
@@ -17,4 +17,25 @@ describe DeployKey, :mailer do
should_not_email(user)
end
end
+
+ describe '#user' do
+ let(:deploy_key) { create(:deploy_key) }
+ let(:user) { create(:user) }
+
+ context 'when user is set' do
+ before do
+ deploy_key.user = user
+ end
+
+ it 'returns the user' do
+ expect(deploy_key.user).to be(user)
+ end
+ end
+
+ context 'when user is not set' do
+ it 'returns the ghost user' do
+ expect(deploy_key.user).to eq(User.ghost)
+ end
+ end
+ end
end
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
new file mode 100644
index 00000000000..a182116d637
--- /dev/null
+++ b/spec/models/lfs_object_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe LfsObject do
+ describe '#local_store?' do
+ it 'returns true when file_store is nil' do
+ subject.file_store = nil
+
+ expect(subject.local_store?).to eq true
+ end
+
+ it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
+ subject.file_store = LfsObjectUploader::Store::LOCAL
+
+ expect(subject.local_store?).to eq true
+ end
+
+ it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
+ subject.file_store = LfsObjectUploader::Store::REMOTE
+
+ expect(subject.local_store?).to eq false
+ end
+ end
+
+ describe '#schedule_background_upload' do
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ subject { create(:lfs_object, :with_file) }
+
+ context 'when object storage is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it 'does not schedule the migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when object storage is enabled' do
+ context 'when background upload is enabled' do
+ context 'when is licensed' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker)
+ .to receive(:perform_async)
+ .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+ .once
+
+ subject
+ end
+
+ it 'schedules the model for migration once' do
+ expect(ObjectStorage::BackgroundMoveWorker)
+ .to receive(:perform_async)
+ .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+ .once
+
+ lfs_object = create(:lfs_object)
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ end
+ end
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ stub_lfs_object_storage(background_upload: false)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index ff5a6f63010..f73f44ca0ad 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1961,6 +1961,17 @@ describe MergeRequest do
expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3)
end
end
+
+ it 'runs a single query on the initial call, and none afterwards' do
+ expect { subject.merge_request_diff_for(merge_request_diff1.diff_refs) }
+ .not_to exceed_query_limit(1)
+
+ expect { subject.merge_request_diff_for(merge_request_diff2.diff_refs) }
+ .not_to exceed_query_limit(0)
+
+ expect { subject.merge_request_diff_for(merge_request_diff3.head_commit_sha) }
+ .not_to exceed_query_limit(0)
+ end
end
describe '#version_params_for' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index e506c932d58..60ab52565cb 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -501,28 +501,6 @@ describe Repository do
end
end
- describe '#create_hooks' do
- let(:hook_path) { File.join(repository.path_to_repo, 'hooks') }
-
- it 'symlinks the global hooks directory' do
- repository.create_hooks
-
- expect(File.symlink?(hook_path)).to be true
- expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
- end
-
- it 'replaces existing symlink with the right directory' do
- FileUtils.mkdir_p(hook_path)
-
- expect(File.symlink?(hook_path)).to be false
-
- repository.create_hooks
-
- expect(File.symlink?(hook_path)).to be true
- expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
- end
- end
-
describe "#create_dir" do
it "commits a change that creates a new directory" do
expect do
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index dfac82b327a..01238a89a81 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -16,66 +16,6 @@ describe Route do
it { is_expected.to validate_presence_of(:source) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_uniqueness_of(:path).case_insensitive }
-
- describe '#ensure_permanent_paths' do
- context 'when the route is not yet persisted' do
- let(:new_route) { described_class.new(path: 'foo', source: build(:group)) }
-
- context 'when permanent conflicting redirects exist' do
- it 'is invalid' do
- redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz')
- redirect.save!(validate: false)
-
- expect(new_route.valid?).to be_falsey
- expect(new_route.errors.first[1]).to eq('has been taken before')
- end
- end
-
- context 'when no permanent conflicting redirects exist' do
- it 'is valid' do
- expect(new_route.valid?).to be_truthy
- end
- end
- end
-
- context 'when path has changed' do
- before do
- route.path = 'foo'
- end
-
- context 'when permanent conflicting redirects exist' do
- it 'is invalid' do
- redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz')
- redirect.save!(validate: false)
-
- expect(route.valid?).to be_falsey
- expect(route.errors.first[1]).to eq('has been taken before')
- end
- end
-
- context 'when no permanent conflicting redirects exist' do
- it 'is valid' do
- expect(route.valid?).to be_truthy
- end
- end
- end
-
- context 'when path has not changed' do
- context 'when permanent conflicting redirects exist' do
- it 'is valid' do
- redirect = build(:redirect_route, :permanent, path: 'git_lab/foo/bar')
- redirect.save!(validate: false)
-
- expect(route.valid?).to be_truthy
- end
- end
- context 'when no permanent conflicting redirects exist' do
- it 'is valid' do
- expect(route.valid?).to be_truthy
- end
- end
- end
- end
end
describe 'callbacks' do
@@ -211,43 +151,31 @@ describe Route do
end
context 'when the source is a Project' do
- it 'creates a temporal RedirectRoute' do
+ it 'creates a RedirectRoute' do
project = create(:project)
route = project.route
redirect_route = route.create_redirect('foo')
- expect(redirect_route.permanent?).to be_falsy
+ expect(redirect_route).not_to be_nil
end
end
context 'when the source is not a project' do
- it 'creates a permanent RedirectRoute' do
- redirect_route = route.create_redirect('foo', permanent: true)
- expect(redirect_route.permanent?).to be_truthy
+ it 'creates a RedirectRoute' do
+ redirect_route = route.create_redirect('foo')
+ expect(redirect_route).not_to be_nil
end
end
end
describe '#delete_conflicting_redirects' do
- context 'with permanent redirect' do
- it 'does not delete the redirect' do
- route.create_redirect("#{route.path}/foo", permanent: true)
-
- expect do
- route.delete_conflicting_redirects
- end.not_to change { RedirectRoute.count }
- end
- end
-
- context 'with temporal redirect' do
- let(:route) { create(:project).route }
+ let(:route) { create(:project).route }
- it 'deletes the redirect' do
- route.create_redirect("#{route.path}/foo")
+ it 'deletes the redirect' do
+ route.create_redirect("#{route.path}/foo")
- expect do
- route.delete_conflicting_redirects
- end.to change { RedirectRoute.count }.by(-1)
- end
+ expect do
+ route.delete_conflicting_redirects
+ end.to change { RedirectRoute.count }.by(-1)
end
context 'when a redirect route with the same path exists' do
@@ -289,31 +217,18 @@ describe Route do
end
describe '#conflicting_redirects' do
+ let(:route) { create(:project).route }
+
it 'returns an ActiveRecord::Relation' do
expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation)
end
- context 'with permanent redirects' do
- it 'does not return anything' do
- route.create_redirect("#{route.path}/foo", permanent: true)
- route.create_redirect("#{route.path}/foo/bar", permanent: true)
- route.create_redirect("#{route.path}/baz/quz", permanent: true)
+ it 'returns the redirect routes' do
+ redirect1 = route.create_redirect("#{route.path}/foo")
+ redirect2 = route.create_redirect("#{route.path}/foo/bar")
+ redirect3 = route.create_redirect("#{route.path}/baz/quz")
- expect(route.conflicting_redirects).to be_empty
- end
- end
-
- context 'with temporal redirects' do
- let(:route) { create(:project).route }
-
- it 'returns the redirect routes' do
- route = create(:project).route
- redirect1 = route.create_redirect("#{route.path}/foo")
- redirect2 = route.create_redirect("#{route.path}/foo/bar")
- redirect3 = route.create_redirect("#{route.path}/baz/quz")
-
- expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3])
- end
+ expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3])
end
context 'when a redirect route with the same path exists' do
@@ -348,44 +263,6 @@ describe Route do
end
end
- describe "#conflicting_redirect_exists?" do
- context 'when a conflicting redirect exists' do
- let(:group1) { create(:group, path: 'foo') }
- let(:group2) { create(:group, path: 'baz') }
-
- it 'should not be saved' do
- group1.path = 'bar'
- group1.save
-
- group2.path = 'foo'
-
- expect(group2.save).to be_falsy
- end
-
- it 'should return an error on path' do
- group1.path = 'bar'
- group1.save
-
- group2.path = 'foo'
- group2.valid?
- expect(group2.errors[:path]).to eq(['has been taken before'])
- end
- end
-
- context 'when a conflicting redirect does not exist' do
- let(:project1) { create(:project, path: 'foo') }
- let(:project2) { create(:project, path: 'baz') }
-
- it 'should be saved' do
- project1.path = 'bar'
- project1.save
-
- project2.path = 'foo'
- expect(project2.save).to be_truthy
- end
- end
- end
-
describe '#delete_conflicting_orphaned_routes' do
context 'when there is a conflicting route' do
let!(:conflicting_group) { create(:group, path: 'foo') }
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 79f25dc4360..83ed3b203e6 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -58,6 +58,21 @@ describe Service do
end
describe "Template" do
+ describe '.build_from_template' do
+ context 'when template is invalid' do
+ it 'sets service template to inactive when template is invalid' do
+ project = create(:project)
+ template = JiraService.new(template: true, active: true)
+ template.save(validate: false)
+
+ service = described_class.build_from_template(project.id, template)
+
+ expect(service).to be_valid
+ expect(service.active).to be false
+ end
+ end
+ end
+
describe "for pushover service" do
let!(:service_template) do
PushoverService.create(
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index c61674fff13..100418da804 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -25,7 +25,7 @@ describe User do
it { is_expected.to have_many(:group_members) }
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
- it { is_expected.to have_many(:deploy_keys).dependent(:destroy) }
+ it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
it { is_expected.to have_many(:events).dependent(:destroy) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
@@ -126,23 +126,6 @@ describe User do
end
end
- context 'when the username was used by another user before' do
- let(:username) { 'foo' }
- let!(:other_user) { create(:user, username: username) }
-
- before do
- other_user.username = 'bar'
- other_user.save!
- end
-
- it 'is invalid' do
- user = build(:user, username: username)
-
- expect(user).not_to be_valid
- expect(user.errors.full_messages).to eq(['Username has been taken before'])
- end
- end
-
context 'when the username is in use by another user' do
let(:username) { 'foo' }
let!(:other_user) { create(:user, username: username) }
@@ -2699,27 +2682,19 @@ describe User do
end
end
- describe "#username_previously_taken?" do
- let(:user1) { create(:user, username: 'foo') }
+ context 'changing a username' do
+ let(:user) { create(:user, username: 'foo') }
- context 'when the username has been taken before' do
- before do
- user1.username = 'bar'
- user1.save!
- end
-
- it 'should raise an ActiveRecord::RecordInvalid exception' do
- user2 = build(:user, username: 'foo')
- expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Username has been taken before/)
- end
+ it 'creates a redirect route' do
+ expect { user.update!(username: 'bar') }
+ .to change { RedirectRoute.where(path: 'foo').count }.by(1)
end
- context 'when the username has not been taken before' do
- it 'should be valid' do
- expect(RedirectRoute.count).to eq(0)
- user2 = build(:user, username: 'baz')
- expect(user2).to be_valid
- end
+ it 'deletes the redirect when a user with the old username was created' do
+ user.update!(username: 'bar')
+
+ expect { create(:user, username: 'foo') }
+ .to change { RedirectRoute.where(path: 'foo').count }.by(-1)
end
end
end
diff --git a/spec/policies/protected_branch_policy_spec.rb b/spec/policies/protected_branch_policy_spec.rb
new file mode 100644
index 00000000000..b39de42d721
--- /dev/null
+++ b/spec/policies/protected_branch_policy_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe ProtectedBranchPolicy do
+ let(:user) { create(:user) }
+ let(:name) { 'feature' }
+ let(:protected_branch) { create(:protected_branch, name: name) }
+ let(:project) { protected_branch.project }
+
+ subject { described_class.new(user, protected_branch) }
+
+ it 'branches can be updated via project masters' do
+ project.add_master(user)
+
+ is_expected.to be_allowed(:update_protected_branch)
+ end
+
+ it "branches can't be updated by guests" do
+ project.add_guest(user)
+
+ is_expected.to be_disallowed(:update_protected_branch)
+ end
+end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index f8c93d91ec5..55962f345d4 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -339,7 +339,7 @@ describe ProjectPresenter do
it 'returns link to clusters page if more than one exists' do
project.add_master(user)
- create(:cluster, projects: [project])
+ create(:cluster, :production_environment, projects: [project])
create(:cluster, projects: [project])
expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true,
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 852f67db958..8ad19e3f0f5 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1141,4 +1141,33 @@ describe API::Commits do
end
end
end
+
+ describe 'GET /projects/:id/repository/commits/:sha/merge_requests' do
+ let!(:project) { create(:project, :repository, :private) }
+ let!(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
+ let(:commit) { merged_mr.merge_request_diff.commits.last }
+
+ it 'returns the correct merge request' do
+ get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(1)
+ expect(json_response[0]['id']).to eq(merged_mr.id)
+ end
+
+ it 'returns 403 for an unauthorized user' do
+ project.add_guest(user)
+
+ get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+
+ it 'responds 404 when the commit does not exist' do
+ get api("/projects/#{project.id}/repository/commits/a7d26f00c35b/merge_requests", user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
end
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 0772b3f2e64..ae9c0e9c304 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -91,6 +91,10 @@ describe API::DeployKeys do
expect do
post api("/projects/#{project.id}/deploy_keys", admin), key_attrs
end.to change { project.deploy_keys.count }.by(1)
+
+ new_key = project.deploy_keys.last
+ expect(new_key.key).to eq(key_attrs[:key])
+ expect(new_key.user).to eq(admin)
end
it 'returns an existing ssh key when attempting to add a duplicate' do
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 6192bbd4abb..3ffdfdc0e9a 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe API::Jobs do
+ include HttpIOHelpers
+
set(:project) do
create(:project, :repository, public_builds: false)
end
@@ -112,6 +114,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
@@ -335,10 +338,55 @@ describe API::Jobs do
end
end
+ context 'when artifacts are stored remotely' do
+ let(:proxy_download) { false }
+
+ before do
+ stub_artifacts_object_storage(proxy_download: proxy_download)
+ end
+
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+ before do
+ job.reload
+
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ context 'when proxy download is enabled' do
+ let(:proxy_download) { true }
+
+ it 'responds with the workhorse send-url' do
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ end
+ end
+
+ context 'when proxy download is disabled' do
+ it 'returns location redirect' do
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
+
+ context 'authorized user' do
+ it 'returns the file remote URL' do
+ expect(response).to redirect_to(artifact.file.url)
+ end
+ end
+
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -349,6 +397,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
+ stub_artifacts_object_storage
job.success
end
@@ -412,9 +461,24 @@ describe API::Jobs do
"attachment; filename=#{job.artifacts_file.filename}" }
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_http_status(:ok) }
it { expect(response.headers).to include(download_headers) }
end
+
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+ before do
+ job.reload
+
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(:found)
+ end
+ end
end
context 'with regular branch' do
@@ -451,6 +515,22 @@ describe API::Jobs do
end
context 'authorized user' do
+ context 'when trace is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ stub_remote_trace_206
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+ allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+ end
+
+ it 'returns specific job trace' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.body).to eq(job.trace.raw)
+ end
+ end
+
context 'when trace is artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 1d23e023bb6..576fde46615 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -193,6 +193,19 @@ describe API::ProtectedBranches do
expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER)
end
end
+
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents deletion of the protected branch rule" do
+ post post_endpoint, name: branch_name
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
end
context 'when authenticated as a guest' do
@@ -209,18 +222,20 @@ describe API::ProtectedBranches do
end
describe "DELETE /projects/:id/protected_branches/unprotect/:branch" do
+ let(:delete_endpoint) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) }
+
before do
project.add_master(user)
end
it "unprotects a single branch" do
- delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user)
+ delete delete_endpoint
expect(response).to have_gitlab_http_status(204)
end
it_behaves_like '412 response' do
- let(:request) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) }
+ let(:request) { delete_endpoint }
end
it "returns 404 if branch does not exist" do
@@ -229,11 +244,24 @@ describe API::ProtectedBranches do
expect(response).to have_gitlab_http_status(404)
end
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents deletion of the protected branch rule" do
+ delete delete_endpoint
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
context 'when branch has a wildcard in its name' do
let(:protected_name) { 'feature*' }
it "unprotects a wildcard branch" do
- delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user)
+ delete delete_endpoint
expect(response).to have_gitlab_http_status(204)
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 95c23726a79..5084b36c761 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -109,6 +109,26 @@ describe API::Runner do
end
end
+ context 'when maximum job timeout is specified' do
+ it 'creates runner' do
+ post api('/runners'), token: registration_token,
+ maximum_timeout: 9000
+
+ expect(response).to have_gitlab_http_status 201
+ expect(Ci::Runner.first.maximum_timeout).to eq(9000)
+ end
+
+ context 'when maximum job timeout is empty' do
+ it 'creates runner' do
+ post api('/runners'), token: registration_token,
+ maximum_timeout: ''
+
+ expect(response).to have_gitlab_http_status 201
+ expect(Ci::Runner.first.maximum_timeout).to be_nil
+ end
+ end
+ end
+
%w(name version revision platform architecture).each do |param|
context "when info parameter '#{param}' info is present" do
let(:value) { "#{param}_value" }
@@ -200,7 +220,7 @@ describe API::Runner do
let(:project) { create(:project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
- let!(:job) do
+ let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
@@ -215,6 +235,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
+ job
stub_container_registry_config(enabled: false)
end
@@ -339,12 +360,12 @@ describe API::Runner do
let(:expected_steps) do
[{ 'name' => 'script',
'script' => %w(ls date),
- 'timeout' => job.timeout,
+ 'timeout' => job.metadata_timeout,
'when' => 'on_success',
'allow_failure' => false },
{ 'name' => 'after_script',
'script' => %w(ls date),
- 'timeout' => job.timeout,
+ 'timeout' => job.metadata_timeout,
'when' => 'always',
'allow_failure' => true }]
end
@@ -647,6 +668,41 @@ describe API::Runner do
end
end
end
+
+ describe 'timeout support' do
+ context 'when project specifies job timeout' do
+ let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
+
+ it 'contains info about timeout taken from project' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+
+ context 'when runner specifies lower timeout' do
+ let(:runner) { create(:ci_runner, maximum_timeout: 1000) }
+
+ it 'contains info about timeout overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
+ end
+ end
+
+ context 'when runner specifies bigger timeout' do
+ let(:runner) { create(:ci_runner, maximum_timeout: 2000) }
+
+ it 'contains info about timeout not overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+ end
+ end
+ end
end
def request_job(token = runner.token, **params)
@@ -888,6 +944,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
+ stub_artifacts_object_storage
job.run!
end
@@ -1179,27 +1236,67 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
- before do
- download_artifact
- end
-
context 'when job has artifacts' do
- let(:job) { create(:ci_build, :artifacts) }
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ let(:job) { create(:ci_build) }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
+
+ before do
+ create(:ci_job_artifact, :archive, file_store: store, job: job)
end
context 'when using job token' do
- it 'download artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include download_headers
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ before do
+ download_artifact
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:store) { JobArtifactUploader::Store::REMOTE }
+ let!(:job) { create(:ci_build) }
+
+ context 'when proxy download is being used' do
+ before do
+ download_artifact(direct_download: false)
+ end
+
+ it 'uses workhorse send-url' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(
+ 'Gitlab-Workhorse-Send-Data' => /send-url:/)
+ end
+ end
+
+ context 'when direct download is being used' do
+ before do
+ download_artifact(direct_download: true)
+ end
+
+ it 'receive redirect for downloading artifacts' do
+ expect(response).to have_gitlab_http_status(302)
+ expect(response.headers).to include('Location')
+ end
+ end
end
end
context 'when using runnners token' do
let(:token) { job.project.runners_token }
+ before do
+ download_artifact
+ end
+
it 'responds with forbidden' do
expect(response).to have_gitlab_http_status(403)
end
@@ -1208,12 +1305,16 @@ describe API::Runner do
context 'when job does not has artifacts' do
it 'responds with not found' do
+ download_artifact
+
expect(response).to have_gitlab_http_status(404)
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
+ job.reload
+
get api("/jobs/#{job.id}/artifacts"), params, request_headers
end
end
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index ec5cad4f4fd..d30f0cf36e2 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -123,6 +123,7 @@ describe API::Runners do
expect(response).to have_gitlab_http_status(200)
expect(json_response['description']).to eq(shared_runner.description)
+ expect(json_response['maximum_timeout']).to be_nil
end
end
@@ -192,7 +193,8 @@ describe API::Runners do
tag_list: ['ruby2.1', 'pgsql', 'mysql'],
run_untagged: 'false',
locked: 'true',
- access_level: 'ref_protected')
+ access_level: 'ref_protected',
+ maximum_timeout: 1234)
shared_runner.reload
expect(response).to have_gitlab_http_status(200)
@@ -204,6 +206,7 @@ describe API::Runners do
expect(shared_runner.ref_protected?).to be_truthy
expect(shared_runner.ensure_runner_queue_value)
.not_to eq(runner_queue_value)
+ expect(shared_runner.maximum_timeout).to eq(1234)
end
end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 79041c6a792..00f067889a0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -216,6 +216,7 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
+ stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
@@ -230,13 +231,24 @@ describe API::V3::Builds do
end
it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
end
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
+
+ it 'returns location redirect' do
+ get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
+
context 'unauthorized user' do
let(:api_user) { nil }
@@ -256,6 +268,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage
build.success
end
@@ -318,9 +331,24 @@ describe API::V3::Builds do
"attachment; filename=#{build.artifacts_file.filename}" }
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
+
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
+
+ before do
+ build.reload
+
+ get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+ end
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 6dbbb1ad7bb..494db30e8e0 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -163,7 +163,7 @@ describe 'Git HTTP requests' do
download(path) do |response|
json_body = ActiveSupport::JSON.decode(response.body)
- expect(json_body['RepoPath']).to include(wiki.repository.disk_path)
+ expect(json_body['Repository']['relative_path']).to eq(wiki.repository.relative_path)
end
end
end
@@ -344,20 +344,11 @@ describe 'Git HTTP requests' do
context 'and the user requests a redirected path' do
let!(:redirect) { project.route.create_redirect('foo/bar') }
let(:path) { "#{redirect.path}.git" }
- let(:project_moved_message) do
- <<-MSG.strip_heredoc
- Project '#{redirect.path}' was moved to '#{project.full_path}'.
- Please update your Git remote:
-
- git remote set-url origin #{project.http_url_to_repo} and try again.
- MSG
- end
-
- it 'downloads get status 404 with "project was moved" message' do
+ it 'downloads get status 200 for redirects' do
clone_get(path, {})
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to match(project_moved_message)
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -559,20 +550,19 @@ describe 'Git HTTP requests' do
Please update your Git remote:
- git remote set-url origin #{project.http_url_to_repo} and try again.
+ git remote set-url origin #{project.http_url_to_repo}.
MSG
end
- it 'downloads get status 404 with "project was moved" message' do
+ it 'downloads get status 200' do
clone_get(path, env)
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to match(project_moved_message)
+
+ expect(response).to have_gitlab_http_status(:ok)
end
it 'uploads get status 404 with "project was moved" message' do
upload(path, env) do |response|
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to match(project_moved_message)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 971b45c411d..1e6bd993c08 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
+ let(:before_get) { }
before do
enable_lfs
update_permissions
+ before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
@@ -239,6 +241,38 @@ describe 'Git LFS API and storage' do
end
it_behaves_like 'responds with a file'
+
+ context 'when LFS uses object storage' do
+ context 'when proxy download is enabled' do
+ let(:before_get) do
+ stub_lfs_object_storage(proxy_download: true)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'responds with redirect' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'responds with the workhorse send-url' do
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ end
+ end
+
+ context 'when proxy download is disabled' do
+ let(:before_get) do
+ stub_lfs_object_storage(proxy_download: false)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'responds with redirect' do
+ expect(response).to have_gitlab_http_status(302)
+ end
+
+ it 'responds with the file location' do
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+ end
+ end
end
end
@@ -945,22 +979,61 @@ describe 'Git LFS API and storage' do
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
- before do
- put_authorize
+ shared_examples 'a valid response' do
+ before do
+ put_authorize
+ end
+
+ it 'responds with status 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'uses the gitlab-workhorse content type' do
+ expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
end
- it 'responds with status 200' do
- expect(response).to have_gitlab_http_status(200)
+ shared_examples 'a local file' do
+ it_behaves_like 'a valid response' do
+ it 'responds with status 200, location of lfs store and object details' do
+ expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ expect(json_response['LfsOid']).to eq(sample_oid)
+ expect(json_response['LfsSize']).to eq(sample_size)
+ end
+ end
end
- it 'uses the gitlab-workhorse content type' do
- expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ context 'when using local storage' do
+ it_behaves_like 'a local file'
end
- it 'responds with status 200, location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
- expect(json_response['LfsOid']).to eq(sample_oid)
- expect(json_response['LfsSize']).to eq(sample_size)
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_lfs_object_storage(enabled: true, direct_upload: true)
+ end
+
+ it_behaves_like 'a valid response' do
+ it 'responds with status 200, location of lfs remote store and object details' do
+ expect(json_response['TempPath']).to be_nil
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['LfsOid']).to eq(sample_oid)
+ expect(json_response['LfsSize']).to eq(sample_size)
+ end
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: true, direct_upload: false)
+ end
+
+ it_behaves_like 'a local file'
+ end
end
end
@@ -978,14 +1051,95 @@ describe 'Git LFS API and storage' do
end
end
+ context 'and workhorse requests upload finalize for a new lfs object' do
+ before do
+ lfs_object.destroy
+ end
+
+ context 'with object storage disabled' do
+ it "doesn't attempt to migrate file to object storage" do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+
+ context 'with object storage enabled' do
+ context 'and direct upload enabled' do
+ let!(:fog_connection) do
+ stub_lfs_object_storage(direct_upload: true)
+ end
+
+ ['123123', '../../123123'].each do |remote_id|
+ context "with invalid remote_id: #{remote_id}" do
+ subject do
+ put_finalize_with_args('file.remote_id' => remote_id)
+ end
+
+ it 'responds with status 403' do
+ subject
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+ end
+
+ context 'with valid remote_id' do
+ before do
+ fog_connection.directories.get('lfs-objects').files.create(
+ key: 'tmp/upload/12312300',
+ body: 'content'
+ )
+ end
+
+ subject do
+ put_finalize_with_args(
+ 'file.remote_id' => '12312300',
+ 'file.name' => 'name')
+ end
+
+ it 'responds with status 200' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'schedules migration of file to object storage' do
+ subject
+
+ expect(LfsObject.last.projects).to include(project)
+ end
+
+ it 'have valid file' do
+ subject
+
+ expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(LfsObject.last.file).to be_exists
+ end
+ end
+ end
+
+ context 'and background upload enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'schedules migration of file to object storage' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+ end
+ end
+
context 'invalid tempfiles' do
- it 'rejects slashes in the tempfile name (path traversal' do
- put_finalize('foo/bar')
- expect(response).to have_gitlab_http_status(403)
+ before do
+ lfs_object.destroy
end
- it 'rejects tempfile names that do not start with the oid' do
- put_finalize("foo#{sample_oid}")
+ it 'rejects slashes in the tempfile name (path traversal)' do
+ put_finalize('../bar', with_tempfile: true)
expect(response).to have_gitlab_http_status(403)
end
end
@@ -1075,7 +1229,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
+ expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
@@ -1177,9 +1331,25 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
- def put_finalize(lfs_tmp = lfs_tmp_file)
- put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
- headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
+ def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
+ upload_path = LfsObjectUploader.workhorse_local_upload_path
+ file_path = upload_path + '/' + lfs_tmp if lfs_tmp
+
+ if with_tempfile
+ FileUtils.mkdir_p(upload_path)
+ FileUtils.touch(file_path)
+ end
+
+ args = {
+ 'file.path' => file_path,
+ 'file.name' => File.basename(file_path)
+ }.compact
+
+ put_finalize_with_args(args)
+ end
+
+ def put_finalize_with_args(args)
+ put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers
end
def lfs_tmp_file
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index c38795ad1a1..f51c11b141f 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -117,6 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
+
expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
diff --git a/spec/serializers/status_entity_spec.rb b/spec/serializers/status_entity_spec.rb
index 16431ed4188..70402bac2e2 100644
--- a/spec/serializers/status_entity_spec.rb
+++ b/spec/serializers/status_entity_spec.rb
@@ -25,5 +25,10 @@ describe StatusEntity do
allow(Rails.env).to receive(:development?) { true }
expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/dev/favicon_status_success.ico')
end
+
+ it 'contains a canary namespaced favicon if canary env' do
+ stub_env('CANARY', 'true')
+ expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/canary/favicon_status_success.ico')
+ end
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index db9c216d3f4..8de0bdf92e2 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -28,7 +28,9 @@ describe Ci::RetryBuildService do
%i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
- user_id auto_canceled_by_id retried failure_reason].freeze
+ user_id auto_canceled_by_id retried failure_reason
+ artifacts_file_store artifacts_metadata_store
+ metadata].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index e2e64659dfa..1c2f9c5cf43 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -82,7 +82,7 @@ describe Clusters::CreateService do
context 'when project has a cluster' do
include_context 'valid params'
- let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+ let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'does not create a cluster' do
expect(ClusterProvisionWorker).not_to receive(:perform_async)
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index e1c873f8c1e..999677cfaaa 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -222,8 +222,8 @@ describe Groups::TransferService, :postgresql do
expect(new_parent_group.children.first).to eq(group)
end
- it 'should create a permanent redirect for the group' do
- expect(group.redirect_routes.permanent.count).to eq(1)
+ it 'should create a redirect for the group' do
+ expect(group.redirect_routes.count).to eq(1)
end
end
@@ -243,10 +243,10 @@ describe Groups::TransferService, :postgresql do
end
end
- it 'should create permanent redirects for the subgroups' do
- expect(group.redirect_routes.permanent.count).to eq(1)
- expect(subgroup1.redirect_routes.permanent.count).to eq(1)
- expect(subgroup2.redirect_routes.permanent.count).to eq(1)
+ it 'should create redirects for the subgroups' do
+ expect(group.redirect_routes.count).to eq(1)
+ expect(subgroup1.redirect_routes.count).to eq(1)
+ expect(subgroup2.redirect_routes.count).to eq(1)
end
context 'when the new parent has a higher visibility than the children' do
@@ -287,9 +287,9 @@ describe Groups::TransferService, :postgresql do
end
it 'should create permanent redirects for the projects' do
- expect(group.redirect_routes.permanent.count).to eq(1)
- expect(project1.redirect_routes.permanent.count).to eq(1)
- expect(project2.redirect_routes.permanent.count).to eq(1)
+ expect(group.redirect_routes.count).to eq(1)
+ expect(project1.redirect_routes.count).to eq(1)
+ expect(project2.redirect_routes.count).to eq(1)
end
context 'when the new parent has a higher visibility than the projects' do
@@ -338,12 +338,12 @@ describe Groups::TransferService, :postgresql do
end
end
- it 'should create permanent redirect for the subgroups and projects' do
- expect(group.redirect_routes.permanent.count).to eq(1)
- expect(subgroup1.redirect_routes.permanent.count).to eq(1)
- expect(subgroup2.redirect_routes.permanent.count).to eq(1)
- expect(project1.redirect_routes.permanent.count).to eq(1)
- expect(project2.redirect_routes.permanent.count).to eq(1)
+ it 'should create redirect for the subgroups and projects' do
+ expect(group.redirect_routes.count).to eq(1)
+ expect(subgroup1.redirect_routes.count).to eq(1)
+ expect(subgroup2.redirect_routes.count).to eq(1)
+ expect(project1.redirect_routes.count).to eq(1)
+ expect(project2.redirect_routes.count).to eq(1)
end
end
@@ -380,12 +380,12 @@ describe Groups::TransferService, :postgresql do
end
end
- it 'should create permanent redirect for the subgroups and projects' do
- expect(group.redirect_routes.permanent.count).to eq(1)
- expect(project1.redirect_routes.permanent.count).to eq(1)
- expect(subgroup1.redirect_routes.permanent.count).to eq(1)
- expect(nested_subgroup.redirect_routes.permanent.count).to eq(1)
- expect(nested_project.redirect_routes.permanent.count).to eq(1)
+ it 'should create redirect for the subgroups and projects' do
+ expect(group.redirect_routes.count).to eq(1)
+ expect(project1.redirect_routes.count).to eq(1)
+ expect(subgroup1.redirect_routes.count).to eq(1)
+ expect(nested_subgroup.redirect_routes.count).to eq(1)
+ expect(nested_project.redirect_routes.count).to eq(1)
end
end
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index c148a98569b..a9aee9e100f 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -6,7 +6,7 @@ describe Issues::MoveService do
let(:title) { 'Some issue' }
let(:description) { 'Some issue description' }
let(:old_project) { create(:project) }
- let(:new_project) { create(:project, group: create(:group)) }
+ let(:new_project) { create(:project) }
let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') }
let(:old_issue) do
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 4413c6ef83e..2cacb97a293 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -70,6 +70,16 @@ describe Projects::CreateService, '#execute' do
opts[:default_branch] = 'master'
expect(create_project(user, opts)).to eq(nil)
end
+
+ it 'sets invalid service as inactive' do
+ create(:service, type: 'JiraService', project: nil, template: true, active: true)
+
+ project = create_project(user, opts)
+ service = project.services.first
+
+ expect(project).to be_persisted
+ expect(service.active).to be false
+ end
end
context 'wiki_enabled creates repository directory' do
@@ -232,14 +242,15 @@ describe Projects::CreateService, '#execute' do
end
context 'when a bad service template is created' do
- it 'reports an error in the imported project' do
+ it 'sets service to be inactive' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce'
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
project = create_project(user, opts)
+ service = project.services.first
- expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/)
- expect(project.services.count).to eq 0
+ expect(project).to be_persisted
+ expect(service.active).to be false
end
end
diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb
index 53b3e5e365d..786493c3577 100644
--- a/spec/services/protected_branches/create_service_spec.rb
+++ b/spec/services/protected_branches/create_service_spec.rb
@@ -35,5 +35,18 @@ describe ProtectedBranches::CreateService do
expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
+
+ context 'when a policy restricts rule creation' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents creation of the protected branch rule" do
+ expect do
+ service.execute
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
end
end
diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb
new file mode 100644
index 00000000000..4a391b6c25c
--- /dev/null
+++ b/spec/services/protected_branches/destroy_service_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe ProtectedBranches::DestroyService do
+ let(:protected_branch) { create(:protected_branch) }
+ let(:project) { protected_branch.project }
+ let(:user) { project.owner }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(project, user) }
+
+ it 'destroys a protected branch' do
+ service.execute(protected_branch)
+
+ expect(protected_branch).to be_destroyed
+ end
+
+ context 'when a policy restricts rule deletion' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents deletion of the protected branch rule" do
+ expect do
+ service.execute(protected_branch)
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+ end
+end
diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb
index 9fa5983db66..3f6f8e09565 100644
--- a/spec/services/protected_branches/update_service_spec.rb
+++ b/spec/services/protected_branches/update_service_spec.rb
@@ -22,5 +22,16 @@ describe ProtectedBranches::UpdateService do
expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
+
+ context 'when a policy restricts rule creation' do
+ before do
+ policy = instance_double(ProtectedBranchPolicy, can?: false)
+ expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ end
+
+ it "prevents creation of the protected branch rule" do
+ expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
end
end
diff --git a/spec/services/protected_tags/destroy_service_spec.rb b/spec/services/protected_tags/destroy_service_spec.rb
new file mode 100644
index 00000000000..e12f53a2221
--- /dev/null
+++ b/spec/services/protected_tags/destroy_service_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe ProtectedTags::DestroyService do
+ let(:protected_tag) { create(:protected_tag) }
+ let(:project) { protected_tag.project }
+ let(:user) { project.owner }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(project, user) }
+
+ it 'destroy a protected tag' do
+ service.execute(protected_tag)
+
+ expect(protected_tag).to be_destroyed
+ end
+ end
+end
diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb
index 576db1dde2d..d974cc0226f 100644
--- a/spec/services/verify_pages_domain_service_spec.rb
+++ b/spec/services/verify_pages_domain_service_spec.rb
@@ -93,6 +93,25 @@ describe VerifyPagesDomainService do
expect(domain).not_to be_enabled
end
end
+
+ context 'invalid domain' do
+ let(:domain) { build(:pages_domain, :expired, :with_missing_chain) }
+
+ before do
+ domain.save(validate: false)
+ end
+
+ it 'can be disabled' do
+ error_status[:message] += '. It is now disabled.'
+
+ stub_resolver
+
+ expect(service.execute).to eq(error_status)
+
+ expect(domain).not_to be_verified
+ expect(domain).not_to be_enabled
+ end
+ end
end
context 'timeout behaviour' do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 5051cd34564..e8cecf361ff 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -97,6 +97,10 @@ RSpec.configure do |config|
TestEnv.init
end
+ config.after(:all) do
+ TestEnv.clean_test_path
+ end
+
config.before(:example) do
# Skip pre-receive hook check so we can use the web editor and merge.
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb
index c7e8a39a617..9cf541372b5 100644
--- a/spec/support/gitaly.rb
+++ b/spec/support/gitaly.rb
@@ -1,11 +1,13 @@
RSpec.configure do |config|
config.before(:each) do |example|
if example.metadata[:disable_gitaly]
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false)
+ # Use 'and_wrap_original' to make sure the arguments are valid
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) && false }
else
next if example.metadata[:skip_gitaly_mock]
- allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true)
+ # Use 'and_wrap_original' to make sure the arguments are valid
+ allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) || true }
end
end
end
diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb
new file mode 100644
index 00000000000..31e07e720cd
--- /dev/null
+++ b/spec/support/http_io/http_io_helpers.rb
@@ -0,0 +1,64 @@
+module HttpIOHelpers
+ def stub_remote_trace_206
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return { |request| remote_trace_response(request, 206) }
+ end
+
+ def stub_remote_trace_200
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return { |request| remote_trace_response(request, 200) }
+ end
+
+ def stub_remote_trace_500
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return(status: [500, "Internal Server Error"])
+ end
+
+ def remote_trace_url
+ "http://trace.com/trace"
+ end
+
+ def remote_trace_response(request, responce_status)
+ range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
+
+ {
+ status: responce_status,
+ headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
+ body: range_trace_body(range[1].to_i, range[2].to_i)
+ }
+ end
+
+ def remote_trace_response_headers(responce_status, from, to)
+ headers = { 'Content-Type' => 'text/plain' }
+
+ if responce_status == 206
+ headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
+ end
+
+ headers
+ end
+
+ def range_trace_body(from, to)
+ remote_trace_body[from..to]
+ end
+
+ def remote_trace_body
+ @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
+ end
+
+ def remote_trace_size
+ remote_trace_body.length
+ end
+
+ def set_smaller_buffer_size_than(file_size)
+ blocks = (file_size / 128)
+ new_size = (blocks / 2) * 128
+ stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+ end
+
+ def set_larger_buffer_size_than(file_size)
+ blocks = (file_size / 128)
+ new_size = (blocks * 2) * 128
+ stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+ end
+end
diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb
index 6bf976a2cf9..5d6f662e8fe 100644
--- a/spec/support/migrations_helpers.rb
+++ b/spec/support/migrations_helpers.rb
@@ -1,6 +1,9 @@
module MigrationsHelpers
def table(name)
- Class.new(ActiveRecord::Base) { self.table_name = name }
+ Class.new(ActiveRecord::Base) do
+ self.table_name = name
+ self.inheritance_column = :_type_disabled
+ end
end
def migrations_paths
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
new file mode 100644
index 00000000000..6352f1527cd
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -0,0 +1,138 @@
+shared_context 'with storage' do |store, **stub_params|
+ before do
+ subject.object_store = store
+ end
+end
+
+shared_examples "migrates" do |to_store:, from_store: nil|
+ let(:to) { to_store }
+ let(:from) { from_store || subject.object_store }
+
+ def migrate(to)
+ subject.migrate!(to)
+ end
+
+ def checksum
+ Digest::SHA256.hexdigest(subject.read)
+ end
+
+ before do
+ migrate(from)
+ end
+
+ it 'returns corresponding file type' do
+ expect(subject).to be_an(CarrierWave::Uploader::Base)
+ expect(subject).to be_a(ObjectStorage::Concern)
+
+ if from == described_class::Store::REMOTE
+ expect(subject.file).to be_a(CarrierWave::Storage::Fog::File)
+ elsif from == described_class::Store::LOCAL
+ expect(subject.file).to be_a(CarrierWave::SanitizedFile)
+ else
+ raise 'Unexpected file type'
+ end
+ end
+
+ it 'does nothing when migrating to the current store' do
+ expect { migrate(from) }.not_to change { subject.object_store }.from(from)
+ end
+
+ it 'migrate to the specified store' do
+ from_checksum = checksum
+
+ expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
+ expect(checksum).to eq(from_checksum)
+ end
+
+ it 'removes the original file after the migration' do
+ original_file = subject.file.path
+ migrate(to)
+
+ expect(File.exist?(original_file)).to be_falsey
+ end
+
+ it 'can access to the original file during migration' do
+ file = subject.file
+
+ allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate
+ allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload)
+
+ expect(file.exists?).to be_truthy
+ expect { migrate(to) }.not_to change { file.exists? }
+ end
+
+ context 'when migrate! is not occupied by another process' do
+ it 'executes migrate!' do
+ expect(subject).to receive(:object_store=).at_least(1)
+
+ migrate(to)
+ end
+
+ it 'executes use_file' do
+ expect(subject).to receive(:unsafe_use_file).once
+
+ subject.use_file
+ end
+ end
+
+ context 'when migrate! is occupied by another process' do
+ let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" }
+
+ before do
+ @uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+ end
+
+ it 'does not execute migrate!' do
+ expect(subject).not_to receive(:unsafe_migrate!)
+
+ expect { migrate(to) }.to raise_error('exclusive lease already taken')
+ end
+
+ it 'does not execute use_file' do
+ expect(subject).not_to receive(:unsafe_use_file)
+
+ expect { subject.use_file }.to raise_error('exclusive lease already taken')
+ end
+
+ after do
+ Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid)
+ end
+ end
+
+ context 'migration is unsuccessful' do
+ shared_examples "handles gracefully" do |error:|
+ it 'does not update the object_store' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.object_store).to eq(from)
+ end
+
+ it 'does not delete the original file' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.exists?).to be_truthy
+ end
+ end
+
+ context 'when the store is not supported' do
+ let(:to) { -1 } # not a valid store
+
+ include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
+ end
+
+ context 'upon a fog failure' do
+ before do
+ storage_class = subject.send(:storage_for, to).class
+ expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
+ end
+
+ include_examples "handles gracefully", error: "Store failure."
+ end
+
+ context 'upon a database failure' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
+ end
+
+ include_examples "handles gracefully", error: "ActiveRecord failure."
+ end
+ end
+end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
new file mode 100644
index 00000000000..6e88641da42
--- /dev/null
+++ b/spec/support/stub_object_storage.rb
@@ -0,0 +1,48 @@
+module StubConfiguration
+ def stub_object_storage_uploader(
+ config:,
+ uploader:,
+ remote_directory:,
+ enabled: true,
+ proxy_download: false,
+ background_upload: false,
+ direct_upload: false
+ )
+ allow(config).to receive(:enabled) { enabled }
+ allow(config).to receive(:proxy_download) { proxy_download }
+ allow(config).to receive(:background_upload) { background_upload }
+ allow(config).to receive(:direct_upload) { direct_upload }
+
+ return unless enabled
+
+ Fog.mock!
+
+ ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
+ begin
+ connection.directories.create(key: remote_directory)
+ rescue Excon::Error::Conflict
+ end
+ end
+ end
+
+ def stub_artifacts_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ remote_directory: 'artifacts',
+ **params)
+ end
+
+ def stub_lfs_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
+ uploader: LfsObjectUploader,
+ remote_directory: 'lfs-objects',
+ **params)
+ end
+
+ def stub_uploads_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
+ uploader: uploader,
+ remote_directory: 'uploads',
+ **params)
+ end
+end
diff --git a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
new file mode 100644
index 00000000000..8544fb62b5a
--- /dev/null
+++ b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
@@ -0,0 +1,118 @@
+require 'rake_helper'
+
+describe 'gitlab:artifacts namespace rake task' do
+ before(:context) do
+ Rake.application.rake_require 'tasks/gitlab/artifacts/migrate'
+ end
+
+ let(:object_storage_enabled) { false }
+
+ before do
+ stub_artifacts_object_storage(enabled: object_storage_enabled)
+ end
+
+ subject { run_rake_task('gitlab:artifacts:migrate') }
+
+ context 'legacy artifacts' do
+ describe 'migrate' do
+ let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ context 'and job does not have file store defined' do
+ let(:object_storage_enabled) { true }
+ let(:store) { nil }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is defined' do
+ let(:object_storage_enabled) { true }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is not defined' do
+ it "fails to migrate to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when remote storage is used' do
+ let(:object_storage_enabled) { true }
+
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ it "file stays on remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+ end
+ end
+
+ context 'job artifacts' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ context 'and job does not have file store defined' do
+ let(:object_storage_enabled) { true }
+ let(:store) { nil }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is defined' do
+ let(:object_storage_enabled) { true }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is not defined' do
+ it "fails to migrate to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when remote storage is used' do
+ let(:object_storage_enabled) { true }
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ it "file stays on remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
new file mode 100644
index 00000000000..66d1a192a96
--- /dev/null
+++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
@@ -0,0 +1,37 @@
+require 'rake_helper'
+
+describe 'gitlab:lfs namespace rake task' do
+ before :all do
+ Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
+ end
+
+ describe 'migrate' do
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+
+ def lfs_migrate
+ run_rake_task('gitlab:lfs:migrate')
+ end
+
+ context 'object storage disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+
+ context 'object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'migrates local file to object storage' do
+ expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
new file mode 100644
index 00000000000..b778d26060d
--- /dev/null
+++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
@@ -0,0 +1,28 @@
+require 'rake_helper'
+
+describe 'gitlab:uploads:migrate rake tasks' do
+ let!(:projects) { create_list(:project, 10, :with_avatar) }
+ let(:model_class) { Project }
+ let(:uploader_class) { AvatarUploader }
+ let(:mounted_as) { :avatar }
+ let(:batch_size) { 3 }
+
+ before do
+ stub_env('BATCH', batch_size.to_s)
+ stub_uploads_object_storage(uploader_class)
+ Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
+
+ allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
+ end
+
+ def run
+ args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
+ run_rake_task("gitlab:uploads:migrate", *args)
+ end
+
+ it 'enqueue jobs in batch' do
+ expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
+
+ run
+ end
+end
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 091ba824fc6..d302c14efb9 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -11,4 +11,26 @@ describe AttachmentUploader do
store_dir: %r[uploads/-/system/note/attachment/],
upload_path: %r[uploads/-/system/note/attachment/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
+
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[note/attachment/],
+ upload_path: %r[note/attachment/]
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index bf9028c9260..b0468bc35ff 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe AvatarUploader do
- let(:model) { create(:user, :with_avatar) }
+ let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
@@ -12,15 +12,28 @@ describe AvatarUploader do
upload_path: %r[uploads/-/system/user/avatar/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
- describe '#move_to_cache' do
- it 'is false' do
- expect(uploader.move_to_cache).to eq(false)
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[user/avatar/],
+ upload_path: %r[user/avatar/]
end
- describe '#move_to_store' do
- it 'is false' do
- expect(uploader.move_to_store).to eq(false)
+ context "with a file" do
+ let(:project) { create(:project, :with_avatar) }
+ let(:uploader) { project.avatar }
+ let(:upload) { uploader.upload }
+
+ before do
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index bc024cd307c..68b7e24776d 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -36,6 +36,12 @@ describe FileMover do
it 'creates a new update record' do
expect { subject }.to change { Upload.count }.by(1)
end
+
+ it 'schedules a background migration' do
+ expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once
+
+ subject
+ end
end
context 'when update_markdown fails' do
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index b42ce982b27..db2810bbe1d 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -11,32 +11,41 @@ describe FileUploader do
shared_examples 'builds correct legacy storage paths' do
include_examples 'builds correct paths',
store_dir: %r{awesome/project/\h+},
+ upload_path: %r{\h+/<filename>},
absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
- shared_examples 'uses hashed storage' do
- context 'when rolled out attachments' do
- let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
+ context 'legacy storage' do
+ it_behaves_like 'builds correct legacy storage paths'
- before do
- allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
- end
+ context 'uses hashed storage' do
+ context 'when rolled out attachments' do
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
- it_behaves_like 'builds correct paths',
- store_dir: %r{ca/fe/fe/ed/\h+},
- absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
- end
+ include_examples 'builds correct paths',
+ store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
+ upload_path: %r{\h+/<filename>}
+ end
- context 'when only repositories are rolled out' do
- let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+ context 'when only repositories are rolled out' do
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- it_behaves_like 'builds correct legacy storage paths'
+ it_behaves_like 'builds correct legacy storage paths'
+ end
end
end
- context 'legacy storage' do
- it_behaves_like 'builds correct legacy storage paths'
- include_examples 'uses hashed storage'
+ context 'object store is remote' do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ # always use hashed storage path for remote uploads
+ it_behaves_like 'builds correct paths',
+ store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
+ upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>}
end
describe 'initialize' do
@@ -78,6 +87,16 @@ describe FileUploader do
end
end
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
+
describe '#upload=' do
let(:secret) { SecureRandom.hex }
let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') }
@@ -93,15 +112,5 @@ describe FileUploader do
uploader.upload = upload
end
-
- context 'uploader_context is empty' do
- it 'fallbacks to regex based extraction' do
- expect(upload).to receive(:uploader_context).and_return({})
-
- uploader.upload = upload
- expect(uploader.secret).to eq(secret)
- expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt')
- end
- end
end
end
diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb
index 60e35dcf235..4fba122cce1 100644
--- a/spec/uploaders/gitlab_uploader_spec.rb
+++ b/spec/uploaders/gitlab_uploader_spec.rb
@@ -27,7 +27,7 @@ describe GitlabUploader do
describe '#file_cache_storage?' do
context 'when file storage is used' do
before do
- uploader_class.cache_storage(:file)
+ expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File }
end
it { is_expected.to be_file_cache_storage }
@@ -35,7 +35,7 @@ describe GitlabUploader do
context 'when is remote storage' do
before do
- uploader_class.cache_storage(:fog)
+ expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog }
end
it { is_expected.not_to be_file_cache_storage }
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 5612ec7e661..42036d67f3d 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe JobArtifactUploader do
- let(:job_artifact) { create(:ci_job_artifact) }
+ let(:store) { described_class::Store::LOCAL }
+ let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
subject { uploader }
@@ -11,6 +12,17 @@ describe JobArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
+ context "object store is REMOTE" do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
+ end
+
describe '#open' do
subject { uploader.open }
@@ -36,6 +48,17 @@ describe JobArtifactUploader do
end
end
end
+
+ context 'when trace is stored in Object storage' do
+ before do
+ allow(uploader).to receive(:file_storage?) { false }
+ allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
+ end
+
+ it 'returns http io stream' do
+ is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
+ end
+ end
end
context 'file is stored in valid local_path' do
@@ -55,4 +78,14 @@ describe JobArtifactUploader do
it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace')))
+ stub_artifacts_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 54c6a8b869b..eeb6fd90c9d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
require 'rails_helper'
describe LegacyArtifactUploader do
- let(:job) { create(:ci_build) }
+ let(:store) { described_class::Store::LOCAL }
+ let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { described_class.root }
@@ -20,6 +21,17 @@ describe LegacyArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
+ context 'object store is remote' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
+ end
+
describe '#filename' do
# we need to use uploader, as this makes to use mounter
# which initialises uploader.file object
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 6ebc885daa8..a2fb3886610 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -11,4 +11,62 @@ describe LfsObjectUploader do
store_dir: %r[\h{2}/\h{2}],
cache_dir: %r[/lfs-objects/tmp/cache],
work_dir: %r[/lfs-objects/tmp/work]
+
+ context "object store is REMOTE" do
+ before do
+ stub_lfs_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}]
+ end
+
+ describe 'migration to object storage' do
+ context 'with object storage disabled' do
+ it "is skipped" do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ lfs_object
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'is scheduled to run after creation' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
+
+ lfs_object
+ end
+ end
+ end
+
+ describe 'remote file' do
+ let(:remote) { described_class::Store::REMOTE }
+ let(:lfs_object) { create(:lfs_object, file_store: remote) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+
+ store_file(lfs_object)
+
+ expect(lfs_object.file_store).to eq remote
+ expect(lfs_object.file.path).not_to be_blank
+ end
+ end
+ end
+
+ def store_file(lfs_object)
+ lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
+ lfs_object.save!
+ end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index 24a2fc0f72e..a8ba01d70b8 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -13,4 +13,26 @@ describe NamespaceFileUploader do
store_dir: %r[uploads/-/system/namespace/\d+],
upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[namespace/\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 00000000000..59e02fecbce
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,654 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+ include ObjectStorage::Concern
+ include ::RecordsUploads::Concern
+ prepend ::ObjectStorage::Extension::RecordsUploads
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+end
+
+describe ObjectStorage do
+ let(:uploader_class) { Implementation }
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ describe '#object_store=' do
+ before do
+ allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+ end
+
+ it "reload the local storage" do
+ uploader.object_store = described_class::Store::LOCAL
+ expect(uploader.file_storage?).to be_truthy
+ end
+
+ it "reload the REMOTE storage" do
+ uploader.object_store = described_class::Store::REMOTE
+ expect(uploader.file_storage?).to be_falsey
+ end
+
+ context 'object_store is Store::LOCAL' do
+ before do
+ uploader.object_store = described_class::Store::LOCAL
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (base_dir, dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+ end
+ end
+ end
+
+ context 'object_store is Store::REMOTE' do
+ before do
+ uploader.object_store = described_class::Store::REMOTE
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("user/")
+ end
+ end
+ end
+ end
+
+ describe '#object_store' do
+ it "delegates to <mount>_store on model" do
+ expect(object).to receive(:file_store)
+
+ uploader.object_store
+ end
+
+ context 'when store is null' do
+ before do
+ expect(object).to receive(:file_store).and_return(nil)
+ end
+
+ it "returns Store::LOCAL" do
+ expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+ end
+ end
+
+ context 'when value is set' do
+ before do
+ expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+ end
+
+ it "returns the given value" do
+ expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+ end
+ end
+ end
+
+ describe '#file_cache_storage?' do
+ context 'when file storage is used' do
+ before do
+ expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File }
+ end
+
+ it { expect(uploader).to be_file_cache_storage }
+ end
+
+ context 'when is remote storage' do
+ before do
+ expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog }
+ end
+
+ it { expect(uploader).not_to be_file_cache_storage }
+ end
+ end
+
+ # this means the model shall include
+ # include RecordsUpload::Concern
+ # prepend ObjectStorage::Extension::RecordsUploads
+ # the object_store persistence is delegated to the `Upload` model.
+ #
+ context 'when persist_object_store? is false' do
+ let(:object) { create(:project, :with_avatar) }
+ let(:uploader) { object.avatar }
+
+ it { expect(object).to be_a(Avatarable) }
+ it { expect(uploader.persist_object_store?).to be_falsey }
+
+ describe 'delegates the object_store logic to the `Upload` model' do
+ it 'sets @upload to the found `upload`' do
+ expect(uploader.upload).to eq(uploader.upload)
+ end
+
+ it 'sets @object_store to the `Upload` value' do
+ expect(uploader.object_store).to eq(uploader.upload.store)
+ end
+ end
+
+ describe '#migrate!' do
+ let(:new_store) { ObjectStorage::Store::REMOTE }
+
+ before do
+ stub_uploads_object_storage(uploader: AvatarUploader)
+ end
+
+ subject { uploader.migrate!(new_store) }
+
+ it 'persist @object_store to the recorded upload' do
+ subject
+
+ expect(uploader.upload.store).to eq(new_store)
+ end
+
+ describe 'fails' do
+ it 'is handled gracefully' do
+ store = uploader.object_store
+ expect_any_instance_of(Upload).to receive(:save!).and_raise("An error")
+
+ expect { subject }.to raise_error("An error")
+ expect(uploader.exists?).to be_truthy
+ expect(uploader.upload.store).to eq(store)
+ end
+ end
+ end
+ end
+
+ # this means the model holds an <mounted_as>_store attribute directly
+ # and do not delegate the object_store persistence to the `Upload` model.
+ #
+ context 'persist_object_store? is true' do
+ context 'when using JobArtifactsUploader' do
+ let(:store) { described_class::Store::LOCAL }
+ let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+ let(:uploader) { object.file }
+
+ context 'checking described_class' do
+ it "uploader include described_class::Concern" do
+ expect(uploader).to be_a(described_class::Concern)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::Store::REMOTE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ subject { uploader.migrate!(new_store) }
+
+ shared_examples "updates the underlying <mounted>_store" do
+ it do
+ subject
+
+ expect(object.file_store).to eq(new_store)
+ end
+ end
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader).not_to receive(:store!)
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::Store::REMOTE }
+ let(:new_store) { described_class::Store::LOCAL }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "remote file exist" do
+ expect(uploader.file.exists?).to be_truthy
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::Store::REMOTE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+ end
+
+ it "original file is not removed" do
+ expect { subject }.to raise_error(/exception/)
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ allow(uploader_class).to receive(:options) do
+ double(object_store: double(remote_directory: remote_directory))
+ end
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ context 'when file is in use' do
+ def when_file_is_in_use
+ uploader.use_file do
+ yield
+ end
+ end
+
+ it 'cannot migrate' do
+ when_file_is_in_use do
+ expect(uploader).not_to receive(:unsafe_migrate!)
+
+ expect { uploader.migrate!(described_class::Store::REMOTE) }.to raise_error('exclusive lease already taken')
+ end
+ end
+
+ it 'cannot use_file' do
+ when_file_is_in_use do
+ expect(uploader).not_to receive(:unsafe_use_file)
+
+ expect { uploader.use_file }.to raise_error('exclusive lease already taken')
+ end
+ end
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+ before do
+ allow(uploader_class).to receive(:options) do
+ double(object_store: double(connection: connection))
+ end
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(provider: 'AWS') }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '.workhorse_authorize' do
+ subject { uploader_class.workhorse_authorize }
+
+ before do
+ # ensure that we use regular Fog libraries
+ # other tests might call `Fog.mock!` and
+ # it will make tests to fail
+ Fog.unmock!
+ end
+
+ shared_examples 'uses local storage' do
+ it "returns temporary path" do
+ is_expected.to have_key(:TempPath)
+
+ expect(subject[:TempPath]).to start_with(uploader_class.root)
+ expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH)
+ end
+
+ it "does not return remote store" do
+ is_expected.not_to have_key('RemoteObject')
+ end
+ end
+
+ shared_examples 'uses remote storage' do
+ it "returns remote store" do
+ is_expected.to have_key(:RemoteObject)
+
+ expect(subject[:RemoteObject]).to have_key(:ID)
+ expect(subject[:RemoteObject]).to have_key(:GetURL)
+ expect(subject[:RemoteObject]).to have_key(:DeleteURL)
+ expect(subject[:RemoteObject]).to have_key(:StoreURL)
+ expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH)
+ expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH)
+ expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH)
+ end
+
+ it "does not return local store" do
+ is_expected.not_to have_key('TempPath')
+ end
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false }
+ end
+
+ it_behaves_like 'uses local storage'
+ end
+
+ context 'when object storage is enabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true }
+ end
+
+ context 'when direct upload is enabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true }
+ end
+
+ context 'uses AWS' do
+ before do
+ expect(uploader_class).to receive(:object_store_credentials) do
+ { provider: "AWS",
+ aws_access_key_id: "AWS_ACCESS_KEY_ID",
+ aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
+ region: "eu-central-1" }
+ end
+ end
+
+ it_behaves_like 'uses remote storage' do
+ let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
+
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+
+ context 'uses Google' do
+ before do
+ expect(uploader_class).to receive(:object_store_credentials) do
+ { provider: "Google",
+ google_storage_access_key_id: 'ACCESS_KEY_ID',
+ google_storage_secret_access_key: 'SECRET_ACCESS_KEY' }
+ end
+ end
+
+ it_behaves_like 'uses remote storage' do
+ let(:storage_url) { "https://storage.googleapis.com/uploads/" }
+
+ it 'returns links for Google Cloud' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+
+ context 'uses GDK/minio' do
+ before do
+ expect(uploader_class).to receive(:object_store_credentials) do
+ { provider: "AWS",
+ aws_access_key_id: "AWS_ACCESS_KEY_ID",
+ aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
+ endpoint: 'http://127.0.0.1:9000',
+ path_style: true,
+ region: "gdk" }
+ end
+ end
+
+ it_behaves_like 'uses remote storage' do
+ let(:storage_url) { "http://127.0.0.1:9000/uploads/" }
+
+ it 'returns links for S3' do
+ expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
+ expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
+ end
+ end
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false }
+ end
+
+ it_behaves_like 'uses local storage'
+ end
+ end
+ end
+
+ describe '#store_workhorse_file!' do
+ subject do
+ uploader.store_workhorse_file!(params, :file)
+ end
+
+ context 'when local file is used' do
+ context 'when valid file is used' do
+ let(:target_path) do
+ File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH)
+ end
+
+ before do
+ FileUtils.mkdir_p(target_path)
+ end
+
+ context 'when no filename is specified' do
+ let(:params) do
+ { "file.path" => "test/file" }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
+ end
+ end
+
+ context 'when invalid file is specified' do
+ let(:file_path) do
+ File.join(target_path, "..", "test.file")
+ end
+
+ before do
+ FileUtils.touch(file_path)
+ end
+
+ let(:params) do
+ { "file.path" => file_path,
+ "file.name" => "my_file.txt" }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
+ end
+ end
+
+ context 'when filename is specified' do
+ let(:params) do
+ { "file.path" => tmp_file,
+ "file.name" => "my_file.txt" }
+ end
+
+ let(:tmp_file) { Tempfile.new('filename', target_path) }
+
+ before do
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm_f(tmp_file)
+ end
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+
+ expect(uploader).to be_exists
+ end
+
+ it 'proper path is being used' do
+ subject
+
+ expect(uploader.path).to start_with(uploader_class.root)
+ expect(uploader.path).to end_with("my_file.txt")
+ end
+
+ it 'source file to not exist' do
+ subject
+
+ expect(File.exist?(tmp_file.path)).to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'when remote file is used' do
+ let!(:fog_connection) do
+ stub_uploads_object_storage(uploader_class)
+ end
+
+ context 'when valid file is used' do
+ context 'when no filename is specified' do
+ let(:params) do
+ { "file.remote_id" => "test/123123" }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
+ end
+ end
+
+ context 'when invalid file is specified' do
+ let(:params) do
+ { "file.remote_id" => "../test/123123",
+ "file.name" => "my_file.txt" }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
+ end
+ end
+
+ context 'when non existing file is specified' do
+ let(:params) do
+ { "file.remote_id" => "test/12312300",
+ "file.name" => "my_file.txt" }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/)
+ end
+ end
+
+ context 'when filename is specified' do
+ let(:params) do
+ { "file.remote_id" => "test/123123",
+ "file.name" => "my_file.txt" }
+ end
+
+ let!(:fog_file) do
+ fog_connection.directories.get('uploads').files.create(
+ key: 'tmp/upload/test/123123',
+ body: 'content'
+ )
+ end
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+
+ expect(uploader).to be_exists
+ end
+
+ it 'path to not be temporary' do
+ subject
+
+ expect(uploader.path).not_to be_nil
+ expect(uploader.path).not_to include('tmp/upload')
+ expect(uploader.url).to include('/my_file.txt')
+ end
+
+ it 'url is used' do
+ subject
+
+ expect(uploader.url).not_to be_nil
+ expect(uploader.url).to include('/my_file.txt')
+ end
+ end
+ end
+ end
+
+ context 'when no file is used' do
+ let(:params) { {} }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index ed1fba6edda..c70521d90dc 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -14,6 +14,18 @@ describe PersonalFileUploader do
upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
describe '#to_h' do
before do
subject.instance_variable_set(:@secret, 'secret')
@@ -30,4 +42,14 @@ describe PersonalFileUploader do
)
end
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/views/ci/lints/show.html.haml_spec.rb b/spec/views/projects/ci/lints/show.html.haml_spec.rb
index ded320793ea..2f0cd38c14a 100644
--- a/spec/views/ci/lints/show.html.haml_spec.rb
+++ b/spec/views/projects/ci/lints/show.html.haml_spec.rb
@@ -1,12 +1,13 @@
require 'spec_helper'
-describe 'ci/lints/show' do
+describe 'projects/ci/lints/show' do
include Devise::Test::ControllerHelpers
+ let(:project) { create(:project, :repository) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
describe 'XSS protection' do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
-
before do
+ assign(:project, project)
assign(:status, true)
assign(:builds, config_processor.builds)
assign(:stages, config_processor.stages)
@@ -48,22 +49,21 @@ describe 'ci/lints/show' do
end
end
- let(:content) do
- {
- build_template: {
- script: './build.sh',
- tags: ['dotnet'],
- only: ['test@dude/repo'],
- except: ['deploy'],
- environment: 'testing'
+ context 'when the content is valid' do
+ let(:content) do
+ {
+ build_template: {
+ script: './build.sh',
+ tags: ['dotnet'],
+ only: ['test@dude/repo'],
+ except: ['deploy'],
+ environment: 'testing'
+ }
}
- }
- end
-
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
+ end
- context 'when the content is valid' do
before do
+ assign(:project, project)
assign(:status, true)
assign(:builds, config_processor.builds)
assign(:stages, config_processor.stages)
@@ -83,6 +83,7 @@ describe 'ci/lints/show' do
context 'when the content is invalid' do
before do
+ assign(:project, project)
assign(:status, false)
assign(:error, 'Undefined error')
end
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb
new file mode 100644
index 00000000000..32ddcbe9757
--- /dev/null
+++ b/spec/workers/object_storage_upload_worker_spec.rb
@@ -0,0 +1,108 @@
+require 'spec_helper'
+
+describe ObjectStorageUploadWorker do
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
+
+ def perform
+ described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
+ end
+
+ context 'for LFS' do
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+ let(:uploader_class) { LfsObjectUploader }
+ let(:subject_class) { LfsObject }
+ let(:file_field) { :file }
+ let(:subject_id) { lfs_object.id }
+
+ context 'when object storage is enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'uploads object to storage' do
+ expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
+ end
+
+ it 'is skipped' do
+ expect { perform }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ perform
+
+ expect(lfs_object.reload.file_store).to eq(local)
+ end
+ end
+ end
+
+ context 'for legacy artifacts' do
+ let(:build) { create(:ci_build, :legacy_artifacts) }
+ let(:uploader_class) { LegacyArtifactUploader }
+ let(:subject_class) { Ci::Build }
+ let(:file_field) { :artifacts_file }
+ let(:subject_id) { build.id }
+
+ context 'when local storage is used' do
+ let(:store) { local }
+
+ context 'and remote storage is defined' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it "migrates file to remote storage" do
+ perform
+
+ expect(build.reload.artifacts_file_store).to eq(remote)
+ end
+
+ context 'for artifacts_metadata' do
+ let(:file_field) { :artifacts_metadata }
+
+ it 'migrates metadata to remote storage' do
+ perform
+
+ expect(build.reload.artifacts_metadata_store).to eq(remote)
+ end
+ end
+ end
+ end
+ end
+
+ context 'for job artifacts' do
+ let(:artifact) { create(:ci_job_artifact, :archive) }
+ let(:uploader_class) { JobArtifactUploader }
+ let(:subject_class) { Ci::JobArtifact }
+ let(:file_field) { :file }
+ let(:subject_id) { artifact.id }
+
+ context 'when local storage is used' do
+ let(:store) { local }
+
+ context 'and remote storage is defined' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it "migrates file to remote storage" do
+ perform
+
+ expect(artifact.reload.file_store).to eq(remote)
+ end
+ end
+ end
+ end
+end
diff --git a/yarn.lock b/yarn.lock
index af7bda5d562..584951b5da0 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3605,7 +3605,7 @@ fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
-fsevents@^1.0.0:
+fsevents@^1.0.0, fsevents@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.3.tgz#11f82318f5fe7bb2cd22965a108e9306208216d8"
dependencies: