summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-07-20 12:26:25 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-07-20 12:26:25 +0000
commita09983ae35713f5a2bbb100981116d31ce99826e (patch)
tree2ee2af7bd104d57086db360a7e6d8c9d5d43667a /lib
parent18c5ab32b738c0b6ecb4d0df3994000482f34bd8 (diff)
downloadgitlab-ce-a09983ae35713f5a2bbb100981116d31ce99826e.tar.gz
Add latest changes from gitlab-org/gitlab@13-2-stable-ee
Diffstat (limited to 'lib')
-rw-r--r--lib/api/access_requests.rb2
-rw-r--r--lib/api/admin/ci/variables.rb2
-rw-r--r--lib/api/admin/instance_clusters.rb134
-rw-r--r--lib/api/admin/sidekiq.rb2
-rw-r--r--lib/api/api.rb27
-rw-r--r--lib/api/api_guard.rb12
-rw-r--r--lib/api/appearance.rb2
-rw-r--r--lib/api/applications.rb2
-rw-r--r--lib/api/avatar.rb2
-rw-r--r--lib/api/award_emoji.rb2
-rw-r--r--lib/api/badges.rb2
-rw-r--r--lib/api/boards.rb2
-rw-r--r--lib/api/branches.rb13
-rw-r--r--lib/api/broadcast_messages.rb2
-rw-r--r--lib/api/ci/pipeline_schedules.rb217
-rw-r--r--lib/api/ci/pipelines.rb189
-rw-r--r--lib/api/ci/runner.rb318
-rw-r--r--lib/api/ci/runners.rb289
-rw-r--r--lib/api/commit_statuses.rb4
-rw-r--r--lib/api/commits.rb2
-rw-r--r--lib/api/composer_packages.rb156
-rw-r--r--lib/api/conan_packages.rb309
-rw-r--r--lib/api/container_registry_event.rb2
-rw-r--r--lib/api/deploy_keys.rb2
-rw-r--r--lib/api/deploy_tokens.rb6
-rw-r--r--lib/api/deployments.rb2
-rw-r--r--lib/api/discussions.rb18
-rw-r--r--lib/api/entities/approvals.rb9
-rw-r--r--lib/api/entities/basic_project_details.rb3
-rw-r--r--lib/api/entities/conan_package/conan_package_manifest.rb11
-rw-r--r--lib/api/entities/conan_package/conan_package_snapshot.rb11
-rw-r--r--lib/api/entities/conan_package/conan_recipe_manifest.rb11
-rw-r--r--lib/api/entities/conan_package/conan_recipe_snapshot.rb11
-rw-r--r--lib/api/entities/conan_package/conan_upload_urls.rb11
-rw-r--r--lib/api/entities/entity_helpers.rb19
-rw-r--r--lib/api/entities/go_module_version.rb10
-rw-r--r--lib/api/entities/group.rb1
-rw-r--r--lib/api/entities/group_detail.rb1
-rw-r--r--lib/api/entities/issuable_entity.rb36
-rw-r--r--lib/api/entities/issue_basic.rb8
-rw-r--r--lib/api/entities/merge_request_approvals.rb24
-rw-r--r--lib/api/entities/merge_request_basic.rb21
-rw-r--r--lib/api/entities/npm_package.rb11
-rw-r--r--lib/api/entities/npm_package_tag.rb9
-rw-r--r--lib/api/entities/nuget/dependency.rb14
-rw-r--r--lib/api/entities/nuget/dependency_group.rb14
-rw-r--r--lib/api/entities/nuget/metadatum.rb13
-rw-r--r--lib/api/entities/nuget/package_metadata.rb13
-rw-r--r--lib/api/entities/nuget/package_metadata_catalog_entry.rb19
-rw-r--r--lib/api/entities/nuget/packages_metadata.rb12
-rw-r--r--lib/api/entities/nuget/packages_metadata_item.rb15
-rw-r--r--lib/api/entities/nuget/packages_versions.rb11
-rw-r--r--lib/api/entities/nuget/search_result.rb21
-rw-r--r--lib/api/entities/nuget/search_result_version.rb13
-rw-r--r--lib/api/entities/nuget/search_results.rb12
-rw-r--r--lib/api/entities/nuget/service_index.rb12
-rw-r--r--lib/api/entities/package.rb42
-rw-r--r--lib/api/entities/package/pipeline.rb11
-rw-r--r--lib/api/entities/package_file.rb11
-rw-r--r--lib/api/entities/package_version.rb14
-rw-r--r--lib/api/entities/project.rb2
-rw-r--r--lib/api/entities/project_statistics.rb1
-rw-r--r--lib/api/entities/release.rb9
-rw-r--r--lib/api/entities/resource_state_event.rb18
-rw-r--r--lib/api/entities/snippet.rb12
-rw-r--r--lib/api/entities/user.rb2
-rw-r--r--lib/api/environments.rb2
-rw-r--r--lib/api/error_tracking.rb2
-rw-r--r--lib/api/events.rb2
-rw-r--r--lib/api/features.rb2
-rw-r--r--lib/api/files.rb4
-rw-r--r--lib/api/freeze_periods.rb2
-rwxr-xr-xlib/api/go_proxy.rb135
-rw-r--r--lib/api/group_boards.rb2
-rw-r--r--lib/api/group_clusters.rb18
-rw-r--r--lib/api/group_container_repositories.rb2
-rw-r--r--lib/api/group_export.rb2
-rw-r--r--lib/api/group_import.rb2
-rw-r--r--lib/api/group_labels.rb2
-rw-r--r--lib/api/group_milestones.rb6
-rw-r--r--lib/api/group_packages.rb44
-rw-r--r--lib/api/group_variables.rb6
-rw-r--r--lib/api/groups.rb11
-rw-r--r--lib/api/helpers.rb33
-rw-r--r--lib/api/helpers/common_helpers.rb20
-rw-r--r--lib/api/helpers/internal_helpers.rb4
-rw-r--r--lib/api/helpers/merge_requests_helpers.rb40
-rw-r--r--lib/api/helpers/packages/basic_auth_helpers.rb57
-rw-r--r--lib/api/helpers/packages/conan/api_helpers.rb225
-rw-r--r--lib/api/helpers/packages/dependency_proxy_helpers.rb36
-rw-r--r--lib/api/helpers/packages_helpers.rb52
-rw-r--r--lib/api/helpers/packages_manager_clients_helpers.rb63
-rw-r--r--lib/api/helpers/projects_helpers.rb6
-rw-r--r--lib/api/helpers/runner.rb7
-rw-r--r--lib/api/helpers/services_helpers.rb33
-rw-r--r--lib/api/helpers/snippets_helpers.rb26
-rw-r--r--lib/api/helpers/users_helpers.rb7
-rw-r--r--lib/api/helpers/wikis_helpers.rb35
-rw-r--r--lib/api/import_bitbucket_server.rb44
-rw-r--r--lib/api/import_github.rb2
-rw-r--r--lib/api/internal/base.rb8
-rw-r--r--lib/api/internal/pages.rb2
-rw-r--r--lib/api/issues.rb41
-rw-r--r--lib/api/job_artifacts.rb2
-rw-r--r--lib/api/jobs.rb4
-rw-r--r--lib/api/keys.rb2
-rw-r--r--lib/api/labels.rb2
-rw-r--r--lib/api/lint.rb2
-rw-r--r--lib/api/markdown.rb2
-rw-r--r--lib/api/maven_packages.rb251
-rw-r--r--lib/api/members.rb12
-rw-r--r--lib/api/merge_request_approvals.rb78
-rw-r--r--lib/api/merge_request_diffs.rb2
-rw-r--r--lib/api/merge_requests.rb31
-rw-r--r--lib/api/metrics/dashboard/annotations.rb2
-rw-r--r--lib/api/metrics/user_starred_dashboards.rb2
-rw-r--r--lib/api/milestone_responses.rb2
-rw-r--r--lib/api/namespaces.rb2
-rw-r--r--lib/api/notes.rb4
-rw-r--r--lib/api/notification_settings.rb2
-rw-r--r--lib/api/npm_packages.rb173
-rw-r--r--lib/api/nuget_packages.rb221
-rw-r--r--lib/api/package_files.rb33
-rw-r--r--lib/api/pages.rb2
-rw-r--r--lib/api/pages_domains.rb2
-rw-r--r--lib/api/pagination_params.rb2
-rw-r--r--lib/api/pipeline_schedules.rb215
-rw-r--r--lib/api/pipelines.rb187
-rw-r--r--lib/api/project_clusters.rb18
-rw-r--r--lib/api/project_container_repositories.rb2
-rw-r--r--lib/api/project_events.rb2
-rw-r--r--lib/api/project_export.rb2
-rw-r--r--lib/api/project_hooks.rb2
-rw-r--r--lib/api/project_import.rb2
-rw-r--r--lib/api/project_milestones.rb6
-rw-r--r--lib/api/project_packages.rb71
-rw-r--r--lib/api/project_repository_storage_moves.rb2
-rw-r--r--lib/api/project_snapshots.rb2
-rw-r--r--lib/api/project_snippets.rb23
-rw-r--r--lib/api/project_statistics.rb2
-rw-r--r--lib/api/project_templates.rb2
-rw-r--r--lib/api/projects.rb32
-rw-r--r--lib/api/projects_relation_builder.rb9
-rw-r--r--lib/api/protected_branches.rb2
-rw-r--r--lib/api/protected_tags.rb2
-rw-r--r--lib/api/pypi_packages.rb148
-rw-r--r--lib/api/release/links.rb2
-rw-r--r--lib/api/releases.rb4
-rw-r--r--lib/api/remote_mirrors.rb2
-rw-r--r--lib/api/repositories.rb4
-rw-r--r--lib/api/resource_label_events.rb2
-rw-r--r--lib/api/resource_milestone_events.rb5
-rw-r--r--lib/api/resource_state_events.rb50
-rw-r--r--lib/api/runner.rb297
-rw-r--r--lib/api/runners.rb287
-rw-r--r--lib/api/search.rb5
-rw-r--r--lib/api/services.rb2
-rw-r--r--lib/api/settings.rb11
-rw-r--r--lib/api/sidekiq_metrics.rb2
-rw-r--r--lib/api/snippets.rb28
-rw-r--r--lib/api/statistics.rb2
-rw-r--r--lib/api/submodules.rb2
-rw-r--r--lib/api/subscriptions.rb2
-rw-r--r--lib/api/suggestions.rb4
-rw-r--r--lib/api/system_hooks.rb2
-rw-r--r--lib/api/tags.rb2
-rw-r--r--lib/api/templates.rb2
-rw-r--r--lib/api/terraform/state.rb12
-rw-r--r--lib/api/todos.rb2
-rw-r--r--lib/api/triggers.rb4
-rw-r--r--lib/api/user_counts.rb2
-rw-r--r--lib/api/users.rb35
-rw-r--r--lib/api/validations/types/comma_separated_to_array.rb2
-rw-r--r--lib/api/validations/types/comma_separated_to_integer_array.rb15
-rw-r--r--lib/api/validations/types/labels_list.rb24
-rw-r--r--lib/api/validations/types/safe_file.rb15
-rw-r--r--lib/api/validations/types/workhorse_file.rb13
-rw-r--r--lib/api/variables.rb32
-rw-r--r--lib/api/version.rb2
-rw-r--r--lib/api/wikis.rb206
-rw-r--r--lib/backup/database.rb10
-rw-r--r--lib/banzai/filter/abstract_reference_filter.rb10
-rw-r--r--lib/banzai/filter/commit_trailers_filter.rb5
-rw-r--r--lib/banzai/filter/external_issue_reference_filter.rb6
-rw-r--r--lib/banzai/filter/inline_cluster_metrics_filter.rb40
-rw-r--r--lib/banzai/filter/inline_metrics_redactor_filter.rb4
-rw-r--r--lib/banzai/filter/jira_import/adf_to_commonmark_filter.rb24
-rw-r--r--lib/banzai/filter/project_reference_filter.rb6
-rw-r--r--lib/banzai/filter/reference_filter.rb87
-rw-r--r--lib/banzai/filter/table_of_contents_filter.rb11
-rw-r--r--lib/banzai/filter/user_reference_filter.rb6
-rw-r--r--lib/banzai/pipeline/gfm_pipeline.rb3
-rw-r--r--lib/banzai/pipeline/jira_import/adf_commonmark_pipeline.rb15
-rw-r--r--lib/container_registry/tag.rb7
-rw-r--r--lib/declarative_policy/base.rb25
-rw-r--r--lib/event_filter.rb21
-rw-r--r--lib/feature.rb41
-rw-r--r--lib/feature/definition.rb137
-rw-r--r--lib/feature/shared.rb33
-rw-r--r--lib/gitlab/action_cable/config.rb17
-rw-r--r--lib/gitlab/alert_management/alert_params.rb6
-rw-r--r--lib/gitlab/alert_management/fingerprint.rb16
-rw-r--r--lib/gitlab/alerting/notification_payload_parser.rb11
-rw-r--r--lib/gitlab/analytics/cycle_analytics/records_fetcher.rb4
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events.rb2
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb9
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb9
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb8
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb23
-rw-r--r--lib/gitlab/analytics/unique_visits.rb60
-rw-r--r--lib/gitlab/application_rate_limiter.rb24
-rw-r--r--lib/gitlab/auth/auth_finders.rb26
-rw-r--r--lib/gitlab/background_migration.rb1
-rw-r--r--lib/gitlab/background_migration/backfill_namespace_settings.rb18
-rw-r--r--lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb8
-rw-r--r--lib/gitlab/background_migration/digest_column.rb25
-rw-r--r--lib/gitlab/background_migration/encrypt_columns.rb104
-rw-r--r--lib/gitlab/background_migration/encrypt_runners_tokens.rb32
-rw-r--r--lib/gitlab/background_migration/fix_pages_access_level.rb2
-rw-r--r--lib/gitlab/background_migration/mailers/unconfirm_mailer.rb24
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml19
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb14
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/namespace.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/project.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/runner.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/settings.rb37
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb28
-rw-r--r--lib/gitlab/background_migration/populate_project_snippet_statistics.rb61
-rw-r--r--lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb13
-rw-r--r--lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb97
-rw-r--r--lib/gitlab/bitbucket_import/importer.rb11
-rw-r--r--lib/gitlab/bitbucket_import/metrics.rb41
-rw-r--r--lib/gitlab/bitbucket_server_import/importer.rb11
-rw-r--r--lib/gitlab/cache/ci/project_pipeline_status.rb11
-rw-r--r--lib/gitlab/ci/build/releaser.rb17
-rw-r--r--lib/gitlab/ci/config.rb6
-rw-r--r--lib/gitlab/ci/config/entry/environment.rb2
-rw-r--r--lib/gitlab/ci/config/entry/job.rb4
-rw-r--r--lib/gitlab/ci/config/entry/processable.rb4
-rw-r--r--lib/gitlab/ci/config/entry/release.rb24
-rw-r--r--lib/gitlab/ci/config/entry/reports.rb9
-rw-r--r--lib/gitlab/ci/features.rb54
-rw-r--r--lib/gitlab/ci/parsers/terraform/tfplan.rb34
-rw-r--r--lib/gitlab/ci/pipeline/chain/build.rb6
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb8
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content.rb1
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content/parameter.rb30
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content/source.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/process.rb12
-rw-r--r--lib/gitlab/ci/pipeline/chain/create.rb4
-rw-r--r--lib/gitlab/ci/pipeline/chain/helpers.rb9
-rw-r--r--lib/gitlab/ci/pipeline/chain/metrics.rb35
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/abilities.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/repository.rb2
-rw-r--r--lib/gitlab/ci/pipeline/metrics.rb42
-rw-r--r--lib/gitlab/ci/pipeline/preloader.rb18
-rw-r--r--lib/gitlab/ci/reports/test_report_summary.rb49
-rw-r--r--lib/gitlab/ci/reports/test_suite.rb14
-rw-r--r--lib/gitlab/ci/reports/test_suite_summary.rb49
-rw-r--r--lib/gitlab/ci/status/composite.rb4
-rw-r--r--lib/gitlab/ci/status/factory.rb2
-rw-r--r--lib/gitlab/ci/status/stage/play_manual.rb2
-rw-r--r--lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Android.gitlab-ci.yml63
-rw-r--r--lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Composer.gitlab-ci.yml19
-rw-r--r--lib/gitlab/ci/templates/Dart.gitlab-ci.yml22
-rw-r--r--lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml20
-rw-r--r--lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml13
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml42
-rw-r--r--lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml29
-rw-r--r--lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml34
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml16
-rw-r--r--lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml48
-rw-r--r--lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml28
-rw-r--r--lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml11
-rw-r--r--lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml12
-rw-r--r--lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml23
-rw-r--r--lib/gitlab/ci/templates/index.md3
-rw-r--r--lib/gitlab/ci/templates/npm.gitlab-ci.yml59
-rw-r--r--lib/gitlab/ci/yaml_processor.rb65
-rw-r--r--lib/gitlab/class_attributes.rb30
-rw-r--r--lib/gitlab/code_navigation_path.rb12
-rw-r--r--lib/gitlab/conan_token.rb64
-rw-r--r--lib/gitlab/config/entry/configurable.rb4
-rw-r--r--lib/gitlab/config/entry/node.rb11
-rw-r--r--lib/gitlab/config/loader/yaml.rb3
-rw-r--r--lib/gitlab/config_checker/external_database_checker.rb49
-rw-r--r--lib/gitlab/danger/changelog.rb14
-rw-r--r--lib/gitlab/danger/commit_linter.rb9
-rw-r--r--lib/gitlab/danger/helper.rb67
-rw-r--r--lib/gitlab/danger/roulette.rb45
-rw-r--r--lib/gitlab/danger/sidekiq_queues.rb37
-rw-r--r--lib/gitlab/danger/teammate.rb80
-rw-r--r--lib/gitlab/database.rb63
-rw-r--r--lib/gitlab/database/background_migration_job.rb38
-rw-r--r--lib/gitlab/database/dynamic_model_helpers.rb16
-rw-r--r--lib/gitlab/database/migration_helpers.rb158
-rw-r--r--lib/gitlab/database/migrations/background_migration_helpers.rb157
-rw-r--r--lib/gitlab/database/partitioning/monthly_strategy.rb96
-rw-r--r--lib/gitlab/database/partitioning/partition_creator.rb87
-rw-r--r--lib/gitlab/database/partitioning/time_partition.rb84
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb105
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb2
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb165
-rw-r--r--lib/gitlab/database/schema_helpers.rb25
-rw-r--r--lib/gitlab/diff/file.rb8
-rw-r--r--lib/gitlab/diff/file_collection/base.rb27
-rw-r--r--lib/gitlab/diff/file_collection/merge_request_diff_base.rb33
-rw-r--r--lib/gitlab/diff/file_collection/wiki_page.rb24
-rw-r--r--lib/gitlab/diff/position_tracer.rb4
-rw-r--r--lib/gitlab/diff/position_tracer/base_strategy.rb1
-rw-r--r--lib/gitlab/diff/position_tracer/image_strategy.rb10
-rw-r--r--lib/gitlab/diff/position_tracer/line_strategy.rb30
-rw-r--r--lib/gitlab/diff/stats_cache.rb54
-rw-r--r--lib/gitlab/discussions_diff/file_collection.rb4
-rw-r--r--lib/gitlab/discussions_diff/highlight_cache.rb10
-rw-r--r--lib/gitlab/email/handler.rb5
-rw-r--r--lib/gitlab/email/handler/reply_processing.rb15
-rw-r--r--lib/gitlab/email/handler/service_desk_handler.rb152
-rw-r--r--lib/gitlab/email/service_desk_receiver.rb23
-rw-r--r--lib/gitlab/emoji.rb4
-rw-r--r--lib/gitlab/error_tracking.rb15
-rw-r--r--lib/gitlab/error_tracking/detailed_error.rb1
-rw-r--r--lib/gitlab/file_finder.rb2
-rw-r--r--lib/gitlab/git/commit.rb5
-rw-r--r--lib/gitlab/git/diff.rb24
-rw-r--r--lib/gitlab/git/repository.rb18
-rw-r--r--lib/gitlab/git/wiki.rb4
-rw-r--r--lib/gitlab/git_ref_validator.rb4
-rw-r--r--lib/gitlab/gitaly_client.rb32
-rw-r--r--lib/gitlab/gitaly_client/blob_service.rb52
-rw-r--r--lib/gitlab/gitaly_client/call.rb72
-rw-r--r--lib/gitlab/gitaly_client/cleanup_service.rb5
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb68
-rw-r--r--lib/gitlab/gitaly_client/conflicts_service.rb1
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb4
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb31
-rw-r--r--lib/gitlab/gitaly_client/remote_service.rb6
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb8
-rw-r--r--lib/gitlab/gl_repository.rb6
-rw-r--r--lib/gitlab/gl_repository/identifier.rb94
-rw-r--r--lib/gitlab/global_id.rb17
-rw-r--r--lib/gitlab/graphql/authorize/authorize_resource.rb3
-rw-r--r--lib/gitlab/graphql/lazy.rb19
-rw-r--r--lib/gitlab/graphql/loaders/issuable_loader.rb82
-rw-r--r--lib/gitlab/graphql/mount_mutation.rb8
-rw-r--r--lib/gitlab/graphql/query_analyzers/logger_analyzer.rb3
-rw-r--r--lib/gitlab/health_checks/probes/collection.rb6
-rw-r--r--lib/gitlab/import/metrics.rb87
-rw-r--r--lib/gitlab/import_export/json/streaming_serializer.rb14
-rw-r--r--lib/gitlab/import_export/project/import_export.yml10
-rw-r--r--lib/gitlab/import_export/project/relation_factory.rb11
-rw-r--r--lib/gitlab/import_export/snippet_repo_restorer.rb2
-rw-r--r--lib/gitlab/incident_management/pager_duty/incident_issue_description.rb64
-rw-r--r--lib/gitlab/instrumentation/elasticsearch_transport.rb16
-rw-r--r--lib/gitlab/instrumentation/redis.rb4
-rw-r--r--lib/gitlab/instrumentation/redis_base.rb37
-rw-r--r--lib/gitlab/instrumentation/redis_cluster_validator.rb106
-rw-r--r--lib/gitlab/instrumentation/redis_interceptor.rb23
-rw-r--r--lib/gitlab/issuable_metadata.rb14
-rw-r--r--lib/gitlab/jira_import/issue_serializer.rb4
-rw-r--r--lib/gitlab/jira_import/user_mapper.rb53
-rw-r--r--lib/gitlab/json.rb199
-rw-r--r--lib/gitlab/json_logger.rb2
-rw-r--r--lib/gitlab/kubernetes/helm.rb2
-rw-r--r--lib/gitlab/kubernetes/node.rb78
-rw-r--r--lib/gitlab/lograge/custom_options.rb12
-rw-r--r--lib/gitlab/marginalia/comment.rb4
-rw-r--r--lib/gitlab/markdown_cache/redis/extension.rb22
-rw-r--r--lib/gitlab/markdown_cache/redis/store.rb20
-rw-r--r--lib/gitlab/metrics/background_transaction.rb2
-rw-r--r--lib/gitlab/metrics/dashboard/errors.rb6
-rw-r--r--lib/gitlab/metrics/dashboard/finder.rb45
-rw-r--r--lib/gitlab/metrics/dashboard/service_selector.rb4
-rw-r--r--lib/gitlab/metrics/dashboard/stages/base_stage.rb8
-rw-r--r--lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb83
-rw-r--r--lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb (renamed from lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb)10
-rw-r--r--lib/gitlab/metrics/dashboard/stages/sorter.rb4
-rw-r--r--lib/gitlab/metrics/dashboard/stages/url_validator.rb43
-rw-r--r--lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb34
-rw-r--r--lib/gitlab/metrics/dashboard/url.rb16
-rw-r--r--lib/gitlab/metrics/methods.rb2
-rw-r--r--lib/gitlab/metrics/sidekiq_middleware.rb4
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb22
-rw-r--r--lib/gitlab/metrics/transaction.rb8
-rw-r--r--lib/gitlab/metrics/web_transaction.rb11
-rw-r--r--lib/gitlab/middleware/go.rb4
-rw-r--r--lib/gitlab/middleware/multipart.rb19
-rw-r--r--lib/gitlab/project_template.rb1
-rw-r--r--lib/gitlab/prometheus_client.rb20
-rw-r--r--lib/gitlab/regex.rb12
-rw-r--r--lib/gitlab/runtime.rb26
-rw-r--r--lib/gitlab/search_results.rb3
-rw-r--r--lib/gitlab/seeder.rb3
-rw-r--r--lib/gitlab/service_desk.rb16
-rw-r--r--lib/gitlab/service_desk_email.rb22
-rw-r--r--lib/gitlab/set_cache.rb5
-rw-r--r--lib/gitlab/sidekiq_logging/deduplication_logger.rb7
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb3
-rw-r--r--lib/gitlab/static_site_editor/config.rb8
-rw-r--r--lib/gitlab/suggestions/file_suggestion.rb53
-rw-r--r--lib/gitlab/suggestions/suggestion_set.rb22
-rw-r--r--lib/gitlab/template/service_desk_template.rb21
-rw-r--r--lib/gitlab/tracking/incident_management.rb3
-rw-r--r--lib/gitlab/tree_summary.rb2
-rw-r--r--lib/gitlab/updated_notes_paginator.rb74
-rw-r--r--lib/gitlab/url_builder.rb14
-rw-r--r--lib/gitlab/usage_data.rb363
-rw-r--r--lib/gitlab/usage_data/topology.rb258
-rw-r--r--lib/gitlab/usage_data_concerns/topology.rb137
-rw-r--r--lib/gitlab/usage_data_counters/track_unique_actions.rb86
-rw-r--r--lib/gitlab/user_access.rb6
-rw-r--r--lib/gitlab/utils.rb12
-rw-r--r--lib/gitlab/utils/markdown.rb19
-rw-r--r--lib/gitlab/utils/usage_data.rb14
-rw-r--r--lib/gitlab/workhorse.rb4
-rw-r--r--lib/gitlab_danger.rb1
-rw-r--r--lib/google_api/auth.rb2
-rw-r--r--lib/kramdown/converter/commonmark.rb109
-rw-r--r--lib/kramdown/parser/atlassian_document_format.rb381
-rw-r--r--lib/learn_gitlab.rb35
-rw-r--r--lib/object_storage/direct_upload.rb25
-rw-r--r--lib/pager_duty/webhook_payload_parser.rb66
-rw-r--r--lib/peek/views/elasticsearch.rb2
-rw-r--r--lib/product_analytics/collector_app.rb40
-rw-r--r--lib/product_analytics/event_params.rb51
-rw-r--r--lib/quality/helm3_client.rb109
-rw-r--r--lib/quality/kubernetes_client.rb85
-rw-r--r--lib/quality/seeders/issues.rb1
-rw-r--r--lib/quality/test_level.rb8
-rw-r--r--lib/rspec_flaky/flaky_examples_collection.rb2
-rw-r--r--lib/sentry/client/issue.rb3
-rw-r--r--lib/support/logrotate/gitlab2
-rw-r--r--lib/system_check/incoming_email/imap_authentication_check.rb11
-rw-r--r--lib/tasks/cache.rake4
-rw-r--r--lib/tasks/gitlab/container_registry.rake16
-rw-r--r--lib/tasks/gitlab/db.rake37
-rw-r--r--lib/tasks/gitlab/external_diffs.rake35
-rw-r--r--lib/tasks/gitlab/packages/migrate.rake23
449 files changed, 10560 insertions, 3422 deletions
diff --git a/lib/api/access_requests.rb b/lib/api/access_requests.rb
index ee8dc822098..5305b25538f 100644
--- a/lib/api/access_requests.rb
+++ b/lib/api/access_requests.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class AccessRequests < Grape::API
+ class AccessRequests < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/admin/ci/variables.rb b/lib/api/admin/ci/variables.rb
index df731148bac..6b0ff5e9395 100644
--- a/lib/api/admin/ci/variables.rb
+++ b/lib/api/admin/ci/variables.rb
@@ -3,7 +3,7 @@
module API
module Admin
module Ci
- class Variables < Grape::API
+ class Variables < Grape::API::Instance
include PaginationParams
before { authenticated_as_admin! }
diff --git a/lib/api/admin/instance_clusters.rb b/lib/api/admin/instance_clusters.rb
new file mode 100644
index 00000000000..8208d10c089
--- /dev/null
+++ b/lib/api/admin/instance_clusters.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+module API
+ module Admin
+ class InstanceClusters < Grape::API::Instance
+ include PaginationParams
+
+ before do
+ authenticated_as_admin!
+ end
+
+ namespace 'admin' do
+ desc "Get list of all instance clusters" do
+ detail "This feature was introduced in GitLab 13.2."
+ end
+ get '/clusters' do
+ authorize! :read_cluster, clusterable_instance
+ present paginate(clusters_for_current_user), with: Entities::Cluster
+ end
+
+ desc "Get a single instance cluster" do
+ detail "This feature was introduced in GitLab 13.2."
+ end
+ params do
+ requires :cluster_id, type: Integer, desc: "The cluster ID"
+ end
+ get '/clusters/:cluster_id' do
+ authorize! :read_cluster, cluster
+
+ present cluster, with: Entities::Cluster
+ end
+
+ desc "Add an instance cluster" do
+ detail "This feature was introduced in GitLab 13.2."
+ end
+ params do
+ requires :name, type: String, desc: 'Cluster name'
+ optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
+ optional :environment_scope, default: '*', type: String, desc: 'The associated environment to the cluster'
+ optional :domain, type: String, desc: 'Cluster base domain'
+ optional :management_project_id, type: Integer, desc: 'The ID of the management project'
+ optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
+ requires :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
+ requires :api_url, type: String, allow_blank: false, desc: 'URL to access the Kubernetes API'
+ requires :token, type: String, desc: 'Token to authenticate against Kubernetes'
+ optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
+ optional :namespace, type: String, desc: 'Unique namespace related to Project'
+ optional :authorization_type, type: String, values: ::Clusters::Platforms::Kubernetes.authorization_types.keys, default: 'rbac', desc: 'Cluster authorization type, defaults to RBAC'
+ end
+ end
+ post '/clusters/add' do
+ authorize! :add_cluster, clusterable_instance
+
+ user_cluster = ::Clusters::CreateService
+ .new(current_user, create_cluster_user_params)
+ .execute
+
+ if user_cluster.persisted?
+ present user_cluster, with: Entities::Cluster
+ else
+ render_validation_error!(user_cluster)
+ end
+ end
+
+ desc "Update an instance cluster" do
+ detail "This feature was introduced in GitLab 13.2."
+ end
+ params do
+ requires :cluster_id, type: Integer, desc: 'The cluster ID'
+ optional :name, type: String, desc: 'Cluster name'
+ optional :enabled, type: Boolean, desc: 'Enable or disable Gitlab\'s connection to your Kubernetes cluster'
+ optional :environment_scope, type: String, desc: 'The associated environment to the cluster'
+ optional :domain, type: String, desc: 'Cluster base domain'
+ optional :management_project_id, type: Integer, desc: 'The ID of the management project'
+ optional :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
+ optional :api_url, type: String, desc: 'URL to access the Kubernetes API'
+ optional :token, type: String, desc: 'Token to authenticate against Kubernetes'
+ optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
+ optional :namespace, type: String, desc: 'Unique namespace related to Project'
+ end
+ end
+ put '/clusters/:cluster_id' do
+ authorize! :update_cluster, cluster
+
+ update_service = ::Clusters::UpdateService.new(current_user, update_cluster_params)
+
+ if update_service.execute(cluster)
+ present cluster, with: Entities::ClusterProject
+ else
+ render_validation_error!(cluster)
+ end
+ end
+
+ desc "Remove a cluster" do
+ detail "This feature was introduced in GitLab 13.2."
+ end
+ params do
+ requires :cluster_id, type: Integer, desc: "The cluster ID"
+ end
+ delete '/clusters/:cluster_id' do
+ authorize! :admin_cluster, cluster
+
+ destroy_conditionally!(cluster)
+ end
+ end
+
+ helpers do
+ def clusterable_instance
+ Clusters::Instance.new
+ end
+
+ def clusters_for_current_user
+ @clusters_for_current_user ||= ClustersFinder.new(clusterable_instance, current_user, :all).execute
+ end
+
+ def cluster
+ @cluster ||= clusters_for_current_user.find(params[:cluster_id])
+ end
+
+ def create_cluster_user_params
+ declared_params.merge({
+ provider_type: :user,
+ platform_type: :kubernetes,
+ clusterable: clusterable_instance
+ })
+ end
+
+ def update_cluster_params
+ declared_params(include_missing: false).without(:cluster_id)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/admin/sidekiq.rb b/lib/api/admin/sidekiq.rb
index a700bea0fd7..f4c84f2eee8 100644
--- a/lib/api/admin/sidekiq.rb
+++ b/lib/api/admin/sidekiq.rb
@@ -2,7 +2,7 @@
module API
module Admin
- class Sidekiq < Grape::API
+ class Sidekiq < Grape::API::Instance
before { authenticated_as_admin! }
namespace 'admin' do
diff --git a/lib/api/api.rb b/lib/api/api.rb
index fb67258f331..a89dc0fa6fa 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class API < Grape::API
+ class API < Grape::API::Instance
include APIGuard
LOG_FILENAME = Rails.root.join("log", "api_json.log")
@@ -46,6 +46,8 @@ module API
end
before do
+ coerce_nil_params_to_array!
+
Gitlab::ApplicationContext.push(
user: -> { @current_user },
project: -> { @project },
@@ -108,6 +110,7 @@ module API
end
format :json
+ formatter :json, Gitlab::Json::GrapeFormatter
content_type :txt, "text/plain"
# Ensure the namespace is right, otherwise we might load Grape::API::Helpers
@@ -122,6 +125,7 @@ module API
# Keep in alphabetical order
mount ::API::AccessRequests
mount ::API::Admin::Ci::Variables
+ mount ::API::Admin::InstanceClusters
mount ::API::Admin::Sidekiq
mount ::API::Appearance
mount ::API::Applications
@@ -131,6 +135,10 @@ module API
mount ::API::Boards
mount ::API::Branches
mount ::API::BroadcastMessages
+ mount ::API::Ci::Pipelines
+ mount ::API::Ci::PipelineSchedules
+ mount ::API::Ci::Runner
+ mount ::API::Ci::Runners
mount ::API::Commits
mount ::API::CommitStatuses
mount ::API::ContainerRegistryEvent
@@ -152,6 +160,7 @@ module API
mount ::API::Groups
mount ::API::GroupContainerRepositories
mount ::API::GroupVariables
+ mount ::API::ImportBitbucketServer
mount ::API::ImportGithub
mount ::API::Issues
mount ::API::JobArtifacts
@@ -163,6 +172,7 @@ module API
mount ::API::Members
mount ::API::MergeRequestDiffs
mount ::API::MergeRequests
+ mount ::API::MergeRequestApprovals
mount ::API::Metrics::Dashboard::Annotations
mount ::API::Metrics::UserStarredDashboards
mount ::API::Namespaces
@@ -170,11 +180,20 @@ module API
mount ::API::Discussions
mount ::API::ResourceLabelEvents
mount ::API::ResourceMilestoneEvents
+ mount ::API::ResourceStateEvents
mount ::API::NotificationSettings
+ mount ::API::ProjectPackages
+ mount ::API::GroupPackages
+ mount ::API::PackageFiles
+ mount ::API::NugetPackages
+ mount ::API::PypiPackages
+ mount ::API::ComposerPackages
+ mount ::API::ConanPackages
+ mount ::API::MavenPackages
+ mount ::API::NpmPackages
+ mount ::API::GoProxy
mount ::API::Pages
mount ::API::PagesDomains
- mount ::API::Pipelines
- mount ::API::PipelineSchedules
mount ::API::ProjectClusters
mount ::API::ProjectContainerRepositories
mount ::API::ProjectEvents
@@ -195,8 +214,6 @@ module API
mount ::API::Release::Links
mount ::API::RemoteMirrors
mount ::API::Repositories
- mount ::API::Runner
- mount ::API::Runners
mount ::API::Search
mount ::API::Services
mount ::API::Settings
diff --git a/lib/api/api_guard.rb b/lib/api/api_guard.rb
index c6557fce541..4b87861a3de 100644
--- a/lib/api/api_guard.rb
+++ b/lib/api/api_guard.rb
@@ -43,7 +43,6 @@ module API
# Helper Methods for Grape Endpoint
module HelperMethods
- prepend_if_ee('EE::API::APIGuard::HelperMethods') # rubocop: disable Cop/InjectEnterpriseEditionModule
include Gitlab::Auth::AuthFinders
def access_token
@@ -66,7 +65,7 @@ module API
def find_user_from_sources
deploy_token_from_request ||
- find_user_from_access_token ||
+ find_user_from_bearer_token ||
find_user_from_job_token ||
find_user_from_warden
end
@@ -153,7 +152,14 @@ module API
{ scope: e.scopes })
end
- response.finish
+ status, headers, body = response.finish
+
+ # Grape expects a Rack::Response
+ # (https://github.com/ruby-grape/grape/commit/c117bff7d22971675f4b34367d3a98bc31c8fc02),
+ # so we need to recreate the response again even though
+ # response.finish already does this.
+ # (https://github.com/nov/rack-oauth2/blob/40c9a99fd80486ccb8de0e4869ae384547c0d703/lib/rack/oauth2/server/abstract/error.rb#L26).
+ Rack::Response.new(body, status, headers)
end
end
end
diff --git a/lib/api/appearance.rb b/lib/api/appearance.rb
index 71a35bb4493..f98004af480 100644
--- a/lib/api/appearance.rb
+++ b/lib/api/appearance.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Appearance < Grape::API
+ class Appearance < Grape::API::Instance
before { authenticated_as_admin! }
helpers do
diff --git a/lib/api/applications.rb b/lib/api/applications.rb
index 70e6b8395d7..4e8d68c8d09 100644
--- a/lib/api/applications.rb
+++ b/lib/api/applications.rb
@@ -2,7 +2,7 @@
module API
# External applications API
- class Applications < Grape::API
+ class Applications < Grape::API::Instance
before { authenticated_as_admin! }
resource :applications do
diff --git a/lib/api/avatar.rb b/lib/api/avatar.rb
index 0f14d003065..9501e777fff 100644
--- a/lib/api/avatar.rb
+++ b/lib/api/avatar.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Avatar < Grape::API
+ class Avatar < Grape::API::Instance
resource :avatar do
desc 'Return avatar url for a user' do
success Entities::Avatar
diff --git a/lib/api/award_emoji.rb b/lib/api/award_emoji.rb
index 8e3b3ff8ce5..0a3df3ed96e 100644
--- a/lib/api/award_emoji.rb
+++ b/lib/api/award_emoji.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class AwardEmoji < Grape::API
+ class AwardEmoji < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/badges.rb b/lib/api/badges.rb
index d2152fad07b..f6cd3f83ff3 100644
--- a/lib/api/badges.rb
+++ b/lib/api/badges.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Badges < Grape::API
+ class Badges < Grape::API::Instance
include PaginationParams
before { authenticate_non_get! }
diff --git a/lib/api/boards.rb b/lib/api/boards.rb
index 87818903705..1f5086127a8 100644
--- a/lib/api/boards.rb
+++ b/lib/api/boards.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Boards < Grape::API
+ class Boards < Grape::API::Instance
include BoardsResponses
include PaginationParams
diff --git a/lib/api/branches.rb b/lib/api/branches.rb
index 081e8ffe4f0..5e9c2caf8f5 100644
--- a/lib/api/branches.rb
+++ b/lib/api/branches.rb
@@ -3,7 +3,7 @@
require 'mime/types'
module API
- class Branches < Grape::API
+ class Branches < Grape::API::Instance
include PaginationParams
BRANCH_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(branch: API::NO_SLASH_URL_PART_REGEX)
@@ -32,14 +32,21 @@ module API
params do
use :pagination
use :filter_params
+
+ optional :page_token, type: String, desc: 'Name of branch to start the paginaition from'
end
get ':id/repository/branches' do
user_project.preload_protected_branches
repository = user_project.repository
- branches = BranchesFinder.new(repository, declared_params(include_missing: false)).execute
- branches = paginate(::Kaminari.paginate_array(branches))
+ if Feature.enabled?(:branch_list_keyset_pagination, user_project)
+ branches = BranchesFinder.new(repository, declared_params(include_missing: false)).execute(gitaly_pagination: true)
+ else
+ branches = BranchesFinder.new(repository, declared_params(include_missing: false)).execute
+ branches = paginate(::Kaminari.paginate_array(branches))
+ end
+
merged_branch_names = repository.merged_branch_names(branches.map(&:name))
present(
diff --git a/lib/api/broadcast_messages.rb b/lib/api/broadcast_messages.rb
index 42e7dc751f0..dcf950d7a03 100644
--- a/lib/api/broadcast_messages.rb
+++ b/lib/api/broadcast_messages.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class BroadcastMessages < Grape::API
+ class BroadcastMessages < Grape::API::Instance
include PaginationParams
resource :broadcast_messages do
diff --git a/lib/api/ci/pipeline_schedules.rb b/lib/api/ci/pipeline_schedules.rb
new file mode 100644
index 00000000000..80ad8aa04dd
--- /dev/null
+++ b/lib/api/ci/pipeline_schedules.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+module API
+ module Ci
+ class PipelineSchedules < Grape::API::Instance
+ include PaginationParams
+
+ before { authenticate! }
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: ::API::API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get all pipeline schedules' do
+ success Entities::PipelineSchedule
+ end
+ params do
+ use :pagination
+ optional :scope, type: String, values: %w[active inactive],
+ desc: 'The scope of pipeline schedules'
+ end
+ # rubocop: disable CodeReuse/ActiveRecord
+ get ':id/pipeline_schedules' do
+ authorize! :read_pipeline_schedule, user_project
+
+ schedules = ::Ci::PipelineSchedulesFinder.new(user_project).execute(scope: params[:scope])
+ .preload([:owner, :last_pipeline])
+ present paginate(schedules), with: Entities::PipelineSchedule
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ desc 'Get a single pipeline schedule' do
+ success Entities::PipelineScheduleDetails
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ end
+ get ':id/pipeline_schedules/:pipeline_schedule_id' do
+ present pipeline_schedule, with: Entities::PipelineScheduleDetails
+ end
+
+ desc 'Create a new pipeline schedule' do
+ success Entities::PipelineScheduleDetails
+ end
+ params do
+ requires :description, type: String, desc: 'The description of pipeline schedule'
+ requires :ref, type: String, desc: 'The branch/tag name will be triggered', allow_blank: false
+ requires :cron, type: String, desc: 'The cron'
+ optional :cron_timezone, type: String, default: 'UTC', desc: 'The timezone'
+ optional :active, type: Boolean, default: true, desc: 'The activation of pipeline schedule'
+ end
+ post ':id/pipeline_schedules' do
+ authorize! :create_pipeline_schedule, user_project
+
+ pipeline_schedule = ::Ci::CreatePipelineScheduleService
+ .new(user_project, current_user, declared_params(include_missing: false))
+ .execute
+
+ if pipeline_schedule.persisted?
+ present pipeline_schedule, with: Entities::PipelineScheduleDetails
+ else
+ render_validation_error!(pipeline_schedule)
+ end
+ end
+
+ desc 'Edit a pipeline schedule' do
+ success Entities::PipelineScheduleDetails
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ optional :description, type: String, desc: 'The description of pipeline schedule'
+ optional :ref, type: String, desc: 'The branch/tag name will be triggered'
+ optional :cron, type: String, desc: 'The cron'
+ optional :cron_timezone, type: String, desc: 'The timezone'
+ optional :active, type: Boolean, desc: 'The activation of pipeline schedule'
+ end
+ put ':id/pipeline_schedules/:pipeline_schedule_id' do
+ authorize! :update_pipeline_schedule, pipeline_schedule
+
+ if pipeline_schedule.update(declared_params(include_missing: false))
+ present pipeline_schedule, with: Entities::PipelineScheduleDetails
+ else
+ render_validation_error!(pipeline_schedule)
+ end
+ end
+
+ desc 'Take ownership of a pipeline schedule' do
+ success Entities::PipelineScheduleDetails
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ end
+ post ':id/pipeline_schedules/:pipeline_schedule_id/take_ownership' do
+ authorize! :update_pipeline_schedule, pipeline_schedule
+
+ if pipeline_schedule.own!(current_user)
+ present pipeline_schedule, with: Entities::PipelineScheduleDetails
+ else
+ render_validation_error!(pipeline_schedule)
+ end
+ end
+
+ desc 'Delete a pipeline schedule' do
+ success Entities::PipelineScheduleDetails
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ end
+ delete ':id/pipeline_schedules/:pipeline_schedule_id' do
+ authorize! :admin_pipeline_schedule, pipeline_schedule
+
+ destroy_conditionally!(pipeline_schedule)
+ end
+
+ desc 'Play a scheduled pipeline immediately' do
+ detail 'This feature was added in GitLab 12.8'
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ end
+ post ':id/pipeline_schedules/:pipeline_schedule_id/play' do
+ authorize! :play_pipeline_schedule, pipeline_schedule
+
+ job_id = RunPipelineScheduleWorker # rubocop:disable CodeReuse/Worker
+ .perform_async(pipeline_schedule.id, current_user.id)
+
+ if job_id
+ created!
+ else
+ render_api_error!('Unable to schedule pipeline run immediately', 500)
+ end
+ end
+
+ desc 'Create a new pipeline schedule variable' do
+ success Entities::Variable
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ requires :key, type: String, desc: 'The key of the variable'
+ requires :value, type: String, desc: 'The value of the variable'
+ optional :variable_type, type: String, values: ::Ci::PipelineScheduleVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
+ end
+ post ':id/pipeline_schedules/:pipeline_schedule_id/variables' do
+ authorize! :update_pipeline_schedule, pipeline_schedule
+
+ variable_params = declared_params(include_missing: false)
+ variable = pipeline_schedule.variables.create(variable_params)
+ if variable.persisted?
+ present variable, with: Entities::Variable
+ else
+ render_validation_error!(variable)
+ end
+ end
+
+ desc 'Edit a pipeline schedule variable' do
+ success Entities::Variable
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ requires :key, type: String, desc: 'The key of the variable'
+ optional :value, type: String, desc: 'The value of the variable'
+ optional :variable_type, type: String, values: ::Ci::PipelineScheduleVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
+ end
+ put ':id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
+ authorize! :update_pipeline_schedule, pipeline_schedule
+
+ if pipeline_schedule_variable.update(declared_params(include_missing: false))
+ present pipeline_schedule_variable, with: Entities::Variable
+ else
+ render_validation_error!(pipeline_schedule_variable)
+ end
+ end
+
+ desc 'Delete a pipeline schedule variable' do
+ success Entities::Variable
+ end
+ params do
+ requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
+ requires :key, type: String, desc: 'The key of the variable'
+ end
+ delete ':id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
+ authorize! :admin_pipeline_schedule, pipeline_schedule
+
+ status :accepted
+ present pipeline_schedule_variable.destroy, with: Entities::Variable
+ end
+ end
+
+ helpers do
+ # rubocop: disable CodeReuse/ActiveRecord
+ def pipeline_schedule
+ @pipeline_schedule ||=
+ user_project
+ .pipeline_schedules
+ .preload(:owner, :last_pipeline)
+ .find_by(id: params.delete(:pipeline_schedule_id)).tap do |pipeline_schedule|
+ unless can?(current_user, :read_pipeline_schedule, pipeline_schedule)
+ not_found!('Pipeline Schedule')
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def pipeline_schedule_variable
+ @pipeline_schedule_variable ||=
+ pipeline_schedule.variables.find_by(key: params[:key]).tap do |pipeline_schedule_variable|
+ unless pipeline_schedule_variable
+ not_found!('Pipeline Schedule Variable')
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/lib/api/ci/pipelines.rb b/lib/api/ci/pipelines.rb
new file mode 100644
index 00000000000..33bb8b38d92
--- /dev/null
+++ b/lib/api/ci/pipelines.rb
@@ -0,0 +1,189 @@
+# frozen_string_literal: true
+
+module API
+ module Ci
+ class Pipelines < Grape::API::Instance
+ include PaginationParams
+
+ before { authenticate_non_get! }
+
+ params do
+ requires :id, type: String, desc: 'The project ID'
+ end
+ resource :projects, requirements: ::API::API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get all Pipelines of the project' do
+ detail 'This feature was introduced in GitLab 8.11.'
+ success Entities::PipelineBasic
+ end
+ params do
+ use :pagination
+ optional :scope, type: String, values: %w[running pending finished branches tags],
+ desc: 'The scope of pipelines'
+ optional :status, type: String, values: ::Ci::HasStatus::AVAILABLE_STATUSES,
+ desc: 'The status of pipelines'
+ optional :ref, type: String, desc: 'The ref of pipelines'
+ optional :sha, type: String, desc: 'The sha of pipelines'
+ optional :yaml_errors, type: Boolean, desc: 'Returns pipelines with invalid configurations'
+ optional :name, type: String, desc: 'The name of the user who triggered pipelines'
+ optional :username, type: String, desc: 'The username of the user who triggered pipelines'
+ optional :updated_before, type: DateTime, desc: 'Return pipelines updated before the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
+ optional :updated_after, type: DateTime, desc: 'Return pipelines updated after the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
+ optional :order_by, type: String, values: ::Ci::PipelinesFinder::ALLOWED_INDEXED_COLUMNS, default: 'id',
+ desc: 'Order pipelines'
+ optional :sort, type: String, values: %w[asc desc], default: 'desc',
+ desc: 'Sort pipelines'
+ end
+ get ':id/pipelines' do
+ authorize! :read_pipeline, user_project
+ authorize! :read_build, user_project
+
+ pipelines = ::Ci::PipelinesFinder.new(user_project, current_user, params).execute
+ present paginate(pipelines), with: Entities::PipelineBasic
+ end
+
+ desc 'Create a new pipeline' do
+ detail 'This feature was introduced in GitLab 8.14'
+ success Entities::Pipeline
+ end
+ params do
+ requires :ref, type: String, desc: 'Reference'
+ optional :variables, Array, desc: 'Array of variables available in the pipeline'
+ end
+ post ':id/pipeline' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42124')
+
+ authorize! :create_pipeline, user_project
+
+ pipeline_params = declared_params(include_missing: false)
+ .merge(variables_attributes: params[:variables])
+ .except(:variables)
+
+ new_pipeline = ::Ci::CreatePipelineService.new(user_project,
+ current_user,
+ pipeline_params)
+ .execute(:api, ignore_skip_ci: true, save_on_errors: false)
+
+ if new_pipeline.persisted?
+ present new_pipeline, with: Entities::Pipeline
+ else
+ render_validation_error!(new_pipeline)
+ end
+ end
+
+ desc 'Gets a the latest pipeline for the project branch' do
+ detail 'This feature was introduced in GitLab 12.3'
+ success Entities::Pipeline
+ end
+ params do
+ optional :ref, type: String, desc: 'branch ref of pipeline'
+ end
+ get ':id/pipelines/latest' do
+ authorize! :read_pipeline, latest_pipeline
+
+ present latest_pipeline, with: Entities::Pipeline
+ end
+
+ desc 'Gets a specific pipeline for the project' do
+ detail 'This feature was introduced in GitLab 8.11'
+ success Entities::Pipeline
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ get ':id/pipelines/:pipeline_id' do
+ authorize! :read_pipeline, pipeline
+
+ present pipeline, with: Entities::Pipeline
+ end
+
+ desc 'Gets the variables for a given pipeline' do
+ detail 'This feature was introduced in GitLab 11.11'
+ success Entities::Variable
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ get ':id/pipelines/:pipeline_id/variables' do
+ authorize! :read_pipeline_variable, pipeline
+
+ present pipeline.variables, with: Entities::Variable
+ end
+
+ desc 'Gets the test report for a given pipeline' do
+ detail 'This feature was introduced in GitLab 13.0. Disabled by default behind feature flag `junit_pipeline_view`'
+ success TestReportEntity
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ get ':id/pipelines/:pipeline_id/test_report' do
+ not_found! unless Feature.enabled?(:junit_pipeline_view, user_project)
+
+ authorize! :read_build, pipeline
+
+ present pipeline.test_reports, with: TestReportEntity, details: true
+ end
+
+ desc 'Deletes a pipeline' do
+ detail 'This feature was introduced in GitLab 11.6'
+ http_codes [[204, 'Pipeline was deleted'], [403, 'Forbidden']]
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ delete ':id/pipelines/:pipeline_id' do
+ authorize! :destroy_pipeline, pipeline
+
+ destroy_conditionally!(pipeline) do
+ ::Ci::DestroyPipelineService.new(user_project, current_user).execute(pipeline)
+ end
+ end
+
+ desc 'Retry builds in the pipeline' do
+ detail 'This feature was introduced in GitLab 8.11.'
+ success Entities::Pipeline
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ post ':id/pipelines/:pipeline_id/retry' do
+ authorize! :update_pipeline, pipeline
+
+ pipeline.retry_failed(current_user)
+
+ present pipeline, with: Entities::Pipeline
+ end
+
+ desc 'Cancel all builds in the pipeline' do
+ detail 'This feature was introduced in GitLab 8.11.'
+ success Entities::Pipeline
+ end
+ params do
+ requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
+ end
+ post ':id/pipelines/:pipeline_id/cancel' do
+ authorize! :update_pipeline, pipeline
+
+ pipeline.cancel_running
+
+ status 200
+ present pipeline.reset, with: Entities::Pipeline
+ end
+ end
+
+ helpers do
+ def pipeline
+ strong_memoize(:pipeline) do
+ user_project.ci_pipelines.find(params[:pipeline_id])
+ end
+ end
+
+ def latest_pipeline
+ strong_memoize(:latest_pipeline) do
+ user_project.latest_pipeline_for_ref(params[:ref])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/ci/runner.rb b/lib/api/ci/runner.rb
new file mode 100644
index 00000000000..31be1bb7e3e
--- /dev/null
+++ b/lib/api/ci/runner.rb
@@ -0,0 +1,318 @@
+# frozen_string_literal: true
+
+module API
+ module Ci
+ class Runner < Grape::API::Instance
+ helpers ::API::Helpers::Runner
+
+ resource :runners do
+ desc 'Registers a new Runner' do
+ success Entities::RunnerRegistrationDetails
+ http_codes [[201, 'Runner was created'], [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: 'Registration token'
+ optional :description, type: String, desc: %q(Runner's description)
+ optional :info, type: Hash, desc: %q(Runner's metadata)
+ optional :active, type: Boolean, desc: 'Should Runner be active'
+ optional :locked, type: Boolean, desc: 'Should Runner be locked for current project'
+ optional :access_level, type: String, values: ::Ci::Runner.access_levels.keys,
+ desc: 'The access_level of the runner'
+ optional :run_untagged, type: Boolean, desc: 'Should Runner handle untagged jobs'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: %q(List of Runner's tags)
+ optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
+ end
+ post '/' do
+ attributes = attributes_for_keys([:description, :active, :locked, :run_untagged, :tag_list, :access_level, :maximum_timeout])
+ .merge(get_runner_details_from_request)
+
+ attributes =
+ if runner_registration_token_valid?
+ # Create shared runner. Requires admin access
+ attributes.merge(runner_type: :instance_type)
+ elsif project = Project.find_by_runners_token(params[:token])
+ # Create a specific runner for the project
+ attributes.merge(runner_type: :project_type, projects: [project])
+ elsif group = Group.find_by_runners_token(params[:token])
+ # Create a specific runner for the group
+ attributes.merge(runner_type: :group_type, groups: [group])
+ else
+ forbidden!
+ end
+
+ runner = ::Ci::Runner.create(attributes)
+
+ if runner.persisted?
+ present runner, with: Entities::RunnerRegistrationDetails
+ else
+ render_validation_error!(runner)
+ end
+ end
+
+ desc 'Deletes a registered Runner' do
+ http_codes [[204, 'Runner was deleted'], [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: %q(Runner's authentication token)
+ end
+ delete '/' do
+ authenticate_runner!
+
+ runner = ::Ci::Runner.find_by_token(params[:token])
+
+ destroy_conditionally!(runner)
+ end
+
+ desc 'Validates authentication credentials' do
+ http_codes [[200, 'Credentials are valid'], [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: %q(Runner's authentication token)
+ end
+ post '/verify' do
+ authenticate_runner!
+ status 200
+ end
+ end
+
+ resource :jobs do
+ before do
+ Gitlab::ApplicationContext.push(
+ user: -> { current_job&.user },
+ project: -> { current_job&.project }
+ )
+ end
+
+ desc 'Request a job' do
+ success Entities::JobRequest::Response
+ http_codes [[201, 'Job was scheduled'],
+ [204, 'No job for Runner'],
+ [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: %q(Runner's authentication token)
+ optional :last_update, type: String, desc: %q(Runner's queue last_update token)
+ optional :info, type: Hash, desc: %q(Runner's metadata) do
+ optional :name, type: String, desc: %q(Runner's name)
+ optional :version, type: String, desc: %q(Runner's version)
+ optional :revision, type: String, desc: %q(Runner's revision)
+ optional :platform, type: String, desc: %q(Runner's platform)
+ optional :architecture, type: String, desc: %q(Runner's architecture)
+ optional :executor, type: String, desc: %q(Runner's executor)
+ optional :features, type: Hash, desc: %q(Runner's features)
+ end
+ optional :session, type: Hash, desc: %q(Runner's session data) do
+ optional :url, type: String, desc: %q(Session's url)
+ optional :certificate, type: String, desc: %q(Session's certificate)
+ optional :authorization, type: String, desc: %q(Session's authorization)
+ end
+ optional :job_age, type: Integer, desc: %q(Job should be older than passed age in seconds to be ran on runner)
+ end
+
+ # Since we serialize the build output ourselves to ensure Gitaly
+ # gRPC calls succeed, we need a custom Grape format to handle
+ # this:
+ # 1. Grape will ordinarily call `JSON.dump` when Content-Type is set
+ # to application/json. To avoid this, we need to define a custom type in
+ # `content_type` and a custom formatter to go with it.
+ # 2. Grape will parse the request input with the parser defined for
+ # `content_type`. If no such parser exists, it will be treated as text. We
+ # reuse the existing JSON parser to preserve the previous behavior.
+ content_type :build_json, 'application/json'
+ formatter :build_json, ->(object, _) { object }
+ parser :build_json, ::Grape::Parser::Json
+
+ post '/request' do
+ authenticate_runner!
+
+ unless current_runner.active?
+ header 'X-GitLab-Last-Update', current_runner.ensure_runner_queue_value
+ break no_content!
+ end
+
+ runner_params = declared_params(include_missing: false)
+
+ if current_runner.runner_queue_value_latest?(runner_params[:last_update])
+ header 'X-GitLab-Last-Update', runner_params[:last_update]
+ Gitlab::Metrics.add_event(:build_not_found_cached)
+ break no_content!
+ end
+
+ new_update = current_runner.ensure_runner_queue_value
+ result = ::Ci::RegisterJobService.new(current_runner).execute(runner_params)
+
+ if result.valid?
+ if result.build_json
+ Gitlab::Metrics.add_event(:build_found)
+ env['api.format'] = :build_json
+ body result.build_json
+ else
+ Gitlab::Metrics.add_event(:build_not_found)
+ header 'X-GitLab-Last-Update', new_update
+ no_content!
+ end
+ else
+ # We received build that is invalid due to concurrency conflict
+ Gitlab::Metrics.add_event(:build_invalid)
+ conflict!
+ end
+ end
+
+ desc 'Updates a job' do
+ http_codes [[200, 'Job was updated'], [403, 'Forbidden']]
+ end
+ params do
+ requires :token, type: String, desc: %q(Runners's authentication token)
+ requires :id, type: Integer, desc: %q(Job's ID)
+ optional :trace, type: String, desc: %q(Job's full trace)
+ optional :state, type: String, desc: %q(Job's status: success, failed)
+ optional :failure_reason, type: String, desc: %q(Job's failure_reason)
+ end
+ put '/:id' do
+ job = authenticate_job!
+
+ job.trace.set(params[:trace]) if params[:trace]
+
+ Gitlab::Metrics.add_event(:update_build)
+
+ case params[:state].to_s
+ when 'running'
+ job.touch if job.needs_touch?
+ when 'success'
+ job.success!
+ when 'failed'
+ job.drop!(params[:failure_reason] || :unknown_failure)
+ end
+ end
+
+ desc 'Appends a patch to the job trace' do
+ http_codes [[202, 'Trace was patched'],
+ [400, 'Missing Content-Range header'],
+ [403, 'Forbidden'],
+ [416, 'Range not satisfiable']]
+ end
+ params do
+ requires :id, type: Integer, desc: %q(Job's ID)
+ optional :token, type: String, desc: %q(Job's authentication token)
+ end
+ patch '/:id/trace' do
+ job = authenticate_job!
+
+ error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range')
+ content_range = request.headers['Content-Range']
+ content_range = content_range.split('-')
+
+ # TODO:
+ # it seems that `Content-Range` as formatted by runner is wrong,
+ # the `byte_end` should point to final byte, but it points byte+1
+ # that means that we have to calculate end of body,
+ # as we cannot use `content_length[1]`
+ # Issue: https://gitlab.com/gitlab-org/gitlab-runner/issues/3275
+
+ body_data = request.body.read
+ body_start = content_range[0].to_i
+ body_end = body_start + body_data.bytesize
+
+ stream_size = job.trace.append(body_data, body_start)
+ unless stream_size == body_end
+ break error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{stream_size}" })
+ end
+
+ status 202
+ header 'Job-Status', job.status
+ header 'Range', "0-#{stream_size}"
+ header 'X-GitLab-Trace-Update-Interval', job.trace.update_interval.to_s
+ end
+
+ desc 'Authorize artifacts uploading for job' do
+ http_codes [[200, 'Upload allowed'],
+ [403, 'Forbidden'],
+ [405, 'Artifacts support not enabled'],
+ [413, 'File too large']]
+ end
+ params do
+ requires :id, type: Integer, desc: %q(Job's ID)
+ optional :token, type: String, desc: %q(Job's authentication token)
+
+ # NOTE:
+ # In current runner, filesize parameter would be empty here. This is because archive is streamed by runner,
+ # so the archive size is not known ahead of time. Streaming is done to not use additional I/O on
+ # Runner to first save, and then send via Network.
+ optional :filesize, type: Integer, desc: %q(Artifacts filesize)
+
+ optional :artifact_type, type: String, desc: %q(The type of artifact),
+ default: 'archive', values: ::Ci::JobArtifact.file_types.keys
+ end
+ post '/:id/artifacts/authorize' do
+ not_allowed! unless Gitlab.config.artifacts.enabled
+ require_gitlab_workhorse!
+
+ job = authenticate_job!
+
+ result = ::Ci::CreateJobArtifactsService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
+
+ if result[:status] == :success
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ status :ok
+ result[:headers]
+ else
+ render_api_error!(result[:message], result[:http_status])
+ end
+ end
+
+ desc 'Upload artifacts for job' do
+ success Entities::JobRequest::Response
+ http_codes [[201, 'Artifact uploaded'],
+ [400, 'Bad request'],
+ [403, 'Forbidden'],
+ [405, 'Artifacts support not enabled'],
+ [413, 'File too large']]
+ end
+ params do
+ requires :id, type: Integer, desc: %q(Job's ID)
+ requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact file to store (generated by Multipart middleware))
+ optional :token, type: String, desc: %q(Job's authentication token)
+ optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
+ optional :artifact_type, type: String, desc: %q(The type of artifact),
+ default: 'archive', values: ::Ci::JobArtifact.file_types.keys
+ optional :artifact_format, type: String, desc: %q(The format of artifact),
+ default: 'zip', values: ::Ci::JobArtifact.file_formats.keys
+ optional :metadata, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact metadata to store (generated by Multipart middleware))
+ end
+ post '/:id/artifacts' do
+ not_allowed! unless Gitlab.config.artifacts.enabled
+ require_gitlab_workhorse!
+
+ job = authenticate_job!
+
+ artifacts = params[:file]
+ metadata = params[:metadata]
+
+ result = ::Ci::CreateJobArtifactsService.new(job).execute(artifacts, params, metadata_file: metadata)
+
+ if result[:status] == :success
+ status :created
+ else
+ render_api_error!(result[:message], result[:http_status])
+ end
+ end
+
+ desc 'Download the artifacts file for job' do
+ http_codes [[200, 'Upload allowed'],
+ [403, 'Forbidden'],
+ [404, 'Artifact not found']]
+ end
+ params do
+ requires :id, type: Integer, desc: %q(Job's ID)
+ optional :token, type: String, desc: %q(Job's authentication token)
+ optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
+ end
+ get '/:id/artifacts' do
+ job = authenticate_job!(require_running: false)
+
+ present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/ci/runners.rb b/lib/api/ci/runners.rb
new file mode 100644
index 00000000000..2c156a71160
--- /dev/null
+++ b/lib/api/ci/runners.rb
@@ -0,0 +1,289 @@
+# frozen_string_literal: true
+
+module API
+ module Ci
+ class Runners < Grape::API::Instance
+ include PaginationParams
+
+ before { authenticate! }
+
+ resource :runners do
+ desc 'Get runners available for user' do
+ success Entities::Runner
+ end
+ params do
+ optional :scope, type: String, values: ::Ci::Runner::AVAILABLE_STATUSES,
+ desc: 'The scope of specific runners to show'
+ optional :type, type: String, values: ::Ci::Runner::AVAILABLE_TYPES,
+ desc: 'The type of the runners to show'
+ optional :status, type: String, values: ::Ci::Runner::AVAILABLE_STATUSES,
+ desc: 'The status of the runners to show'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The tags of the runners to show'
+ use :pagination
+ end
+ get do
+ runners = current_user.ci_owned_runners
+ runners = filter_runners(runners, params[:scope], allowed_scopes: ::Ci::Runner::AVAILABLE_STATUSES)
+ runners = filter_runners(runners, params[:type], allowed_scopes: ::Ci::Runner::AVAILABLE_TYPES)
+ runners = filter_runners(runners, params[:status], allowed_scopes: ::Ci::Runner::AVAILABLE_STATUSES)
+ runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
+
+ present paginate(runners), with: Entities::Runner
+ end
+
+ desc 'Get all runners - shared and specific' do
+ success Entities::Runner
+ end
+ params do
+ optional :scope, type: String, values: ::Ci::Runner::AVAILABLE_SCOPES,
+ desc: 'The scope of specific runners to show'
+ optional :type, type: String, values: ::Ci::Runner::AVAILABLE_TYPES,
+ desc: 'The type of the runners to show'
+ optional :status, type: String, values: ::Ci::Runner::AVAILABLE_STATUSES,
+ desc: 'The status of the runners to show'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The tags of the runners to show'
+ use :pagination
+ end
+ get 'all' do
+ authenticated_as_admin!
+
+ runners = ::Ci::Runner.all
+ runners = filter_runners(runners, params[:scope])
+ runners = filter_runners(runners, params[:type], allowed_scopes: ::Ci::Runner::AVAILABLE_TYPES)
+ runners = filter_runners(runners, params[:status], allowed_scopes: ::Ci::Runner::AVAILABLE_STATUSES)
+ runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
+
+ present paginate(runners), with: Entities::Runner
+ end
+
+ desc "Get runner's details" do
+ success Entities::RunnerDetails
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ end
+ get ':id' do
+ runner = get_runner(params[:id])
+ authenticate_show_runner!(runner)
+
+ present runner, with: Entities::RunnerDetails, current_user: current_user
+ end
+
+ desc "Update runner's details" do
+ success Entities::RunnerDetails
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ optional :description, type: String, desc: 'The description of the runner'
+ optional :active, type: Boolean, desc: 'The state of a runner'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The list of tags for a runner'
+ optional :run_untagged, type: Boolean, desc: 'Flag indicating the runner can execute untagged jobs'
+ optional :locked, type: Boolean, desc: 'Flag indicating the runner is locked'
+ optional :access_level, type: String, values: ::Ci::Runner.access_levels.keys,
+ desc: 'The access_level of the runner'
+ optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
+ at_least_one_of :description, :active, :tag_list, :run_untagged, :locked, :access_level, :maximum_timeout
+ end
+ put ':id' do
+ runner = get_runner(params.delete(:id))
+ authenticate_update_runner!(runner)
+ update_service = ::Ci::UpdateRunnerService.new(runner)
+
+ if update_service.update(declared_params(include_missing: false))
+ present runner, with: Entities::RunnerDetails, current_user: current_user
+ else
+ render_validation_error!(runner)
+ end
+ end
+
+ desc 'Remove a runner' do
+ success Entities::Runner
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ end
+ delete ':id' do
+ runner = get_runner(params[:id])
+
+ authenticate_delete_runner!(runner)
+
+ destroy_conditionally!(runner)
+ end
+
+ desc 'List jobs running on a runner' do
+ success Entities::JobBasicWithProject
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ optional :status, type: String, desc: 'Status of the job', values: ::Ci::Build::AVAILABLE_STATUSES
+ optional :order_by, type: String, desc: 'Order by `id` or not', values: ::Ci::RunnerJobsFinder::ALLOWED_INDEXED_COLUMNS
+ optional :sort, type: String, values: %w[asc desc], default: 'desc', desc: 'Sort by asc (ascending) or desc (descending)'
+ use :pagination
+ end
+ get ':id/jobs' do
+ runner = get_runner(params[:id])
+ authenticate_list_runners_jobs!(runner)
+
+ jobs = ::Ci::RunnerJobsFinder.new(runner, params).execute
+
+ present paginate(jobs), with: Entities::JobBasicWithProject
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before { authorize_admin_project }
+
+ desc 'Get runners available for project' do
+ success Entities::Runner
+ end
+ params do
+ optional :scope, type: String, values: ::Ci::Runner::AVAILABLE_SCOPES,
+ desc: 'The scope of specific runners to show'
+ optional :type, type: String, values: ::Ci::Runner::AVAILABLE_TYPES,
+ desc: 'The type of the runners to show'
+ optional :status, type: String, values: ::Ci::Runner::AVAILABLE_STATUSES,
+ desc: 'The status of the runners to show'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The tags of the runners to show'
+ use :pagination
+ end
+ get ':id/runners' do
+ runners = ::Ci::Runner.owned_or_instance_wide(user_project.id)
+ # scope is deprecated (for project runners), however api documentation still supports it.
+ # Not including them in `apply_filter` method as it's not supported for group runners
+ runners = filter_runners(runners, params[:scope])
+ runners = apply_filter(runners, params)
+
+ present paginate(runners), with: Entities::Runner
+ end
+
+ desc 'Enable a runner for a project' do
+ success Entities::Runner
+ end
+ params do
+ requires :runner_id, type: Integer, desc: 'The ID of the runner'
+ end
+ post ':id/runners' do
+ runner = get_runner(params[:runner_id])
+ authenticate_enable_runner!(runner)
+
+ if runner.assign_to(user_project)
+ present runner, with: Entities::Runner
+ else
+ render_validation_error!(runner)
+ end
+ end
+
+ desc "Disable project's runner" do
+ success Entities::Runner
+ end
+ params do
+ requires :runner_id, type: Integer, desc: 'The ID of the runner'
+ end
+ # rubocop: disable CodeReuse/ActiveRecord
+ delete ':id/runners/:runner_id' do
+ runner_project = user_project.runner_projects.find_by(runner_id: params[:runner_id])
+ not_found!('Runner') unless runner_project
+
+ runner = runner_project.runner
+ forbidden!("Only one project associated with the runner. Please remove the runner instead") if runner.projects.count == 1
+
+ destroy_conditionally!(runner_project)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ end
+ resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before { authorize_admin_group }
+
+ desc 'Get runners available for group' do
+ success Entities::Runner
+ end
+ params do
+ optional :type, type: String, values: ::Ci::Runner::AVAILABLE_TYPES,
+ desc: 'The type of the runners to show'
+ optional :status, type: String, values: ::Ci::Runner::AVAILABLE_STATUSES,
+ desc: 'The status of the runners to show'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The tags of the runners to show'
+ use :pagination
+ end
+ get ':id/runners' do
+ runners = ::Ci::Runner.belonging_to_group(user_group.id, include_ancestors: true)
+ runners = apply_filter(runners, params)
+
+ present paginate(runners), with: Entities::Runner
+ end
+ end
+
+ helpers do
+ def filter_runners(runners, scope, allowed_scopes: ::Ci::Runner::AVAILABLE_SCOPES)
+ return runners unless scope.present?
+
+ unless allowed_scopes.include?(scope)
+ render_api_error!('Scope contains invalid value', 400)
+ end
+
+ # Support deprecated scopes
+ if runners.respond_to?("deprecated_#{scope}")
+ scope = "deprecated_#{scope}"
+ end
+
+ runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend
+ end
+
+ def apply_filter(runners, params)
+ runners = filter_runners(runners, params[:type], allowed_scopes: ::Ci::Runner::AVAILABLE_TYPES)
+ runners = filter_runners(runners, params[:status], allowed_scopes: ::Ci::Runner::AVAILABLE_STATUSES)
+ runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
+
+ runners
+ end
+
+ def get_runner(id)
+ runner = ::Ci::Runner.find(id)
+ not_found!('Runner') unless runner
+ runner
+ end
+
+ def authenticate_show_runner!(runner)
+ return if runner.instance_type? || current_user.admin?
+
+ forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
+ end
+
+ def authenticate_update_runner!(runner)
+ return if current_user.admin?
+
+ forbidden!("No access granted") unless can?(current_user, :update_runner, runner)
+ end
+
+ def authenticate_delete_runner!(runner)
+ return if current_user.admin?
+
+ forbidden!("Runner associated with more than one project") if runner.projects.count > 1
+ forbidden!("No access granted") unless can?(current_user, :delete_runner, runner)
+ end
+
+ def authenticate_enable_runner!(runner)
+ forbidden!("Runner is a group runner") if runner.group_type?
+
+ return if current_user.admin?
+
+ forbidden!("Runner is locked") if runner.locked?
+ forbidden!("No access granted") unless can?(current_user, :assign_runner, runner)
+ end
+
+ def authenticate_list_runners_jobs!(runner)
+ return if current_user.admin?
+
+ forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/commit_statuses.rb b/lib/api/commit_statuses.rb
index b4c5d7869a2..140351c9e5c 100644
--- a/lib/api/commit_statuses.rb
+++ b/lib/api/commit_statuses.rb
@@ -3,7 +3,7 @@
require 'mime/types'
module API
- class CommitStatuses < Grape::API
+ class CommitStatuses < Grape::API::Instance
params do
requires :id, type: String, desc: 'The ID of a project'
end
@@ -60,7 +60,7 @@ module API
not_found! 'Commit' unless commit
- # Since the CommitStatus is attached to Ci::Pipeline (in the future Pipeline)
+ # Since the CommitStatus is attached to ::Ci::Pipeline (in the future Pipeline)
# We need to always have the pipeline object
# To have a valid pipeline object that can be attached to specific MR
# Other CI service needs to send `ref`
diff --git a/lib/api/commits.rb b/lib/api/commits.rb
index 086a1b7c402..1a0fe393753 100644
--- a/lib/api/commits.rb
+++ b/lib/api/commits.rb
@@ -3,7 +3,7 @@
require 'mime/types'
module API
- class Commits < Grape::API
+ class Commits < Grape::API::Instance
include PaginationParams
before do
diff --git a/lib/api/composer_packages.rb b/lib/api/composer_packages.rb
new file mode 100644
index 00000000000..726dc89271a
--- /dev/null
+++ b/lib/api/composer_packages.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+# PHP composer support (https://getcomposer.org/)
+module API
+ class ComposerPackages < Grape::API::Instance
+ helpers ::API::Helpers::PackagesManagerClientsHelpers
+ helpers ::API::Helpers::RelatedResourcesHelpers
+ helpers ::API::Helpers::Packages::BasicAuthHelpers
+ include ::API::Helpers::Packages::BasicAuthHelpers::Constants
+ include ::Gitlab::Utils::StrongMemoize
+
+ content_type :json, 'application/json'
+ default_format :json
+
+ COMPOSER_ENDPOINT_REQUIREMENTS = {
+ package_name: API::NO_SLASH_URL_PART_REGEX
+ }.freeze
+
+ default_format :json
+
+ rescue_from ArgumentError do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ rescue_from ActiveRecord::RecordInvalid do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ helpers do
+ def packages
+ strong_memoize(:packages) do
+ packages = ::Packages::Composer::PackagesFinder.new(current_user, user_group).execute
+
+ if params[:package_name].present?
+ packages = packages.with_name(params[:package_name])
+ end
+
+ packages
+ end
+ end
+
+ def presenter
+ @presenter ||= ::Packages::Composer::PackagesPresenter.new(user_group, packages)
+ end
+ end
+
+ before do
+ require_packages_enabled!
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ end
+
+ resource :group, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ user_group
+ end
+
+ desc 'Composer packages endpoint at group level'
+
+ route_setting :authentication, job_token_allowed: true
+
+ get ':id/-/packages/composer/packages' do
+ presenter.root
+ end
+
+ desc 'Composer packages endpoint at group level for packages list'
+
+ params do
+ requires :sha, type: String, desc: 'Shasum of current json'
+ end
+
+ route_setting :authentication, job_token_allowed: true
+
+ get ':id/-/packages/composer/p/:sha' do
+ presenter.provider
+ end
+
+ desc 'Composer packages endpoint at group level for package versions metadata'
+
+ params do
+ requires :package_name, type: String, file_path: true, desc: 'The Composer package name'
+ end
+
+ route_setting :authentication, job_token_allowed: true
+
+ get ':id/-/packages/composer/*package_name', requirements: COMPOSER_ENDPOINT_REQUIREMENTS, file_path: true do
+ not_found! if packages.empty?
+
+ presenter.package_versions
+ end
+ end
+
+ params do
+ requires :id, type: Integer, desc: 'The ID of a project'
+ end
+
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ unauthorized_user_project!
+ end
+
+ desc 'Composer packages endpoint for registering packages'
+
+ namespace ':id/packages/composer' do
+ route_setting :authentication, job_token_allowed: true
+
+ params do
+ optional :branch, type: String, desc: 'The name of the branch'
+ optional :tag, type: String, desc: 'The name of the tag'
+ exactly_one_of :tag, :branch
+ end
+
+ post do
+ authorize_create_package!(authorized_user_project)
+
+ if params[:branch].present?
+ params[:branch] = find_branch!(params[:branch])
+ elsif params[:tag].present?
+ params[:tag] = find_tag!(params[:tag])
+ else
+ bad_request!
+ end
+
+ track_event('register_package')
+
+ ::Packages::Composer::CreatePackageService
+ .new(authorized_user_project, current_user, declared_params)
+ .execute
+
+ created!
+ end
+
+ params do
+ requires :sha, type: String, desc: 'Shasum of current json'
+ requires :package_name, type: String, file_path: true, desc: 'The Composer package name'
+ end
+
+ get 'archives/*package_name' do
+ metadata = unauthorized_user_project
+ .packages
+ .composer
+ .with_name(params[:package_name])
+ .with_composer_target(params[:sha])
+ .first
+ &.composer_metadatum
+
+ not_found! unless metadata
+
+ send_git_archive unauthorized_user_project.repository, ref: metadata.target_sha, format: 'zip', append_sha: true
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/conan_packages.rb b/lib/api/conan_packages.rb
new file mode 100644
index 00000000000..1d941e422a7
--- /dev/null
+++ b/lib/api/conan_packages.rb
@@ -0,0 +1,309 @@
+# frozen_string_literal: true
+
+# Conan Package Manager Client API
+#
+# These API endpoints are not consumed directly by users, so there is no documentation for the
+# individual endpoints. They are called by the Conan package manager client when users run commands
+# like `conan install` or `conan upload`. The usage of the GitLab Conan repository is documented here:
+# https://docs.gitlab.com/ee/user/packages/conan_repository/#installing-a-package
+#
+# Technical debt: https://gitlab.com/gitlab-org/gitlab/issues/35798
+module API
+ class ConanPackages < Grape::API::Instance
+ helpers ::API::Helpers::PackagesManagerClientsHelpers
+
+ PACKAGE_REQUIREMENTS = {
+ package_name: API::NO_SLASH_URL_PART_REGEX,
+ package_version: API::NO_SLASH_URL_PART_REGEX,
+ package_username: API::NO_SLASH_URL_PART_REGEX,
+ package_channel: API::NO_SLASH_URL_PART_REGEX
+ }.freeze
+
+ FILE_NAME_REQUIREMENTS = {
+ file_name: API::NO_SLASH_URL_PART_REGEX
+ }.freeze
+
+ PACKAGE_COMPONENT_REGEX = Gitlab::Regex.conan_recipe_component_regex
+ CONAN_REVISION_REGEX = Gitlab::Regex.conan_revision_regex
+
+ before do
+ require_packages_enabled!
+
+ # Personal access token will be extracted from Bearer or Basic authorization
+ # in the overridden find_personal_access_token or find_user_from_job_token helpers
+ authenticate!
+ end
+
+ namespace 'packages/conan/v1' do
+ desc 'Ping the Conan API' do
+ detail 'This feature was introduced in GitLab 12.2'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'ping' do
+ header 'X-Conan-Server-Capabilities', [].join(',')
+ end
+
+ desc 'Search for packages' do
+ detail 'This feature was introduced in GitLab 12.4'
+ end
+ params do
+ requires :q, type: String, desc: 'Search query'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'conans/search' do
+ service = ::Packages::Conan::SearchService.new(current_user, query: params[:q]).execute
+ service.payload
+ end
+
+ namespace 'users' do
+ format :txt
+
+ desc 'Authenticate user against conan CLI' do
+ detail 'This feature was introduced in GitLab 12.2'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'authenticate' do
+ unauthorized! unless token
+
+ token.to_jwt
+ end
+
+ desc 'Check for valid user credentials per conan CLI' do
+ detail 'This feature was introduced in GitLab 12.4'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'check_credentials' do
+ authenticate!
+ :ok
+ end
+ end
+
+ params do
+ requires :package_name, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package name'
+ requires :package_version, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package version'
+ requires :package_username, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package username'
+ requires :package_channel, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package channel'
+ end
+ namespace 'conans/:package_name/:package_version/:package_username/:package_channel', requirements: PACKAGE_REQUIREMENTS do
+ # Get the snapshot
+ #
+ # the snapshot is a hash of { filename: md5 hash }
+ # md5 hash is the has of that file. This hash is used to diff the files existing on the client
+ # to determine which client files need to be uploaded if no recipe exists the snapshot is empty
+ desc 'Package Snapshot' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ params do
+ requires :conan_package_reference, type: String, desc: 'Conan package ID'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'packages/:conan_package_reference' do
+ authorize!(:read_package, project)
+
+ presenter = ::Packages::Conan::PackagePresenter.new(
+ recipe,
+ current_user,
+ project,
+ conan_package_reference: params[:conan_package_reference]
+ )
+
+ present presenter, with: ::API::Entities::ConanPackage::ConanPackageSnapshot
+ end
+
+ desc 'Recipe Snapshot' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get do
+ authorize!(:read_package, project)
+
+ presenter = ::Packages::Conan::PackagePresenter.new(recipe, current_user, project)
+
+ present presenter, with: ::API::Entities::ConanPackage::ConanRecipeSnapshot
+ end
+
+ # Get the manifest
+ # returns the download urls for the existing recipe in the registry
+ #
+ # the manifest is a hash of { filename: url }
+ # where the url is the download url for the file
+ desc 'Package Digest' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ params do
+ requires :conan_package_reference, type: String, desc: 'Conan package ID'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'packages/:conan_package_reference/digest' do
+ present_package_download_urls
+ end
+
+ desc 'Recipe Digest' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'digest' do
+ present_recipe_download_urls
+ end
+
+ # Get the download urls
+ #
+ # returns the download urls for the existing recipe or package in the registry
+ #
+ # the manifest is a hash of { filename: url }
+ # where the url is the download url for the file
+ desc 'Package Download Urls' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ params do
+ requires :conan_package_reference, type: String, desc: 'Conan package ID'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'packages/:conan_package_reference/download_urls' do
+ present_package_download_urls
+ end
+
+ desc 'Recipe Download Urls' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get 'download_urls' do
+ present_recipe_download_urls
+ end
+
+ # Get the upload urls
+ #
+ # request body contains { filename: filesize } where the filename is the
+ # name of the file the conan client is requesting to upload
+ #
+ # returns { filename: url }
+ # where the url is the upload url for the file that the conan client will use
+ desc 'Package Upload Urls' do
+ detail 'This feature was introduced in GitLab 12.4'
+ end
+ params do
+ requires :conan_package_reference, type: String, desc: 'Conan package ID'
+ end
+ route_setting :authentication, job_token_allowed: true
+ post 'packages/:conan_package_reference/upload_urls' do
+ authorize!(:read_package, project)
+
+ status 200
+ upload_urls = package_upload_urls(::Packages::Conan::FileMetadatum::PACKAGE_FILES)
+
+ present upload_urls, with: ::API::Entities::ConanPackage::ConanUploadUrls
+ end
+
+ desc 'Recipe Upload Urls' do
+ detail 'This feature was introduced in GitLab 12.4'
+ end
+ route_setting :authentication, job_token_allowed: true
+ post 'upload_urls' do
+ authorize!(:read_package, project)
+
+ status 200
+ upload_urls = recipe_upload_urls(::Packages::Conan::FileMetadatum::RECIPE_FILES)
+
+ present upload_urls, with: ::API::Entities::ConanPackage::ConanUploadUrls
+ end
+
+ desc 'Delete Package' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ route_setting :authentication, job_token_allowed: true
+ delete do
+ authorize!(:destroy_package, project)
+
+ track_event('delete_package')
+
+ package.destroy
+ end
+ end
+
+ params do
+ requires :package_name, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package name'
+ requires :package_version, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package version'
+ requires :package_username, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package username'
+ requires :package_channel, type: String, regexp: PACKAGE_COMPONENT_REGEX, desc: 'Package channel'
+ requires :recipe_revision, type: String, regexp: CONAN_REVISION_REGEX, desc: 'Conan Recipe Revision'
+ end
+ namespace 'files/:package_name/:package_version/:package_username/:package_channel/:recipe_revision', requirements: PACKAGE_REQUIREMENTS do
+ before do
+ authenticate_non_get!
+ end
+
+ params do
+ requires :file_name, type: String, desc: 'Package file name', regexp: Gitlab::Regex.conan_file_name_regex
+ end
+ namespace 'export/:file_name', requirements: FILE_NAME_REQUIREMENTS do
+ desc 'Download recipe files' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get do
+ download_package_file(:recipe_file)
+ end
+
+ desc 'Upload recipe package files' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ params do
+ use :workhorse_upload_params
+ end
+ route_setting :authentication, job_token_allowed: true
+ put do
+ upload_package_file(:recipe_file)
+ end
+
+ desc 'Workhorse authorize the conan recipe file' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ route_setting :authentication, job_token_allowed: true
+ put 'authorize' do
+ authorize_workhorse!(subject: project)
+ end
+ end
+
+ params do
+ requires :conan_package_reference, type: String, desc: 'Conan Package ID'
+ requires :package_revision, type: String, desc: 'Conan Package Revision'
+ requires :file_name, type: String, desc: 'Package file name', regexp: Gitlab::Regex.conan_file_name_regex
+ end
+ namespace 'package/:conan_package_reference/:package_revision/:file_name', requirements: FILE_NAME_REQUIREMENTS do
+ desc 'Download package files' do
+ detail 'This feature was introduced in GitLab 12.5'
+ end
+ route_setting :authentication, job_token_allowed: true
+ get do
+ download_package_file(:package_file)
+ end
+
+ desc 'Workhorse authorize the conan package file' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ route_setting :authentication, job_token_allowed: true
+ put 'authorize' do
+ authorize_workhorse!(subject: project)
+ end
+
+ desc 'Upload package files' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ params do
+ use :workhorse_upload_params
+ end
+ route_setting :authentication, job_token_allowed: true
+ put do
+ upload_package_file(:package_file)
+ end
+ end
+ end
+ end
+
+ helpers do
+ include Gitlab::Utils::StrongMemoize
+ include ::API::Helpers::RelatedResourcesHelpers
+ include ::API::Helpers::Packages::Conan::ApiHelpers
+ end
+ end
+end
diff --git a/lib/api/container_registry_event.rb b/lib/api/container_registry_event.rb
index 6d93cc65336..0b7c35cadbd 100644
--- a/lib/api/container_registry_event.rb
+++ b/lib/api/container_registry_event.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ContainerRegistryEvent < Grape::API
+ class ContainerRegistryEvent < Grape::API::Instance
DOCKER_DISTRIBUTION_EVENTS_V1_JSON = 'application/vnd.docker.distribution.events.v1+json'
before { authenticate_registry_notification! }
diff --git a/lib/api/deploy_keys.rb b/lib/api/deploy_keys.rb
index 3259b615369..ad37b7578ad 100644
--- a/lib/api/deploy_keys.rb
+++ b/lib/api/deploy_keys.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class DeployKeys < Grape::API
+ class DeployKeys < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/deploy_tokens.rb b/lib/api/deploy_tokens.rb
index 0fbbd96cf02..96aa2445f56 100644
--- a/lib/api/deploy_tokens.rb
+++ b/lib/api/deploy_tokens.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class DeployTokens < Grape::API
+ class DeployTokens < Grape::API::Instance
include PaginationParams
helpers do
@@ -56,7 +56,7 @@ module API
params do
requires :name, type: String, desc: "New deploy token's name"
- requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
+ requires :scopes, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository", "read_registry", "write_registry", "read_package_registry", or "write_package_registry".'
optional :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
optional :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
@@ -119,7 +119,7 @@ module API
params do
requires :name, type: String, desc: 'The name of the deploy token'
- requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
+ requires :scopes, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository", "read_registry", "write_registry", "read_package_registry", or "write_package_registry".'
optional :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
optional :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
diff --git a/lib/api/deployments.rb b/lib/api/deployments.rb
index cb1dca11e87..87144fd31cc 100644
--- a/lib/api/deployments.rb
+++ b/lib/api/deployments.rb
@@ -2,7 +2,7 @@
module API
# Deployments RESTful API endpoints
- class Deployments < Grape::API
+ class Deployments < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/discussions.rb b/lib/api/discussions.rb
index 7b453ada41c..c431ec8e1e4 100644
--- a/lib/api/discussions.rb
+++ b/lib/api/discussions.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Discussions < Grape::API
+ class Discussions < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::NotesHelpers
helpers ::RendersNotes
@@ -76,10 +76,18 @@ module API
optional :y, type: Integer, desc: 'Y coordinate in the image'
optional :line_range, type: Hash, desc: 'Multi-line start and end' do
- requires :start_line_code, type: String, desc: 'Start line code for multi-line note'
- requires :end_line_code, type: String, desc: 'End line code for multi-line note'
- requires :start_line_type, type: String, desc: 'Start line type for multi-line note'
- requires :end_line_type, type: String, desc: 'End line type for multi-line note'
+ optional :start, type: Hash do
+ optional :line_code, type: String, desc: 'Start line code for multi-line note'
+ optional :type, type: String, desc: 'Start line type for multi-line note'
+ optional :old_line, type: String, desc: 'Start old_line line number'
+ optional :new_line, type: String, desc: 'Start new_line line number'
+ end
+ optional :end, type: Hash do
+ optional :line_code, type: String, desc: 'End line code for multi-line note'
+ optional :type, type: String, desc: 'End line type for multi-line note'
+ optional :old_line, type: String, desc: 'End old_line line number'
+ optional :new_line, type: String, desc: 'End new_line line number'
+ end
end
end
end
diff --git a/lib/api/entities/approvals.rb b/lib/api/entities/approvals.rb
new file mode 100644
index 00000000000..74973772831
--- /dev/null
+++ b/lib/api/entities/approvals.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class Approvals < Grape::Entity
+ expose :user, using: ::API::Entities::UserBasic
+ end
+ end
+end
diff --git a/lib/api/entities/basic_project_details.rb b/lib/api/entities/basic_project_details.rb
index 13bc19456b3..cf0b32bed26 100644
--- a/lib/api/entities/basic_project_details.rb
+++ b/lib/api/entities/basic_project_details.rb
@@ -33,7 +33,8 @@ module API
project.avatar_url(only_path: false)
end
- expose :star_count, :forks_count
+ expose :forks_count
+ expose :star_count
expose :last_activity_at
expose :namespace, using: 'API::Entities::NamespaceBasic'
expose :custom_attributes, using: 'API::Entities::CustomAttribute', if: :with_custom_attributes
diff --git a/lib/api/entities/conan_package/conan_package_manifest.rb b/lib/api/entities/conan_package/conan_package_manifest.rb
new file mode 100644
index 00000000000..e6acfe1912f
--- /dev/null
+++ b/lib/api/entities/conan_package/conan_package_manifest.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module ConanPackage
+ class ConanPackageManifest < Grape::Entity
+ expose :package_urls, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/conan_package/conan_package_snapshot.rb b/lib/api/entities/conan_package/conan_package_snapshot.rb
new file mode 100644
index 00000000000..d7fdda09b5a
--- /dev/null
+++ b/lib/api/entities/conan_package/conan_package_snapshot.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module ConanPackage
+ class ConanPackageSnapshot < Grape::Entity
+ expose :package_snapshot, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/conan_package/conan_recipe_manifest.rb b/lib/api/entities/conan_package/conan_recipe_manifest.rb
new file mode 100644
index 00000000000..ecaa142cef9
--- /dev/null
+++ b/lib/api/entities/conan_package/conan_recipe_manifest.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module ConanPackage
+ class ConanRecipeManifest < Grape::Entity
+ expose :recipe_urls, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/conan_package/conan_recipe_snapshot.rb b/lib/api/entities/conan_package/conan_recipe_snapshot.rb
new file mode 100644
index 00000000000..09a60d23727
--- /dev/null
+++ b/lib/api/entities/conan_package/conan_recipe_snapshot.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module ConanPackage
+ class ConanRecipeSnapshot < Grape::Entity
+ expose :recipe_snapshot, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/conan_package/conan_upload_urls.rb b/lib/api/entities/conan_package/conan_upload_urls.rb
new file mode 100644
index 00000000000..c14963c87f5
--- /dev/null
+++ b/lib/api/entities/conan_package/conan_upload_urls.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module ConanPackage
+ class ConanUploadUrls < Grape::Entity
+ expose :upload_urls, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/entity_helpers.rb b/lib/api/entities/entity_helpers.rb
new file mode 100644
index 00000000000..3a68044ad35
--- /dev/null
+++ b/lib/api/entities/entity_helpers.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module EntityHelpers
+ def can_read(attr, &block)
+ ->(obj, opts) { Ability.allowed?(opts[:user], "read_#{attr}".to_sym, yield(obj)) }
+ end
+
+ def can_destroy(attr, &block)
+ ->(obj, opts) { Ability.allowed?(opts[:user], "destroy_#{attr}".to_sym, yield(obj)) }
+ end
+
+ def expose_restricted(attr, &block)
+ expose attr, if: can_read(attr, &block)
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/go_module_version.rb b/lib/api/entities/go_module_version.rb
new file mode 100644
index 00000000000..643e25df9e0
--- /dev/null
+++ b/lib/api/entities/go_module_version.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class GoModuleVersion < Grape::Entity
+ expose :name, as: 'Version'
+ expose :time, as: 'Time'
+ end
+ end
+end
diff --git a/lib/api/entities/group.rb b/lib/api/entities/group.rb
index 8a6a5b7057c..e430eba4880 100644
--- a/lib/api/entities/group.rb
+++ b/lib/api/entities/group.rb
@@ -31,6 +31,7 @@ module API
expose :wiki_size
expose :lfs_objects_size
expose :build_artifacts_size, as: :job_artifacts_size
+ expose :snippets_size
end
end
end
diff --git a/lib/api/entities/group_detail.rb b/lib/api/entities/group_detail.rb
index 93dc41da81d..2d9d4ca7992 100644
--- a/lib/api/entities/group_detail.rb
+++ b/lib/api/entities/group_detail.rb
@@ -7,6 +7,7 @@ module API
SharedGroupWithGroup.represent(group.shared_with_group_links.public_or_visible_to_user(group, options[:current_user]))
end
expose :runners_token, if: lambda { |group, options| options[:user_can_admin_group] }
+
expose :projects, using: Entities::Project do |group, options|
projects = GroupProjectsFinder.new(
group: group,
diff --git a/lib/api/entities/issuable_entity.rb b/lib/api/entities/issuable_entity.rb
index 5bee59de539..e2c674c0b8b 100644
--- a/lib/api/entities/issuable_entity.rb
+++ b/lib/api/entities/issuable_entity.rb
@@ -8,10 +8,38 @@ module API
expose :title, :description
expose :state, :created_at, :updated_at
- # Avoids an N+1 query when metadata is included
- def issuable_metadata(subject, options, method, args = nil)
- cached_subject = options.dig(:issuable_metadata, subject.id)
- (cached_subject || subject).public_send(method, *args) # rubocop: disable GitlabSecurity/PublicSend
+ def presented
+ lazy_issuable_metadata
+
+ super
+ end
+
+ def issuable_metadata
+ options.dig(:issuable_metadata, object.id) || lazy_issuable_metadata
+ end
+
+ protected
+
+ # This method will preload the `issuable_metadata` for the current
+ # entity according to the current top-level entity options, such
+ # as the current_user.
+ def lazy_issuable_metadata
+ BatchLoader.for(object).batch(key: [current_user, :issuable_metadata]) do |models, loader, args|
+ current_user = args[:key].first
+
+ issuable_metadata = Gitlab::IssuableMetadata.new(current_user, models)
+ metadata_by_id = issuable_metadata.data
+
+ models.each do |issuable|
+ loader.call(issuable, metadata_by_id[issuable.id])
+ end
+ end
+ end
+
+ private
+
+ def current_user
+ options[:current_user]
end
end
end
diff --git a/lib/api/entities/issue_basic.rb b/lib/api/entities/issue_basic.rb
index af92f4124f1..cf96c6556ec 100644
--- a/lib/api/entities/issue_basic.rb
+++ b/lib/api/entities/issue_basic.rb
@@ -21,10 +21,10 @@ module API
issue.assignees.first
end
- expose(:user_notes_count) { |issue, options| issuable_metadata(issue, options, :user_notes_count) }
- expose(:merge_requests_count) { |issue, options| issuable_metadata(issue, options, :merge_requests_count, options[:current_user]) }
- expose(:upvotes) { |issue, options| issuable_metadata(issue, options, :upvotes) }
- expose(:downvotes) { |issue, options| issuable_metadata(issue, options, :downvotes) }
+ expose(:user_notes_count) { |issue, options| issuable_metadata.user_notes_count }
+ expose(:merge_requests_count) { |issue, options| issuable_metadata.merge_requests_count }
+ expose(:upvotes) { |issue, options| issuable_metadata.upvotes }
+ expose(:downvotes) { |issue, options| issuable_metadata.downvotes }
expose :due_date
expose :confidential
expose :discussion_locked
diff --git a/lib/api/entities/merge_request_approvals.rb b/lib/api/entities/merge_request_approvals.rb
new file mode 100644
index 00000000000..e3d58d687c4
--- /dev/null
+++ b/lib/api/entities/merge_request_approvals.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class MergeRequestApprovals < Grape::Entity
+ expose :user_has_approved do |merge_request, options|
+ merge_request.approved_by?(options[:current_user])
+ end
+
+ expose :user_can_approve do |merge_request, options|
+ !merge_request.approved_by?(options[:current_user]) &&
+ options[:current_user].can?(:approve_merge_request, merge_request)
+ end
+
+ expose :approved do |merge_request|
+ merge_request.approvals.present?
+ end
+
+ expose :approved_by, using: ::API::Entities::Approvals do |merge_request|
+ merge_request.approvals
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/merge_request_basic.rb b/lib/api/entities/merge_request_basic.rb
index 1643f267938..69523e3637b 100644
--- a/lib/api/entities/merge_request_basic.rb
+++ b/lib/api/entities/merge_request_basic.rb
@@ -22,13 +22,11 @@ module API
MarkupHelper.markdown_field(entity, :description)
end
expose :target_branch, :source_branch
- expose(:user_notes_count) { |merge_request, options| issuable_metadata(merge_request, options, :user_notes_count) }
- expose(:upvotes) { |merge_request, options| issuable_metadata(merge_request, options, :upvotes) }
- expose(:downvotes) { |merge_request, options| issuable_metadata(merge_request, options, :downvotes) }
- expose :assignee, using: ::API::Entities::UserBasic do |merge_request|
- merge_request.assignee
- end
- expose :author, :assignees, using: Entities::UserBasic
+ expose(:user_notes_count) { |merge_request, options| issuable_metadata.user_notes_count }
+ expose(:upvotes) { |merge_request, options| issuable_metadata.upvotes }
+ expose(:downvotes) { |merge_request, options| issuable_metadata.downvotes }
+
+ expose :author, :assignees, :assignee, using: Entities::UserBasic
expose :source_project_id, :target_project_id
expose :labels do |merge_request, options|
if options[:with_labels_details]
@@ -57,9 +55,12 @@ module API
expose :discussion_locked
expose :should_remove_source_branch?, as: :should_remove_source_branch
expose :force_remove_source_branch?, as: :force_remove_source_branch
- expose :allow_collaboration, if: -> (merge_request, _) { merge_request.for_fork? }
- # Deprecated
- expose :allow_collaboration, as: :allow_maintainer_to_push, if: -> (merge_request, _) { merge_request.for_fork? }
+
+ with_options if: -> (merge_request, _) { merge_request.for_fork? } do
+ expose :allow_collaboration
+ # Deprecated
+ expose :allow_collaboration, as: :allow_maintainer_to_push
+ end
# reference is deprecated in favour of references
# Introduced [Gitlab 12.6](https://gitlab.com/gitlab-org/gitlab/merge_requests/20354)
diff --git a/lib/api/entities/npm_package.rb b/lib/api/entities/npm_package.rb
new file mode 100644
index 00000000000..b094f3acdb6
--- /dev/null
+++ b/lib/api/entities/npm_package.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class NpmPackage < Grape::Entity
+ expose :name
+ expose :versions
+ expose :dist_tags, as: 'dist-tags'
+ end
+ end
+end
diff --git a/lib/api/entities/npm_package_tag.rb b/lib/api/entities/npm_package_tag.rb
new file mode 100644
index 00000000000..7f458fa037f
--- /dev/null
+++ b/lib/api/entities/npm_package_tag.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class NpmPackageTag < Grape::Entity
+ expose :dist_tags, merge: true
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/dependency.rb b/lib/api/entities/nuget/dependency.rb
new file mode 100644
index 00000000000..b61c37f5882
--- /dev/null
+++ b/lib/api/entities/nuget/dependency.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class Dependency < Grape::Entity
+ expose :id, as: :@id
+ expose :type, as: :@type
+ expose :name, as: :id
+ expose :range
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/dependency_group.rb b/lib/api/entities/nuget/dependency_group.rb
new file mode 100644
index 00000000000..dcab9359fcf
--- /dev/null
+++ b/lib/api/entities/nuget/dependency_group.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class DependencyGroup < Grape::Entity
+ expose :id, as: :@id
+ expose :type, as: :@type
+ expose :target_framework, as: :targetFramework, expose_nil: false
+ expose :dependencies, using: ::API::Entities::Nuget::Dependency
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/metadatum.rb b/lib/api/entities/nuget/metadatum.rb
new file mode 100644
index 00000000000..87caef41a85
--- /dev/null
+++ b/lib/api/entities/nuget/metadatum.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class Metadatum < Grape::Entity
+ expose :project_url, as: :projectUrl, expose_nil: false
+ expose :license_url, as: :licenseUrl, expose_nil: false
+ expose :icon_url, as: :iconUrl, expose_nil: false
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/package_metadata.rb b/lib/api/entities/nuget/package_metadata.rb
new file mode 100644
index 00000000000..e1c2a1ae161
--- /dev/null
+++ b/lib/api/entities/nuget/package_metadata.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class PackageMetadata < Grape::Entity
+ expose :json_url, as: :@id
+ expose :archive_url, as: :packageContent
+ expose :catalog_entry, as: :catalogEntry, using: ::API::Entities::Nuget::PackageMetadataCatalogEntry
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/package_metadata_catalog_entry.rb b/lib/api/entities/nuget/package_metadata_catalog_entry.rb
new file mode 100644
index 00000000000..5533f857596
--- /dev/null
+++ b/lib/api/entities/nuget/package_metadata_catalog_entry.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class PackageMetadataCatalogEntry < Grape::Entity
+ expose :json_url, as: :@id
+ expose :authors
+ expose :dependency_groups, as: :dependencyGroups, using: ::API::Entities::Nuget::DependencyGroup
+ expose :package_name, as: :id
+ expose :package_version, as: :version
+ expose :tags
+ expose :archive_url, as: :packageContent
+ expose :summary
+ expose :metadatum, using: ::API::Entities::Nuget::Metadatum, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/packages_metadata.rb b/lib/api/entities/nuget/packages_metadata.rb
new file mode 100644
index 00000000000..1cdf2491725
--- /dev/null
+++ b/lib/api/entities/nuget/packages_metadata.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class PackagesMetadata < Grape::Entity
+ expose :count
+ expose :items, using: ::API::Entities::Nuget::PackagesMetadataItem
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/packages_metadata_item.rb b/lib/api/entities/nuget/packages_metadata_item.rb
new file mode 100644
index 00000000000..84cc79166f3
--- /dev/null
+++ b/lib/api/entities/nuget/packages_metadata_item.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class PackagesMetadataItem < Grape::Entity
+ expose :json_url, as: :@id
+ expose :lower_version, as: :lower
+ expose :upper_version, as: :upper
+ expose :packages_count, as: :count
+ expose :packages, as: :items, using: ::API::Entities::Nuget::PackageMetadata
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/packages_versions.rb b/lib/api/entities/nuget/packages_versions.rb
new file mode 100644
index 00000000000..498c6970d5c
--- /dev/null
+++ b/lib/api/entities/nuget/packages_versions.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class PackagesVersions < Grape::Entity
+ expose :versions
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/search_result.rb b/lib/api/entities/nuget/search_result.rb
new file mode 100644
index 00000000000..8e028cbad95
--- /dev/null
+++ b/lib/api/entities/nuget/search_result.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class SearchResult < Grape::Entity
+ expose :type, as: :@type
+ expose :authors
+ expose :name, as: :id
+ expose :name, as: :title
+ expose :summary
+ expose :total_downloads, as: :totalDownloads
+ expose :verified
+ expose :version
+ expose :versions, using: ::API::Entities::Nuget::SearchResultVersion
+ expose :tags
+ expose :metadatum, using: ::API::Entities::Nuget::Metadatum, merge: true
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/search_result_version.rb b/lib/api/entities/nuget/search_result_version.rb
new file mode 100644
index 00000000000..9032c964c44
--- /dev/null
+++ b/lib/api/entities/nuget/search_result_version.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class SearchResultVersion < Grape::Entity
+ expose :json_url, as: :@id
+ expose :version
+ expose :downloads
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/search_results.rb b/lib/api/entities/nuget/search_results.rb
new file mode 100644
index 00000000000..22a77dc7b6c
--- /dev/null
+++ b/lib/api/entities/nuget/search_results.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class SearchResults < Grape::Entity
+ expose :total_count, as: :totalHits
+ expose :data, using: ::API::Entities::Nuget::SearchResult
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/nuget/service_index.rb b/lib/api/entities/nuget/service_index.rb
new file mode 100644
index 00000000000..e57bd04adb9
--- /dev/null
+++ b/lib/api/entities/nuget/service_index.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Nuget
+ class ServiceIndex < Grape::Entity
+ expose :version
+ expose :resources
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/package.rb b/lib/api/entities/package.rb
new file mode 100644
index 00000000000..73473f16da9
--- /dev/null
+++ b/lib/api/entities/package.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class Package < Grape::Entity
+ include ::API::Helpers::RelatedResourcesHelpers
+ extend ::API::Entities::EntityHelpers
+
+ expose :id
+ expose :name
+ expose :version
+ expose :package_type
+
+ expose :_links do
+ expose :web_path do |package|
+ if ::Gitlab.ee?
+ ::Gitlab::Routing.url_helpers.project_package_path(package.project, package)
+ end
+ end
+
+ expose :delete_api_path, if: can_destroy(:package, &:project) do |package|
+ expose_url api_v4_projects_packages_path(package_id: package.id, id: package.project_id)
+ end
+ end
+
+ expose :created_at
+ expose :project_id, if: ->(_, opts) { opts[:group] }
+ expose :project_path, if: ->(obj, opts) { opts[:group] && Ability.allowed?(opts[:user], :read_project, obj.project) }
+ expose :tags
+
+ expose :pipeline, if: ->(package) { package.build_info }, using: Package::Pipeline
+
+ expose :versions, using: ::API::Entities::PackageVersion
+
+ private
+
+ def project_path
+ object.project.full_path
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/package/pipeline.rb b/lib/api/entities/package/pipeline.rb
new file mode 100644
index 00000000000..e91a12e47fa
--- /dev/null
+++ b/lib/api/entities/package/pipeline.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class Package < Grape::Entity
+ class Pipeline < ::API::Entities::PipelineBasic
+ expose :user, using: ::API::Entities::UserBasic
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/package_file.rb b/lib/api/entities/package_file.rb
new file mode 100644
index 00000000000..8be4e5a4316
--- /dev/null
+++ b/lib/api/entities/package_file.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class PackageFile < Grape::Entity
+ expose :id, :package_id, :created_at
+ expose :file_name, :size
+ expose :file_md5, :file_sha1
+ end
+ end
+end
diff --git a/lib/api/entities/package_version.rb b/lib/api/entities/package_version.rb
new file mode 100644
index 00000000000..5f3e86c3229
--- /dev/null
+++ b/lib/api/entities/package_version.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class PackageVersion < Grape::Entity
+ expose :id
+ expose :version
+ expose :created_at
+ expose :tags
+
+ expose :pipeline, if: ->(package) { package.build_info }, using: Package::Pipeline
+ end
+ end
+end
diff --git a/lib/api/entities/project.rb b/lib/api/entities/project.rb
index 55a57501858..e3c5177cd0b 100644
--- a/lib/api/entities/project.rb
+++ b/lib/api/entities/project.rb
@@ -51,6 +51,8 @@ module API
expose(:wiki_enabled) { |project, options| project.feature_available?(:wiki, options[:current_user]) }
expose(:jobs_enabled) { |project, options| project.feature_available?(:builds, options[:current_user]) }
expose(:snippets_enabled) { |project, options| project.feature_available?(:snippets, options[:current_user]) }
+ expose :service_desk_enabled
+ expose :service_desk_address
expose(:can_create_merge_request_in) do |project, options|
Ability.allowed?(options[:current_user], :create_merge_request_in, project)
diff --git a/lib/api/entities/project_statistics.rb b/lib/api/entities/project_statistics.rb
index e5f6165da31..32201e88eaf 100644
--- a/lib/api/entities/project_statistics.rb
+++ b/lib/api/entities/project_statistics.rb
@@ -9,6 +9,7 @@ module API
expose :wiki_size
expose :lfs_objects_size
expose :build_artifacts_size, as: :job_artifacts_size
+ expose :snippets_size
end
end
end
diff --git a/lib/api/entities/release.rb b/lib/api/entities/release.rb
index 99fa496d368..afe14cf33cf 100644
--- a/lib/api/entities/release.rb
+++ b/lib/api/entities/release.rb
@@ -5,9 +5,7 @@ module API
class Release < Grape::Entity
include ::API::Helpers::Presentable
- expose :name do |release, _|
- can_download_code? ? release.name : "Release-#{release.id}"
- end
+ expose :name
expose :tag, as: :tag_name, if: ->(_, _) { can_download_code? }
expose :description
expose :description_html do |entity|
@@ -23,10 +21,7 @@ module API
expose :tag_path, expose_nil: false
expose :assets do
- expose :assets_count, as: :count do |release, _|
- assets_to_exclude = can_download_code? ? [] : [:sources]
- release.assets_count(except: assets_to_exclude)
- end
+ expose :assets_count, as: :count
expose :sources, using: Entities::Releases::Source, if: ->(_, _) { can_download_code? }
expose :links, using: Entities::Releases::Link do |release, options|
release.links.sorted
diff --git a/lib/api/entities/resource_state_event.rb b/lib/api/entities/resource_state_event.rb
new file mode 100644
index 00000000000..f71a38e4115
--- /dev/null
+++ b/lib/api/entities/resource_state_event.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ class ResourceStateEvent < Grape::Entity
+ expose :id
+ expose :user, using: Entities::UserBasic
+ expose :created_at
+ expose :resource_type do |event, _options|
+ event.issuable.class.name
+ end
+ expose :resource_id do |event, _options|
+ event.issuable.id
+ end
+ expose :state
+ end
+ end
+end
diff --git a/lib/api/entities/snippet.rb b/lib/api/entities/snippet.rb
index 19c89603cbc..40488eb882d 100644
--- a/lib/api/entities/snippet.rb
+++ b/lib/api/entities/snippet.rb
@@ -17,6 +17,18 @@ module API
expose :file_name do |snippet|
snippet.file_name_on_repo || snippet.file_name
end
+ expose :files, if: ->(snippet, options) { snippet_multiple_files?(snippet, options[:current_user]) } do |snippet, options|
+ snippet.list_files.map do |file|
+ {
+ path: file,
+ raw_url: Gitlab::UrlBuilder.build(snippet, file: file, ref: snippet.repository.root_ref)
+ }
+ end
+ end
+
+ def snippet_multiple_files?(snippet, current_user)
+ ::Feature.enabled?(:snippet_multiple_files, current_user) && snippet.repository_exists?
+ end
end
end
end
diff --git a/lib/api/entities/user.rb b/lib/api/entities/user.rb
index adf954ab02d..4aa5c9b7236 100644
--- a/lib/api/entities/user.rb
+++ b/lib/api/entities/user.rb
@@ -5,7 +5,7 @@ module API
class User < UserBasic
include UsersHelper
expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) }
- expose :bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title
+ expose :bio, :bio_html, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title
expose :work_information do |user|
work_information(user)
end
diff --git a/lib/api/environments.rb b/lib/api/environments.rb
index 28019ce7796..b825904e2c5 100644
--- a/lib/api/environments.rb
+++ b/lib/api/environments.rb
@@ -2,7 +2,7 @@
module API
# Environments RESTfull API endpoints
- class Environments < Grape::API
+ class Environments < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/error_tracking.rb b/lib/api/error_tracking.rb
index 14888037f53..64ec6f0a57a 100644
--- a/lib/api/error_tracking.rb
+++ b/lib/api/error_tracking.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ErrorTracking < Grape::API
+ class ErrorTracking < Grape::API::Instance
before { authenticate! }
params do
diff --git a/lib/api/events.rb b/lib/api/events.rb
index e4c017fab42..0b79431a76d 100644
--- a/lib/api/events.rb
+++ b/lib/api/events.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Events < Grape::API
+ class Events < Grape::API::Instance
include PaginationParams
include APIGuard
helpers ::API::Helpers::EventsHelpers
diff --git a/lib/api/features.rb b/lib/api/features.rb
index 3fb3fc92e42..9d011d658f6 100644
--- a/lib/api/features.rb
+++ b/lib/api/features.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Features < Grape::API
+ class Features < Grape::API::Instance
before { authenticated_as_admin! }
helpers do
diff --git a/lib/api/files.rb b/lib/api/files.rb
index 76ab9a2190b..748bdfa894d 100644
--- a/lib/api/files.rb
+++ b/lib/api/files.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Files < Grape::API
+ class Files < Grape::API::Instance
include APIGuard
FILE_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(file_path: API::NO_SLASH_URL_PART_REGEX)
@@ -56,7 +56,7 @@ module API
ref: params[:ref],
blob_id: @blob.id,
commit_id: @commit.id,
- last_commit_id: @repo.last_commit_id_for_path(@commit.sha, params[:file_path])
+ last_commit_id: @repo.last_commit_id_for_path(@commit.sha, params[:file_path], literal_pathspec: true)
}
end
diff --git a/lib/api/freeze_periods.rb b/lib/api/freeze_periods.rb
index 9c7e5a5832d..b8254ee9ab4 100644
--- a/lib/api/freeze_periods.rb
+++ b/lib/api/freeze_periods.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class FreezePeriods < Grape::API
+ class FreezePeriods < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/go_proxy.rb b/lib/api/go_proxy.rb
new file mode 100755
index 00000000000..c0207f9169c
--- /dev/null
+++ b/lib/api/go_proxy.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+module API
+ class GoProxy < Grape::API::Instance
+ helpers Gitlab::Golang
+ helpers ::API::Helpers::PackagesHelpers
+
+ # basic semver, except case encoded (A => !a)
+ MODULE_VERSION_REGEX = /v(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-([-.!a-z0-9]+))?(?:\+([-.!a-z0-9]+))?/.freeze
+
+ MODULE_VERSION_REQUIREMENTS = { module_version: MODULE_VERSION_REGEX }.freeze
+
+ before { require_packages_enabled! }
+
+ helpers do
+ def case_decode(str)
+ # Converts "github.com/!azure" to "github.com/Azure"
+ #
+ # From `go help goproxy`:
+ #
+ # > To avoid problems when serving from case-sensitive file systems,
+ # > the <module> and <version> elements are case-encoded, replacing
+ # > every uppercase letter with an exclamation mark followed by the
+ # > corresponding lower-case letter: github.com/Azure encodes as
+ # > github.com/!azure.
+
+ str.gsub(/![[:alpha:]]/) { |s| s[1..].upcase }
+ end
+
+ def find_project!(id)
+ # based on API::Helpers::Packages::BasicAuthHelpers#authorized_project_find!
+
+ project = find_project(id)
+
+ return project if project && can?(current_user, :read_project, project)
+
+ if current_user
+ not_found!('Project')
+ else
+ unauthorized!
+ end
+ end
+
+ def find_module
+ not_found! unless Feature.enabled?(:go_proxy, user_project)
+
+ module_name = case_decode params[:module_name]
+ bad_request!('Module Name') if module_name.blank?
+
+ mod = ::Packages::Go::ModuleFinder.new(user_project, module_name).execute
+
+ not_found! unless mod
+
+ mod
+ end
+
+ def find_version
+ module_version = case_decode params[:module_version]
+ ver = ::Packages::Go::VersionFinder.new(find_module).find(module_version)
+
+ not_found! unless ver&.valid?
+
+ ver
+
+ rescue ArgumentError
+ not_found!
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ requires :module_name, type: String, desc: 'Module name', coerce_with: ->(val) { CGI.unescape(val) }
+ end
+ route_setting :authentication, job_token_allowed: true, basic_auth_personal_access_token: true
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ authorize_read_package!
+ end
+
+ namespace ':id/packages/go/*module_name/@v' do
+ desc 'Get all tagged versions for a given Go module' do
+ detail 'See `go help goproxy`, GET $GOPROXY/<module>/@v/list. This feature was introduced in GitLab 13.1.'
+ end
+ get 'list' do
+ mod = find_module
+
+ content_type 'text/plain'
+ mod.versions.map { |t| t.name }.join("\n")
+ end
+
+ desc 'Get information about the given module version' do
+ detail 'See `go help goproxy`, GET $GOPROXY/<module>/@v/<version>.info. This feature was introduced in GitLab 13.1.'
+ success ::API::Entities::GoModuleVersion
+ end
+ params do
+ requires :module_version, type: String, desc: 'Module version'
+ end
+ get ':module_version.info', requirements: MODULE_VERSION_REQUIREMENTS do
+ ver = find_version
+
+ present ::Packages::Go::ModuleVersionPresenter.new(ver), with: ::API::Entities::GoModuleVersion
+ end
+
+ desc 'Get the module file of the given module version' do
+ detail 'See `go help goproxy`, GET $GOPROXY/<module>/@v/<version>.mod. This feature was introduced in GitLab 13.1.'
+ end
+ params do
+ requires :module_version, type: String, desc: 'Module version'
+ end
+ get ':module_version.mod', requirements: MODULE_VERSION_REQUIREMENTS do
+ ver = find_version
+
+ content_type 'text/plain'
+ ver.gomod
+ end
+
+ desc 'Get a zip of the source of the given module version' do
+ detail 'See `go help goproxy`, GET $GOPROXY/<module>/@v/<version>.zip. This feature was introduced in GitLab 13.1.'
+ end
+ params do
+ requires :module_version, type: String, desc: 'Module version'
+ end
+ get ':module_version.zip', requirements: MODULE_VERSION_REQUIREMENTS do
+ ver = find_version
+
+ content_type 'application/zip'
+ env['api.format'] = :binary
+ header['Content-Disposition'] = ActionDispatch::Http::ContentDisposition.format(disposition: 'attachment', filename: ver.name + '.zip')
+ header['Content-Transfer-Encoding'] = 'binary'
+ status :ok
+ body ver.archive.string
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/group_boards.rb b/lib/api/group_boards.rb
index 88d04e70e11..7efc12121d2 100644
--- a/lib/api/group_boards.rb
+++ b/lib/api/group_boards.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupBoards < Grape::API
+ class GroupBoards < Grape::API::Instance
include BoardsResponses
include PaginationParams
diff --git a/lib/api/group_clusters.rb b/lib/api/group_clusters.rb
index 2c12c6387fb..ae41d9f13b8 100644
--- a/lib/api/group_clusters.rb
+++ b/lib/api/group_clusters.rb
@@ -1,23 +1,11 @@
# frozen_string_literal: true
module API
- class GroupClusters < Grape::API
+ class GroupClusters < Grape::API::Instance
include PaginationParams
before { authenticate! }
- # EE::API::GroupClusters will
- # override these methods
- helpers do
- params :create_params_ee do
- end
-
- params :update_params_ee do
- end
- end
-
- prepend_if_ee('EE::API::GroupClusters') # rubocop: disable Cop/InjectEnterpriseEditionModule
-
params do
requires :id, type: String, desc: 'The ID of the group'
end
@@ -52,6 +40,7 @@ module API
params do
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
+ optional :environment_scope, default: '*', type: String, desc: 'The associated environment to the cluster'
optional :domain, type: String, desc: 'Cluster base domain'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
@@ -62,7 +51,6 @@ module API
optional :namespace, type: String, desc: 'Unique namespace related to Group'
optional :authorization_type, type: String, values: ::Clusters::Platforms::Kubernetes.authorization_types.keys, default: 'rbac', desc: 'Cluster authorization type, defaults to RBAC'
end
- use :create_params_ee
end
post ':id/clusters/user' do
authorize! :add_cluster, user_group
@@ -85,6 +73,7 @@ module API
requires :cluster_id, type: Integer, desc: 'The cluster ID'
optional :name, type: String, desc: 'Cluster name'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
optional :api_url, type: String, desc: 'URL to access the Kubernetes API'
@@ -92,7 +81,6 @@ module API
optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
optional :namespace, type: String, desc: 'Unique namespace related to Group'
end
- use :update_params_ee
end
put ':id/clusters/:cluster_id' do
authorize! :update_cluster, cluster
diff --git a/lib/api/group_container_repositories.rb b/lib/api/group_container_repositories.rb
index d34317b5271..25b3059f63b 100644
--- a/lib/api/group_container_repositories.rb
+++ b/lib/api/group_container_repositories.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupContainerRepositories < Grape::API
+ class GroupContainerRepositories < Grape::API::Instance
include PaginationParams
before { authorize_read_group_container_images! }
diff --git a/lib/api/group_export.rb b/lib/api/group_export.rb
index d3010b6d147..dc14813eefc 100644
--- a/lib/api/group_export.rb
+++ b/lib/api/group_export.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupExport < Grape::API
+ class GroupExport < Grape::API::Instance
helpers Helpers::RateLimiter
before do
diff --git a/lib/api/group_import.rb b/lib/api/group_import.rb
index afcbc24d3ce..b82d9fc519a 100644
--- a/lib/api/group_import.rb
+++ b/lib/api/group_import.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupImport < Grape::API
+ class GroupImport < Grape::API::Instance
helpers Helpers::FileUploadHelpers
helpers do
diff --git a/lib/api/group_labels.rb b/lib/api/group_labels.rb
index 7585293031f..56f2b769464 100644
--- a/lib/api/group_labels.rb
+++ b/lib/api/group_labels.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupLabels < Grape::API
+ class GroupLabels < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::LabelHelpers
diff --git a/lib/api/group_milestones.rb b/lib/api/group_milestones.rb
index 9e9f5101285..82f5df79356 100644
--- a/lib/api/group_milestones.rb
+++ b/lib/api/group_milestones.rb
@@ -1,13 +1,11 @@
# frozen_string_literal: true
module API
- class GroupMilestones < Grape::API
+ class GroupMilestones < Grape::API::Instance
include MilestoneResponses
include PaginationParams
- before do
- authenticate!
- end
+ before { authenticate! }
params do
requires :id, type: String, desc: 'The ID of a group'
diff --git a/lib/api/group_packages.rb b/lib/api/group_packages.rb
new file mode 100644
index 00000000000..aa047e260f5
--- /dev/null
+++ b/lib/api/group_packages.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module API
+ class GroupPackages < Grape::API::Instance
+ include PaginationParams
+
+ before do
+ authorize_packages_access!(user_group)
+ end
+
+ helpers ::API::Helpers::PackagesHelpers
+
+ params do
+ requires :id, type: String, desc: "Group's ID or path"
+ optional :exclude_subgroups, type: Boolean, default: false, desc: 'Determines if subgroups should be excluded'
+ end
+ resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get all project packages within a group' do
+ detail 'This feature was introduced in GitLab 12.5'
+ success ::API::Entities::Package
+ end
+ params do
+ use :pagination
+ optional :order_by, type: String, values: %w[created_at name version type project_path], default: 'created_at',
+ desc: 'Return packages ordered by `created_at`, `name`, `version` or `type` fields.'
+ optional :sort, type: String, values: %w[asc desc], default: 'asc',
+ desc: 'Return packages sorted in `asc` or `desc` order.'
+ optional :package_type, type: String, values: Packages::Package.package_types.keys,
+ desc: 'Return packages of a certain type'
+ optional :package_name, type: String,
+ desc: 'Return packages with this name'
+ end
+ get ':id/packages' do
+ packages = Packages::GroupPackagesFinder.new(
+ current_user,
+ user_group,
+ declared(params).slice(:exclude_subgroups, :order_by, :sort, :package_type, :package_name)
+ ).execute
+
+ present paginate(packages), with: ::API::Entities::Package, user: current_user, group: true
+ end
+ end
+ end
+end
diff --git a/lib/api/group_variables.rb b/lib/api/group_variables.rb
index 916f89649a5..d3ca1c79e73 100644
--- a/lib/api/group_variables.rb
+++ b/lib/api/group_variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class GroupVariables < Grape::API
+ class GroupVariables < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -48,7 +48,7 @@ module API
requires :value, type: String, desc: 'The value of the variable'
optional :protected, type: String, desc: 'Whether the variable is protected'
optional :masked, type: String, desc: 'Whether the variable is masked'
- optional :variable_type, type: String, values: Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
+ optional :variable_type, type: String, values: ::Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
end
post ':id/variables' do
variable_params = declared_params(include_missing: false)
@@ -70,7 +70,7 @@ module API
optional :value, type: String, desc: 'The value of the variable'
optional :protected, type: String, desc: 'Whether the variable is protected'
optional :masked, type: String, desc: 'Whether the variable is masked'
- optional :variable_type, type: String, values: Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
+ optional :variable_type, type: String, values: ::Ci::GroupVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
end
# rubocop: disable CodeReuse/ActiveRecord
put ':id/variables/:key' do
diff --git a/lib/api/groups.rb b/lib/api/groups.rb
index 6e07bb46721..9ac3ac818fc 100644
--- a/lib/api/groups.rb
+++ b/lib/api/groups.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Groups < Grape::API
+ class Groups < Grape::API::Instance
include PaginationParams
include Helpers::CustomAttributes
@@ -16,7 +16,7 @@ module API
params :group_list_params do
use :statistics_params
- optional :skip_groups, type: Array[Integer], desc: 'Array of group ids to exclude from list'
+ optional :skip_groups, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'Array of group ids to exclude from list'
optional :all_available, type: Boolean, desc: 'Show all group that you have access to'
optional :search, type: String, desc: 'Search for a specific group'
optional :owned, type: Boolean, default: false, desc: 'Limit by owned by authenticated user'
@@ -76,9 +76,6 @@ module API
params: project_finder_params,
options: finder_options
).execute
- projects = projects.with_issues_available_for_user(current_user) if params[:with_issues_enabled]
- projects = projects.with_merge_requests_enabled if params[:with_merge_requests_enabled]
- projects = projects.visible_to_user_and_access_level(current_user, params[:min_access_level]) if params[:min_access_level]
projects = reorder_projects(projects)
paginate(projects)
end
@@ -221,7 +218,7 @@ module API
success Entities::Project
end
params do
- optional :archived, type: Boolean, default: false, desc: 'Limit by archived status'
+ optional :archived, type: Boolean, desc: 'Limit by archived status'
optional :visibility, type: String, values: Gitlab::VisibilityLevel.string_values,
desc: 'Limit by visibility'
optional :search, type: String, desc: 'Return list of authorized projects matching the search criteria'
@@ -258,7 +255,7 @@ module API
success Entities::Project
end
params do
- optional :archived, type: Boolean, default: false, desc: 'Limit by archived status'
+ optional :archived, type: Boolean, desc: 'Limit by archived status'
optional :visibility, type: String, values: Gitlab::VisibilityLevel.string_values,
desc: 'Limit by visibility'
optional :search, type: String, desc: 'Return list of authorized projects matching the search criteria'
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index bbdb45da3b1..01b89959c14 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -41,6 +41,16 @@ module API
end
end
+ def job_token_authentication?
+ initial_current_user && @current_authenticated_job.present? # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ # Returns the job associated with the token provided for
+ # authentication, if any
+ def current_authenticated_job
+ @current_authenticated_job
+ end
+
# rubocop:disable Gitlab/ModuleWithInstanceVariables
# We can't rewrite this with StrongMemoize because `sudo!` would
# actually write to `@current_user`, and `sudo?` would immediately
@@ -79,12 +89,6 @@ module API
@project ||= find_project!(params[:id])
end
- def wiki_page
- page = ProjectWiki.new(user_project, current_user).find_page(params[:slug])
-
- page || not_found!('Wiki Page')
- end
-
def available_labels_for(label_parent, include_ancestor_groups: true)
search_params = { include_ancestor_groups: include_ancestor_groups }
@@ -374,6 +378,12 @@ module API
render_api_error!(message.join(' '), 404)
end
+ def check_sha_param!(params, merge_request)
+ if params[:sha] && merge_request.diff_head_sha != params[:sha]
+ render_api_error!("SHA does not match HEAD of source branch: #{merge_request.diff_head_sha}", 409)
+ end
+ end
+
def unauthorized!
render_api_error!('401 Unauthorized', 401)
end
@@ -416,10 +426,14 @@ module API
def render_validation_error!(model)
if model.errors.any?
- render_api_error!(model.errors.messages || '400 Bad Request', 400)
+ render_api_error!(model_error_messages(model) || '400 Bad Request', 400)
end
end
+ def model_error_messages(model)
+ model.errors.messages
+ end
+
def render_spam_error!
render_api_error!({ error: 'Spam detected' }, 400)
end
@@ -490,7 +504,7 @@ module API
header['X-Sendfile'] = path
body
else
- file path
+ sendfile path
end
end
@@ -534,6 +548,8 @@ module API
def project_finder_params_ce
finder_params = project_finder_params_visibility_ce
+ finder_params[:with_issues_enabled] = true if params[:with_issues_enabled].present?
+ finder_params[:with_merge_requests_enabled] = true if params[:with_merge_requests_enabled].present?
finder_params[:without_deleted] = true
finder_params[:search] = params[:search] if params[:search]
finder_params[:search_namespaces] = true if params[:search_namespaces].present?
@@ -543,6 +559,7 @@ module API
finder_params[:id_before] = params[:id_before] if params[:id_before]
finder_params[:last_activity_after] = params[:last_activity_after] if params[:last_activity_after]
finder_params[:last_activity_before] = params[:last_activity_before] if params[:last_activity_before]
+ finder_params[:repository_storage] = params[:repository_storage] if params[:repository_storage]
finder_params
end
diff --git a/lib/api/helpers/common_helpers.rb b/lib/api/helpers/common_helpers.rb
index 32a15381f27..a44fd4b0a5b 100644
--- a/lib/api/helpers/common_helpers.rb
+++ b/lib/api/helpers/common_helpers.rb
@@ -12,6 +12,26 @@ module API
end
end
end
+
+ # Grape v1.3.3 no longer automatically coerces an Array
+ # type to an empty array if the value is nil.
+ def coerce_nil_params_to_array!
+ keys_to_coerce = params_with_array_types
+
+ params.each do |key, val|
+ params[key] = Array(val) if val.nil? && keys_to_coerce.include?(key)
+ end
+ end
+
+ def params_with_array_types
+ options[:route_options][:params].map do |key, val|
+ param_type = val[:type]
+ # Search for parameters with Array types (e.g. "[String]", "[Integer]", etc.)
+ if param_type =~ %r(\[\w*\])
+ key
+ end
+ end.compact.to_set
+ end
end
end
end
diff --git a/lib/api/helpers/internal_helpers.rb b/lib/api/helpers/internal_helpers.rb
index b05e82a541d..b69930b447c 100644
--- a/lib/api/helpers/internal_helpers.rb
+++ b/lib/api/helpers/internal_helpers.rb
@@ -118,8 +118,8 @@ module API
{
repository: repository.gitaly_repository,
- address: Gitlab::GitalyClient.address(container.repository_storage),
- token: Gitlab::GitalyClient.token(container.repository_storage),
+ address: Gitlab::GitalyClient.address(repository.shard),
+ token: Gitlab::GitalyClient.token(repository.shard),
features: Feature::Gitaly.server_feature_flags
}
end
diff --git a/lib/api/helpers/merge_requests_helpers.rb b/lib/api/helpers/merge_requests_helpers.rb
index 9dab2a88f0b..4d5350498a7 100644
--- a/lib/api/helpers/merge_requests_helpers.rb
+++ b/lib/api/helpers/merge_requests_helpers.rb
@@ -5,7 +5,30 @@ module API
module MergeRequestsHelpers
extend Grape::API::Helpers
+ params :merge_requests_negatable_params do
+ optional :author_id, type: Integer, desc: 'Return merge requests which are authored by the user with the given ID'
+ optional :author_username, type: String, desc: 'Return merge requests which are authored by the user with the given username'
+ mutually_exclusive :author_id, :author_username
+
+ optional :assignee_id,
+ types: [Integer, String],
+ integer_none_any: true,
+ desc: 'Return merge requests which are assigned to the user with the given ID'
+ optional :assignee_username, type: Array[String], check_assignees_count: true,
+ coerce_with: Validations::Validators::CheckAssigneesCount.coerce,
+ desc: 'Return merge requests which are assigned to the user with the given username'
+ mutually_exclusive :assignee_id, :assignee_username
+
+ optional :labels,
+ type: Array[String],
+ coerce_with: Validations::Types::CommaSeparatedToArray.coerce,
+ desc: 'Comma-separated list of label names'
+ optional :milestone, type: String, desc: 'Return merge requests for a specific milestone'
+ optional :my_reaction_emoji, type: String, desc: 'Return issues reacted by the authenticated user by the given emoji'
+ end
+
params :merge_requests_base_params do
+ use :merge_requests_negatable_params
optional :state,
type: String,
values: %w[opened closed locked merged all],
@@ -21,11 +44,6 @@ module API
values: %w[asc desc],
default: 'desc',
desc: 'Return merge requests sorted in `asc` or `desc` order.'
- optional :milestone, type: String, desc: 'Return merge requests for a specific milestone'
- optional :labels,
- type: Array[String],
- coerce_with: Validations::Types::LabelsList.coerce,
- desc: 'Comma-separated list of label names'
optional :with_labels_details, type: Boolean, desc: 'Return titles of labels and other details', default: false
optional :with_merge_status_recheck, type: Boolean, desc: 'Request that stale merge statuses be rechecked asynchronously', default: false
optional :created_after, type: DateTime, desc: 'Return merge requests created after the specified time'
@@ -37,19 +55,10 @@ module API
values: %w[simple],
desc: 'If simple, returns the `iid`, URL, title, description, and basic state of merge request'
- optional :author_id, type: Integer, desc: 'Return merge requests which are authored by the user with the given ID'
- optional :author_username, type: String, desc: 'Return merge requests which are authored by the user with the given username'
- mutually_exclusive :author_id, :author_username
-
- optional :assignee_id,
- types: [Integer, String],
- integer_none_any: true,
- desc: 'Return merge requests which are assigned to the user with the given ID'
optional :scope,
type: String,
values: %w[created-by-me assigned-to-me created_by_me assigned_to_me all],
desc: 'Return merge requests for the given scope: `created_by_me`, `assigned_to_me` or `all`'
- optional :my_reaction_emoji, type: String, desc: 'Return issues reacted by the authenticated user by the given emoji'
optional :source_branch, type: String, desc: 'Return merge requests with the given source branch'
optional :source_project_id, type: Integer, desc: 'Return merge requests with the given source project id'
optional :target_branch, type: String, desc: 'Return merge requests with the given target branch'
@@ -58,6 +67,9 @@ module API
desc: 'Search merge requests for text present in the title, description, or any combination of these'
optional :in, type: String, desc: '`title`, `description`, or a string joining them with comma'
optional :wip, type: String, values: %w[yes no], desc: 'Search merge requests for WIP in the title'
+ optional :not, type: Hash, desc: 'Parameters to negate' do
+ use :merge_requests_negatable_params
+ end
end
params :optional_scope_param do
diff --git a/lib/api/helpers/packages/basic_auth_helpers.rb b/lib/api/helpers/packages/basic_auth_helpers.rb
new file mode 100644
index 00000000000..835b5f4614c
--- /dev/null
+++ b/lib/api/helpers/packages/basic_auth_helpers.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module Packages
+ module BasicAuthHelpers
+ module Constants
+ AUTHENTICATE_REALM_HEADER = 'Www-Authenticate: Basic realm'
+ AUTHENTICATE_REALM_NAME = 'GitLab Packages Registry'
+ end
+
+ include Constants
+
+ def find_personal_access_token
+ find_personal_access_token_from_http_basic_auth
+ end
+
+ def unauthorized_user_project
+ @unauthorized_user_project ||= find_project(params[:id])
+ end
+
+ def unauthorized_user_project!
+ unauthorized_user_project || not_found!
+ end
+
+ def authorized_user_project
+ @authorized_user_project ||= authorized_project_find!
+ end
+
+ def authorized_project_find!
+ project = unauthorized_user_project
+
+ unless project && can?(current_user, :read_project, project)
+ return unauthorized_or! { not_found! }
+ end
+
+ project
+ end
+
+ def authorize!(action, subject = :global, reason = nil)
+ return if can?(current_user, action, subject)
+
+ unauthorized_or! { forbidden!(reason) }
+ end
+
+ def unauthorized_or!
+ current_user ? yield : unauthorized_with_header!
+ end
+
+ def unauthorized_with_header!
+ header(AUTHENTICATE_REALM_HEADER, AUTHENTICATE_REALM_NAME)
+ unauthorized!
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/packages/conan/api_helpers.rb b/lib/api/helpers/packages/conan/api_helpers.rb
new file mode 100644
index 00000000000..30e690a5a1d
--- /dev/null
+++ b/lib/api/helpers/packages/conan/api_helpers.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module Packages
+ module Conan
+ module ApiHelpers
+ def present_download_urls(entity)
+ authorize!(:read_package, project)
+
+ presenter = ::Packages::Conan::PackagePresenter.new(
+ recipe,
+ current_user,
+ project,
+ conan_package_reference: params[:conan_package_reference]
+ )
+
+ render_api_error!("No recipe manifest found", 404) if yield(presenter).empty?
+
+ present presenter, with: entity
+ end
+
+ def present_package_download_urls
+ present_download_urls(::API::Entities::ConanPackage::ConanPackageManifest, &:package_urls)
+ end
+
+ def present_recipe_download_urls
+ present_download_urls(::API::Entities::ConanPackage::ConanRecipeManifest, &:recipe_urls)
+ end
+
+ def recipe_upload_urls(file_names)
+ { upload_urls: Hash[
+ file_names.collect do |file_name|
+ [file_name, recipe_file_upload_url(file_name)]
+ end
+ ] }
+ end
+
+ def package_upload_urls(file_names)
+ { upload_urls: Hash[
+ file_names.collect do |file_name|
+ [file_name, package_file_upload_url(file_name)]
+ end
+ ] }
+ end
+
+ def package_file_upload_url(file_name)
+ expose_url(
+ api_v4_packages_conan_v1_files_package_path(
+ package_name: params[:package_name],
+ package_version: params[:package_version],
+ package_username: params[:package_username],
+ package_channel: params[:package_channel],
+ recipe_revision: '0',
+ conan_package_reference: params[:conan_package_reference],
+ package_revision: '0',
+ file_name: file_name
+ )
+ )
+ end
+
+ def recipe_file_upload_url(file_name)
+ expose_url(
+ api_v4_packages_conan_v1_files_export_path(
+ package_name: params[:package_name],
+ package_version: params[:package_version],
+ package_username: params[:package_username],
+ package_channel: params[:package_channel],
+ recipe_revision: '0',
+ file_name: file_name
+ )
+ )
+ end
+
+ def recipe
+ "%{package_name}/%{package_version}@%{package_username}/%{package_channel}" % params.symbolize_keys
+ end
+
+ def project
+ strong_memoize(:project) do
+ full_path = ::Packages::Conan::Metadatum.full_path_from(package_username: params[:package_username])
+ Project.find_by_full_path(full_path)
+ end
+ end
+
+ def package
+ strong_memoize(:package) do
+ project.packages
+ .with_name(params[:package_name])
+ .with_version(params[:package_version])
+ .with_conan_channel(params[:package_channel])
+ .order_created
+ .last
+ end
+ end
+
+ def token
+ strong_memoize(:token) do
+ token = nil
+ token = ::Gitlab::ConanToken.from_personal_access_token(access_token) if access_token
+ token = ::Gitlab::ConanToken.from_deploy_token(deploy_token_from_request) if deploy_token_from_request
+ token = ::Gitlab::ConanToken.from_job(find_job_from_token) if find_job_from_token
+ token
+ end
+ end
+
+ def download_package_file(file_type)
+ authorize!(:read_package, project)
+
+ package_file = ::Packages::Conan::PackageFileFinder
+ .new(
+ package,
+ params[:file_name].to_s,
+ conan_file_type: file_type,
+ conan_package_reference: params[:conan_package_reference]
+ ).execute!
+
+ track_event('pull_package') if params[:file_name] == ::Packages::Conan::FileMetadatum::PACKAGE_BINARY
+
+ present_carrierwave_file!(package_file.file)
+ end
+
+ def find_or_create_package
+ package || ::Packages::Conan::CreatePackageService.new(project, current_user, params).execute
+ end
+
+ def track_push_package_event
+ if params[:file_name] == ::Packages::Conan::FileMetadatum::PACKAGE_BINARY && params['file.size'] > 0
+ track_event('push_package')
+ end
+ end
+
+ def create_package_file_with_type(file_type, current_package)
+ unless params['file.size'] == 0
+ # conan sends two upload requests, the first has no file, so we skip record creation if file.size == 0
+ ::Packages::Conan::CreatePackageFileService.new(current_package, uploaded_package_file, params.merge(conan_file_type: file_type)).execute
+ end
+ end
+
+ def upload_package_file(file_type)
+ authorize_upload!(project)
+
+ current_package = find_or_create_package
+
+ track_push_package_event
+
+ create_package_file_with_type(file_type, current_package)
+ rescue ObjectStorage::RemoteStoreError => e
+ Gitlab::ErrorTracking.track_exception(e, file_name: params[:file_name], project_id: project.id)
+
+ forbidden!
+ end
+
+ def find_personal_access_token
+ personal_access_token = find_personal_access_token_from_conan_jwt ||
+ find_personal_access_token_from_http_basic_auth
+
+ personal_access_token
+ end
+
+ def find_user_from_job_token
+ return unless route_authentication_setting[:job_token_allowed]
+
+ job = find_job_from_token || raise(::Gitlab::Auth::UnauthorizedError)
+
+ job.user
+ end
+
+ def deploy_token_from_request
+ find_deploy_token_from_conan_jwt || find_deploy_token_from_http_basic_auth
+ end
+
+ def find_job_from_token
+ find_job_from_conan_jwt || find_job_from_http_basic_auth
+ end
+
+ # We need to override this one because it
+ # looks into Bearer authorization header
+ def find_oauth_access_token
+ end
+
+ def find_personal_access_token_from_conan_jwt
+ token = decode_oauth_token_from_jwt
+
+ return unless token
+
+ PersonalAccessToken.find_by_id_and_user_id(token.access_token_id, token.user_id)
+ end
+
+ def find_deploy_token_from_conan_jwt
+ token = decode_oauth_token_from_jwt
+
+ return unless token
+
+ deploy_token = DeployToken.active.find_by_token(token.access_token_id.to_s)
+ # note: uesr_id is not a user record id, but is the attribute set on ConanToken
+ return if token.user_id != deploy_token&.username
+
+ deploy_token
+ end
+
+ def find_job_from_conan_jwt
+ token = decode_oauth_token_from_jwt
+
+ return unless token
+
+ ::Ci::Build.find_by_token(token.access_token_id.to_s)
+ end
+
+ def decode_oauth_token_from_jwt
+ jwt = Doorkeeper::OAuth::Token.from_bearer_authorization(current_request)
+
+ return unless jwt
+
+ token = ::Gitlab::ConanToken.decode(jwt)
+
+ return unless token && token.access_token_id && token.user_id
+
+ token
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/packages/dependency_proxy_helpers.rb b/lib/api/helpers/packages/dependency_proxy_helpers.rb
new file mode 100644
index 00000000000..254af7690a2
--- /dev/null
+++ b/lib/api/helpers/packages/dependency_proxy_helpers.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module Packages
+ module DependencyProxyHelpers
+ REGISTRY_BASE_URLS = {
+ npm: 'https://registry.npmjs.org/'
+ }.freeze
+
+ def redirect_registry_request(forward_to_registry, package_type, options)
+ if forward_to_registry && redirect_registry_request_available?
+ redirect(registry_url(package_type, options))
+ else
+ yield
+ end
+ end
+
+ def registry_url(package_type, options)
+ base_url = REGISTRY_BASE_URLS[package_type]
+
+ raise ArgumentError, "Can't build registry_url for package_type #{package_type}" unless base_url
+
+ case package_type
+ when :npm
+ "#{base_url}#{options[:package_name]}"
+ end
+ end
+
+ def redirect_registry_request_available?
+ ::Gitlab::CurrentSettings.current_application_settings.npm_package_requests_forwarding
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/packages_helpers.rb b/lib/api/helpers/packages_helpers.rb
new file mode 100644
index 00000000000..c6037d52de9
--- /dev/null
+++ b/lib/api/helpers/packages_helpers.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module PackagesHelpers
+ MAX_PACKAGE_FILE_SIZE = 50.megabytes.freeze
+
+ def require_packages_enabled!
+ not_found! unless ::Gitlab.config.packages.enabled
+ end
+
+ def require_dependency_proxy_enabled!
+ not_found! unless ::Gitlab.config.dependency_proxy.enabled
+ end
+
+ def authorize_read_package!(subject = user_project)
+ authorize!(:read_package, subject)
+ end
+
+ def authorize_create_package!(subject = user_project)
+ authorize!(:create_package, subject)
+ end
+
+ def authorize_destroy_package!(subject = user_project)
+ authorize!(:destroy_package, subject)
+ end
+
+ def authorize_packages_access!(subject = user_project)
+ require_packages_enabled!
+ authorize_read_package!(subject)
+ end
+
+ def authorize_workhorse!(subject: user_project, has_length: true, maximum_size: MAX_PACKAGE_FILE_SIZE)
+ authorize_upload!(subject)
+
+ Gitlab::Workhorse.verify_api_request!(headers)
+
+ status 200
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+
+ params = { has_length: has_length }
+ params[:maximum_size] = maximum_size unless has_length
+ ::Packages::PackageFileUploader.workhorse_authorize(params)
+ end
+
+ def authorize_upload!(subject = user_project)
+ authorize_create_package!(subject)
+ require_gitlab_workhorse!
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/packages_manager_clients_helpers.rb b/lib/api/helpers/packages_manager_clients_helpers.rb
new file mode 100644
index 00000000000..7b5d0dd708d
--- /dev/null
+++ b/lib/api/helpers/packages_manager_clients_helpers.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module PackagesManagerClientsHelpers
+ extend Grape::API::Helpers
+ include ::API::Helpers::PackagesHelpers
+
+ params :workhorse_upload_params do
+ optional 'file.path', type: String, desc: 'Path to locally stored body (generated by Workhorse)'
+ optional 'file.name', type: String, desc: 'Real filename as send in Content-Disposition (generated by Workhorse)'
+ optional 'file.type', type: String, desc: 'Real content type as send in Content-Type (generated by Workhorse)'
+ optional 'file.size', type: Integer, desc: 'Real size of file (generated by Workhorse)'
+ optional 'file.md5', type: String, desc: 'MD5 checksum of the file (generated by Workhorse)'
+ optional 'file.sha1', type: String, desc: 'SHA1 checksum of the file (generated by Workhorse)'
+ optional 'file.sha256', type: String, desc: 'SHA256 checksum of the file (generated by Workhorse)'
+ end
+
+ def find_personal_access_token_from_http_basic_auth
+ return unless headers
+
+ token = decode_token
+
+ return unless token
+
+ PersonalAccessToken.find_by_token(token)
+ end
+
+ def find_job_from_http_basic_auth
+ return unless headers
+
+ token = decode_token
+
+ return unless token
+
+ ::Ci::Build.find_by_token(token)
+ end
+
+ def find_deploy_token_from_http_basic_auth
+ return unless headers
+
+ token = decode_token
+
+ return unless token
+
+ DeployToken.active.find_by_token(token)
+ end
+
+ def uploaded_package_file(param_name = :file)
+ uploaded_file = UploadedFile.from_params(params, param_name, ::Packages::PackageFileUploader.workhorse_local_upload_path)
+ bad_request!('Missing package file!') unless uploaded_file
+ uploaded_file
+ end
+
+ private
+
+ def decode_token
+ encoded_credentials = headers['Authorization'].to_s.split('Basic ', 2).second
+ Base64.decode64(encoded_credentials || '').split(':', 2).second
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/projects_helpers.rb b/lib/api/helpers/projects_helpers.rb
index 8a115d42929..76e5bb95c4d 100644
--- a/lib/api/helpers/projects_helpers.rb
+++ b/lib/api/helpers/projects_helpers.rb
@@ -6,6 +6,8 @@ module API
extend ActiveSupport::Concern
extend Grape::API::Helpers
+ STATISTICS_SORT_PARAMS = %w[storage_size repository_size wiki_size].freeze
+
params :optional_project_params_ce do
optional :description, type: String, desc: 'The description of the project'
optional :build_git_strategy, type: String, values: %w(fetch clone), desc: 'The Git strategy. Defaults to `fetch`'
@@ -13,6 +15,7 @@ module API
optional :auto_cancel_pending_pipelines, type: String, values: %w(disabled enabled), desc: 'Auto-cancel pending pipelines'
optional :build_coverage_regex, type: String, desc: 'Test coverage parsing'
optional :ci_config_path, type: String, desc: 'The path to CI config file. Defaults to `.gitlab-ci.yml`'
+ optional :service_desk_enabled, type: Boolean, desc: 'Disable or enable the service desk'
# TODO: remove in API v5, replaced by *_access_level
optional :issues_enabled, type: Boolean, desc: 'Flag indication if the issue tracker is enabled'
@@ -46,7 +49,7 @@ module API
optional :only_allow_merge_if_pipeline_succeeds, type: Boolean, desc: 'Only allow to merge if builds succeed'
optional :allow_merge_on_skipped_pipeline, type: Boolean, desc: 'Allow to merge if pipeline is skipped'
optional :only_allow_merge_if_all_discussions_are_resolved, type: Boolean, desc: 'Only allow to merge if all discussions are resolved'
- optional :tag_list, type: Array[String], desc: 'The list of tags for a project'
+ optional :tag_list, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The list of tags for a project'
# TODO: remove rubocop disable - https://gitlab.com/gitlab-org/gitlab/issues/14960
optional :avatar, type: File, desc: 'Avatar image for project' # rubocop:disable Scalability/FileUploads
optional :printing_merge_request_link_enabled, type: Boolean, desc: 'Show link to create/view merge request when pushing from the command line'
@@ -134,6 +137,7 @@ module API
:suggestion_commit_message,
:repository_storage,
:compliance_framework_setting,
+ :service_desk_enabled,
# TODO: remove in API v5, replaced by *_access_level
:issues_enabled,
diff --git a/lib/api/helpers/runner.rb b/lib/api/helpers/runner.rb
index 293d7ed9a6a..34a2fb09875 100644
--- a/lib/api/helpers/runner.rb
+++ b/lib/api/helpers/runner.rb
@@ -60,7 +60,7 @@ module API
def current_job
strong_memoize(:current_job) do
- Ci::Build.find_by_id(params[:id])
+ ::Ci::Build.find_by_id(params[:id])
end
end
@@ -69,11 +69,6 @@ module API
token && job.valid_token?(token)
end
- def max_artifacts_size(job)
- max_size = job.project.closest_setting(:max_artifacts_size)
- max_size.megabytes.to_i
- end
-
def job_forbidden!(job, reason)
header 'Job-Status', job.status
forbidden!(reason)
diff --git a/lib/api/helpers/services_helpers.rb b/lib/api/helpers/services_helpers.rb
index 3d6039cacaa..d4870b96575 100644
--- a/lib/api/helpers/services_helpers.rb
+++ b/lib/api/helpers/services_helpers.rb
@@ -234,18 +234,6 @@ module API
name: :project_url,
type: String,
desc: 'Project URL'
- },
- {
- required: false,
- name: :description,
- type: String,
- desc: 'Description'
- },
- {
- required: false,
- name: :title,
- type: String,
- desc: 'Title'
}
],
'buildkite' => [
@@ -288,6 +276,14 @@ module API
desc: 'Campfire room'
}
],
+ 'confluence' => [
+ {
+ required: true,
+ name: :confluence_url,
+ type: String,
+ desc: 'The URL of the Confluence Cloud Workspace hosted on atlassian.net'
+ }
+ ],
'custom-issue-tracker' => [
{
required: true,
@@ -306,18 +302,6 @@ module API
name: :project_url,
type: String,
desc: 'Project URL'
- },
- {
- required: false,
- name: :description,
- type: String,
- desc: 'Description'
- },
- {
- required: false,
- name: :title,
- type: String,
- desc: 'Title'
}
],
'discord' => [
@@ -757,6 +741,7 @@ module API
::BambooService,
::BugzillaService,
::BuildkiteService,
+ ::ConfluenceService,
::CampfireService,
::CustomIssueTrackerService,
::DiscordService,
diff --git a/lib/api/helpers/snippets_helpers.rb b/lib/api/helpers/snippets_helpers.rb
index 20aeca6a9d3..f95d066bd7c 100644
--- a/lib/api/helpers/snippets_helpers.rb
+++ b/lib/api/helpers/snippets_helpers.rb
@@ -3,15 +3,37 @@
module API
module Helpers
module SnippetsHelpers
+ extend Grape::API::Helpers
+
+ params :raw_file_params do
+ requires :file_path, type: String, file_path: true, desc: 'The url encoded path to the file, e.g. lib%2Fclass%2Erb'
+ requires :ref, type: String, desc: 'The name of branch, tag or commit'
+ end
+
def content_for(snippet)
if snippet.empty_repo?
+ env['api.format'] = :txt
+ content_type 'text/plain'
+ header['Content-Disposition'] = 'attachment'
+
snippet.content
else
blob = snippet.blobs.first
- blob.load_all_data!
- blob.data
+
+ send_git_blob(blob.repository, blob)
end
end
+
+ def file_content_for(snippet)
+ repo = snippet.repository
+ commit = repo.commit(params[:ref])
+ not_found!('Reference') unless commit
+
+ blob = repo.blob_at(commit.sha, params[:file_path])
+ not_found!('File') unless blob
+
+ send_git_blob(repo, blob)
+ end
end
end
end
diff --git a/lib/api/helpers/users_helpers.rb b/lib/api/helpers/users_helpers.rb
index 99eefc1cbb9..2d7b22e66b3 100644
--- a/lib/api/helpers/users_helpers.rb
+++ b/lib/api/helpers/users_helpers.rb
@@ -11,6 +11,13 @@ module API
params :optional_index_params_ee do
end
+
+ def model_error_messages(model)
+ super.tap do |error_messages|
+ # Remapping errors from nested associations.
+ error_messages[:bio] = error_messages.delete(:"user_detail.bio") if error_messages.has_key?(:"user_detail.bio")
+ end
+ end
end
end
end
diff --git a/lib/api/helpers/wikis_helpers.rb b/lib/api/helpers/wikis_helpers.rb
new file mode 100644
index 00000000000..49da1e317ab
--- /dev/null
+++ b/lib/api/helpers/wikis_helpers.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module API
+ module Helpers
+ module WikisHelpers
+ def self.wiki_resource_kinds
+ [:projects]
+ end
+
+ def find_container(kind)
+ return user_project if kind == :projects
+
+ raise "Unknown wiki container #{kind}"
+ end
+
+ def wiki_page
+ Wiki.for_container(container, current_user).find_page(params[:slug]) || not_found!('Wiki Page')
+ end
+
+ def commit_params(attrs)
+ base_params = { branch_name: attrs[:branch] }
+ file_details = case attrs[:file]
+ when Hash # legacy format: TODO remove when we drop support for non accelerated uploads
+ { file_name: attrs[:file][:filename], file_content: attrs[:file][:tempfile].read }
+ else
+ { file_name: attrs[:file].original_filename, file_content: attrs[:file].read }
+ end
+
+ base_params.merge(file_details)
+ end
+ end
+ end
+end
+
+API::Helpers::WikisHelpers.prepend_if_ee('EE::API::Helpers::WikisHelpers')
diff --git a/lib/api/import_bitbucket_server.rb b/lib/api/import_bitbucket_server.rb
new file mode 100644
index 00000000000..df3235420e9
--- /dev/null
+++ b/lib/api/import_bitbucket_server.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module API
+ class ImportBitbucketServer < Grape::API::Instance
+ helpers do
+ def client
+ @client ||= BitbucketServer::Client.new(credentials)
+ end
+
+ def credentials
+ @credentials ||= {
+ base_uri: params[:bitbucket_server_url],
+ user: params[:bitbucket_server_username],
+ password: params[:personal_access_token]
+ }
+ end
+ end
+
+ desc 'Import a BitBucket Server repository' do
+ detail 'This feature was introduced in GitLab 13.2.'
+ success ::ProjectEntity
+ end
+
+ params do
+ requires :bitbucket_server_url, type: String, desc: 'Bitbucket Server URL'
+ requires :bitbucket_server_username, type: String, desc: 'BitBucket Server Username'
+ requires :personal_access_token, type: String, desc: 'BitBucket Server personal access token/password'
+ requires :bitbucket_server_project, type: String, desc: 'BitBucket Server Project Key'
+ requires :bitbucket_server_repo, type: String, desc: 'BitBucket Server Repository Name'
+ optional :new_name, type: String, desc: 'New repo name'
+ optional :new_namespace, type: String, desc: 'Namespace to import repo into'
+ end
+
+ post 'import/bitbucket_server' do
+ result = Import::BitbucketServerService.new(client, current_user, params).execute(credentials)
+
+ if result[:status] == :success
+ present ProjectSerializer.new.represent(result[:project], serializer: :import)
+ else
+ render_api_error!({ error: result[:message] }, result[:http_status])
+ end
+ end
+ end
+end
diff --git a/lib/api/import_github.rb b/lib/api/import_github.rb
index f31cc15dc62..1e839816006 100644
--- a/lib/api/import_github.rb
+++ b/lib/api/import_github.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ImportGithub < Grape::API
+ class ImportGithub < Grape::API::Instance
rescue_from Octokit::Unauthorized, with: :provider_unauthorized
before do
diff --git a/lib/api/internal/base.rb b/lib/api/internal/base.rb
index 79c407b9581..6d4a4fc9c8b 100644
--- a/lib/api/internal/base.rb
+++ b/lib/api/internal/base.rb
@@ -3,7 +3,7 @@
module API
# Internal access API
module Internal
- class Base < Grape::API
+ class Base < Grape::API::Instance
before { authenticate_by_gitlab_shell_token! }
before do
@@ -63,15 +63,13 @@ module API
gl_project_path: gl_repository_path,
gl_id: Gitlab::GlId.gl_id(actor.user),
gl_username: actor.username,
- git_config_options: [],
+ git_config_options: ["uploadpack.allowFilter=true",
+ "uploadpack.allowAnySHA1InWant=true"],
gitaly: gitaly_payload(params[:action]),
gl_console_messages: check_result.console_messages
}
# Custom option for git-receive-pack command
- if Feature.enabled?(:gitaly_upload_pack_filter, project, default_enabled: true)
- payload[:git_config_options] << "uploadpack.allowFilter=true" << "uploadpack.allowAnySHA1InWant=true"
- end
receive_max_input_size = Gitlab::CurrentSettings.receive_max_input_size.to_i
diff --git a/lib/api/internal/pages.rb b/lib/api/internal/pages.rb
index 6c8da414e4d..5f8d23f15fa 100644
--- a/lib/api/internal/pages.rb
+++ b/lib/api/internal/pages.rb
@@ -3,7 +3,7 @@
module API
# Pages Internal API
module Internal
- class Pages < Grape::API
+ class Pages < Grape::API::Instance
before do
authenticate_gitlab_pages_request!
end
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index 2374ac11f4a..455511caabb 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Issues < Grape::API
+ class Issues < Grape::API::Instance
include PaginationParams
helpers Helpers::IssuesHelpers
helpers Helpers::RateLimiter
@@ -10,9 +10,9 @@ module API
helpers do
params :negatable_issue_filter_params do
- optional :labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
+ optional :labels, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
optional :milestone, type: String, desc: 'Milestone title'
- optional :iids, type: Array[Integer], desc: 'The IID array of issues'
+ optional :iids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The IID array of issues'
optional :search, type: String, desc: 'Search issues for text present in the title, description, or any combination of these'
optional :in, type: String, desc: '`title`, `description`, or a string joining them with comma'
@@ -62,12 +62,12 @@ module API
params :issue_params do
optional :description, type: String, desc: 'The description of an issue'
- optional :assignee_ids, type: Array[Integer], desc: 'The array of user IDs to assign issue'
+ optional :assignee_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The array of user IDs to assign issue'
optional :assignee_id, type: Integer, desc: '[Deprecated] The ID of a user to assign issue'
optional :milestone_id, type: Integer, desc: 'The ID of a milestone to assign issue'
- optional :labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
- optional :add_labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
- optional :remove_labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
+ optional :labels, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
+ optional :add_labels, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
+ optional :remove_labels, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
optional :due_date, type: String, desc: 'Date string in the format YEAR-MONTH-DAY'
optional :confidential, type: Boolean, desc: 'Boolean parameter if the issue should be confidential'
optional :discussion_locked, type: Boolean, desc: " Boolean parameter indicating if the issue's discussion is locked"
@@ -107,7 +107,6 @@ module API
with: Entities::Issue,
with_labels_details: declared_params[:with_labels_details],
current_user: current_user,
- issuable_metadata: Gitlab::IssuableMetadata.new(current_user, issues).data,
include_subscribed: false
}
@@ -133,7 +132,6 @@ module API
with: Entities::Issue,
with_labels_details: declared_params[:with_labels_details],
current_user: current_user,
- issuable_metadata: Gitlab::IssuableMetadata.new(current_user, issues).data,
include_subscribed: false,
group: user_group
}
@@ -170,7 +168,6 @@ module API
with_labels_details: declared_params[:with_labels_details],
current_user: current_user,
project: user_project,
- issuable_metadata: Gitlab::IssuableMetadata.new(current_user, issues).data,
include_subscribed: false
}
@@ -289,6 +286,30 @@ module API
end
# rubocop: enable CodeReuse/ActiveRecord
+ desc 'Reorder an existing issue' do
+ success Entities::Issue
+ end
+ params do
+ requires :issue_iid, type: Integer, desc: 'The internal ID of a project issue'
+ optional :move_after_id, type: Integer, desc: 'The ID of the issue we want to be after'
+ optional :move_before_id, type: Integer, desc: 'The ID of the issue we want to be before'
+ at_least_one_of :move_after_id, :move_before_id
+ end
+ # rubocop: disable CodeReuse/ActiveRecord
+ put ':id/issues/:issue_iid/reorder' do
+ issue = user_project.issues.find_by(iid: params[:issue_iid])
+ not_found!('Issue') unless issue
+
+ authorize! :update_issue, issue
+
+ if ::Issues::ReorderService.new(user_project, current_user, params).execute(issue)
+ present issue, with: Entities::Issue, current_user: current_user, project: user_project
+ else
+ render_api_error!({ error: 'Unprocessable Entity' }, 422)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
desc 'Move an existing issue' do
success Entities::Issue
end
diff --git a/lib/api/job_artifacts.rb b/lib/api/job_artifacts.rb
index 6a82256cc96..61c279a76e9 100644
--- a/lib/api/job_artifacts.rb
+++ b/lib/api/job_artifacts.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class JobArtifacts < Grape::API
+ class JobArtifacts < Grape::API::Instance
before { authenticate_non_get! }
# EE::API::JobArtifacts would override the following helpers
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 61a7fc107ef..bcc00429dd6 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Jobs < Grape::API
+ class Jobs < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -160,7 +160,7 @@ module API
authorize!(:update_build, build)
break forbidden!('Job is not retryable') unless build.retryable?
- build = Ci::Build.retry(build, current_user)
+ build = ::Ci::Build.retry(build, current_user)
present build, with: Entities::Job
end
diff --git a/lib/api/keys.rb b/lib/api/keys.rb
index b730e027063..c014641ca04 100644
--- a/lib/api/keys.rb
+++ b/lib/api/keys.rb
@@ -2,7 +2,7 @@
module API
# Keys API
- class Keys < Grape::API
+ class Keys < Grape::API::Instance
before { authenticate! }
resource :keys do
diff --git a/lib/api/labels.rb b/lib/api/labels.rb
index 2b283d82e4a..edf4a8ca14e 100644
--- a/lib/api/labels.rb
+++ b/lib/api/labels.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Labels < Grape::API
+ class Labels < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::LabelHelpers
diff --git a/lib/api/lint.rb b/lib/api/lint.rb
index a7672021db0..f7796b1e969 100644
--- a/lib/api/lint.rb
+++ b/lib/api/lint.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Lint < Grape::API
+ class Lint < Grape::API::Instance
namespace :ci do
desc 'Validation of .gitlab-ci.yml content'
params do
diff --git a/lib/api/markdown.rb b/lib/api/markdown.rb
index de77bef43ce..a0822271cca 100644
--- a/lib/api/markdown.rb
+++ b/lib/api/markdown.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Markdown < Grape::API
+ class Markdown < Grape::API::Instance
params do
requires :text, type: String, desc: "The markdown text to render"
optional :gfm, type: Boolean, desc: "Render text using GitLab Flavored Markdown"
diff --git a/lib/api/maven_packages.rb b/lib/api/maven_packages.rb
new file mode 100644
index 00000000000..32a45c59cfa
--- /dev/null
+++ b/lib/api/maven_packages.rb
@@ -0,0 +1,251 @@
+# frozen_string_literal: true
+module API
+ class MavenPackages < Grape::API::Instance
+ MAVEN_ENDPOINT_REQUIREMENTS = {
+ file_name: API::NO_SLASH_URL_PART_REGEX
+ }.freeze
+
+ content_type :md5, 'text/plain'
+ content_type :sha1, 'text/plain'
+ content_type :binary, 'application/octet-stream'
+
+ rescue_from ActiveRecord::RecordInvalid do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ before do
+ require_packages_enabled!
+ authenticate_non_get!
+ end
+
+ helpers ::API::Helpers::PackagesHelpers
+
+ helpers do
+ def extract_format(file_name)
+ name, _, format = file_name.rpartition('.')
+
+ if %w(md5 sha1).include?(format)
+ [name, format]
+ else
+ [file_name, format]
+ end
+ end
+
+ def verify_package_file(package_file, uploaded_file)
+ stored_sha1 = Digest::SHA256.hexdigest(package_file.file_sha1)
+ expected_sha1 = uploaded_file.sha256
+
+ if stored_sha1 == expected_sha1
+ no_content!
+ else
+ conflict!
+ end
+ end
+
+ def find_project_by_path(path)
+ project_path = path.rpartition('/').first
+ Project.find_by_full_path(project_path)
+ end
+
+ def jar_file?(format)
+ format == 'jar'
+ end
+
+ def present_carrierwave_file_with_head_support!(file, supports_direct_download: true)
+ if head_request_on_aws_file?(file, supports_direct_download) && !file.file_storage?
+ return redirect(signed_head_url(file))
+ end
+
+ present_carrierwave_file!(file, supports_direct_download: supports_direct_download)
+ end
+
+ def signed_head_url(file)
+ fog_storage = ::Fog::Storage.new(file.fog_credentials)
+ fog_dir = fog_storage.directories.new(key: file.fog_directory)
+ fog_file = fog_dir.files.new(key: file.path)
+ expire_at = ::Fog::Time.now + file.fog_authenticated_url_expiration
+
+ fog_file.collection.head_url(fog_file.key, expire_at)
+ end
+
+ def head_request_on_aws_file?(file, supports_direct_download)
+ Gitlab.config.packages.object_store.enabled &&
+ supports_direct_download &&
+ file.class.direct_download_enabled? &&
+ request.head? &&
+ file.fog_credentials[:provider] == 'AWS'
+ end
+ end
+
+ desc 'Download the maven package file at instance level' do
+ detail 'This feature was introduced in GitLab 11.6'
+ end
+ params do
+ requires :path, type: String, desc: 'Package path'
+ requires :file_name, type: String, desc: 'Package file name'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ get 'packages/maven/*path/:file_name', requirements: MAVEN_ENDPOINT_REQUIREMENTS do
+ file_name, format = extract_format(params[:file_name])
+
+ # To avoid name collision we require project path and project package be the same.
+ # For packages that have different name from the project we should use
+ # the endpoint that includes project id
+ project = find_project_by_path(params[:path])
+
+ authorize_read_package!(project)
+
+ package = ::Packages::Maven::PackageFinder
+ .new(params[:path], current_user, project: project).execute!
+
+ package_file = ::Packages::PackageFileFinder
+ .new(package, file_name).execute!
+
+ case format
+ when 'md5'
+ package_file.file_md5
+ when 'sha1'
+ package_file.file_sha1
+ else
+ track_event('pull_package') if jar_file?(format)
+ present_carrierwave_file_with_head_support!(package_file.file)
+ end
+ end
+
+ desc 'Download the maven package file at a group level' do
+ detail 'This feature was introduced in GitLab 11.7'
+ end
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ end
+ resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ params do
+ requires :path, type: String, desc: 'Package path'
+ requires :file_name, type: String, desc: 'Package file name'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ get ':id/-/packages/maven/*path/:file_name', requirements: MAVEN_ENDPOINT_REQUIREMENTS do
+ file_name, format = extract_format(params[:file_name])
+
+ group = find_group(params[:id])
+
+ not_found!('Group') unless can?(current_user, :read_group, group)
+
+ package = ::Packages::Maven::PackageFinder
+ .new(params[:path], current_user, group: group).execute!
+
+ authorize_read_package!(package.project)
+
+ package_file = ::Packages::PackageFileFinder
+ .new(package, file_name).execute!
+
+ case format
+ when 'md5'
+ package_file.file_md5
+ when 'sha1'
+ package_file.file_sha1
+ else
+ track_event('pull_package') if jar_file?(format)
+
+ present_carrierwave_file_with_head_support!(package_file.file)
+ end
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Download the maven package file' do
+ detail 'This feature was introduced in GitLab 11.3'
+ end
+ params do
+ requires :path, type: String, desc: 'Package path'
+ requires :file_name, type: String, desc: 'Package file name'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ get ':id/packages/maven/*path/:file_name', requirements: MAVEN_ENDPOINT_REQUIREMENTS do
+ authorize_read_package!(user_project)
+
+ file_name, format = extract_format(params[:file_name])
+
+ package = ::Packages::Maven::PackageFinder
+ .new(params[:path], current_user, project: user_project).execute!
+
+ package_file = ::Packages::PackageFileFinder
+ .new(package, file_name).execute!
+
+ case format
+ when 'md5'
+ package_file.file_md5
+ when 'sha1'
+ package_file.file_sha1
+ else
+ track_event('pull_package') if jar_file?(format)
+
+ present_carrierwave_file_with_head_support!(package_file.file)
+ end
+ end
+
+ desc 'Workhorse authorize the maven package file upload' do
+ detail 'This feature was introduced in GitLab 11.3'
+ end
+ params do
+ requires :path, type: String, desc: 'Package path'
+ requires :file_name, type: String, desc: 'Package file name', regexp: Gitlab::Regex.maven_file_name_regex
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ put ':id/packages/maven/*path/:file_name/authorize', requirements: MAVEN_ENDPOINT_REQUIREMENTS do
+ authorize_upload!
+
+ status 200
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ ::Packages::PackageFileUploader.workhorse_authorize(has_length: true)
+ end
+
+ desc 'Upload the maven package file' do
+ detail 'This feature was introduced in GitLab 11.3'
+ end
+ params do
+ requires :path, type: String, desc: 'Package path'
+ requires :file_name, type: String, desc: 'Package file name', regexp: Gitlab::Regex.maven_file_name_regex
+ requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: 'The package file to be published (generated by Multipart middleware)'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ put ':id/packages/maven/*path/:file_name', requirements: MAVEN_ENDPOINT_REQUIREMENTS do
+ authorize_upload!
+
+ file_name, format = extract_format(params[:file_name])
+
+ package = ::Packages::Maven::FindOrCreatePackageService
+ .new(user_project, current_user, params.merge(build: current_authenticated_job)).execute
+
+ case format
+ when 'sha1'
+ # After uploading a file, Maven tries to upload a sha1 and md5 version of it.
+ # Since we store md5/sha1 in database we simply need to validate our hash
+ # against one uploaded by Maven. We do this for `sha1` format.
+ package_file = ::Packages::PackageFileFinder
+ .new(package, file_name).execute!
+
+ verify_package_file(package_file, params[:file])
+ when 'md5'
+ nil
+ else
+ track_event('push_package') if jar_file?(format)
+
+ file_params = {
+ file: params[:file],
+ size: params['file.size'],
+ file_name: file_name,
+ file_type: params['file.type'],
+ file_sha1: params['file.sha1'],
+ file_md5: params['file.md5']
+ }
+
+ ::Packages::CreatePackageFileService.new(package, file_params).execute
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/members.rb b/lib/api/members.rb
index 37d4ca29b68..4edf94c6350 100644
--- a/lib/api/members.rb
+++ b/lib/api/members.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Members < Grape::API
+ class Members < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -18,7 +18,7 @@ module API
end
params do
optional :query, type: String, desc: 'A query string to search for members'
- optional :user_ids, type: Array[Integer], desc: 'Array of user ids to look up for membership'
+ optional :user_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'Array of user ids to look up for membership'
optional :show_seat_info, type: Boolean, desc: 'Show seat information for members'
use :optional_filter_params_ee
use :pagination
@@ -37,7 +37,7 @@ module API
end
params do
optional :query, type: String, desc: 'A query string to search for members'
- optional :user_ids, type: Array[Integer], desc: 'Array of user ids to look up for membership'
+ optional :user_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'Array of user ids to look up for membership'
optional :show_seat_info, type: Boolean, desc: 'Show seat information for members'
use :pagination
end
@@ -107,7 +107,7 @@ module API
if !member
not_allowed! # This currently can only be reached in EE
- elsif member.persisted? && member.valid?
+ elsif member.valid? && member.persisted?
present_members(member)
else
render_validation_error!(member)
@@ -145,6 +145,8 @@ module API
desc 'Removes a user from a group or project.'
params do
requires :user_id, type: Integer, desc: 'The user ID of the member'
+ optional :unassign_issuables, type: Boolean, default: false,
+ desc: 'Flag indicating if the removed member should be unassigned from any issues or merge requests within given group or project'
end
# rubocop: disable CodeReuse/ActiveRecord
delete ":id/members/:user_id" do
@@ -152,7 +154,7 @@ module API
member = source.members.find_by!(user_id: params[:user_id])
destroy_conditionally!(member) do
- ::Members::DestroyService.new(current_user).execute(member)
+ ::Members::DestroyService.new(current_user).execute(member, unassign_issuables: params[:unassign_issuables])
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/api/merge_request_approvals.rb b/lib/api/merge_request_approvals.rb
new file mode 100644
index 00000000000..035ed9f0e04
--- /dev/null
+++ b/lib/api/merge_request_approvals.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module API
+ class MergeRequestApprovals < ::Grape::API::Instance
+ before { authenticate_non_get! }
+
+ helpers do
+ params :ee_approval_params do
+ end
+
+ def present_approval(merge_request)
+ present merge_request, with: ::API::Entities::MergeRequestApprovals, current_user: current_user
+ end
+ end
+
+ resource :projects, requirements: ::API::API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ segment ':id/merge_requests/:merge_request_iid' do
+ # Get the status of the merge request's approvals
+ #
+ # Parameters:
+ # id (required) - The ID of a project
+ # merge_request_iid (required) - IID of MR
+ # Examples:
+ # GET /projects/:id/merge_requests/:merge_request_iid/approvals
+ desc 'List approvals for merge request'
+ get 'approvals' do
+ merge_request = find_merge_request_with_access(params[:merge_request_iid])
+
+ present_approval(merge_request)
+ end
+
+ # Approve a merge request
+ #
+ # Parameters:
+ # id (required) - The ID of a project
+ # merge_request_iid (required) - IID of MR
+ # Examples:
+ # POST /projects/:id/merge_requests/:merge_request_iid/approve
+ #
+ desc 'Approve a merge request'
+ params do
+ optional :sha, type: String, desc: 'When present, must have the HEAD SHA of the source branch'
+
+ use :ee_approval_params
+ end
+ post 'approve' do
+ merge_request = find_merge_request_with_access(params[:merge_request_iid], :approve_merge_request)
+
+ check_sha_param!(params, merge_request)
+
+ success =
+ ::MergeRequests::ApprovalService
+ .new(user_project, current_user, params)
+ .execute(merge_request)
+
+ unauthorized! unless success
+
+ present_approval(merge_request)
+ end
+
+ desc 'Remove an approval from a merge request'
+ post 'unapprove' do
+ merge_request = find_merge_request_with_access(params[:merge_request_iid], :approve_merge_request)
+
+ success = ::MergeRequests::RemoveApprovalService
+ .new(user_project, current_user)
+ .execute(merge_request)
+
+ not_found! unless success
+
+ present_approval(merge_request)
+ end
+ end
+ end
+ end
+end
+
+API::MergeRequestApprovals.prepend_if_ee('EE::API::MergeRequestApprovals')
diff --git a/lib/api/merge_request_diffs.rb b/lib/api/merge_request_diffs.rb
index 6ad30aa56e0..3e43fe8b257 100644
--- a/lib/api/merge_request_diffs.rb
+++ b/lib/api/merge_request_diffs.rb
@@ -2,7 +2,7 @@
module API
# MergeRequestDiff API
- class MergeRequestDiffs < Grape::API
+ class MergeRequestDiffs < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/merge_requests.rb b/lib/api/merge_requests.rb
index 773a451d3a8..2e6ac40a593 100644
--- a/lib/api/merge_requests.rb
+++ b/lib/api/merge_requests.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class MergeRequests < Grape::API
+ class MergeRequests < Grape::API::Instance
include PaginationParams
CONTEXT_COMMITS_POST_LIMIT = 20
@@ -44,7 +44,9 @@ module API
def find_merge_requests(args = {})
args = declared_params.merge(args)
args[:milestone_title] = args.delete(:milestone)
+ args[:not][:milestone_title] = args[:not]&.delete(:milestone)
args[:label_name] = args.delete(:labels)
+ args[:not][:label_name] = args[:not]&.delete(:labels)
args[:scope] = args[:scope].underscore if args[:scope]
merge_requests = MergeRequestsFinder.new(current_user, args).execute
@@ -60,16 +62,8 @@ module API
# rubocop: enable CodeReuse/ActiveRecord
def merge_request_pipelines_with_access
- authorize! :read_pipeline, user_project
-
mr = find_merge_request_with_access(params[:merge_request_iid])
- mr.all_pipelines
- end
-
- def check_sha_param!(params, merge_request)
- if params[:sha] && merge_request.diff_head_sha != params[:sha]
- render_api_error!("SHA does not match HEAD of source branch: #{merge_request.diff_head_sha}", 409)
- end
+ ::Ci::PipelinesForMergeRequestFinder.new(mr, current_user).execute
end
def automatically_mergeable?(merge_when_pipeline_succeeds, merge_request)
@@ -91,7 +85,6 @@ module API
if params[:view] == 'simple'
options[:with] = Entities::MergeRequestSimple
else
- options[:issuable_metadata] = Gitlab::IssuableMetadata.new(current_user, merge_requests).data
options[:skip_merge_status_recheck] = !declared_params[:with_merge_status_recheck]
end
@@ -179,11 +172,11 @@ module API
params :optional_params do
optional :description, type: String, desc: 'The description of the merge request'
optional :assignee_id, type: Integer, desc: 'The ID of a user to assign the merge request'
- optional :assignee_ids, type: Array[Integer], desc: 'The array of user IDs to assign issue'
+ optional :assignee_ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The array of user IDs to assign issue'
optional :milestone_id, type: Integer, desc: 'The ID of a milestone to assign the merge request'
- optional :labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
- optional :add_labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
- optional :remove_labels, type: Array[String], coerce_with: Validations::Types::LabelsList.coerce, desc: 'Comma-separated list of label names'
+ optional :labels, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
+ optional :add_labels, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
+ optional :remove_labels, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Comma-separated list of label names'
optional :remove_source_branch, type: Boolean, desc: 'Remove source branch when merging'
optional :allow_collaboration, type: Boolean, desc: 'Allow commits from members who can merge to the target branch'
optional :allow_maintainer_to_push, type: Boolean, as: :allow_collaboration, desc: '[deprecated] See allow_collaboration'
@@ -198,7 +191,7 @@ module API
end
params do
use :merge_requests_params
- optional :iids, type: Array[Integer], desc: 'The IID array of merge requests'
+ optional :iids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The IID array of merge requests'
end
get ":id/merge_requests" do
authorize! :read_merge_request, user_project
@@ -315,7 +308,7 @@ module API
end
params do
- requires :commits, type: Array, allow_blank: false, desc: 'List of context commits sha'
+ requires :commits, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, allow_blank: false, desc: 'List of context commits sha'
end
desc 'create context commits of merge request' do
success Entities::Commit
@@ -345,7 +338,7 @@ module API
end
params do
- requires :commits, type: Array, allow_blank: false, desc: 'List of context commits sha'
+ requires :commits, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, allow_blank: false, desc: 'List of context commits sha'
end
desc 'remove context commits of merge request'
delete ':id/merge_requests/:merge_request_iid/context_commits' do
@@ -389,8 +382,6 @@ module API
success Entities::Pipeline
end
post ':id/merge_requests/:merge_request_iid/pipelines' do
- authorize! :create_pipeline, user_project
-
pipeline = ::MergeRequests::CreatePipelineService
.new(user_project, current_user, allow_duplicate: true)
.execute(find_merge_request_with_access(params[:merge_request_iid]))
diff --git a/lib/api/metrics/dashboard/annotations.rb b/lib/api/metrics/dashboard/annotations.rb
index c8ec4d29657..e07762ac6d3 100644
--- a/lib/api/metrics/dashboard/annotations.rb
+++ b/lib/api/metrics/dashboard/annotations.rb
@@ -3,7 +3,7 @@
module API
module Metrics
module Dashboard
- class Annotations < Grape::API
+ class Annotations < Grape::API::Instance
desc 'Create a new monitoring dashboard annotation' do
success Entities::Metrics::Dashboard::Annotation
end
diff --git a/lib/api/metrics/user_starred_dashboards.rb b/lib/api/metrics/user_starred_dashboards.rb
index 85fc0f33ed8..263d2394276 100644
--- a/lib/api/metrics/user_starred_dashboards.rb
+++ b/lib/api/metrics/user_starred_dashboards.rb
@@ -2,7 +2,7 @@
module API
module Metrics
- class UserStarredDashboards < Grape::API
+ class UserStarredDashboards < Grape::API::Instance
resource :projects do
desc 'Marks selected metrics dashboard as starred' do
success Entities::Metrics::UserStarredDashboard
diff --git a/lib/api/milestone_responses.rb b/lib/api/milestone_responses.rb
index 62e159ab003..8ff885983bc 100644
--- a/lib/api/milestone_responses.rb
+++ b/lib/api/milestone_responses.rb
@@ -15,7 +15,7 @@ module API
params :list_params do
optional :state, type: String, values: %w[active closed all], default: 'all',
desc: 'Return "active", "closed", or "all" milestones'
- optional :iids, type: Array[Integer], desc: 'The IIDs of the milestones'
+ optional :iids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'The IIDs of the milestones'
optional :title, type: String, desc: 'The title of the milestones'
optional :search, type: String, desc: 'The search criteria for the title or description of the milestone'
use :pagination
diff --git a/lib/api/namespaces.rb b/lib/api/namespaces.rb
index e40a5dde7ce..e1f279df045 100644
--- a/lib/api/namespaces.rb
+++ b/lib/api/namespaces.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Namespaces < Grape::API
+ class Namespaces < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/notes.rb b/lib/api/notes.rb
index 3eafc1ead77..bfd09dcd496 100644
--- a/lib/api/notes.rb
+++ b/lib/api/notes.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Notes < Grape::API
+ class Notes < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::NotesHelpers
@@ -68,6 +68,7 @@ module API
params do
requires :noteable_id, type: Integer, desc: 'The ID of the noteable'
requires :body, type: String, desc: 'The content of a note'
+ optional :confidential, type: Boolean, desc: 'Confidentiality note flag, default is false'
optional :created_at, type: String, desc: 'The creation date of the note'
end
post ":id/#{noteables_str}/:noteable_id/notes" do
@@ -77,6 +78,7 @@ module API
note: params[:body],
noteable_type: noteables_str.classify,
noteable_id: noteable.id,
+ confidential: params[:confidential],
created_at: params[:created_at]
}
diff --git a/lib/api/notification_settings.rb b/lib/api/notification_settings.rb
index 8cb46bd3ad6..f8b621c1c38 100644
--- a/lib/api/notification_settings.rb
+++ b/lib/api/notification_settings.rb
@@ -2,7 +2,7 @@
module API
# notification_settings API
- class NotificationSettings < Grape::API
+ class NotificationSettings < Grape::API::Instance
before { authenticate! }
helpers ::API::Helpers::MembersHelpers
diff --git a/lib/api/npm_packages.rb b/lib/api/npm_packages.rb
new file mode 100644
index 00000000000..21ca57b7985
--- /dev/null
+++ b/lib/api/npm_packages.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+module API
+ class NpmPackages < Grape::API::Instance
+ helpers ::API::Helpers::PackagesHelpers
+ helpers ::API::Helpers::Packages::DependencyProxyHelpers
+
+ NPM_ENDPOINT_REQUIREMENTS = {
+ package_name: API::NO_SLASH_URL_PART_REGEX
+ }.freeze
+
+ rescue_from ActiveRecord::RecordInvalid do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ before do
+ require_packages_enabled!
+ authenticate_non_get!
+ end
+
+ helpers do
+ def project_by_package_name
+ strong_memoize(:project_by_package_name) do
+ ::Packages::Package.npm.with_name(params[:package_name]).first&.project
+ end
+ end
+ end
+
+ desc 'Get all tags for a given an NPM package' do
+ detail 'This feature was introduced in GitLab 12.7'
+ success ::API::Entities::NpmPackageTag
+ end
+ params do
+ requires :package_name, type: String, desc: 'Package name'
+ end
+ get 'packages/npm/-/package/*package_name/dist-tags', format: false, requirements: NPM_ENDPOINT_REQUIREMENTS do
+ package_name = params[:package_name]
+
+ bad_request!('Package Name') if package_name.blank?
+
+ authorize_read_package!(project_by_package_name)
+
+ packages = ::Packages::Npm::PackageFinder.new(project_by_package_name, package_name)
+ .execute
+
+ present ::Packages::Npm::PackagePresenter.new(package_name, packages),
+ with: ::API::Entities::NpmPackageTag
+ end
+
+ params do
+ requires :package_name, type: String, desc: 'Package name'
+ requires :tag, type: String, desc: "Package dist-tag"
+ end
+ namespace 'packages/npm/-/package/*package_name/dist-tags/:tag', requirements: NPM_ENDPOINT_REQUIREMENTS do
+ desc 'Create or Update the given tag for the given NPM package and version' do
+ detail 'This feature was introduced in GitLab 12.7'
+ end
+ put format: false do
+ package_name = params[:package_name]
+ version = env['api.request.body']
+ tag = params[:tag]
+
+ bad_request!('Package Name') if package_name.blank?
+ bad_request!('Version') if version.blank?
+ bad_request!('Tag') if tag.blank?
+
+ authorize_create_package!(project_by_package_name)
+
+ package = ::Packages::Npm::PackageFinder
+ .new(project_by_package_name, package_name)
+ .find_by_version(version)
+ not_found!('Package') unless package
+
+ ::Packages::Npm::CreateTagService.new(package, tag).execute
+
+ no_content!
+ end
+
+ desc 'Deletes the given tag' do
+ detail 'This feature was introduced in GitLab 12.7'
+ end
+ delete format: false do
+ package_name = params[:package_name]
+ tag = params[:tag]
+
+ bad_request!('Package Name') if package_name.blank?
+ bad_request!('Tag') if tag.blank?
+
+ authorize_destroy_package!(project_by_package_name)
+
+ package_tag = ::Packages::TagsFinder
+ .new(project_by_package_name, package_name, package_type: :npm)
+ .find_by_name(tag)
+
+ not_found!('Package tag') unless package_tag
+
+ ::Packages::RemoveTagService.new(package_tag).execute
+
+ no_content!
+ end
+ end
+
+ desc 'NPM registry endpoint at instance level' do
+ detail 'This feature was introduced in GitLab 11.8'
+ end
+ params do
+ requires :package_name, type: String, desc: 'Package name'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ get 'packages/npm/*package_name', format: false, requirements: NPM_ENDPOINT_REQUIREMENTS do
+ package_name = params[:package_name]
+
+ redirect_registry_request(project_by_package_name.blank?, :npm, package_name: package_name) do
+ authorize_read_package!(project_by_package_name)
+
+ packages = ::Packages::Npm::PackageFinder
+ .new(project_by_package_name, package_name).execute
+
+ present ::Packages::Npm::PackagePresenter.new(package_name, packages),
+ with: ::API::Entities::NpmPackage
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Download the NPM tarball' do
+ detail 'This feature was introduced in GitLab 11.8'
+ end
+ params do
+ requires :package_name, type: String, desc: 'Package name'
+ requires :file_name, type: String, desc: 'Package file name'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ get ':id/packages/npm/*package_name/-/*file_name', format: false do
+ authorize_read_package!(user_project)
+
+ package = user_project.packages.npm
+ .by_name_and_file_name(params[:package_name], params[:file_name])
+
+ package_file = ::Packages::PackageFileFinder
+ .new(package, params[:file_name]).execute!
+
+ track_event('pull_package')
+
+ present_carrierwave_file!(package_file.file)
+ end
+
+ desc 'Create NPM package' do
+ detail 'This feature was introduced in GitLab 11.8'
+ end
+ params do
+ requires :package_name, type: String, desc: 'Package name'
+ requires :versions, type: Hash, desc: 'Package version info'
+ end
+ route_setting :authentication, job_token_allowed: true, deploy_token_allowed: true
+ put ':id/packages/npm/:package_name', requirements: NPM_ENDPOINT_REQUIREMENTS do
+ authorize_create_package!(user_project)
+
+ track_event('push_package')
+
+ created_package = ::Packages::Npm::CreatePackageService
+ .new(user_project, current_user, params.merge(build: current_authenticated_job)).execute
+
+ if created_package[:status] == :error
+ render_api_error!(created_package[:message], created_package[:http_status])
+ else
+ created_package
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/nuget_packages.rb b/lib/api/nuget_packages.rb
new file mode 100644
index 00000000000..eb7d320a0f5
--- /dev/null
+++ b/lib/api/nuget_packages.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+# NuGet Package Manager Client API
+#
+# These API endpoints are not meant to be consumed directly by users. They are
+# called by the NuGet package manager client when users run commands
+# like `nuget install` or `nuget push`.
+module API
+ class NugetPackages < Grape::API::Instance
+ helpers ::API::Helpers::PackagesManagerClientsHelpers
+ helpers ::API::Helpers::Packages::BasicAuthHelpers
+
+ POSITIVE_INTEGER_REGEX = %r{\A[1-9]\d*\z}.freeze
+ NON_NEGATIVE_INTEGER_REGEX = %r{\A0|[1-9]\d*\z}.freeze
+
+ PACKAGE_FILENAME = 'package.nupkg'
+
+ default_format :json
+
+ rescue_from ArgumentError do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ helpers do
+ def find_packages
+ packages = package_finder.execute
+
+ not_found!('Packages') unless packages.exists?
+
+ packages
+ end
+
+ def find_package
+ package = package_finder(package_version: params[:package_version]).execute
+ .first
+
+ not_found!('Package') unless package
+
+ package
+ end
+
+ def package_finder(finder_params = {})
+ ::Packages::Nuget::PackageFinder.new(
+ authorized_user_project,
+ finder_params.merge(package_name: params[:package_name])
+ )
+ end
+ end
+
+ before do
+ require_packages_enabled!
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project', regexp: POSITIVE_INTEGER_REGEX
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ authorized_user_project
+ end
+
+ namespace ':id/packages/nuget' do
+ # https://docs.microsoft.com/en-us/nuget/api/service-index
+ desc 'The NuGet Service Index' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get 'index', format: :json do
+ authorize_read_package!(authorized_user_project)
+
+ track_event('nuget_service_index')
+
+ present ::Packages::Nuget::ServiceIndexPresenter.new(authorized_user_project),
+ with: ::API::Entities::Nuget::ServiceIndex
+ end
+
+ # https://docs.microsoft.com/en-us/nuget/api/package-publish-resource
+ desc 'The NuGet Package Publish endpoint' do
+ detail 'This feature was introduced in GitLab 12.6'
+ end
+ params do
+ requires :package, type: ::API::Validations::Types::WorkhorseFile, desc: 'The package file to be published (generated by Multipart middleware)'
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ put do
+ authorize_upload!(authorized_user_project)
+
+ file_params = params.merge(
+ file: params[:package],
+ file_name: PACKAGE_FILENAME
+ )
+
+ package = ::Packages::Nuget::CreatePackageService.new(authorized_user_project, current_user)
+ .execute
+
+ package_file = ::Packages::CreatePackageFileService.new(package, file_params)
+ .execute
+
+ track_event('push_package')
+
+ ::Packages::Nuget::ExtractionWorker.perform_async(package_file.id) # rubocop:disable CodeReuse/Worker
+
+ created!
+ rescue ObjectStorage::RemoteStoreError => e
+ Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:file_name], project_id: authorized_user_project.id })
+
+ forbidden!
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ put 'authorize' do
+ authorize_workhorse!(subject: authorized_user_project, has_length: false)
+ end
+
+ params do
+ requires :package_name, type: String, desc: 'The NuGet package name', regexp: API::NO_SLASH_URL_PART_REGEX
+ end
+ namespace '/metadata/*package_name' do
+ before do
+ authorize_read_package!(authorized_user_project)
+ end
+
+ # https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource
+ desc 'The NuGet Metadata Service - Package name level' do
+ detail 'This feature was introduced in GitLab 12.8'
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get 'index', format: :json do
+ present ::Packages::Nuget::PackagesMetadataPresenter.new(find_packages),
+ with: ::API::Entities::Nuget::PackagesMetadata
+ end
+
+ desc 'The NuGet Metadata Service - Package name and version level' do
+ detail 'This feature was introduced in GitLab 12.8'
+ end
+ params do
+ requires :package_version, type: String, desc: 'The NuGet package version', regexp: API::NO_SLASH_URL_PART_REGEX
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get '*package_version', format: :json do
+ present ::Packages::Nuget::PackageMetadataPresenter.new(find_package),
+ with: ::API::Entities::Nuget::PackageMetadata
+ end
+ end
+
+ # https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource
+ params do
+ requires :package_name, type: String, desc: 'The NuGet package name', regexp: API::NO_SLASH_URL_PART_REGEX
+ end
+ namespace '/download/*package_name' do
+ before do
+ authorize_read_package!(authorized_user_project)
+ end
+
+ desc 'The NuGet Content Service - index request' do
+ detail 'This feature was introduced in GitLab 12.8'
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get 'index', format: :json do
+ present ::Packages::Nuget::PackagesVersionsPresenter.new(find_packages),
+ with: ::API::Entities::Nuget::PackagesVersions
+ end
+
+ desc 'The NuGet Content Service - content request' do
+ detail 'This feature was introduced in GitLab 12.8'
+ end
+ params do
+ requires :package_version, type: String, desc: 'The NuGet package version', regexp: API::NO_SLASH_URL_PART_REGEX
+ requires :package_filename, type: String, desc: 'The NuGet package filename', regexp: API::NO_SLASH_URL_PART_REGEX
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get '*package_version/*package_filename', format: :nupkg do
+ filename = "#{params[:package_filename]}.#{params[:format]}"
+ package_file = ::Packages::PackageFileFinder.new(find_package, filename, with_file_name_like: true)
+ .execute
+
+ not_found!('Package') unless package_file
+
+ track_event('pull_package')
+
+ # nuget and dotnet don't support 302 Moved status codes, supports_direct_download has to be set to false
+ present_carrierwave_file!(package_file.file, supports_direct_download: false)
+ end
+ end
+
+ params do
+ requires :q, type: String, desc: 'The search term'
+ optional :skip, type: Integer, desc: 'The number of results to skip', default: 0, regexp: NON_NEGATIVE_INTEGER_REGEX
+ optional :take, type: Integer, desc: 'The number of results to return', default: Kaminari.config.default_per_page, regexp: POSITIVE_INTEGER_REGEX
+ optional :prerelease, type: Boolean, desc: 'Include prerelease versions', default: true
+ end
+ namespace '/query' do
+ before do
+ authorize_read_package!(authorized_user_project)
+ end
+
+ # https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource
+ desc 'The NuGet Search Service' do
+ detail 'This feature was introduced in GitLab 12.8'
+ end
+ route_setting :authentication, deploy_token_allowed: true
+ get format: :json do
+ search_options = {
+ include_prerelease_versions: params[:prerelease],
+ per_page: params[:take],
+ padding: params[:skip]
+ }
+ search = Packages::Nuget::SearchService
+ .new(authorized_user_project, params[:q], search_options)
+ .execute
+
+ track_event('search_package')
+
+ present ::Packages::Nuget::SearchResultsPresenter.new(search),
+ with: ::API::Entities::Nuget::SearchResults
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/package_files.rb b/lib/api/package_files.rb
new file mode 100644
index 00000000000..17b92df629c
--- /dev/null
+++ b/lib/api/package_files.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module API
+ class PackageFiles < Grape::API::Instance
+ include PaginationParams
+
+ before do
+ authorize_packages_access!(user_project)
+ end
+
+ helpers ::API::Helpers::PackagesHelpers
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ requires :package_id, type: Integer, desc: 'The ID of a package'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get all package files' do
+ detail 'This feature was introduced in GitLab 11.8'
+ success ::API::Entities::PackageFile
+ end
+ params do
+ use :pagination
+ end
+ get ':id/packages/:package_id/package_files' do
+ package = ::Packages::PackageFinder
+ .new(user_project, params[:package_id]).execute
+
+ present paginate(package.package_files), with: ::API::Entities::PackageFile
+ end
+ end
+ end
+end
diff --git a/lib/api/pages.rb b/lib/api/pages.rb
index ee7fe669519..79a6b527581 100644
--- a/lib/api/pages.rb
+++ b/lib/api/pages.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Pages < Grape::API
+ class Pages < Grape::API::Instance
before do
require_pages_config_enabled!
authenticated_with_can_read_all_resources!
diff --git a/lib/api/pages_domains.rb b/lib/api/pages_domains.rb
index 4c3d2d131ac..7d27b575efa 100644
--- a/lib/api/pages_domains.rb
+++ b/lib/api/pages_domains.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class PagesDomains < Grape::API
+ class PagesDomains < Grape::API::Instance
include PaginationParams
PAGES_DOMAINS_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(domain: API::NO_SLASH_URL_PART_REGEX)
diff --git a/lib/api/pagination_params.rb b/lib/api/pagination_params.rb
index ae03595eb25..a232b58d3f7 100644
--- a/lib/api/pagination_params.rb
+++ b/lib/api/pagination_params.rb
@@ -4,7 +4,7 @@ module API
# Concern for declare pagination params.
#
# @example
- # class CustomApiResource < Grape::API
+ # class CustomApiResource < Grape::API::Instance
# include PaginationParams
#
# params do
diff --git a/lib/api/pipeline_schedules.rb b/lib/api/pipeline_schedules.rb
deleted file mode 100644
index edc99590cdb..00000000000
--- a/lib/api/pipeline_schedules.rb
+++ /dev/null
@@ -1,215 +0,0 @@
-# frozen_string_literal: true
-
-module API
- class PipelineSchedules < Grape::API
- include PaginationParams
-
- before { authenticate! }
-
- params do
- requires :id, type: String, desc: 'The ID of a project'
- end
- resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
- desc 'Get all pipeline schedules' do
- success Entities::PipelineSchedule
- end
- params do
- use :pagination
- optional :scope, type: String, values: %w[active inactive],
- desc: 'The scope of pipeline schedules'
- end
- # rubocop: disable CodeReuse/ActiveRecord
- get ':id/pipeline_schedules' do
- authorize! :read_pipeline_schedule, user_project
-
- schedules = Ci::PipelineSchedulesFinder.new(user_project).execute(scope: params[:scope])
- .preload([:owner, :last_pipeline])
- present paginate(schedules), with: Entities::PipelineSchedule
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- desc 'Get a single pipeline schedule' do
- success Entities::PipelineScheduleDetails
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- end
- get ':id/pipeline_schedules/:pipeline_schedule_id' do
- present pipeline_schedule, with: Entities::PipelineScheduleDetails
- end
-
- desc 'Create a new pipeline schedule' do
- success Entities::PipelineScheduleDetails
- end
- params do
- requires :description, type: String, desc: 'The description of pipeline schedule'
- requires :ref, type: String, desc: 'The branch/tag name will be triggered', allow_blank: false
- requires :cron, type: String, desc: 'The cron'
- optional :cron_timezone, type: String, default: 'UTC', desc: 'The timezone'
- optional :active, type: Boolean, default: true, desc: 'The activation of pipeline schedule'
- end
- post ':id/pipeline_schedules' do
- authorize! :create_pipeline_schedule, user_project
-
- pipeline_schedule = Ci::CreatePipelineScheduleService
- .new(user_project, current_user, declared_params(include_missing: false))
- .execute
-
- if pipeline_schedule.persisted?
- present pipeline_schedule, with: Entities::PipelineScheduleDetails
- else
- render_validation_error!(pipeline_schedule)
- end
- end
-
- desc 'Edit a pipeline schedule' do
- success Entities::PipelineScheduleDetails
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- optional :description, type: String, desc: 'The description of pipeline schedule'
- optional :ref, type: String, desc: 'The branch/tag name will be triggered'
- optional :cron, type: String, desc: 'The cron'
- optional :cron_timezone, type: String, desc: 'The timezone'
- optional :active, type: Boolean, desc: 'The activation of pipeline schedule'
- end
- put ':id/pipeline_schedules/:pipeline_schedule_id' do
- authorize! :update_pipeline_schedule, pipeline_schedule
-
- if pipeline_schedule.update(declared_params(include_missing: false))
- present pipeline_schedule, with: Entities::PipelineScheduleDetails
- else
- render_validation_error!(pipeline_schedule)
- end
- end
-
- desc 'Take ownership of a pipeline schedule' do
- success Entities::PipelineScheduleDetails
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- end
- post ':id/pipeline_schedules/:pipeline_schedule_id/take_ownership' do
- authorize! :update_pipeline_schedule, pipeline_schedule
-
- if pipeline_schedule.own!(current_user)
- present pipeline_schedule, with: Entities::PipelineScheduleDetails
- else
- render_validation_error!(pipeline_schedule)
- end
- end
-
- desc 'Delete a pipeline schedule' do
- success Entities::PipelineScheduleDetails
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- end
- delete ':id/pipeline_schedules/:pipeline_schedule_id' do
- authorize! :admin_pipeline_schedule, pipeline_schedule
-
- destroy_conditionally!(pipeline_schedule)
- end
-
- desc 'Play a scheduled pipeline immediately' do
- detail 'This feature was added in GitLab 12.8'
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- end
- post ':id/pipeline_schedules/:pipeline_schedule_id/play' do
- authorize! :play_pipeline_schedule, pipeline_schedule
-
- job_id = RunPipelineScheduleWorker # rubocop:disable CodeReuse/Worker
- .perform_async(pipeline_schedule.id, current_user.id)
-
- if job_id
- created!
- else
- render_api_error!('Unable to schedule pipeline run immediately', 500)
- end
- end
-
- desc 'Create a new pipeline schedule variable' do
- success Entities::Variable
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- requires :key, type: String, desc: 'The key of the variable'
- requires :value, type: String, desc: 'The value of the variable'
- optional :variable_type, type: String, values: Ci::PipelineScheduleVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
- end
- post ':id/pipeline_schedules/:pipeline_schedule_id/variables' do
- authorize! :update_pipeline_schedule, pipeline_schedule
-
- variable_params = declared_params(include_missing: false)
- variable = pipeline_schedule.variables.create(variable_params)
- if variable.persisted?
- present variable, with: Entities::Variable
- else
- render_validation_error!(variable)
- end
- end
-
- desc 'Edit a pipeline schedule variable' do
- success Entities::Variable
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- requires :key, type: String, desc: 'The key of the variable'
- optional :value, type: String, desc: 'The value of the variable'
- optional :variable_type, type: String, values: Ci::PipelineScheduleVariable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
- end
- put ':id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
- authorize! :update_pipeline_schedule, pipeline_schedule
-
- if pipeline_schedule_variable.update(declared_params(include_missing: false))
- present pipeline_schedule_variable, with: Entities::Variable
- else
- render_validation_error!(pipeline_schedule_variable)
- end
- end
-
- desc 'Delete a pipeline schedule variable' do
- success Entities::Variable
- end
- params do
- requires :pipeline_schedule_id, type: Integer, desc: 'The pipeline schedule id'
- requires :key, type: String, desc: 'The key of the variable'
- end
- delete ':id/pipeline_schedules/:pipeline_schedule_id/variables/:key' do
- authorize! :admin_pipeline_schedule, pipeline_schedule
-
- status :accepted
- present pipeline_schedule_variable.destroy, with: Entities::Variable
- end
- end
-
- helpers do
- # rubocop: disable CodeReuse/ActiveRecord
- def pipeline_schedule
- @pipeline_schedule ||=
- user_project
- .pipeline_schedules
- .preload(:owner, :last_pipeline)
- .find_by(id: params.delete(:pipeline_schedule_id)).tap do |pipeline_schedule|
- unless can?(current_user, :read_pipeline_schedule, pipeline_schedule)
- not_found!('Pipeline Schedule')
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def pipeline_schedule_variable
- @pipeline_schedule_variable ||=
- pipeline_schedule.variables.find_by(key: params[:key]).tap do |pipeline_schedule_variable|
- unless pipeline_schedule_variable
- not_found!('Pipeline Schedule Variable')
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
- end
-end
diff --git a/lib/api/pipelines.rb b/lib/api/pipelines.rb
deleted file mode 100644
index c09bca26a41..00000000000
--- a/lib/api/pipelines.rb
+++ /dev/null
@@ -1,187 +0,0 @@
-# frozen_string_literal: true
-
-module API
- class Pipelines < Grape::API
- include PaginationParams
-
- before { authenticate_non_get! }
-
- params do
- requires :id, type: String, desc: 'The project ID'
- end
- resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
- desc 'Get all Pipelines of the project' do
- detail 'This feature was introduced in GitLab 8.11.'
- success Entities::PipelineBasic
- end
- params do
- use :pagination
- optional :scope, type: String, values: %w[running pending finished branches tags],
- desc: 'The scope of pipelines'
- optional :status, type: String, values: HasStatus::AVAILABLE_STATUSES,
- desc: 'The status of pipelines'
- optional :ref, type: String, desc: 'The ref of pipelines'
- optional :sha, type: String, desc: 'The sha of pipelines'
- optional :yaml_errors, type: Boolean, desc: 'Returns pipelines with invalid configurations'
- optional :name, type: String, desc: 'The name of the user who triggered pipelines'
- optional :username, type: String, desc: 'The username of the user who triggered pipelines'
- optional :updated_before, type: DateTime, desc: 'Return pipelines updated before the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
- optional :updated_after, type: DateTime, desc: 'Return pipelines updated after the specified datetime. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
- optional :order_by, type: String, values: Ci::PipelinesFinder::ALLOWED_INDEXED_COLUMNS, default: 'id',
- desc: 'Order pipelines'
- optional :sort, type: String, values: %w[asc desc], default: 'desc',
- desc: 'Sort pipelines'
- end
- get ':id/pipelines' do
- authorize! :read_pipeline, user_project
- authorize! :read_build, user_project
-
- pipelines = Ci::PipelinesFinder.new(user_project, current_user, params).execute
- present paginate(pipelines), with: Entities::PipelineBasic
- end
-
- desc 'Create a new pipeline' do
- detail 'This feature was introduced in GitLab 8.14'
- success Entities::Pipeline
- end
- params do
- requires :ref, type: String, desc: 'Reference'
- optional :variables, Array, desc: 'Array of variables available in the pipeline'
- end
- post ':id/pipeline' do
- Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42124')
-
- authorize! :create_pipeline, user_project
-
- pipeline_params = declared_params(include_missing: false)
- .merge(variables_attributes: params[:variables])
- .except(:variables)
-
- new_pipeline = Ci::CreatePipelineService.new(user_project,
- current_user,
- pipeline_params)
- .execute(:api, ignore_skip_ci: true, save_on_errors: false)
-
- if new_pipeline.persisted?
- present new_pipeline, with: Entities::Pipeline
- else
- render_validation_error!(new_pipeline)
- end
- end
-
- desc 'Gets a the latest pipeline for the project branch' do
- detail 'This feature was introduced in GitLab 12.3'
- success Entities::Pipeline
- end
- params do
- optional :ref, type: String, desc: 'branch ref of pipeline'
- end
- get ':id/pipelines/latest' do
- authorize! :read_pipeline, latest_pipeline
-
- present latest_pipeline, with: Entities::Pipeline
- end
-
- desc 'Gets a specific pipeline for the project' do
- detail 'This feature was introduced in GitLab 8.11'
- success Entities::Pipeline
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- get ':id/pipelines/:pipeline_id' do
- authorize! :read_pipeline, pipeline
-
- present pipeline, with: Entities::Pipeline
- end
-
- desc 'Gets the variables for a given pipeline' do
- detail 'This feature was introduced in GitLab 11.11'
- success Entities::Variable
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- get ':id/pipelines/:pipeline_id/variables' do
- authorize! :read_pipeline_variable, pipeline
-
- present pipeline.variables, with: Entities::Variable
- end
-
- desc 'Gets the test report for a given pipeline' do
- detail 'This feature was introduced in GitLab 13.0. Disabled by default behind feature flag `junit_pipeline_view`'
- success TestReportEntity
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- get ':id/pipelines/:pipeline_id/test_report' do
- not_found! unless Feature.enabled?(:junit_pipeline_view, user_project)
-
- authorize! :read_build, pipeline
-
- present pipeline.test_reports, with: TestReportEntity
- end
-
- desc 'Deletes a pipeline' do
- detail 'This feature was introduced in GitLab 11.6'
- http_codes [[204, 'Pipeline was deleted'], [403, 'Forbidden']]
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- delete ':id/pipelines/:pipeline_id' do
- authorize! :destroy_pipeline, pipeline
-
- destroy_conditionally!(pipeline) do
- ::Ci::DestroyPipelineService.new(user_project, current_user).execute(pipeline)
- end
- end
-
- desc 'Retry builds in the pipeline' do
- detail 'This feature was introduced in GitLab 8.11.'
- success Entities::Pipeline
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- post ':id/pipelines/:pipeline_id/retry' do
- authorize! :update_pipeline, pipeline
-
- pipeline.retry_failed(current_user)
-
- present pipeline, with: Entities::Pipeline
- end
-
- desc 'Cancel all builds in the pipeline' do
- detail 'This feature was introduced in GitLab 8.11.'
- success Entities::Pipeline
- end
- params do
- requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
- end
- post ':id/pipelines/:pipeline_id/cancel' do
- authorize! :update_pipeline, pipeline
-
- pipeline.cancel_running
-
- status 200
- present pipeline.reset, with: Entities::Pipeline
- end
- end
-
- helpers do
- def pipeline
- strong_memoize(:pipeline) do
- user_project.ci_pipelines.find(params[:pipeline_id])
- end
- end
-
- def latest_pipeline
- strong_memoize(:latest_pipeline) do
- user_project.latest_pipeline_for_ref(params[:ref])
- end
- end
- end
- end
-end
diff --git a/lib/api/project_clusters.rb b/lib/api/project_clusters.rb
index 299301aabc4..0e5605984e6 100644
--- a/lib/api/project_clusters.rb
+++ b/lib/api/project_clusters.rb
@@ -1,23 +1,11 @@
# frozen_string_literal: true
module API
- class ProjectClusters < Grape::API
+ class ProjectClusters < Grape::API::Instance
include PaginationParams
before { authenticate! }
- # EE::API::ProjectClusters will
- # override these methods
- helpers do
- params :create_params_ee do
- end
-
- params :update_params_ee do
- end
- end
-
- prepend_if_ee('EE::API::ProjectClusters') # rubocop: disable Cop/InjectEnterpriseEditionModule
-
params do
requires :id, type: String, desc: 'The ID of the project'
end
@@ -56,6 +44,7 @@ module API
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, default: '*', type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
requires :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
@@ -65,7 +54,6 @@ module API
optional :namespace, type: String, desc: 'Unique namespace related to Project'
optional :authorization_type, type: String, values: ::Clusters::Platforms::Kubernetes.authorization_types.keys, default: 'rbac', desc: 'Cluster authorization type, defaults to RBAC'
end
- use :create_params_ee
end
post ':id/clusters/user' do
authorize! :add_cluster, user_project
@@ -89,6 +77,7 @@ module API
requires :cluster_id, type: Integer, desc: 'The cluster ID'
optional :name, type: String, desc: 'Cluster name'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
optional :api_url, type: String, desc: 'URL to access the Kubernetes API'
@@ -96,7 +85,6 @@ module API
optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
optional :namespace, type: String, desc: 'Unique namespace related to Project'
end
- use :update_params_ee
end
put ':id/clusters/:cluster_id' do
authorize! :update_cluster, cluster
diff --git a/lib/api/project_container_repositories.rb b/lib/api/project_container_repositories.rb
index 2a0099018d9..8f2a62bc5a4 100644
--- a/lib/api/project_container_repositories.rb
+++ b/lib/api/project_container_repositories.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectContainerRepositories < Grape::API
+ class ProjectContainerRepositories < Grape::API::Instance
include PaginationParams
REPOSITORY_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(
diff --git a/lib/api/project_events.rb b/lib/api/project_events.rb
index 734311e1142..726e693826e 100644
--- a/lib/api/project_events.rb
+++ b/lib/api/project_events.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectEvents < Grape::API
+ class ProjectEvents < Grape::API::Instance
include PaginationParams
include APIGuard
helpers ::API::Helpers::EventsHelpers
diff --git a/lib/api/project_export.rb b/lib/api/project_export.rb
index 4b35f245b8c..d11c47f8d78 100644
--- a/lib/api/project_export.rb
+++ b/lib/api/project_export.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectExport < Grape::API
+ class ProjectExport < Grape::API::Instance
helpers Helpers::RateLimiter
before do
diff --git a/lib/api/project_hooks.rb b/lib/api/project_hooks.rb
index 0e7576c9243..7cea44e6304 100644
--- a/lib/api/project_hooks.rb
+++ b/lib/api/project_hooks.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectHooks < Grape::API
+ class ProjectHooks < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/project_import.rb b/lib/api/project_import.rb
index 17d08d14a20..9f43c3c7993 100644
--- a/lib/api/project_import.rb
+++ b/lib/api/project_import.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectImport < Grape::API
+ class ProjectImport < Grape::API::Instance
include PaginationParams
MAXIMUM_FILE_SIZE = 50.megabytes
diff --git a/lib/api/project_milestones.rb b/lib/api/project_milestones.rb
index 8643854a655..2f8dd1085dc 100644
--- a/lib/api/project_milestones.rb
+++ b/lib/api/project_milestones.rb
@@ -1,13 +1,11 @@
# frozen_string_literal: true
module API
- class ProjectMilestones < Grape::API
+ class ProjectMilestones < Grape::API::Instance
include PaginationParams
include MilestoneResponses
- before do
- authenticate!
- end
+ before { authenticate! }
params do
requires :id, type: String, desc: 'The ID of a project'
diff --git a/lib/api/project_packages.rb b/lib/api/project_packages.rb
new file mode 100644
index 00000000000..359514f1f78
--- /dev/null
+++ b/lib/api/project_packages.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module API
+ class ProjectPackages < Grape::API::Instance
+ include PaginationParams
+
+ before do
+ authorize_packages_access!(user_project)
+ end
+
+ helpers ::API::Helpers::PackagesHelpers
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc 'Get all project packages' do
+ detail 'This feature was introduced in GitLab 11.8'
+ success ::API::Entities::Package
+ end
+ params do
+ use :pagination
+ optional :order_by, type: String, values: %w[created_at name version type], default: 'created_at',
+ desc: 'Return packages ordered by `created_at`, `name`, `version` or `type` fields.'
+ optional :sort, type: String, values: %w[asc desc], default: 'asc',
+ desc: 'Return packages sorted in `asc` or `desc` order.'
+ optional :package_type, type: String, values: Packages::Package.package_types.keys,
+ desc: 'Return packages of a certain type'
+ optional :package_name, type: String,
+ desc: 'Return packages with this name'
+ end
+ get ':id/packages' do
+ packages = ::Packages::PackagesFinder.new(
+ user_project,
+ declared_params.slice(:order_by, :sort, :package_type, :package_name)
+ ).execute
+
+ present paginate(packages), with: ::API::Entities::Package, user: current_user
+ end
+
+ desc 'Get a single project package' do
+ detail 'This feature was introduced in GitLab 11.9'
+ success ::API::Entities::Package
+ end
+ params do
+ requires :package_id, type: Integer, desc: 'The ID of a package'
+ end
+ get ':id/packages/:package_id' do
+ package = ::Packages::PackageFinder
+ .new(user_project, params[:package_id]).execute
+
+ present package, with: ::API::Entities::Package, user: current_user
+ end
+
+ desc 'Remove a package' do
+ detail 'This feature was introduced in GitLab 11.9'
+ end
+ params do
+ requires :package_id, type: Integer, desc: 'The ID of a package'
+ end
+ delete ':id/packages/:package_id' do
+ authorize_destroy_package!(user_project)
+
+ package = ::Packages::PackageFinder
+ .new(user_project, params[:package_id]).execute
+
+ destroy_conditionally!(package)
+ end
+ end
+ end
+end
diff --git a/lib/api/project_repository_storage_moves.rb b/lib/api/project_repository_storage_moves.rb
index 5de623102fb..c318907542b 100644
--- a/lib/api/project_repository_storage_moves.rb
+++ b/lib/api/project_repository_storage_moves.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectRepositoryStorageMoves < Grape::API
+ class ProjectRepositoryStorageMoves < Grape::API::Instance
include PaginationParams
before { authenticated_as_admin! }
diff --git a/lib/api/project_snapshots.rb b/lib/api/project_snapshots.rb
index 175fbb2ce92..360000861fc 100644
--- a/lib/api/project_snapshots.rb
+++ b/lib/api/project_snapshots.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectSnapshots < Grape::API
+ class ProjectSnapshots < Grape::API::Instance
helpers ::API::Helpers::ProjectSnapshotsHelpers
before { authorize_read_git_snapshot! }
diff --git a/lib/api/project_snippets.rb b/lib/api/project_snippets.rb
index 68f4a0dcb65..09934502e85 100644
--- a/lib/api/project_snippets.rb
+++ b/lib/api/project_snippets.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectSnippets < Grape::API
+ class ProjectSnippets < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -37,7 +37,7 @@ module API
use :pagination
end
get ":id/snippets" do
- present paginate(snippets_for_current_user), with: Entities::ProjectSnippet
+ present paginate(snippets_for_current_user), with: Entities::ProjectSnippet, current_user: current_user
end
desc 'Get a single project snippet' do
@@ -48,7 +48,7 @@ module API
end
get ":id/snippets/:snippet_id" do
snippet = snippets_for_current_user.find(params[:snippet_id])
- present snippet, with: Entities::ProjectSnippet
+ present snippet, with: Entities::ProjectSnippet, current_user: current_user
end
desc 'Create a new project snippet' do
@@ -71,7 +71,7 @@ module API
snippet = service_response.payload[:snippet]
if service_response.success?
- present snippet, with: Entities::ProjectSnippet
+ present snippet, with: Entities::ProjectSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
@@ -107,7 +107,7 @@ module API
snippet = service_response.payload[:snippet]
if service_response.success?
- present snippet, with: Entities::ProjectSnippet
+ present snippet, with: Entities::ProjectSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
@@ -147,10 +147,19 @@ module API
snippet = snippets_for_current_user.find_by(id: params[:snippet_id])
not_found!('Snippet') unless snippet
- env['api.format'] = :txt
- content_type 'text/plain'
present content_for(snippet)
end
+
+ desc 'Get raw project snippet file contents from the repository'
+ params do
+ use :raw_file_params
+ end
+ get ":id/snippets/:snippet_id/files/:ref/:file_path/raw", requirements: { file_path: API::NO_SLASH_URL_PART_REGEX } do
+ snippet = snippets_for_current_user.find_by(id: params[:snippet_id])
+ not_found!('Snippet') unless snippet&.repo_exists?
+
+ present file_content_for(snippet)
+ end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Get the user agent details for a project snippet' do
diff --git a/lib/api/project_statistics.rb b/lib/api/project_statistics.rb
index 14ee0f75513..2196801096f 100644
--- a/lib/api/project_statistics.rb
+++ b/lib/api/project_statistics.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectStatistics < Grape::API
+ class ProjectStatistics < Grape::API::Instance
before do
authenticate!
authorize! :daily_statistics, user_project
diff --git a/lib/api/project_templates.rb b/lib/api/project_templates.rb
index cfcc7f5212d..f0fe4d85c8f 100644
--- a/lib/api/project_templates.rb
+++ b/lib/api/project_templates.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProjectTemplates < Grape::API
+ class ProjectTemplates < Grape::API::Instance
include PaginationParams
TEMPLATE_TYPES = %w[dockerfiles gitignores gitlab_ci_ymls licenses].freeze
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index e00fb61f478..d24dab63bd9 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -3,7 +3,7 @@
require_dependency 'declarative_policy'
module API
- class Projects < Grape::API
+ class Projects < Grape::API::Instance
include PaginationParams
include Helpers::CustomAttributes
@@ -17,6 +17,7 @@ module API
projects = projects.with_issues_available_for_user(current_user) if params[:with_issues_enabled]
projects = projects.with_merge_requests_enabled if params[:with_merge_requests_enabled]
projects = projects.with_statistics if params[:statistics]
+ projects = projects.joins(:statistics) if params[:order_by].include?('project_statistics') # rubocop: disable CodeReuse/ActiveRecord
lang = params[:with_programming_language]
projects = projects.with_programming_language(lang) if lang
@@ -28,6 +29,20 @@ module API
attrs.delete(:repository_storage) unless can?(current_user, :change_repository_storage, project)
end
+ def verify_project_filters!(attrs)
+ attrs.delete(:repository_storage) unless can?(current_user, :use_project_statistics_filters)
+ end
+
+ def verify_statistics_order_by_projects!
+ return unless Helpers::ProjectsHelpers::STATISTICS_SORT_PARAMS.include?(params[:order_by])
+
+ params[:order_by] = if can?(current_user, :use_project_statistics_filters)
+ "project_statistics.#{params[:order_by]}"
+ else
+ route.params['order_by'][:default]
+ end
+ end
+
def delete_project(user_project)
destroy_conditionally!(user_project) do
::Projects::DestroyService.new(user_project, current_user, {}).async_execute
@@ -52,8 +67,9 @@ module API
end
params :sort_params do
- optional :order_by, type: String, values: %w[id name path created_at updated_at last_activity_at],
- default: 'created_at', desc: 'Return projects ordered by field'
+ optional :order_by, type: String,
+ values: %w[id name path created_at updated_at last_activity_at] + Helpers::ProjectsHelpers::STATISTICS_SORT_PARAMS,
+ default: 'created_at', desc: "Return projects ordered by field. #{Helpers::ProjectsHelpers::STATISTICS_SORT_PARAMS.join(', ')} are only available to admins."
optional :sort, type: String, values: %w[asc desc], default: 'desc',
desc: 'Return projects sorted in ascending and descending order'
end
@@ -75,6 +91,7 @@ module API
optional :id_before, type: Integer, desc: 'Limit results to projects with IDs less than the specified ID'
optional :last_activity_after, type: DateTime, desc: 'Limit results to projects with last_activity after specified time. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
optional :last_activity_before, type: DateTime, desc: 'Limit results to projects with last_activity before specified time. Format: ISO 8601 YYYY-MM-DDTHH:MM:SSZ'
+ optional :repository_storage, type: String, desc: 'Which storage shard the repository is on. Available only to admins'
use :optional_filter_params_ee
end
@@ -88,10 +105,15 @@ module API
end
def load_projects
- ProjectsFinder.new(current_user: current_user, params: project_finder_params).execute
+ params = project_finder_params
+ verify_project_filters!(params)
+
+ ProjectsFinder.new(current_user: current_user, params: params).execute
end
def present_projects(projects, options = {})
+ verify_statistics_order_by_projects!
+
projects = reorder_projects(projects)
projects = apply_filters(projects)
@@ -524,7 +546,7 @@ module API
end
params do
optional :search, type: String, desc: 'Return list of users matching the search criteria'
- optional :skip_users, type: Array[Integer], desc: 'Filter out users with the specified IDs'
+ optional :skip_users, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: 'Filter out users with the specified IDs'
use :pagination
end
get ':id/users' do
diff --git a/lib/api/projects_relation_builder.rb b/lib/api/projects_relation_builder.rb
index 263468c9aa6..6dfd82d109f 100644
--- a/lib/api/projects_relation_builder.rb
+++ b/lib/api/projects_relation_builder.rb
@@ -8,6 +8,10 @@ module API
def prepare_relation(projects_relation, options = {})
projects_relation = preload_relation(projects_relation, options)
execute_batch_counting(projects_relation)
+ # Call the forks count method on every project, so the BatchLoader would load them all at
+ # once when the entities are rendered
+ projects_relation.each(&:forks_count)
+
projects_relation
end
@@ -19,16 +23,11 @@ module API
projects_relation
end
- def batch_forks_counting(projects_relation)
- ::Projects::BatchForksCountService.new(forks_counting_projects(projects_relation)).refresh_cache
- end
-
def batch_open_issues_counting(projects_relation)
::Projects::BatchOpenIssuesCountService.new(projects_relation).refresh_cache
end
def execute_batch_counting(projects_relation)
- batch_forks_counting(projects_relation)
batch_open_issues_counting(projects_relation)
end
end
diff --git a/lib/api/protected_branches.rb b/lib/api/protected_branches.rb
index 1fd86d1e720..b0a7f898eec 100644
--- a/lib/api/protected_branches.rb
+++ b/lib/api/protected_branches.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProtectedBranches < Grape::API
+ class ProtectedBranches < Grape::API::Instance
include PaginationParams
BRANCH_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(name: API::NO_SLASH_URL_PART_REGEX)
diff --git a/lib/api/protected_tags.rb b/lib/api/protected_tags.rb
index ee13473c848..aaa31cb7cc6 100644
--- a/lib/api/protected_tags.rb
+++ b/lib/api/protected_tags.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ProtectedTags < Grape::API
+ class ProtectedTags < Grape::API::Instance
include PaginationParams
TAG_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(name: API::NO_SLASH_URL_PART_REGEX)
diff --git a/lib/api/pypi_packages.rb b/lib/api/pypi_packages.rb
new file mode 100644
index 00000000000..a6caacd7df8
--- /dev/null
+++ b/lib/api/pypi_packages.rb
@@ -0,0 +1,148 @@
+# frozen_string_literal: true
+
+# PyPI Package Manager Client API
+#
+# These API endpoints are not meant to be consumed directly by users. They are
+# called by the PyPI package manager client when users run commands
+# like `pip install` or `twine upload`.
+module API
+ class PypiPackages < Grape::API::Instance
+ helpers ::API::Helpers::PackagesManagerClientsHelpers
+ helpers ::API::Helpers::RelatedResourcesHelpers
+ helpers ::API::Helpers::Packages::BasicAuthHelpers
+ include ::API::Helpers::Packages::BasicAuthHelpers::Constants
+
+ default_format :json
+
+ rescue_from ArgumentError do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ rescue_from ActiveRecord::RecordInvalid do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ rescue_from ActiveRecord::RecordInvalid do |e|
+ render_api_error!(e.message, 400)
+ end
+
+ helpers do
+ def packages_finder(project = authorized_user_project)
+ project
+ .packages
+ .pypi
+ .has_version
+ .processed
+ end
+
+ def find_package_versions
+ packages = packages_finder
+ .with_name(params[:package_name])
+
+ not_found!('Package') if packages.empty?
+
+ packages
+ end
+ end
+
+ before do
+ require_packages_enabled!
+ end
+
+ params do
+ requires :id, type: Integer, desc: 'The ID of a project'
+ end
+
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ before do
+ unauthorized_user_project!
+ end
+
+ namespace ':id/packages/pypi' do
+ desc 'The PyPi package download endpoint' do
+ detail 'This feature was introduced in GitLab 12.10'
+ end
+
+ params do
+ requires :file_identifier, type: String, desc: 'The PyPi package file identifier', file_path: true
+ requires :sha256, type: String, desc: 'The PyPi package sha256 check sum'
+ end
+
+ route_setting :authentication, deploy_token_allowed: true
+ get 'files/:sha256/*file_identifier' do
+ project = unauthorized_user_project!
+
+ filename = "#{params[:file_identifier]}.#{params[:format]}"
+ package = packages_finder(project).by_file_name_and_sha256(filename, params[:sha256])
+ package_file = ::Packages::PackageFileFinder.new(package, filename, with_file_name_like: false).execute
+
+ track_event('pull_package')
+
+ present_carrierwave_file!(package_file.file, supports_direct_download: true)
+ end
+
+ desc 'The PyPi Simple Endpoint' do
+ detail 'This feature was introduced in GitLab 12.10'
+ end
+
+ params do
+ requires :package_name, type: String, file_path: true, desc: 'The PyPi package name'
+ end
+
+ # An Api entry point but returns an HTML file instead of JSON.
+ # PyPi simple API returns the package descriptor as a simple HTML file.
+ route_setting :authentication, deploy_token_allowed: true
+ get 'simple/*package_name', format: :txt do
+ authorize_read_package!(authorized_user_project)
+
+ track_event('list_package')
+
+ packages = find_package_versions
+ presenter = ::Packages::Pypi::PackagePresenter.new(packages, authorized_user_project)
+
+ # Adjusts grape output format
+ # to be HTML
+ content_type "text/html; charset=utf-8"
+ env['api.format'] = :binary
+
+ body presenter.body
+ end
+
+ desc 'The PyPi Package upload endpoint' do
+ detail 'This feature was introduced in GitLab 12.10'
+ end
+
+ params do
+ requires :content, type: ::API::Validations::Types::WorkhorseFile, desc: 'The package file to be published (generated by Multipart middleware)'
+ requires :requires_python, type: String
+ requires :name, type: String
+ requires :version, type: String
+ optional :md5_digest, type: String
+ optional :sha256_digest, type: String
+ end
+
+ route_setting :authentication, deploy_token_allowed: true
+ post do
+ authorize_upload!(authorized_user_project)
+
+ track_event('push_package')
+
+ ::Packages::Pypi::CreatePackageService
+ .new(authorized_user_project, current_user, declared_params)
+ .execute
+
+ created!
+ rescue ObjectStorage::RemoteStoreError => e
+ Gitlab::ErrorTracking.track_exception(e, extra: { file_name: params[:name], project_id: authorized_user_project.id })
+
+ forbidden!
+ end
+
+ route_setting :authentication, deploy_token_allowed: true
+ post 'authorize' do
+ authorize_workhorse!(subject: authorized_user_project, has_length: false)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/release/links.rb b/lib/api/release/links.rb
index 07c27f39539..7e1815480a5 100644
--- a/lib/api/release/links.rb
+++ b/lib/api/release/links.rb
@@ -2,7 +2,7 @@
module API
module Release
- class Links < Grape::API
+ class Links < Grape::API::Instance
include PaginationParams
RELEASE_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS
diff --git a/lib/api/releases.rb b/lib/api/releases.rb
index a5bb1a44f1f..30c5e06053e 100644
--- a/lib/api/releases.rb
+++ b/lib/api/releases.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Releases < Grape::API
+ class Releases < Grape::API::Instance
include PaginationParams
RELEASE_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS
@@ -54,7 +54,7 @@ module API
requires :url, type: String
end
end
- optional :milestones, type: Array, desc: 'The titles of the related milestones', default: []
+ optional :milestones, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce, desc: 'The titles of the related milestones', default: []
optional :released_at, type: DateTime, desc: 'The date when the release will be/was ready. Defaults to the current time.'
end
route_setting :authentication, job_token_allowed: true
diff --git a/lib/api/remote_mirrors.rb b/lib/api/remote_mirrors.rb
index 0808541d3c7..d1def05808b 100644
--- a/lib/api/remote_mirrors.rb
+++ b/lib/api/remote_mirrors.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class RemoteMirrors < Grape::API
+ class RemoteMirrors < Grape::API::Instance
include PaginationParams
before do
diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb
index bf4f08ce390..81702f8f02a 100644
--- a/lib/api/repositories.rb
+++ b/lib/api/repositories.rb
@@ -3,7 +3,7 @@
require 'mime/types'
module API
- class Repositories < Grape::API
+ class Repositories < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::HeadersHelpers
@@ -143,7 +143,7 @@ module API
success Entities::Commit
end
params do
- requires :refs, type: Array[String]
+ requires :refs, type: Array[String], coerce_with: ::API::Validations::Types::CommaSeparatedToArray.coerce
end
get ':id/repository/merge_base' do
refs = params[:refs]
diff --git a/lib/api/resource_label_events.rb b/lib/api/resource_label_events.rb
index 1fa6898b92c..a8d3419528c 100644
--- a/lib/api/resource_label_events.rb
+++ b/lib/api/resource_label_events.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ResourceLabelEvents < Grape::API
+ class ResourceLabelEvents < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::NotesHelpers
diff --git a/lib/api/resource_milestone_events.rb b/lib/api/resource_milestone_events.rb
index 30ff5a9b4be..a8f221f8740 100644
--- a/lib/api/resource_milestone_events.rb
+++ b/lib/api/resource_milestone_events.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class ResourceMilestoneEvents < Grape::API
+ class ResourceMilestoneEvents < Grape::API::Instance
include PaginationParams
helpers ::API::Helpers::NotesHelpers
@@ -26,8 +26,7 @@ module API
get ":id/#{eventables_str}/:eventable_id/resource_milestone_events" do
eventable = find_noteable(eventable_type, params[:eventable_id])
- opts = { page: params[:page], per_page: params[:per_page] }
- events = ResourceMilestoneEventFinder.new(current_user, eventable, opts).execute
+ events = ResourceMilestoneEventFinder.new(current_user, eventable).execute
present paginate(events), with: Entities::ResourceMilestoneEvent
end
diff --git a/lib/api/resource_state_events.rb b/lib/api/resource_state_events.rb
new file mode 100644
index 00000000000..1c1a90c09a3
--- /dev/null
+++ b/lib/api/resource_state_events.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module API
+ class ResourceStateEvents < Grape::API::Instance
+ include PaginationParams
+ helpers ::API::Helpers::NotesHelpers
+
+ before { authenticate! }
+
+ [Issue, MergeRequest].each do |eventable_class|
+ eventable_name = eventable_class.to_s.underscore
+
+ params do
+ requires :id, type: String, desc: "The ID of a project"
+ end
+ resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
+ desc "Get a list of #{eventable_class.to_s.downcase} resource state events" do
+ success Entities::ResourceStateEvent
+ end
+ params do
+ requires :eventable_iid, types: Integer, desc: "The IID of the #{eventable_name}"
+ use :pagination
+ end
+
+ get ":id/#{eventable_name.pluralize}/:eventable_iid/resource_state_events" do
+ eventable = find_noteable(eventable_class, params[:eventable_iid])
+
+ events = ResourceStateEventFinder.new(current_user, eventable).execute
+
+ present paginate(events), with: Entities::ResourceStateEvent
+ end
+
+ desc "Get a single #{eventable_class.to_s.downcase} resource state event" do
+ success Entities::ResourceStateEvent
+ end
+ params do
+ requires :eventable_iid, types: Integer, desc: "The IID of the #{eventable_name}"
+ requires :event_id, type: Integer, desc: 'The ID of a resource state event'
+ end
+ get ":id/#{eventable_name.pluralize}/:eventable_iid/resource_state_events/:event_id" do
+ eventable = find_noteable(eventable_class, params[:eventable_iid])
+
+ event = ResourceStateEventFinder.new(current_user, eventable).find(params[:event_id])
+
+ present event, with: Entities::ResourceStateEvent
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
deleted file mode 100644
index 5f08ebe4a06..00000000000
--- a/lib/api/runner.rb
+++ /dev/null
@@ -1,297 +0,0 @@
-# frozen_string_literal: true
-
-module API
- class Runner < Grape::API
- helpers ::API::Helpers::Runner
-
- resource :runners do
- desc 'Registers a new Runner' do
- success Entities::RunnerRegistrationDetails
- http_codes [[201, 'Runner was created'], [403, 'Forbidden']]
- end
- params do
- requires :token, type: String, desc: 'Registration token'
- optional :description, type: String, desc: %q(Runner's description)
- optional :info, type: Hash, desc: %q(Runner's metadata)
- optional :active, type: Boolean, desc: 'Should Runner be active'
- optional :locked, type: Boolean, desc: 'Should Runner be locked for current project'
- optional :access_level, type: String, values: Ci::Runner.access_levels.keys,
- desc: 'The access_level of the runner'
- optional :run_untagged, type: Boolean, desc: 'Should Runner handle untagged jobs'
- optional :tag_list, type: Array[String], desc: %q(List of Runner's tags)
- optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
- end
- post '/' do
- attributes = attributes_for_keys([:description, :active, :locked, :run_untagged, :tag_list, :access_level, :maximum_timeout])
- .merge(get_runner_details_from_request)
-
- attributes =
- if runner_registration_token_valid?
- # Create shared runner. Requires admin access
- attributes.merge(runner_type: :instance_type)
- elsif project = Project.find_by_runners_token(params[:token])
- # Create a specific runner for the project
- attributes.merge(runner_type: :project_type, projects: [project])
- elsif group = Group.find_by_runners_token(params[:token])
- # Create a specific runner for the group
- attributes.merge(runner_type: :group_type, groups: [group])
- else
- forbidden!
- end
-
- runner = Ci::Runner.create(attributes)
-
- if runner.persisted?
- present runner, with: Entities::RunnerRegistrationDetails
- else
- render_validation_error!(runner)
- end
- end
-
- desc 'Deletes a registered Runner' do
- http_codes [[204, 'Runner was deleted'], [403, 'Forbidden']]
- end
- params do
- requires :token, type: String, desc: %q(Runner's authentication token)
- end
- delete '/' do
- authenticate_runner!
-
- runner = Ci::Runner.find_by_token(params[:token])
-
- destroy_conditionally!(runner)
- end
-
- desc 'Validates authentication credentials' do
- http_codes [[200, 'Credentials are valid'], [403, 'Forbidden']]
- end
- params do
- requires :token, type: String, desc: %q(Runner's authentication token)
- end
- post '/verify' do
- authenticate_runner!
- status 200
- end
- end
-
- resource :jobs do
- before do
- Gitlab::ApplicationContext.push(
- user: -> { current_job&.user },
- project: -> { current_job&.project }
- )
- end
-
- desc 'Request a job' do
- success Entities::JobRequest::Response
- http_codes [[201, 'Job was scheduled'],
- [204, 'No job for Runner'],
- [403, 'Forbidden']]
- end
- params do
- requires :token, type: String, desc: %q(Runner's authentication token)
- optional :last_update, type: String, desc: %q(Runner's queue last_update token)
- optional :info, type: Hash, desc: %q(Runner's metadata) do
- optional :name, type: String, desc: %q(Runner's name)
- optional :version, type: String, desc: %q(Runner's version)
- optional :revision, type: String, desc: %q(Runner's revision)
- optional :platform, type: String, desc: %q(Runner's platform)
- optional :architecture, type: String, desc: %q(Runner's architecture)
- optional :executor, type: String, desc: %q(Runner's executor)
- optional :features, type: Hash, desc: %q(Runner's features)
- end
- optional :session, type: Hash, desc: %q(Runner's session data) do
- optional :url, type: String, desc: %q(Session's url)
- optional :certificate, type: String, desc: %q(Session's certificate)
- optional :authorization, type: String, desc: %q(Session's authorization)
- end
- optional :job_age, type: Integer, desc: %q(Job should be older than passed age in seconds to be ran on runner)
- end
- post '/request' do
- authenticate_runner!
-
- unless current_runner.active?
- header 'X-GitLab-Last-Update', current_runner.ensure_runner_queue_value
- break no_content!
- end
-
- runner_params = declared_params(include_missing: false)
-
- if current_runner.runner_queue_value_latest?(runner_params[:last_update])
- header 'X-GitLab-Last-Update', runner_params[:last_update]
- Gitlab::Metrics.add_event(:build_not_found_cached)
- break no_content!
- end
-
- new_update = current_runner.ensure_runner_queue_value
- result = ::Ci::RegisterJobService.new(current_runner).execute(runner_params)
-
- if result.valid?
- if result.build
- Gitlab::Metrics.add_event(:build_found)
- present Ci::BuildRunnerPresenter.new(result.build), with: Entities::JobRequest::Response
- else
- Gitlab::Metrics.add_event(:build_not_found)
- header 'X-GitLab-Last-Update', new_update
- no_content!
- end
- else
- # We received build that is invalid due to concurrency conflict
- Gitlab::Metrics.add_event(:build_invalid)
- conflict!
- end
- end
-
- desc 'Updates a job' do
- http_codes [[200, 'Job was updated'], [403, 'Forbidden']]
- end
- params do
- requires :token, type: String, desc: %q(Runners's authentication token)
- requires :id, type: Integer, desc: %q(Job's ID)
- optional :trace, type: String, desc: %q(Job's full trace)
- optional :state, type: String, desc: %q(Job's status: success, failed)
- optional :failure_reason, type: String, desc: %q(Job's failure_reason)
- end
- put '/:id' do
- job = authenticate_job!
-
- job.trace.set(params[:trace]) if params[:trace]
-
- Gitlab::Metrics.add_event(:update_build)
-
- case params[:state].to_s
- when 'running'
- job.touch if job.needs_touch?
- when 'success'
- job.success!
- when 'failed'
- job.drop!(params[:failure_reason] || :unknown_failure)
- end
- end
-
- desc 'Appends a patch to the job trace' do
- http_codes [[202, 'Trace was patched'],
- [400, 'Missing Content-Range header'],
- [403, 'Forbidden'],
- [416, 'Range not satisfiable']]
- end
- params do
- requires :id, type: Integer, desc: %q(Job's ID)
- optional :token, type: String, desc: %q(Job's authentication token)
- end
- patch '/:id/trace' do
- job = authenticate_job!
-
- error!('400 Missing header Content-Range', 400) unless request.headers.key?('Content-Range')
- content_range = request.headers['Content-Range']
- content_range = content_range.split('-')
-
- # TODO:
- # it seems that `Content-Range` as formatted by runner is wrong,
- # the `byte_end` should point to final byte, but it points byte+1
- # that means that we have to calculate end of body,
- # as we cannot use `content_length[1]`
- # Issue: https://gitlab.com/gitlab-org/gitlab-runner/issues/3275
-
- body_data = request.body.read
- body_start = content_range[0].to_i
- body_end = body_start + body_data.bytesize
-
- stream_size = job.trace.append(body_data, body_start)
- unless stream_size == body_end
- break error!('416 Range Not Satisfiable', 416, { 'Range' => "0-#{stream_size}" })
- end
-
- status 202
- header 'Job-Status', job.status
- header 'Range', "0-#{stream_size}"
- header 'X-GitLab-Trace-Update-Interval', job.trace.update_interval.to_s
- end
-
- desc 'Authorize artifacts uploading for job' do
- http_codes [[200, 'Upload allowed'],
- [403, 'Forbidden'],
- [405, 'Artifacts support not enabled'],
- [413, 'File too large']]
- end
- params do
- requires :id, type: Integer, desc: %q(Job's ID)
- optional :token, type: String, desc: %q(Job's authentication token)
- optional :filesize, type: Integer, desc: %q(Artifacts filesize)
- optional :artifact_type, type: String, desc: %q(The type of artifact),
- default: 'archive', values: Ci::JobArtifact.file_types.keys
- end
- post '/:id/artifacts/authorize' do
- not_allowed! unless Gitlab.config.artifacts.enabled
- require_gitlab_workhorse!
- Gitlab::Workhorse.verify_api_request!(headers)
-
- job = authenticate_job!
-
- service = Ci::AuthorizeJobArtifactService.new(job, params, max_size: max_artifacts_size(job))
-
- forbidden! if service.forbidden?
- file_too_large! if service.too_large?
-
- status 200
- content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
- service.headers
- end
-
- desc 'Upload artifacts for job' do
- success Entities::JobRequest::Response
- http_codes [[201, 'Artifact uploaded'],
- [400, 'Bad request'],
- [403, 'Forbidden'],
- [405, 'Artifacts support not enabled'],
- [413, 'File too large']]
- end
- params do
- requires :id, type: Integer, desc: %q(Job's ID)
- requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact file to store (generated by Multipart middleware))
- optional :token, type: String, desc: %q(Job's authentication token)
- optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
- optional :artifact_type, type: String, desc: %q(The type of artifact),
- default: 'archive', values: Ci::JobArtifact.file_types.keys
- optional :artifact_format, type: String, desc: %q(The format of artifact),
- default: 'zip', values: Ci::JobArtifact.file_formats.keys
- optional :metadata, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact metadata to store (generated by Multipart middleware))
- end
- post '/:id/artifacts' do
- not_allowed! unless Gitlab.config.artifacts.enabled
- require_gitlab_workhorse!
-
- job = authenticate_job!
-
- artifacts = params[:file]
- metadata = params[:metadata]
-
- file_too_large! unless artifacts.size < max_artifacts_size(job)
-
- result = Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
-
- if result[:status] == :success
- status :created
- else
- render_api_error!(result[:message], result[:http_status])
- end
- end
-
- desc 'Download the artifacts file for job' do
- http_codes [[200, 'Upload allowed'],
- [403, 'Forbidden'],
- [404, 'Artifact not found']]
- end
- params do
- requires :id, type: Integer, desc: %q(Job's ID)
- optional :token, type: String, desc: %q(Job's authentication token)
- optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
- end
- get '/:id/artifacts' do
- job = authenticate_job!(require_running: false)
-
- present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
- end
- end
- end
-end
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
deleted file mode 100644
index 43ee1dd1f71..00000000000
--- a/lib/api/runners.rb
+++ /dev/null
@@ -1,287 +0,0 @@
-# frozen_string_literal: true
-
-module API
- class Runners < Grape::API
- include PaginationParams
-
- before { authenticate! }
-
- resource :runners do
- desc 'Get runners available for user' do
- success Entities::Runner
- end
- params do
- optional :scope, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
- desc: 'The scope of specific runners to show'
- optional :type, type: String, values: Ci::Runner::AVAILABLE_TYPES,
- desc: 'The type of the runners to show'
- optional :status, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
- desc: 'The status of the runners to show'
- optional :tag_list, type: Array[String], desc: 'The tags of the runners to show'
- use :pagination
- end
- get do
- runners = current_user.ci_owned_runners
- runners = filter_runners(runners, params[:scope], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
- runners = filter_runners(runners, params[:type], allowed_scopes: Ci::Runner::AVAILABLE_TYPES)
- runners = filter_runners(runners, params[:status], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
- runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
-
- present paginate(runners), with: Entities::Runner
- end
-
- desc 'Get all runners - shared and specific' do
- success Entities::Runner
- end
- params do
- optional :scope, type: String, values: Ci::Runner::AVAILABLE_SCOPES,
- desc: 'The scope of specific runners to show'
- optional :type, type: String, values: Ci::Runner::AVAILABLE_TYPES,
- desc: 'The type of the runners to show'
- optional :status, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
- desc: 'The status of the runners to show'
- optional :tag_list, type: Array[String], desc: 'The tags of the runners to show'
- use :pagination
- end
- get 'all' do
- authenticated_as_admin!
-
- runners = Ci::Runner.all
- runners = filter_runners(runners, params[:scope])
- runners = filter_runners(runners, params[:type], allowed_scopes: Ci::Runner::AVAILABLE_TYPES)
- runners = filter_runners(runners, params[:status], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
- runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
-
- present paginate(runners), with: Entities::Runner
- end
-
- desc "Get runner's details" do
- success Entities::RunnerDetails
- end
- params do
- requires :id, type: Integer, desc: 'The ID of the runner'
- end
- get ':id' do
- runner = get_runner(params[:id])
- authenticate_show_runner!(runner)
-
- present runner, with: Entities::RunnerDetails, current_user: current_user
- end
-
- desc "Update runner's details" do
- success Entities::RunnerDetails
- end
- params do
- requires :id, type: Integer, desc: 'The ID of the runner'
- optional :description, type: String, desc: 'The description of the runner'
- optional :active, type: Boolean, desc: 'The state of a runner'
- optional :tag_list, type: Array[String], desc: 'The list of tags for a runner'
- optional :run_untagged, type: Boolean, desc: 'Flag indicating the runner can execute untagged jobs'
- optional :locked, type: Boolean, desc: 'Flag indicating the runner is locked'
- optional :access_level, type: String, values: Ci::Runner.access_levels.keys,
- desc: 'The access_level of the runner'
- optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
- at_least_one_of :description, :active, :tag_list, :run_untagged, :locked, :access_level, :maximum_timeout
- end
- put ':id' do
- runner = get_runner(params.delete(:id))
- authenticate_update_runner!(runner)
- update_service = Ci::UpdateRunnerService.new(runner)
-
- if update_service.update(declared_params(include_missing: false))
- present runner, with: Entities::RunnerDetails, current_user: current_user
- else
- render_validation_error!(runner)
- end
- end
-
- desc 'Remove a runner' do
- success Entities::Runner
- end
- params do
- requires :id, type: Integer, desc: 'The ID of the runner'
- end
- delete ':id' do
- runner = get_runner(params[:id])
-
- authenticate_delete_runner!(runner)
-
- destroy_conditionally!(runner)
- end
-
- desc 'List jobs running on a runner' do
- success Entities::JobBasicWithProject
- end
- params do
- requires :id, type: Integer, desc: 'The ID of the runner'
- optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES
- optional :order_by, type: String, desc: 'Order by `id` or not', values: Ci::RunnerJobsFinder::ALLOWED_INDEXED_COLUMNS
- optional :sort, type: String, values: %w[asc desc], default: 'desc', desc: 'Sort by asc (ascending) or desc (descending)'
- use :pagination
- end
- get ':id/jobs' do
- runner = get_runner(params[:id])
- authenticate_list_runners_jobs!(runner)
-
- jobs = Ci::RunnerJobsFinder.new(runner, params).execute
-
- present paginate(jobs), with: Entities::JobBasicWithProject
- end
- end
-
- params do
- requires :id, type: String, desc: 'The ID of a project'
- end
- resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
- before { authorize_admin_project }
-
- desc 'Get runners available for project' do
- success Entities::Runner
- end
- params do
- optional :scope, type: String, values: Ci::Runner::AVAILABLE_SCOPES,
- desc: 'The scope of specific runners to show'
- optional :type, type: String, values: Ci::Runner::AVAILABLE_TYPES,
- desc: 'The type of the runners to show'
- optional :status, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
- desc: 'The status of the runners to show'
- optional :tag_list, type: Array[String], desc: 'The tags of the runners to show'
- use :pagination
- end
- get ':id/runners' do
- runners = Ci::Runner.owned_or_instance_wide(user_project.id)
- # scope is deprecated (for project runners), however api documentation still supports it.
- # Not including them in `apply_filter` method as it's not supported for group runners
- runners = filter_runners(runners, params[:scope])
- runners = apply_filter(runners, params)
-
- present paginate(runners), with: Entities::Runner
- end
-
- desc 'Enable a runner for a project' do
- success Entities::Runner
- end
- params do
- requires :runner_id, type: Integer, desc: 'The ID of the runner'
- end
- post ':id/runners' do
- runner = get_runner(params[:runner_id])
- authenticate_enable_runner!(runner)
-
- if runner.assign_to(user_project)
- present runner, with: Entities::Runner
- else
- render_validation_error!(runner)
- end
- end
-
- desc "Disable project's runner" do
- success Entities::Runner
- end
- params do
- requires :runner_id, type: Integer, desc: 'The ID of the runner'
- end
- # rubocop: disable CodeReuse/ActiveRecord
- delete ':id/runners/:runner_id' do
- runner_project = user_project.runner_projects.find_by(runner_id: params[:runner_id])
- not_found!('Runner') unless runner_project
-
- runner = runner_project.runner
- forbidden!("Only one project associated with the runner. Please remove the runner instead") if runner.projects.count == 1
-
- destroy_conditionally!(runner_project)
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
-
- params do
- requires :id, type: String, desc: 'The ID of a group'
- end
- resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
- before { authorize_admin_group }
-
- desc 'Get runners available for group' do
- success Entities::Runner
- end
- params do
- optional :type, type: String, values: Ci::Runner::AVAILABLE_TYPES,
- desc: 'The type of the runners to show'
- optional :status, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
- desc: 'The status of the runners to show'
- optional :tag_list, type: Array[String], desc: 'The tags of the runners to show'
- use :pagination
- end
- get ':id/runners' do
- runners = Ci::Runner.belonging_to_group(user_group.id, include_ancestors: true)
- runners = apply_filter(runners, params)
-
- present paginate(runners), with: Entities::Runner
- end
- end
-
- helpers do
- def filter_runners(runners, scope, allowed_scopes: ::Ci::Runner::AVAILABLE_SCOPES)
- return runners unless scope.present?
-
- unless allowed_scopes.include?(scope)
- render_api_error!('Scope contains invalid value', 400)
- end
-
- # Support deprecated scopes
- if runners.respond_to?("deprecated_#{scope}")
- scope = "deprecated_#{scope}"
- end
-
- runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend
- end
-
- def apply_filter(runners, params)
- runners = filter_runners(runners, params[:type], allowed_scopes: Ci::Runner::AVAILABLE_TYPES)
- runners = filter_runners(runners, params[:status], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
- runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
-
- runners
- end
-
- def get_runner(id)
- runner = Ci::Runner.find(id)
- not_found!('Runner') unless runner
- runner
- end
-
- def authenticate_show_runner!(runner)
- return if runner.instance_type? || current_user.admin?
-
- forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
- end
-
- def authenticate_update_runner!(runner)
- return if current_user.admin?
-
- forbidden!("No access granted") unless can?(current_user, :update_runner, runner)
- end
-
- def authenticate_delete_runner!(runner)
- return if current_user.admin?
-
- forbidden!("Runner associated with more than one project") if runner.projects.count > 1
- forbidden!("No access granted") unless can?(current_user, :delete_runner, runner)
- end
-
- def authenticate_enable_runner!(runner)
- forbidden!("Runner is a group runner") if runner.group_type?
-
- return if current_user.admin?
-
- forbidden!("Runner is locked") if runner.locked?
- forbidden!("No access granted") unless can?(current_user, :assign_runner, runner)
- end
-
- def authenticate_list_runners_jobs!(runner)
- return if current_user.admin?
-
- forbidden!("No access granted") unless can?(current_user, :read_runner, runner)
- end
- end
- end
-end
diff --git a/lib/api/search.rb b/lib/api/search.rb
index ac00d3682a0..53095e0b81a 100644
--- a/lib/api/search.rb
+++ b/lib/api/search.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Search < Grape::API
+ class Search < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -24,7 +24,8 @@ module API
merge_requests: :with_api_entity_associations,
projects: :with_api_entity_associations,
issues: :with_api_entity_associations,
- milestones: :with_api_entity_associations
+ milestones: :with_api_entity_associations,
+ commits: :with_api_commit_entity_associations
}.freeze
def search(additional_params = {})
diff --git a/lib/api/services.rb b/lib/api/services.rb
index 5fd5c6bd9b0..9ee1822339c 100644
--- a/lib/api/services.rb
+++ b/lib/api/services.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
module API
- class Services < Grape::API
+ class Services < Grape::API::Instance
services = Helpers::ServicesHelpers.services
service_classes = Helpers::ServicesHelpers.service_classes
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index 0bf5eed26b4..3463e29041b 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Settings < Grape::API
+ class Settings < Grape::API::Instance
before { authenticated_as_admin! }
helpers Helpers::SettingsHelpers
@@ -49,7 +49,7 @@ module API
optional :default_project_visibility, type: String, values: Gitlab::VisibilityLevel.string_values, desc: 'The default project visibility'
optional :default_projects_limit, type: Integer, desc: 'The maximum number of personal projects'
optional :default_snippet_visibility, type: String, values: Gitlab::VisibilityLevel.string_values, desc: 'The default snippet visibility'
- optional :disabled_oauth_sign_in_sources, type: Array[String], desc: 'Disable certain OAuth sign-in sources'
+ optional :disabled_oauth_sign_in_sources, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Disable certain OAuth sign-in sources'
optional :domain_blacklist_enabled, type: Boolean, desc: 'Enable domain blacklist for sign ups'
optional :domain_blacklist, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com'
optional :domain_whitelist, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'ONLY users with e-mail addresses that match these domain(s) will be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com'
@@ -79,7 +79,8 @@ module API
requires :housekeeping_incremental_repack_period, type: Integer, desc: "Number of Git pushes after which an incremental 'git repack' is run."
end
optional :html_emails_enabled, type: Boolean, desc: 'By default GitLab sends emails in HTML and plain text formats so mail clients can choose what format to use. Disable this option if you only want to send emails in plain text format.'
- optional :import_sources, type: Array[String], values: %w[github bitbucket bitbucket_server gitlab google_code fogbugz git gitlab_project gitea manifest phabricator],
+ optional :import_sources, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce,
+ values: %w[github bitbucket bitbucket_server gitlab google_code fogbugz git gitlab_project gitea manifest phabricator],
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :max_artifacts_size, type: Integer, desc: "Set the maximum file size for each job's artifacts"
optional :max_attachment_size, type: Integer, desc: 'Maximum attachment size in MB'
@@ -113,13 +114,13 @@ module API
requires :recaptcha_private_key, type: String, desc: 'Generate private key at http://www.google.com/recaptcha'
end
optional :repository_checks_enabled, type: Boolean, desc: "GitLab will periodically run 'git fsck' in all project and wiki repositories to look for silent disk corruption issues."
- optional :repository_storages, type: Array[String], desc: 'Storage paths for new projects'
+ optional :repository_storages, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Storage paths for new projects'
optional :repository_storages_weighted, type: Hash, desc: 'Storage paths for new projects with a weighted value between 0 and 100'
optional :require_two_factor_authentication, type: Boolean, desc: 'Require all users to set up Two-factor authentication'
given require_two_factor_authentication: ->(val) { val } do
requires :two_factor_grace_period, type: Integer, desc: 'Amount of time (in hours) that users are allowed to skip forced configuration of two-factor authentication'
end
- optional :restricted_visibility_levels, type: Array[String], desc: 'Selected levels cannot be used by non-admin users for groups, projects or snippets. If the public level is restricted, user profiles are only visible to logged in users.'
+ optional :restricted_visibility_levels, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce, desc: 'Selected levels cannot be used by non-admin users for groups, projects or snippets. If the public level is restricted, user profiles are only visible to logged in users.'
optional :send_user_confirmation_email, type: Boolean, desc: 'Send confirmation email on sign-up'
optional :session_expire_delay, type: Integer, desc: 'Session duration in minutes. GitLab restart is required to apply changes.'
optional :shared_runners_enabled, type: Boolean, desc: 'Enable shared runners for new projects'
diff --git a/lib/api/sidekiq_metrics.rb b/lib/api/sidekiq_metrics.rb
index 693c20cb73a..de1373144e3 100644
--- a/lib/api/sidekiq_metrics.rb
+++ b/lib/api/sidekiq_metrics.rb
@@ -3,7 +3,7 @@
require 'sidekiq/api'
module API
- class SidekiqMetrics < Grape::API
+ class SidekiqMetrics < Grape::API::Instance
before { authenticated_as_admin! }
helpers do
diff --git a/lib/api/snippets.rb b/lib/api/snippets.rb
index be58b832f97..118045e3af2 100644
--- a/lib/api/snippets.rb
+++ b/lib/api/snippets.rb
@@ -2,7 +2,7 @@
module API
# Snippets API
- class Snippets < Grape::API
+ class Snippets < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -31,7 +31,7 @@ module API
use :pagination
end
get do
- present paginate(snippets_for_current_user), with: Entities::Snippet
+ present paginate(snippets_for_current_user), with: Entities::Snippet, current_user: current_user
end
desc 'List all public personal snippets current_user has access to' do
@@ -42,7 +42,7 @@ module API
use :pagination
end
get 'public' do
- present paginate(public_snippets), with: Entities::PersonalSnippet
+ present paginate(public_snippets), with: Entities::PersonalSnippet, current_user: current_user
end
desc 'Get a single snippet' do
@@ -57,7 +57,7 @@ module API
break not_found!('Snippet') unless snippet
- present snippet, with: Entities::PersonalSnippet
+ present snippet, with: Entities::PersonalSnippet, current_user: current_user
end
desc 'Create new snippet' do
@@ -82,7 +82,7 @@ module API
snippet = service_response.payload[:snippet]
if service_response.success?
- present snippet, with: Entities::PersonalSnippet
+ present snippet, with: Entities::PersonalSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
@@ -116,7 +116,7 @@ module API
snippet = service_response.payload[:snippet]
if service_response.success?
- present snippet, with: Entities::PersonalSnippet
+ present snippet, with: Entities::PersonalSnippet, current_user: current_user
else
render_spam_error! if snippet.spam?
@@ -155,14 +155,22 @@ module API
end
get ":id/raw" do
snippet = snippets.find_by_id(params.delete(:id))
- break not_found!('Snippet') unless snippet
+ not_found!('Snippet') unless snippet
- env['api.format'] = :txt
- content_type 'text/plain'
- header['Content-Disposition'] = 'attachment'
present content_for(snippet)
end
+ desc 'Get raw snippet file contents from the repository'
+ params do
+ use :raw_file_params
+ end
+ get ":id/files/:ref/:file_path/raw", requirements: { file_path: API::NO_SLASH_URL_PART_REGEX } do
+ snippet = snippets.find_by_id(params.delete(:id))
+ not_found!('Snippet') unless snippet&.repo_exists?
+
+ present file_content_for(snippet)
+ end
+
desc 'Get the user agent details for a snippet' do
success Entities::UserAgentDetail
end
diff --git a/lib/api/statistics.rb b/lib/api/statistics.rb
index d2dce34dfa5..3869fd3ac76 100644
--- a/lib/api/statistics.rb
+++ b/lib/api/statistics.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Statistics < Grape::API
+ class Statistics < Grape::API::Instance
before { authenticated_as_admin! }
COUNTED_ITEMS = [Project, User, Group, ForkNetworkMember, ForkNetwork, Issue,
diff --git a/lib/api/submodules.rb b/lib/api/submodules.rb
index 72d7d994102..34d21d3d7d8 100644
--- a/lib/api/submodules.rb
+++ b/lib/api/submodules.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Submodules < Grape::API
+ class Submodules < Grape::API::Instance
before { authenticate! }
helpers do
diff --git a/lib/api/subscriptions.rb b/lib/api/subscriptions.rb
index dfb54446ddf..533663fb087 100644
--- a/lib/api/subscriptions.rb
+++ b/lib/api/subscriptions.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Subscriptions < Grape::API
+ class Subscriptions < Grape::API::Instance
helpers ::API::Helpers::LabelHelpers
before { authenticate! }
diff --git a/lib/api/suggestions.rb b/lib/api/suggestions.rb
index 05aaa8a6f41..38e96c080f2 100644
--- a/lib/api/suggestions.rb
+++ b/lib/api/suggestions.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Suggestions < Grape::API
+ class Suggestions < Grape::API::Instance
before { authenticate! }
resource :suggestions do
@@ -25,7 +25,7 @@ module API
success Entities::Suggestion
end
params do
- requires :ids, type: Array[String], desc: "An array of suggestion ID's"
+ requires :ids, type: Array[Integer], coerce_with: ::API::Validations::Types::CommaSeparatedToIntegerArray.coerce, desc: "An array of suggestion ID's"
end
put 'batch_apply' do
ids = params[:ids]
diff --git a/lib/api/system_hooks.rb b/lib/api/system_hooks.rb
index 51fae0e54aa..d8e0a425625 100644
--- a/lib/api/system_hooks.rb
+++ b/lib/api/system_hooks.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class SystemHooks < Grape::API
+ class SystemHooks < Grape::API::Instance
include PaginationParams
before do
diff --git a/lib/api/tags.rb b/lib/api/tags.rb
index 796b1450602..c1fbd3ca7c6 100644
--- a/lib/api/tags.rb
+++ b/lib/api/tags.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Tags < Grape::API
+ class Tags < Grape::API::Instance
include PaginationParams
TAG_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(tag_name: API::NO_SLASH_URL_PART_REGEX)
diff --git a/lib/api/templates.rb b/lib/api/templates.rb
index 51f357d9477..80a97aae429 100644
--- a/lib/api/templates.rb
+++ b/lib/api/templates.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Templates < Grape::API
+ class Templates < Grape::API::Instance
include PaginationParams
GLOBAL_TEMPLATE_TYPES = {
diff --git a/lib/api/terraform/state.rb b/lib/api/terraform/state.rb
index e7c9627c753..f6e966defce 100644
--- a/lib/api/terraform/state.rb
+++ b/lib/api/terraform/state.rb
@@ -4,14 +4,14 @@ require_dependency 'api/validations/validators/limit'
module API
module Terraform
- class State < Grape::API
+ class State < Grape::API::Instance
include ::Gitlab::Utils::StrongMemoize
default_format :json
before do
authenticate!
- authorize! :admin_terraform_state, user_project
+ authorize! :read_terraform_state, user_project
end
params do
@@ -46,6 +46,8 @@ module API
desc 'Add a new terraform state or update an existing one'
route_setting :authentication, basic_auth_personal_access_token: true, job_token_allowed: :basic_auth
post do
+ authorize! :admin_terraform_state, user_project
+
data = request.body.read
no_content! if data.empty?
@@ -59,6 +61,8 @@ module API
desc 'Delete a terraform state of a certain name'
route_setting :authentication, basic_auth_personal_access_token: true, job_token_allowed: :basic_auth
delete do
+ authorize! :admin_terraform_state, user_project
+
remote_state_handler.handle_with_lock do |state|
state.destroy!
status :ok
@@ -77,6 +81,8 @@ module API
requires :Path, type: String, desc: 'Terraform path'
end
post '/lock' do
+ authorize! :admin_terraform_state, user_project
+
status_code = :ok
lock_info = {
'Operation' => params[:Operation],
@@ -108,6 +114,8 @@ module API
optional :ID, type: String, limit: 255, desc: 'Terraform state lock ID'
end
delete '/lock' do
+ authorize! :admin_terraform_state, user_project
+
remote_state_handler.unlock!
status :ok
rescue ::Terraform::RemoteStateHandler::StateLockedError
diff --git a/lib/api/todos.rb b/lib/api/todos.rb
index e36ddf21277..4a73e3e0e94 100644
--- a/lib/api/todos.rb
+++ b/lib/api/todos.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Todos < Grape::API
+ class Todos < Grape::API::Instance
include PaginationParams
before { authenticate! }
diff --git a/lib/api/triggers.rb b/lib/api/triggers.rb
index e1829403941..de67a149274 100644
--- a/lib/api/triggers.rb
+++ b/lib/api/triggers.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Triggers < Grape::API
+ class Triggers < Grape::API::Instance
include PaginationParams
HTTP_GITLAB_EVENT_HEADER = "HTTP_#{WebHookService::GITLAB_EVENT_HEADER}".underscore.upcase
@@ -32,7 +32,7 @@ module API
project = find_project(params[:id])
not_found! unless project
- result = Ci::PipelineTriggerService.new(project, nil, params).execute
+ result = ::Ci::PipelineTriggerService.new(project, nil, params).execute
not_found! unless result
if result[:http_status]
diff --git a/lib/api/user_counts.rb b/lib/api/user_counts.rb
index 8df4b381bbf..90127ecbc73 100644
--- a/lib/api/user_counts.rb
+++ b/lib/api/user_counts.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class UserCounts < Grape::API
+ class UserCounts < Grape::API::Instance
resource :user_counts do
desc 'Return the user specific counts' do
detail 'Open MR Count'
diff --git a/lib/api/users.rb b/lib/api/users.rb
index 3d8ae09edf1..7942777287b 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Users < Grape::API
+ class Users < Grape::API::Instance
include PaginationParams
include APIGuard
include Helpers::CustomAttributes
@@ -117,6 +117,8 @@ module API
users = users.preload(:identities, :u2f_registrations) if entity == Entities::UserWithAdmin
users, options = with_custom_attributes(users, { with: entity, current_user: current_user })
+ users = users.preload(:user_detail)
+
present paginate(users), options
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -328,9 +330,9 @@ module API
user = User.find_by(id: params.delete(:id))
not_found!('User') unless user
- key = user.gpg_keys.new(declared_params(include_missing: false))
+ key = ::GpgKeys::CreateService.new(user, declared_params(include_missing: false)).execute
- if key.save
+ if key.persisted?
present key, with: Entities::GpgKey
else
render_validation_error!(key)
@@ -374,9 +376,10 @@ module API
key = user.gpg_keys.find_by(id: params[:key_id])
not_found!('GPG Key') unless key
- key.destroy
-
- no_content!
+ destroy_conditionally!(key) do |key|
+ destroy_service = ::GpgKeys::DestroyService.new(current_user)
+ destroy_service.execute(key)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -730,9 +733,9 @@ module API
optional :expires_at, type: DateTime, desc: 'The expiration date of the SSH key in ISO 8601 format (YYYY-MM-DDTHH:MM:SSZ)'
end
post "keys" do
- key = current_user.keys.new(declared_params)
+ key = ::Keys::CreateService.new(current_user, declared_params(include_missing: false)).execute
- if key.save
+ if key.persisted?
present key, with: Entities::SSHKey
else
render_validation_error!(key)
@@ -750,7 +753,10 @@ module API
key = current_user.keys.find_by(id: params[:key_id])
not_found!('Key') unless key
- destroy_conditionally!(key)
+ destroy_conditionally!(key) do |key|
+ destroy_service = ::Keys::DestroyService.new(current_user)
+ destroy_service.execute(key)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -789,9 +795,9 @@ module API
requires :key, type: String, desc: 'The new GPG key'
end
post 'gpg_keys' do
- key = current_user.gpg_keys.new(declared_params)
+ key = ::GpgKeys::CreateService.new(current_user, declared_params(include_missing: false)).execute
- if key.save
+ if key.persisted?
present key, with: Entities::GpgKey
else
render_validation_error!(key)
@@ -825,9 +831,10 @@ module API
key = current_user.gpg_keys.find_by(id: params[:key_id])
not_found!('GPG Key') unless key
- key.destroy
-
- no_content!
+ destroy_conditionally!(key) do |key|
+ destroy_service = ::GpgKeys::DestroyService.new(current_user)
+ destroy_service.execute(key)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/lib/api/validations/types/comma_separated_to_array.rb b/lib/api/validations/types/comma_separated_to_array.rb
index b551878abd1..409eb67a3d3 100644
--- a/lib/api/validations/types/comma_separated_to_array.rb
+++ b/lib/api/validations/types/comma_separated_to_array.rb
@@ -10,7 +10,7 @@ module API
when String
value.split(',').map(&:strip)
when Array
- value.map { |v| v.to_s.split(',').map(&:strip) }.flatten
+ value.flat_map { |v| v.to_s.split(',').map(&:strip) }
else
[]
end
diff --git a/lib/api/validations/types/comma_separated_to_integer_array.rb b/lib/api/validations/types/comma_separated_to_integer_array.rb
new file mode 100644
index 00000000000..b8ab08b3fd4
--- /dev/null
+++ b/lib/api/validations/types/comma_separated_to_integer_array.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module API
+ module Validations
+ module Types
+ class CommaSeparatedToIntegerArray < CommaSeparatedToArray
+ def self.coerce
+ lambda do |value|
+ super.call(value).map(&:to_i)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/validations/types/labels_list.rb b/lib/api/validations/types/labels_list.rb
deleted file mode 100644
index 60277b99106..00000000000
--- a/lib/api/validations/types/labels_list.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-module API
- module Validations
- module Types
- class LabelsList
- def self.coerce
- lambda do |value|
- case value
- when String
- value.split(',').map(&:strip)
- when Array
- value.flat_map { |v| v.to_s.split(',').map(&:strip) }
- when LabelsList
- value
- else
- []
- end
- end
- end
- end
- end
- end
-end
diff --git a/lib/api/validations/types/safe_file.rb b/lib/api/validations/types/safe_file.rb
deleted file mode 100644
index 53b5790bfa2..00000000000
--- a/lib/api/validations/types/safe_file.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-# This module overrides the Grape type validator defined in
-# https://github.com/ruby-grape/grape/blob/master/lib/grape/validations/types/file.rb
-module API
- module Validations
- module Types
- class SafeFile < ::Grape::Validations::Types::File
- def value_coerced?(value)
- super && value[:tempfile].is_a?(Tempfile)
- end
- end
- end
- end
-end
diff --git a/lib/api/validations/types/workhorse_file.rb b/lib/api/validations/types/workhorse_file.rb
index 18d111f6556..e65e94fc8db 100644
--- a/lib/api/validations/types/workhorse_file.rb
+++ b/lib/api/validations/types/workhorse_file.rb
@@ -3,15 +3,14 @@
module API
module Validations
module Types
- class WorkhorseFile < Virtus::Attribute
- def coerce(input)
- # Processing of multipart file objects
- # is already taken care of by Gitlab::Middleware::Multipart.
- # Nothing to do here.
- input
+ class WorkhorseFile
+ def self.parse(value)
+ raise "#{value.class} is not an UploadedFile type" unless parsed?(value)
+
+ value
end
- def value_coerced?(value)
+ def self.parsed?(value)
value.is_a?(::UploadedFile)
end
end
diff --git a/lib/api/variables.rb b/lib/api/variables.rb
index 192b06b8a1b..50d137ec7c1 100644
--- a/lib/api/variables.rb
+++ b/lib/api/variables.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Variables < Grape::API
+ class Variables < Grape::API::Instance
include PaginationParams
before { authenticate! }
@@ -13,6 +13,15 @@ module API
# parameters, without having to modify the source code directly.
params
end
+
+ def find_variable(params)
+ variables = ::Ci::VariablesFinder.new(user_project, params).execute.to_a
+
+ return variables.first unless ::Gitlab::Ci::Features.variables_api_filter_environment_scope?
+ return variables.first unless variables.many? # rubocop: disable CodeReuse/ActiveRecord
+
+ conflict!("There are multiple variables with provided parameters. Please use 'filter[environment_scope]'")
+ end
end
params do
@@ -39,10 +48,8 @@ module API
end
# rubocop: disable CodeReuse/ActiveRecord
get ':id/variables/:key' do
- key = params[:key]
- variable = user_project.variables.find_by(key: key)
-
- break not_found!('Variable') unless variable
+ variable = find_variable(params)
+ not_found!('Variable') unless variable
present variable, with: Entities::Variable
end
@@ -56,7 +63,7 @@ module API
requires :value, type: String, desc: 'The value of the variable'
optional :protected, type: Boolean, desc: 'Whether the variable is protected'
optional :masked, type: Boolean, desc: 'Whether the variable is masked'
- optional :variable_type, type: String, values: Ci::Variable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
+ optional :variable_type, type: String, values: ::Ci::Variable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
optional :environment_scope, type: String, desc: 'The environment_scope of the variable'
end
post ':id/variables' do
@@ -80,16 +87,16 @@ module API
optional :value, type: String, desc: 'The value of the variable'
optional :protected, type: Boolean, desc: 'Whether the variable is protected'
optional :masked, type: Boolean, desc: 'Whether the variable is masked'
- optional :variable_type, type: String, values: Ci::Variable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
+ optional :variable_type, type: String, values: ::Ci::Variable.variable_types.keys, desc: 'The type of variable, must be one of env_var or file'
optional :environment_scope, type: String, desc: 'The environment_scope of the variable'
+ optional :filter, type: Hash, desc: 'Available filters: [environment_scope]. Example: filter[environment_scope]=production'
end
# rubocop: disable CodeReuse/ActiveRecord
put ':id/variables/:key' do
- variable = user_project.variables.find_by(key: params[:key])
-
- break not_found!('Variable') unless variable
+ variable = find_variable(params)
+ not_found!('Variable') unless variable
- variable_params = declared_params(include_missing: false).except(:key)
+ variable_params = declared_params(include_missing: false).except(:key, :filter)
variable_params = filter_variable_parameters(variable_params)
if variable.update(variable_params)
@@ -105,10 +112,11 @@ module API
end
params do
requires :key, type: String, desc: 'The key of the variable'
+ optional :filter, type: Hash, desc: 'Available filters: [environment_scope]. Example: filter[environment_scope]=production'
end
# rubocop: disable CodeReuse/ActiveRecord
delete ':id/variables/:key' do
- variable = user_project.variables.find_by(key: params[:key])
+ variable = find_variable(params)
not_found!('Variable') unless variable
# Variables don't have a timestamp. Therefore, destroy unconditionally.
diff --git a/lib/api/version.rb b/lib/api/version.rb
index 2d8c90260fa..6a480fc2bd9 100644
--- a/lib/api/version.rb
+++ b/lib/api/version.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module API
- class Version < Grape::API
+ class Version < Grape::API::Instance
helpers ::API::Helpers::GraphqlHelpers
include APIGuard
diff --git a/lib/api/wikis.rb b/lib/api/wikis.rb
index c1bf3a64923..713136e0887 100644
--- a/lib/api/wikis.rb
+++ b/lib/api/wikis.rb
@@ -1,25 +1,11 @@
# frozen_string_literal: true
module API
- class Wikis < Grape::API
+ class Wikis < Grape::API::Instance
+ helpers ::API::Helpers::WikisHelpers
+
helpers do
- def commit_params(attrs)
- # In order to avoid service disruption this can work with an old workhorse without the acceleration
- # the first branch of this if must be removed when we drop support for non accelerated uploads
- if attrs[:file].is_a?(Hash)
- {
- file_name: attrs[:file][:filename],
- file_content: attrs[:file][:tempfile].read,
- branch_name: attrs[:branch]
- }
- else
- {
- file_name: attrs[:file].original_filename,
- file_content: attrs[:file].read,
- branch_name: attrs[:branch]
- }
- end
- end
+ attr_reader :container
params :common_wiki_page_params do
optional :format,
@@ -32,108 +18,118 @@ module API
WIKI_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(slug: API::NO_SLASH_URL_PART_REGEX)
- resource :projects, requirements: WIKI_ENDPOINT_REQUIREMENTS do
- desc 'Get a list of wiki pages' do
- success Entities::WikiPageBasic
- end
- params do
- optional :with_content, type: Boolean, default: false, desc: "Include pages' content"
- end
- get ':id/wikis' do
- authorize! :read_wiki, user_project
-
- entity = params[:with_content] ? Entities::WikiPage : Entities::WikiPageBasic
+ ::API::Helpers::WikisHelpers.wiki_resource_kinds.each do |container_resource|
+ resource container_resource, requirements: WIKI_ENDPOINT_REQUIREMENTS do
+ after_validation do
+ @container = Gitlab::Lazy.new { find_container(container_resource) }
+ end
- present user_project.wiki.list_pages(load_content: params[:with_content]), with: entity
- end
+ desc 'Get a list of wiki pages' do
+ success Entities::WikiPageBasic
+ end
+ params do
+ optional :with_content, type: Boolean, default: false, desc: "Include pages' content"
+ end
+ get ':id/wikis' do
+ authorize! :read_wiki, container
- desc 'Get a wiki page' do
- success Entities::WikiPage
- end
- params do
- requires :slug, type: String, desc: 'The slug of a wiki page'
- end
- get ':id/wikis/:slug' do
- authorize! :read_wiki, user_project
+ entity = params[:with_content] ? Entities::WikiPage : Entities::WikiPageBasic
- present wiki_page, with: Entities::WikiPage
- end
+ present container.wiki.list_pages(load_content: params[:with_content]), with: entity
+ end
- desc 'Create a wiki page' do
- success Entities::WikiPage
- end
- params do
- requires :title, type: String, desc: 'Title of a wiki page'
- requires :content, type: String, desc: 'Content of a wiki page'
- use :common_wiki_page_params
- end
- post ':id/wikis' do
- authorize! :create_wiki, user_project
+ desc 'Get a wiki page' do
+ success Entities::WikiPage
+ end
+ params do
+ requires :slug, type: String, desc: 'The slug of a wiki page'
+ end
+ get ':id/wikis/:slug' do
+ authorize! :read_wiki, container
- page = WikiPages::CreateService.new(container: user_project, current_user: current_user, params: params).execute
+ present wiki_page, with: Entities::WikiPage
+ end
- if page.valid?
- present page, with: Entities::WikiPage
- else
- render_validation_error!(page)
+ desc 'Create a wiki page' do
+ success Entities::WikiPage
end
- end
+ params do
+ requires :title, type: String, desc: 'Title of a wiki page'
+ requires :content, type: String, desc: 'Content of a wiki page'
+ use :common_wiki_page_params
+ end
+ post ':id/wikis' do
+ authorize! :create_wiki, container
- desc 'Update a wiki page' do
- success Entities::WikiPage
- end
- params do
- optional :title, type: String, desc: 'Title of a wiki page'
- optional :content, type: String, desc: 'Content of a wiki page'
- use :common_wiki_page_params
- at_least_one_of :content, :title, :format
- end
- put ':id/wikis/:slug' do
- authorize! :create_wiki, user_project
+ page = WikiPages::CreateService.new(container: container, current_user: current_user, params: params).execute
- page = WikiPages::UpdateService.new(container: user_project, current_user: current_user, params: params).execute(wiki_page)
+ if page.valid?
+ present page, with: Entities::WikiPage
+ else
+ render_validation_error!(page)
+ end
+ end
- if page.valid?
- present page, with: Entities::WikiPage
- else
- render_validation_error!(page)
+ desc 'Update a wiki page' do
+ success Entities::WikiPage
+ end
+ params do
+ optional :title, type: String, desc: 'Title of a wiki page'
+ optional :content, type: String, desc: 'Content of a wiki page'
+ use :common_wiki_page_params
+ at_least_one_of :content, :title, :format
+ end
+ put ':id/wikis/:slug' do
+ authorize! :create_wiki, container
+
+ page = WikiPages::UpdateService
+ .new(container: container, current_user: current_user, params: params)
+ .execute(wiki_page)
+
+ if page.valid?
+ present page, with: Entities::WikiPage
+ else
+ render_validation_error!(page)
+ end
end
- end
- desc 'Delete a wiki page'
- params do
- requires :slug, type: String, desc: 'The slug of a wiki page'
- end
- delete ':id/wikis/:slug' do
- authorize! :admin_wiki, user_project
+ desc 'Delete a wiki page'
+ params do
+ requires :slug, type: String, desc: 'The slug of a wiki page'
+ end
+ delete ':id/wikis/:slug' do
+ authorize! :admin_wiki, container
- WikiPages::DestroyService.new(container: user_project, current_user: current_user).execute(wiki_page)
+ WikiPages::DestroyService
+ .new(container: container, current_user: current_user)
+ .execute(wiki_page)
- no_content!
- end
+ no_content!
+ end
- desc 'Upload an attachment to the wiki repository' do
- detail 'This feature was introduced in GitLab 11.3.'
- success Entities::WikiAttachment
- end
- params do
- requires :file, types: [::API::Validations::Types::SafeFile, ::API::Validations::Types::WorkhorseFile], desc: 'The attachment file to be uploaded'
- optional :branch, type: String, desc: 'The name of the branch'
- end
- post ":id/wikis/attachments" do
- authorize! :create_wiki, user_project
-
- result = ::Wikis::CreateAttachmentService.new(
- container: user_project,
- current_user: current_user,
- params: commit_params(declared_params(include_missing: false))
- ).execute
-
- if result[:status] == :success
- status(201)
- present OpenStruct.new(result[:result]), with: Entities::WikiAttachment
- else
- render_api_error!(result[:message], 400)
+ desc 'Upload an attachment to the wiki repository' do
+ detail 'This feature was introduced in GitLab 11.3.'
+ success Entities::WikiAttachment
+ end
+ params do
+ requires :file, types: [Rack::Multipart::UploadedFile, ::API::Validations::Types::WorkhorseFile], desc: 'The attachment file to be uploaded'
+ optional :branch, type: String, desc: 'The name of the branch'
+ end
+ post ":id/wikis/attachments" do
+ authorize! :create_wiki, container
+
+ result = ::Wikis::CreateAttachmentService.new(
+ container: container,
+ current_user: current_user,
+ params: commit_params(declared_params(include_missing: false))
+ ).execute
+
+ if result[:status] == :success
+ status(201)
+ present OpenStruct.new(result[:result]), with: Entities::WikiAttachment
+ else
+ render_api_error!(result[:message], 400)
+ end
end
end
end
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
index 7e457c4982d..d4c1ce260e4 100644
--- a/lib/backup/database.rb
+++ b/lib/backup/database.rb
@@ -27,12 +27,18 @@ module Backup
progress.print "Dumping PostgreSQL database #{config['database']} ... "
pg_env
pgsql_args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump.
+
if Gitlab.config.backup.pg_schema
- pgsql_args << "-n"
+ pgsql_args << '-n'
pgsql_args << Gitlab.config.backup.pg_schema
+
+ Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
+ pgsql_args << '-n'
+ pgsql_args << schema.to_s
+ end
end
- spawn('pg_dump', *pgsql_args, config['database'], out: compress_wr)
+ Process.spawn('pg_dump', *pgsql_args, config['database'], out: compress_wr)
end
compress_wr.close
diff --git a/lib/banzai/filter/abstract_reference_filter.rb b/lib/banzai/filter/abstract_reference_filter.rb
index f142333d797..38105e2237c 100644
--- a/lib/banzai/filter/abstract_reference_filter.rb
+++ b/lib/banzai/filter/abstract_reference_filter.rb
@@ -146,16 +146,16 @@ module Banzai
link_pattern_start = /\A#{link_pattern}/
link_pattern_anchor = /\A#{link_pattern}\z/
- nodes.each do |node|
+ nodes.each_with_index do |node, index|
if text_node?(node) && ref_pattern
- replace_text_when_pattern_matches(node, ref_pattern) do |content|
+ replace_text_when_pattern_matches(node, index, ref_pattern) do |content|
object_link_filter(content, ref_pattern)
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if ref_pattern && link =~ ref_pattern_anchor
- replace_link_node_with_href(node, link) do
+ replace_link_node_with_href(node, index, link) do
object_link_filter(link, ref_pattern, link_content: inner_html)
end
@@ -165,7 +165,7 @@ module Banzai
next unless link_pattern
if link == inner_html && inner_html =~ link_pattern_start
- replace_link_node_with_text(node, link) do
+ replace_link_node_with_text(node, index) do
object_link_filter(inner_html, link_pattern, link_reference: true)
end
@@ -173,7 +173,7 @@ module Banzai
end
if link =~ link_pattern_anchor
- replace_link_node_with_href(node, link) do
+ replace_link_node_with_href(node, index, link) do
object_link_filter(link, link_pattern, link_content: inner_html, link_reference: true)
end
diff --git a/lib/banzai/filter/commit_trailers_filter.rb b/lib/banzai/filter/commit_trailers_filter.rb
index 02a47556151..5288db3b0cb 100644
--- a/lib/banzai/filter/commit_trailers_filter.rb
+++ b/lib/banzai/filter/commit_trailers_filter.rb
@@ -144,10 +144,7 @@ module Banzai
end
def data_attributes_from_hash(data = {})
- data.reject! {|_, value| value.nil?}
- data.map do |key, value|
- [%(data-#{key.to_s.dasherize}), value]
- end.to_h
+ data.compact.transform_keys { |key| %(data-#{key.to_s.dasherize}) }
end
end
end
diff --git a/lib/banzai/filter/external_issue_reference_filter.rb b/lib/banzai/filter/external_issue_reference_filter.rb
index 74bc102320c..fcf4863ab4f 100644
--- a/lib/banzai/filter/external_issue_reference_filter.rb
+++ b/lib/banzai/filter/external_issue_reference_filter.rb
@@ -34,16 +34,16 @@ module Banzai
ref_pattern = issue_reference_pattern
ref_start_pattern = /\A#{ref_pattern}\z/
- each_node do |node|
+ nodes.each_with_index do |node, index|
if text_node?(node)
- replace_text_when_pattern_matches(node, ref_pattern) do |content|
+ replace_text_when_pattern_matches(node, index, ref_pattern) do |content|
issue_link_filter(content)
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if link =~ ref_start_pattern
- replace_link_node_with_href(node, link) do
+ replace_link_node_with_href(node, index, link) do
issue_link_filter(link, link_content: inner_html)
end
end
diff --git a/lib/banzai/filter/inline_cluster_metrics_filter.rb b/lib/banzai/filter/inline_cluster_metrics_filter.rb
new file mode 100644
index 00000000000..5ef68388ea9
--- /dev/null
+++ b/lib/banzai/filter/inline_cluster_metrics_filter.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Banzai
+ module Filter
+ class InlineClusterMetricsFilter < ::Banzai::Filter::InlineEmbedsFilter
+ def embed_params(node)
+ url = node['href']
+ @query_params = query_params(url)
+ return unless [:group, :title, :y_label].all? do |param|
+ @query_params.include?(param)
+ end
+
+ link_pattern.match(url) { |m| m.named_captures }.symbolize_keys
+ end
+
+ def xpath_search
+ "descendant-or-self::a[contains(@href,'clusters') and \
+ starts-with(@href, '#{::Gitlab.config.gitlab.url}')]"
+ end
+
+ def link_pattern
+ ::Gitlab::Metrics::Dashboard::Url.clusters_regex
+ end
+
+ def metrics_dashboard_url(params)
+ ::Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_cluster_url(
+ params[:namespace],
+ params[:project],
+ params[:cluster_id],
+ # Only Project clusters are supported for now
+ # admin and group cluster types may be supported in the future
+ cluster_type: :project,
+ embedded: true,
+ format: :json,
+ **@query_params
+ )
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/inline_metrics_redactor_filter.rb b/lib/banzai/filter/inline_metrics_redactor_filter.rb
index 75bd3325bd4..7f98a52d421 100644
--- a/lib/banzai/filter/inline_metrics_redactor_filter.rb
+++ b/lib/banzai/filter/inline_metrics_redactor_filter.rb
@@ -77,6 +77,10 @@ module Banzai
Route.new(
::Gitlab::Metrics::Dashboard::Url.grafana_regex,
:read_project
+ ),
+ Route.new(
+ ::Gitlab::Metrics::Dashboard::Url.clusters_regex,
+ :read_cluster
)
]
end
diff --git a/lib/banzai/filter/jira_import/adf_to_commonmark_filter.rb b/lib/banzai/filter/jira_import/adf_to_commonmark_filter.rb
new file mode 100644
index 00000000000..3db2244d641
--- /dev/null
+++ b/lib/banzai/filter/jira_import/adf_to_commonmark_filter.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Banzai
+ module Filter
+ module JiraImport
+ # Uses Kramdown to convert from the Atlassian Document Format (json)
+ # into CommonMark
+ # @see https://developer.atlassian.com/cloud/jira/platform/apis/document/structure/
+ class AdfToCommonmarkFilter < HTML::Pipeline::TextFilter
+ def initialize(text, context = nil, result = nil)
+ super(text, context, result)
+ end
+
+ def call
+ Kramdown::Document.new(@text, input: 'AtlassianDocumentFormat', html_tables: true).to_commonmark
+ rescue ::Kramdown::Error => e
+ # If we get an error, then just return the original text so at
+ # least the user knows something went wrong
+ "#{e.message}\n\n#{@text}"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/project_reference_filter.rb b/lib/banzai/filter/project_reference_filter.rb
index 292d4b1d56c..50e23460cb8 100644
--- a/lib/banzai/filter/project_reference_filter.rb
+++ b/lib/banzai/filter/project_reference_filter.rb
@@ -27,15 +27,15 @@ module Banzai
ref_pattern = Project.markdown_reference_pattern
ref_pattern_start = /\A#{ref_pattern}\z/
- nodes.each do |node|
+ nodes.each_with_index do |node, index|
if text_node?(node)
- replace_text_when_pattern_matches(node, ref_pattern) do |content|
+ replace_text_when_pattern_matches(node, index, ref_pattern) do |content|
project_link_filter(content)
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if link =~ ref_pattern_start
- replace_link_node_with_href(node, link) do
+ replace_link_node_with_href(node, index, link) do
project_link_filter(link, link_content: inner_html)
end
end
diff --git a/lib/banzai/filter/reference_filter.rb b/lib/banzai/filter/reference_filter.rb
index 9e932ccf9f8..9032ca6ddc6 100644
--- a/lib/banzai/filter/reference_filter.rb
+++ b/lib/banzai/filter/reference_filter.rb
@@ -16,6 +16,23 @@ module Banzai
class << self
attr_accessor :reference_type
+
+ def call(doc, context = nil, result = nil)
+ new(doc, context, result).call_and_update_nodes
+ end
+ end
+
+ def initialize(doc, context = nil, result = nil)
+ super
+
+ if update_nodes_enabled?
+ @new_nodes = {}
+ @nodes = self.result[:reference_filter_nodes]
+ end
+ end
+
+ def call_and_update_nodes
+ update_nodes_enabled? ? with_update_nodes { call } : call
end
# Returns a data attribute String to attach to a reference link
@@ -89,11 +106,6 @@ module Banzai
def each_node
return to_enum(__method__) unless block_given?
- query = %Q{descendant-or-self::text()[not(#{ignore_ancestor_query})]
- | descendant-or-self::a[
- not(contains(concat(" ", @class, " "), " gfm ")) and not(@href = "")
- ]}
-
doc.xpath(query).each do |node|
yield node
end
@@ -114,25 +126,25 @@ module Banzai
yield link, inner_html
end
- def replace_text_when_pattern_matches(node, pattern)
+ def replace_text_when_pattern_matches(node, index, pattern)
return unless node.text =~ pattern
content = node.to_html
html = yield content
- node.replace(html) unless content == html
+ replace_text_with_html(node, index, html) unless html == content
end
- def replace_link_node_with_text(node, link)
+ def replace_link_node_with_text(node, index)
html = yield
- node.replace(html) unless html == node.text
+ replace_text_with_html(node, index, html) unless html == node.text
end
- def replace_link_node_with_href(node, link)
+ def replace_link_node_with_href(node, index, link)
html = yield
- node.replace(html) unless html == link
+ replace_text_with_html(node, index, html) unless html == link
end
def text_node?(node)
@@ -145,9 +157,62 @@ module Banzai
private
+ def query
+ @query ||= %Q{descendant-or-self::text()[not(#{ignore_ancestor_query})]
+ | descendant-or-self::a[
+ not(contains(concat(" ", @class, " "), " gfm ")) and not(@href = "")
+ ]}
+ end
+
+ def replace_text_with_html(node, index, html)
+ if update_nodes_enabled?
+ replace_and_update_new_nodes(node, index, html)
+ else
+ node.replace(html)
+ end
+ end
+
+ def replace_and_update_new_nodes(node, index, html)
+ previous_node = node.previous
+ next_node = node.next
+ parent_node = node.parent
+ # Unfortunately node.replace(html) returns re-parented nodes, not the actual replaced nodes in the doc
+ # We need to find the actual nodes in the doc that were replaced
+ node.replace(html)
+ @new_nodes[index] = []
+
+ # We replaced node with new nodes, so we find first new node. If previous_node is nil, we take first parent child
+ new_node = previous_node ? previous_node.next : parent_node&.children&.first
+
+ # We iterate from first to last replaced node and store replaced nodes in @new_nodes
+ while new_node && new_node != next_node
+ @new_nodes[index] << new_node.xpath(query)
+ new_node = new_node.next
+ end
+
+ @new_nodes[index].flatten!
+ end
+
def only_path?
context[:only_path]
end
+
+ def with_update_nodes
+ @new_nodes = {}
+ yield.tap { update_nodes! }
+ end
+
+ # Once Filter completes replacing nodes, we update nodes with @new_nodes
+ def update_nodes!
+ @new_nodes.sort_by { |index, _new_nodes| -index }.each do |index, new_nodes|
+ nodes[index, 1] = new_nodes
+ end
+ result[:reference_filter_nodes] = nodes
+ end
+
+ def update_nodes_enabled?
+ Feature.enabled?(:update_nodes_for_banzai_reference_filter, project)
+ end
end
end
end
diff --git a/lib/banzai/filter/table_of_contents_filter.rb b/lib/banzai/filter/table_of_contents_filter.rb
index a2c8e92e560..b362607aed2 100644
--- a/lib/banzai/filter/table_of_contents_filter.rb
+++ b/lib/banzai/filter/table_of_contents_filter.rb
@@ -17,7 +17,7 @@ module Banzai
# :toc - String containing Table of Contents data as a `ul` element with
# `li` child elements.
class TableOfContentsFilter < HTML::Pipeline::Filter
- PUNCTUATION_REGEXP = /[^\p{Word}\- ]/u.freeze
+ include Gitlab::Utils::Markdown
def call
return doc if context[:no_header_anchors]
@@ -29,14 +29,7 @@ module Banzai
doc.css('h1, h2, h3, h4, h5, h6').each do |node|
if header_content = node.children.first
- id = node
- .text
- .strip
- .downcase
- .gsub(PUNCTUATION_REGEXP, '') # remove punctuation
- .tr(' ', '-') # replace spaces with dash
- .squeeze('-') # replace multiple dashes with one
- .gsub(/\A(\d+)\z/, 'anchor-\1') # digits-only hrefs conflict with issue refs
+ id = string_to_anchor(node.text)
uniq = headers[id] > 0 ? "-#{headers[id]}" : ''
headers[id] += 1
diff --git a/lib/banzai/filter/user_reference_filter.rb b/lib/banzai/filter/user_reference_filter.rb
index 9268ff1a827..262385524f4 100644
--- a/lib/banzai/filter/user_reference_filter.rb
+++ b/lib/banzai/filter/user_reference_filter.rb
@@ -31,15 +31,15 @@ module Banzai
ref_pattern = User.reference_pattern
ref_pattern_start = /\A#{ref_pattern}\z/
- nodes.each do |node|
+ nodes.each_with_index do |node, index|
if text_node?(node)
- replace_text_when_pattern_matches(node, ref_pattern) do |content|
+ replace_text_when_pattern_matches(node, index, ref_pattern) do |content|
user_link_filter(content)
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if link =~ ref_pattern_start
- replace_link_node_with_href(node, link) do
+ replace_link_node_with_href(node, index, link) do
user_link_filter(link, link_content: inner_html)
end
end
diff --git a/lib/banzai/pipeline/gfm_pipeline.rb b/lib/banzai/pipeline/gfm_pipeline.rb
index 2ea5fd3388a..10ac813ea15 100644
--- a/lib/banzai/pipeline/gfm_pipeline.rb
+++ b/lib/banzai/pipeline/gfm_pipeline.rb
@@ -48,7 +48,8 @@ module Banzai
def self.metrics_filters
[
Filter::InlineMetricsFilter,
- Filter::InlineGrafanaMetricsFilter
+ Filter::InlineGrafanaMetricsFilter,
+ Filter::InlineClusterMetricsFilter
]
end
diff --git a/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline.rb b/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline.rb
new file mode 100644
index 00000000000..8af0279673c
--- /dev/null
+++ b/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Banzai
+ module Pipeline
+ module JiraImport
+ class AdfCommonmarkPipeline < BasePipeline
+ def self.filters
+ FilterArray[
+ Filter::JiraImport::AdfToCommonmarkFilter
+ ]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/container_registry/tag.rb b/lib/container_registry/tag.rb
index e1a2891e43a..09c0aa66a0d 100644
--- a/lib/container_registry/tag.rb
+++ b/lib/container_registry/tag.rb
@@ -6,9 +6,6 @@ module ContainerRegistry
attr_reader :repository, :name
- # https://github.com/docker/distribution/commit/3150937b9f2b1b5b096b2634d0e7c44d4a0f89fb
- TAG_NAME_REGEX = /^[\w][\w.-]{0,127}$/.freeze
-
delegate :registry, :client, to: :repository
delegate :revision, :short_revision, to: :config_blob, allow_nil: true
@@ -16,10 +13,6 @@ module ContainerRegistry
@repository, @name = repository, name
end
- def valid_name?
- !name.match(TAG_NAME_REGEX).nil?
- end
-
def valid?
manifest.present?
end
diff --git a/lib/declarative_policy/base.rb b/lib/declarative_policy/base.rb
index cd6e1606f22..4af0251b990 100644
--- a/lib/declarative_policy/base.rb
+++ b/lib/declarative_policy/base.rb
@@ -117,6 +117,23 @@ module DeclarativePolicy
own_delegations[name] = delegation_block
end
+ # Declare that the given abilities should not be read from delegates.
+ #
+ # This is useful if you have an ability that you want to define
+ # differently in a policy than in a delegated policy, but still want to
+ # delegate all other abilities.
+ #
+ # example:
+ #
+ # delegate { @subect.parent }
+ #
+ # overrides :drive_car, :watch_tv
+ #
+ def overrides(*names)
+ @overrides ||= [].to_set
+ @overrides.merge(names)
+ end
+
# Declares a rule, constructed using RuleDsl, and returns
# a PolicyDsl which is used for registering the rule with
# this class. PolicyDsl will call back into Base.enable_when,
@@ -265,9 +282,13 @@ module DeclarativePolicy
@runners ||= {}
@runners[ability] ||=
begin
- delegated_runners = delegated_policies.values.compact.map { |p| p.runner(ability) }
own_runner = Runner.new(own_steps(ability))
- delegated_runners.inject(own_runner, &:merge_runner)
+ if self.class.overrides.include?(ability)
+ own_runner
+ else
+ delegated_runners = delegated_policies.values.compact.map { |p| p.runner(ability) }
+ delegated_runners.inject(own_runner, &:merge_runner)
+ end
end
end
diff --git a/lib/event_filter.rb b/lib/event_filter.rb
index 538727dc422..0b5833b91ed 100644
--- a/lib/event_filter.rb
+++ b/lib/event_filter.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class EventFilter
+ include Gitlab::Utils::StrongMemoize
+
attr_accessor :filter
ALL = 'all'
@@ -10,6 +12,7 @@ class EventFilter
COMMENTS = 'comments'
TEAM = 'team'
WIKI = 'wiki'
+ DESIGNS = 'designs'
def initialize(filter)
# Split using comma to maintain backward compatibility Ex/ "filter1,filter2"
@@ -23,8 +26,6 @@ class EventFilter
# rubocop: disable CodeReuse/ActiveRecord
def apply_filter(events)
- events = apply_feature_flags(events)
-
case filter
when PUSH
events.pushed_action
@@ -38,6 +39,8 @@ class EventFilter
events.where(action: [:created, :updated, :closed, :reopened], target_type: 'Issue')
when WIKI
wiki_events(events)
+ when DESIGNS
+ design_events(events)
else
events
end
@@ -46,20 +49,16 @@ class EventFilter
private
- def apply_feature_flags(events)
- return events.not_wiki_page unless Feature.enabled?(:wiki_events)
-
- events
- end
-
def wiki_events(events)
- return events unless Feature.enabled?(:wiki_events)
-
events.for_wiki_page
end
+ def design_events(events)
+ events.for_design
+ end
+
def filters
- [ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM, WIKI]
+ [ALL, PUSH, MERGED, ISSUE, COMMENTS, TEAM, WIKI, DESIGNS]
end
end
diff --git a/lib/feature.rb b/lib/feature.rb
index d995e0a988f..7cf40b63fdf 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -34,26 +34,13 @@ class Feature
def persisted_names
return [] unless Gitlab::Database.exists?
- if Gitlab::Utils.to_boolean(ENV['FF_LEGACY_PERSISTED_NAMES'])
- # To be removed:
- # This uses a legacy persisted names that are know to work (always)
- Gitlab::SafeRequestStore[:flipper_persisted_names] ||=
- begin
- # We saw on GitLab.com, this database request was called 2300
- # times/s. Let's cache it for a minute to avoid that load.
- Gitlab::ProcessMemoryCache.cache_backend.fetch('flipper:persisted_names', expires_in: 1.minute) do
- FlipperFeature.feature_names
- end.to_set
- end
- else
- # This loads names of all stored feature flags
- # and returns a stable Set in the following order:
- # - Memoized: using Gitlab::SafeRequestStore or @flipper
- # - L1: using Process cache
- # - L2: using Redis cache
- # - DB: using a single SQL query
- flipper.adapter.features
- end
+ # This loads names of all stored feature flags
+ # and returns a stable Set in the following order:
+ # - Memoized: using Gitlab::SafeRequestStore or @flipper
+ # - L1: using Process cache
+ # - L2: using Redis cache
+ # - DB: using a single SQL query
+ flipper.adapter.features
end
def persisted_name?(feature_name)
@@ -67,12 +54,14 @@ class Feature
# unless set explicitly. The default is `disabled`
# TODO: remove the `default_enabled:` and read it from the `defintion_yaml`
# check: https://gitlab.com/gitlab-org/gitlab/-/issues/30228
- def enabled?(key, thing = nil, default_enabled: false)
+ def enabled?(key, thing = nil, type: :development, default_enabled: false)
if check_feature_flags_definition?
if thing && !thing.respond_to?(:flipper_id)
raise InvalidFeatureFlagError,
"The thing '#{thing.class.name}' for feature flag '#{key}' needs to include `FeatureGate` or implement `flipper_id`"
end
+
+ Feature::Definition.valid_usage!(key, type: type, default_enabled: default_enabled)
end
# During setup the database does not exist yet. So we haven't stored a value
@@ -88,9 +77,9 @@ class Feature
!default_enabled || Feature.persisted_name?(feature.name) ? feature.enabled?(thing) : true
end
- def disabled?(key, thing = nil, default_enabled: false)
+ def disabled?(key, thing = nil, type: :development, default_enabled: false)
# we need to make different method calls to make it easy to mock / define expectations in test mode
- thing.nil? ? !enabled?(key, default_enabled: default_enabled) : !enabled?(key, thing, default_enabled: default_enabled)
+ thing.nil? ? !enabled?(key, type: type, default_enabled: default_enabled) : !enabled?(key, thing, type: type, default_enabled: default_enabled)
end
def enable(key, thing = true)
@@ -142,6 +131,12 @@ class Feature
def register_feature_groups
end
+ def register_definitions
+ return unless check_feature_flags_definition?
+
+ Feature::Definition.load_all!
+ end
+
private
def flipper
diff --git a/lib/feature/definition.rb b/lib/feature/definition.rb
new file mode 100644
index 00000000000..b0ea55c5805
--- /dev/null
+++ b/lib/feature/definition.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+class Feature
+ class Definition
+ include ::Feature::Shared
+
+ attr_reader :path
+ attr_reader :attributes
+
+ PARAMS.each do |param|
+ define_method(param) do
+ attributes[param]
+ end
+ end
+
+ def initialize(path, opts = {})
+ @path = path
+ @attributes = {}
+
+ # assign nil, for all unknown opts
+ PARAMS.each do |param|
+ @attributes[param] = opts[param]
+ end
+ end
+
+ def key
+ name.to_sym
+ end
+
+ def validate!
+ unless name.present?
+ raise Feature::InvalidFeatureFlagError, "Feature flag is missing name"
+ end
+
+ unless path.present?
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' is missing path"
+ end
+
+ unless type.present?
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' is missing type. Ensure to update #{path}"
+ end
+
+ unless Definition::TYPES.include?(type.to_sym)
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' type '#{type}' is invalid. Ensure to update #{path}"
+ end
+
+ unless File.basename(path, ".yml") == name
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' has an invalid path: '#{path}'. Ensure to update #{path}"
+ end
+
+ unless File.basename(File.dirname(path)) == type
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' has an invalid type: '#{path}'. Ensure to update #{path}"
+ end
+
+ if default_enabled.nil?
+ raise Feature::InvalidFeatureFlagError, "Feature flag '#{name}' is missing default_enabled. Ensure to update #{path}"
+ end
+ end
+
+ def valid_usage!(type_in_code:, default_enabled_in_code:)
+ unless Array(type).include?(type_in_code.to_s)
+ # Raise exception in test and dev
+ raise Feature::InvalidFeatureFlagError, "The `type:` of `#{key}` is not equal to config: " \
+ "#{type_in_code} vs #{type}. Ensure to use valid type in #{path} or ensure that you use " \
+ "a valid syntax: #{TYPES.dig(type, :example)}"
+ end
+
+ # We accept an array of defaults as some features are undefined
+ # and have `default_enabled: true/false`
+ unless Array(default_enabled).include?(default_enabled_in_code)
+ # Raise exception in test and dev
+ raise Feature::InvalidFeatureFlagError, "The `default_enabled:` of `#{key}` is not equal to config: " \
+ "#{default_enabled_in_code} vs #{default_enabled}. Ensure to update #{path}"
+ end
+ end
+
+ def to_h
+ attributes
+ end
+
+ class << self
+ def paths
+ @paths ||= [Rails.root.join('config', 'feature_flags', '**', '*.yml')]
+ end
+
+ def definitions
+ @definitions ||= {}
+ end
+
+ def load_all!
+ definitions.clear
+
+ paths.each do |glob_path|
+ load_all_from_path!(glob_path)
+ end
+
+ definitions
+ end
+
+ def valid_usage!(key, type:, default_enabled:)
+ if definition = definitions[key.to_sym]
+ definition.valid_usage!(type_in_code: type, default_enabled_in_code: default_enabled)
+ elsif type_definition = self::TYPES[type]
+ raise InvalidFeatureFlagError, "Missing feature definition for `#{key}`" unless type_definition[:optional]
+ else
+ raise InvalidFeatureFlagError, "Unknown feature flag type used: `#{type}`"
+ end
+ end
+
+ private
+
+ def load_from_file(path)
+ definition = File.read(path)
+ definition = YAML.safe_load(definition)
+ definition.deep_symbolize_keys!
+
+ self.new(path, definition).tap(&:validate!)
+ rescue => e
+ raise Feature::InvalidFeatureFlagError, "Invalid definition for `#{path}`: #{e.message}"
+ end
+
+ def load_all_from_path!(glob_path)
+ Dir.glob(glob_path).each do |path|
+ definition = load_from_file(path)
+
+ if previous = definitions[definition.key]
+ raise InvalidFeatureFlagError, "Feature flag '#{definition.key}' is already defined in '#{previous.path}'"
+ end
+
+ definitions[definition.key] = definition
+ end
+ end
+ end
+ end
+end
+
+Feature::Definition.prepend_if_ee('EE::Feature::Definition')
diff --git a/lib/feature/shared.rb b/lib/feature/shared.rb
new file mode 100644
index 00000000000..14efbb07100
--- /dev/null
+++ b/lib/feature/shared.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+# This file can contain only simple constructs as it is shared between:
+# 1. `Pure Ruby`: `bin/feature-flag`
+# 2. `GitLab Rails`: `lib/feature/definition.rb`
+
+class Feature
+ module Shared
+ # optional: defines if a on-disk definition is required for this feature flag type
+ # rollout_issue: defines if `bin/feature-flag` asks for rollout issue
+ # example: usage being shown when exception is raised
+ TYPES = {
+ development: {
+ description: 'Short lived, used to enable unfinished code to be deployed',
+ optional: true,
+ rollout_issue: true,
+ example: <<-EOS
+ Feature.enabled?(:my_feature_flag)
+ Feature.enabled?(:my_feature_flag, type: :development)
+ EOS
+ }
+ }.freeze
+
+ PARAMS = %i[
+ name
+ default_enabled
+ type
+ introduced_by_url
+ rollout_issue_url
+ group
+ ].freeze
+ end
+end
diff --git a/lib/gitlab/action_cable/config.rb b/lib/gitlab/action_cable/config.rb
new file mode 100644
index 00000000000..38e870353eb
--- /dev/null
+++ b/lib/gitlab/action_cable/config.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ActionCable
+ class Config
+ class << self
+ def in_app?
+ Gitlab::Utils.to_boolean(ENV.fetch('ACTION_CABLE_IN_APP', false))
+ end
+
+ def worker_pool_size
+ ENV.fetch('ACTION_CABLE_WORKER_POOL_SIZE', 4).to_i
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/alert_management/alert_params.rb b/lib/gitlab/alert_management/alert_params.rb
index 789a4fe246a..84a75e62ecf 100644
--- a/lib/gitlab/alert_management/alert_params.rb
+++ b/lib/gitlab/alert_management/alert_params.rb
@@ -8,7 +8,7 @@ module Gitlab
}.freeze
def self.from_generic_alert(project:, payload:)
- parsed_payload = Gitlab::Alerting::NotificationPayloadParser.call(payload).with_indifferent_access
+ parsed_payload = Gitlab::Alerting::NotificationPayloadParser.call(payload, project).with_indifferent_access
annotations = parsed_payload[:annotations]
{
@@ -34,7 +34,9 @@ module Gitlab
payload: parsed_alert.payload,
started_at: parsed_alert.starts_at,
ended_at: parsed_alert.ends_at,
- fingerprint: parsed_alert.gitlab_fingerprint
+ fingerprint: parsed_alert.gitlab_fingerprint,
+ environment: parsed_alert.environment,
+ prometheus_alert: parsed_alert.gitlab_alert
}
end
end
diff --git a/lib/gitlab/alert_management/fingerprint.rb b/lib/gitlab/alert_management/fingerprint.rb
index 6ab47c88ca1..d7842d3b37d 100644
--- a/lib/gitlab/alert_management/fingerprint.rb
+++ b/lib/gitlab/alert_management/fingerprint.rb
@@ -10,11 +10,14 @@ module Gitlab
def generate(data)
return unless data.present?
- if data.is_a?(Array)
- data = flatten_array(data)
- end
+ string = case data
+ when Array then flatten_array(data)
+ when Hash then flatten_hash(data)
+ else
+ data.to_s
+ end
- Digest::SHA1.hexdigest(data.to_s)
+ Digest::SHA1.hexdigest(string)
end
private
@@ -22,6 +25,11 @@ module Gitlab
def flatten_array(array)
array.flatten.map!(&:to_s).join
end
+
+ def flatten_hash(hash)
+ # Sort hash so SHA generated is the same
+ Gitlab::Utils::SafeInlineHash.merge_keys!(hash).sort.to_s
+ end
end
end
end
diff --git a/lib/gitlab/alerting/notification_payload_parser.rb b/lib/gitlab/alerting/notification_payload_parser.rb
index d98b9296347..f285dcf507f 100644
--- a/lib/gitlab/alerting/notification_payload_parser.rb
+++ b/lib/gitlab/alerting/notification_payload_parser.rb
@@ -8,12 +8,13 @@ module Gitlab
DEFAULT_TITLE = 'New: Incident'
DEFAULT_SEVERITY = 'critical'
- def initialize(payload)
+ def initialize(payload, project)
@payload = payload.to_h.with_indifferent_access
+ @project = project
end
- def self.call(payload)
- new(payload).call
+ def self.call(payload, project)
+ new(payload, project).call
end
def call
@@ -25,7 +26,7 @@ module Gitlab
private
- attr_reader :payload
+ attr_reader :payload, :project
def title
payload[:title].presence || DEFAULT_TITLE
@@ -84,3 +85,5 @@ module Gitlab
end
end
end
+
+Gitlab::Alerting::NotificationPayloadParser.prepend_if_ee('EE::Gitlab::Alerting::NotificationPayloadParser')
diff --git a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
index e7352a23b99..4d47a17545a 100644
--- a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
+++ b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
@@ -90,9 +90,7 @@ module Gitlab
end
def ordered_and_limited_query
- query
- .reorder(stage.end_event.timestamp_projection.desc)
- .limit(MAX_RECORDS)
+ order_by_end_event(query).limit(MAX_RECORDS)
end
def records
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events.rb b/lib/gitlab/analytics/cycle_analytics/stage_events.rb
index 5146f92f521..39dc706dff5 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events.rb
@@ -60,7 +60,7 @@ module Gitlab
# hash for defining ActiveRecord enum: identifier => number
def self.to_enum
- enum_mapping.each_with_object({}) { |(k, v), hash| hash[k.identifier] = v }
+ enum_mapping.transform_keys { |k| k.identifier }
end
def self.pairing_rules
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
index 9f0ca80ba50..c5f843d5f1a 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
@@ -21,6 +21,11 @@ module Gitlab
issue_metrics_table[:first_mentioned_in_commit_at]
end
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def apply_query_customization(query)
issue_metrics_join = mr_closing_issues_table
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
index 0ea98e82ecc..7c1f4436c93 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
@@ -18,10 +18,15 @@ module Gitlab
end
def timestamp_projection
- Arel::Nodes::NamedFunction.new('COALESCE', [
+ Arel::Nodes::NamedFunction.new('COALESCE', column_list)
+ end
+
+ override :column_list
+ def column_list
+ [
issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]
- ])
+ ]
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
index 4ca8745abe4..fe477490648 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
@@ -10,6 +10,11 @@ module Gitlab
query.joins(:metrics)
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
end
end
end
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
index 37168a1fb0f..bddc326de71 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
@@ -18,10 +18,15 @@ module Gitlab
end
def timestamp_projection
- Arel::Nodes::NamedFunction.new('COALESCE', [
+ Arel::Nodes::NamedFunction.new('COALESCE', column_list)
+ end
+
+ override :column_list
+ def column_list
+ [
issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]
- ])
+ ]
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
index 619b45664fa..cf05ebeb706 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
@@ -21,6 +21,11 @@ module Gitlab
mr_metrics_table[:first_deployed_to_production_at]
end
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def apply_query_customization(query)
query.joins(merge_requests_closing_issues: { merge_request: [:metrics] }).where(mr_metrics_table[:first_deployed_to_production_at].gteq(mr_table[:created_at]))
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
index 0c75a141c3c..79738747e71 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
@@ -7,6 +7,7 @@ module Gitlab
# Base class for expressing an event that can be used for a stage.
class StageEvent
include Gitlab::CycleAnalytics::MetricsTables
+ extend Gitlab::Utils::Override
delegate :label_based?, to: :class
@@ -32,6 +33,13 @@ module Gitlab
raise NotImplementedError
end
+ # List of columns that are referenced in the `timestamp_projection` expression
+ # Example timestamp projection: COALESCE(issue_metrics.created_at, issue_metrics.updated_at)
+ # Expected column list: issue_metrics.created_at, issue_metrics.updated_at
+ def column_list
+ []
+ end
+
# Optionally a StageEvent may apply additional filtering or join other tables on the base query.
def apply_query_customization(query)
query
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
index 29a2d55df1a..c9a75b39959 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
@@ -22,6 +22,29 @@ module Gitlab
stage.start_event.timestamp_projection
)
end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def order_by_end_event(query)
+ ordered_query = query.reorder(stage.end_event.timestamp_projection.desc)
+
+ # When filtering for more than one label, postgres requires the columns in ORDER BY to be present in the GROUP BY clause
+ if requires_grouping?
+ column_list = [
+ ordered_query.arel_table[:id],
+ *stage.end_event.column_list,
+ *stage.start_event.column_list
+ ]
+
+ ordered_query = ordered_query.group(column_list)
+ end
+
+ ordered_query
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def requires_grouping?
+ Array(params[:label_name]).size > 1
+ end
end
end
end
diff --git a/lib/gitlab/analytics/unique_visits.rb b/lib/gitlab/analytics/unique_visits.rb
new file mode 100644
index 00000000000..9dd7d048eec
--- /dev/null
+++ b/lib/gitlab/analytics/unique_visits.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ class UniqueVisits
+ TARGET_IDS = Set[
+ 'g_analytics_contribution',
+ 'g_analytics_insights',
+ 'g_analytics_issues',
+ 'g_analytics_productivity',
+ 'g_analytics_valuestream',
+ 'p_analytics_pipelines',
+ 'p_analytics_code_reviews',
+ 'p_analytics_valuestream',
+ 'p_analytics_insights',
+ 'p_analytics_issues',
+ 'p_analytics_repo',
+ 'u_analytics_todos',
+ 'i_analytics_cohorts',
+ 'i_analytics_dev_ops_score'
+ ].freeze
+
+ KEY_EXPIRY_LENGTH = 28.days
+
+ def track_visit(visitor_id, target_id, time = Time.zone.now)
+ target_key = key(target_id, time)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.multi do |multi|
+ multi.pfadd(target_key, visitor_id)
+ multi.expire(target_key, KEY_EXPIRY_LENGTH)
+ end
+ end
+ end
+
+ def weekly_unique_visits_for_target(target_id, week_of: 7.days.ago)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(key(target_id, week_of))
+ end
+ end
+
+ def weekly_unique_visits_for_any_target(week_of: 7.days.ago)
+ keys = TARGET_IDS.map { |target_id| key(target_id, week_of) }
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(*keys)
+ end
+ end
+
+ private
+
+ def key(target_id, time)
+ raise "Invalid target id #{target_id}" unless TARGET_IDS.include?(target_id.to_s)
+
+ year_week = time.strftime('%G-%V')
+ "#{target_id}-{#{year_week}}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index 3277ddd9f49..ed963476524 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -19,17 +19,17 @@ module Gitlab
# and only do that when it's needed.
def rate_limits
{
- issues_create: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.issues_create_limit }, interval: 1.minute },
- project_export: { threshold: 30, interval: 5.minutes },
- project_download_export: { threshold: 10, interval: 10.minutes },
+ issues_create: { threshold: -> { application_settings.issues_create_limit }, interval: 1.minute },
+ project_export: { threshold: -> { application_settings.project_export_limit }, interval: 1.minute },
+ project_download_export: { threshold: -> { application_settings.project_download_export_limit }, interval: 1.minute },
project_repositories_archive: { threshold: 5, interval: 1.minute },
- project_generate_new_export: { threshold: 30, interval: 5.minutes },
- project_import: { threshold: 30, interval: 5.minutes },
- play_pipeline_schedule: { threshold: 1, interval: 1.minute },
- show_raw_controller: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.raw_blob_request_limit }, interval: 1.minute },
- group_export: { threshold: 30, interval: 5.minutes },
- group_download_export: { threshold: 10, interval: 10.minutes },
- group_import: { threshold: 30, interval: 5.minutes }
+ project_generate_new_export: { threshold: -> { application_settings.project_export_limit }, interval: 1.minute },
+ project_import: { threshold: -> { application_settings.project_import_limit }, interval: 1.minute },
+ play_pipeline_schedule: { threshold: 1, interval: 1.minute },
+ show_raw_controller: { threshold: -> { application_settings.raw_blob_request_limit }, interval: 1.minute },
+ group_export: { threshold: -> { application_settings.group_export_limit }, interval: 1.minute },
+ group_download_export: { threshold: -> { application_settings.group_download_export_limit }, interval: 1.minute },
+ group_import: { threshold: -> { application_settings.group_import_limit }, interval: 1.minute }
}.freeze
end
@@ -130,6 +130,10 @@ module Gitlab
"application_rate_limiter:#{serialized}"
end
+
+ def application_settings
+ Gitlab::CurrentSettings.current_application_settings
+ end
end
end
end
diff --git a/lib/gitlab/auth/auth_finders.rb b/lib/gitlab/auth/auth_finders.rb
index 93342fbad51..bd5aed0d964 100644
--- a/lib/gitlab/auth/auth_finders.rb
+++ b/lib/gitlab/auth/auth_finders.rb
@@ -54,6 +54,11 @@ module Gitlab
User.find_by_feed_token(token) || raise(UnauthorizedError)
end
+ def find_user_from_bearer_token
+ find_user_from_job_bearer_token ||
+ find_user_from_access_token
+ end
+
def find_user_from_job_token
return unless route_authentication_setting[:job_token_allowed]
return find_user_from_basic_auth_job if route_authentication_setting[:job_token_allowed] == :basic_auth
@@ -92,6 +97,8 @@ module Gitlab
validate_access_token!(scopes: [:api])
+ ::PersonalAccessTokens::LastUsedService.new(access_token).execute
+
access_token.user || raise(UnauthorizedError)
end
@@ -100,6 +107,8 @@ module Gitlab
validate_access_token!
+ ::PersonalAccessTokens::LastUsedService.new(access_token).execute
+
access_token.user || raise(UnauthorizedError)
end
@@ -132,6 +141,9 @@ module Gitlab
end
def validate_access_token!(scopes: [])
+ # return early if we've already authenticated via a job token
+ return if @current_authenticated_job.present? # rubocop:disable Gitlab/ModuleWithInstanceVariables
+
# return early if we've already authenticated via a deploy token
return if @current_authenticated_deploy_token.present? # rubocop:disable Gitlab/ModuleWithInstanceVariables
@@ -151,6 +163,20 @@ module Gitlab
private
+ def find_user_from_job_bearer_token
+ return unless route_authentication_setting[:job_token_allowed]
+
+ token = parsed_oauth_token
+ return unless token
+
+ job = ::Ci::Build.find_by_token(token)
+ return unless job
+
+ @current_authenticated_job = job # rubocop:disable Gitlab/ModuleWithInstanceVariables
+
+ job.user
+ end
+
def route_authentication_setting
return {} unless respond_to?(:route_setting)
diff --git a/lib/gitlab/background_migration.rb b/lib/gitlab/background_migration.rb
index 6a16c37e880..ea0307e8bd6 100644
--- a/lib/gitlab/background_migration.rb
+++ b/lib/gitlab/background_migration.rb
@@ -33,6 +33,7 @@ module Gitlab
next unless job.queue == self.queue
next unless migration_class == steal_class
+ next if block_given? && !(yield migration_args)
begin
perform(migration_class, migration_args) if job.delete
diff --git a/lib/gitlab/background_migration/backfill_namespace_settings.rb b/lib/gitlab/background_migration/backfill_namespace_settings.rb
new file mode 100644
index 00000000000..a391d5f4ebe
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_namespace_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfillnamespace_settings for a range of namespaces
+ class BackfillNamespaceSettings
+ def perform(start_id, end_id)
+ ActiveRecord::Base.connection.execute <<~SQL
+ INSERT INTO namespace_settings (namespace_id, created_at, updated_at)
+ SELECT namespaces.id, now(), now()
+ FROM namespaces
+ WHERE namespaces.id BETWEEN #{start_id} AND #{end_id}
+ ON CONFLICT (namespace_id) DO NOTHING;
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb b/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
index 54f77f184d5..91b50c1a493 100644
--- a/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
+++ b/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
@@ -2,7 +2,7 @@
module Gitlab
module BackgroundMigration
- # Base class for cleaning up concurrent schema changes.
+ # Base class for background migration for rename/type changes.
class CleanupConcurrentSchemaChange
include Database::MigrationHelpers
@@ -10,7 +10,7 @@ module Gitlab
# old_column - The name of the old (to drop) column.
# new_column - The name of the new column.
def perform(table, old_column, new_column)
- return unless column_exists?(table, new_column)
+ return unless column_exists?(table, new_column) && column_exists?(table, old_column)
rows_to_migrate = define_model_for(table)
.where(new_column => nil)
@@ -28,6 +28,10 @@ module Gitlab
end
end
+ def cleanup_concurrent_schema_change(_table, _old_column, _new_column)
+ raise NotImplementedError
+ end
+
# These methods are necessary so we can re-use the migration helpers in
# this class.
def connection
diff --git a/lib/gitlab/background_migration/digest_column.rb b/lib/gitlab/background_migration/digest_column.rb
deleted file mode 100644
index 22a3bb8f8f3..00000000000
--- a/lib/gitlab/background_migration/digest_column.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop:disable Style/Documentation
-module Gitlab
- module BackgroundMigration
- class DigestColumn
- class PersonalAccessToken < ActiveRecord::Base
- self.table_name = 'personal_access_tokens'
- end
-
- def perform(model, attribute_from, attribute_to, start_id, stop_id)
- model = model.constantize if model.is_a?(String)
-
- model.transaction do
- relation = model.where(id: start_id..stop_id).where.not(attribute_from => nil).lock
-
- relation.each do |instance|
- instance.update_columns(attribute_to => Gitlab::CryptoHelper.sha256(instance.read_attribute(attribute_from)),
- attribute_from => nil)
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/encrypt_columns.rb b/lib/gitlab/background_migration/encrypt_columns.rb
deleted file mode 100644
index 173543b7c25..00000000000
--- a/lib/gitlab/background_migration/encrypt_columns.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # EncryptColumn migrates data from an unencrypted column - `foo`, say - to
- # an encrypted column - `encrypted_foo`, say.
- #
- # To avoid depending on a particular version of the model in app/, add a
- # model to `lib/gitlab/background_migration/models/encrypt_columns` and use
- # it in the migration that enqueues the jobs, so code can be shared.
- #
- # For this background migration to work, the table that is migrated _has_ to
- # have an `id` column as the primary key. Additionally, the encrypted column
- # should be managed by attr_encrypted, and map to an attribute with the same
- # name as the unencrypted column (i.e., the unencrypted column should be
- # shadowed), unless you want to define specific methods / accessors in the
- # temporary model in `/models/encrypt_columns/your_model.rb`.
- #
- class EncryptColumns
- def perform(model, attributes, from, to)
- model = model.constantize if model.is_a?(String)
-
- # If sidekiq hasn't undergone a restart, its idea of what columns are
- # present may be inaccurate, so ensure this is as fresh as possible
- model.reset_column_information
- model.define_attribute_methods
-
- attributes = expand_attributes(model, Array(attributes).map(&:to_sym))
-
- model.transaction do
- # Use SELECT ... FOR UPDATE to prevent the value being changed while
- # we are encrypting it
- relation = model.where(id: from..to).lock
-
- relation.each do |instance|
- encrypt!(instance, attributes)
- end
- end
- end
-
- def clear_migrated_values?
- true
- end
-
- private
-
- # Build a hash of { attribute => encrypted column name }
- def expand_attributes(klass, attributes)
- expanded = attributes.flat_map do |attribute|
- attr_config = klass.encrypted_attributes[attribute]
- crypt_column_name = attr_config&.fetch(:attribute)
-
- raise "Couldn't determine encrypted column for #{klass}##{attribute}" if
- crypt_column_name.nil?
-
- raise "#{klass} source column: #{attribute} is missing" unless
- klass.column_names.include?(attribute.to_s)
-
- # Running the migration without the destination column being present
- # leads to data loss
- raise "#{klass} destination column: #{crypt_column_name} is missing" unless
- klass.column_names.include?(crypt_column_name.to_s)
-
- [attribute, crypt_column_name]
- end
-
- Hash[*expanded]
- end
-
- # Generate ciphertext for each column and update the database
- def encrypt!(instance, attributes)
- to_clear = attributes
- .map { |plain, crypt| apply_attribute!(instance, plain, crypt) }
- .compact
- .flat_map { |plain| [plain, nil] }
-
- to_clear = Hash[*to_clear]
-
- if instance.changed?
- instance.save!
-
- if clear_migrated_values?
- instance.update_columns(to_clear)
- end
- end
- end
-
- def apply_attribute!(instance, plain_column, crypt_column)
- plaintext = instance[plain_column]
- ciphertext = instance[crypt_column]
-
- # No need to do anything if the plaintext is nil, or an encrypted
- # value already exists
- return unless plaintext.present?
- return if ciphertext.present?
-
- # attr_encrypted will calculate and set the expected value for us
- instance.public_send("#{plain_column}=", plaintext) # rubocop:disable GitlabSecurity/PublicSend
-
- plain_column
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/encrypt_runners_tokens.rb b/lib/gitlab/background_migration/encrypt_runners_tokens.rb
deleted file mode 100644
index ec64a73542e..00000000000
--- a/lib/gitlab/background_migration/encrypt_runners_tokens.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # EncryptColumn migrates data from an unencrypted column - `foo`, say - to
- # an encrypted column - `encrypted_foo`, say.
- #
- # We only create a subclass here because we want to isolate this migration
- # (migrating unencrypted runner registration tokens to encrypted columns)
- # from other `EncryptColumns` migration. This class name is going to be
- # serialized and stored in Redis and later picked by Sidekiq, so we need to
- # create a separate class name in order to isolate these migration tasks.
- #
- # We can solve this differently, see tech debt issue:
- #
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/54328
- #
- class EncryptRunnersTokens < EncryptColumns
- def perform(model, from, to)
- resource = "::Gitlab::BackgroundMigration::Models::EncryptColumns::#{model.to_s.capitalize}"
- model = resource.constantize
- attributes = model.encrypted_attributes.keys
-
- super(model, attributes, from, to)
- end
-
- def clear_migrated_values?
- false
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/fix_pages_access_level.rb b/lib/gitlab/background_migration/fix_pages_access_level.rb
index 0d49f3dd8c5..31d2e78b2d2 100644
--- a/lib/gitlab/background_migration/fix_pages_access_level.rb
+++ b/lib/gitlab/background_migration/fix_pages_access_level.rb
@@ -16,7 +16,7 @@ module Gitlab
end
# Namespace
- class Namespace < ApplicationRecord
+ class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled
diff --git a/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
new file mode 100644
index 00000000000..c096dae0631
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module Mailers
+ class UnconfirmMailer < ::Notify
+ prepend_view_path(File.join(__dir__, 'views'))
+
+ def unconfirm_notification_email(user)
+ @user = user
+ @verification_from_mail = Gitlab.config.gitlab.email_from
+
+ mail(
+ template_path: 'unconfirm_mailer',
+ template_name: 'unconfirm_notification_email',
+ to: @user.notification_email,
+ subject: subject('GitLab email verification request')
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
new file mode 100644
index 00000000000..d8f7466a1ca
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
@@ -0,0 +1,19 @@
+-# haml-lint:disable NoPlainNodes
+%p
+ Dear GitLab user,
+
+%p
+ As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a #{link_to("recent security release", "https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released", target: '_blank')}.
+
+%p
+ As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+%p
+ We have already sent the re-verification email with a subject line of "Confirmation instructions" from #{@verification_from_mail}. Please feel free to contribute any questions or comments to #{link_to("this issue", "https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942", target: '_blank')}.
+
+%p
+ If you are not "#{@user.username}", please #{link_to 'report this to our administrator', new_abuse_report_url(user_id: @user.id)}
+
+%p
+ Thank you for being a GitLab user!
+-# haml-lint:enable NoPlainNodes
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
new file mode 100644
index 00000000000..d20af9b9803
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
@@ -0,0 +1,14 @@
+Dear GitLab user,
+
+As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a recent security release.
+
+Security release: https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released
+
+As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+We have already sent the re-verification email with a subject line of "Confirmation instructions" from <%= @verification_from_mail %>.
+Please feel free to contribute any questions or comments to this issue: https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942
+
+If you are not "<%= @user.username %>", please report this to our administrator. Report link: <%= new_abuse_report_url(user_id: @user.id) %>
+
+Thank you for being a GitLab user!
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb b/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb
deleted file mode 100644
index 41f18979d76..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `namespaces` table.
- #
- class Namespace < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'namespaces'
- self.inheritance_column = :_type_disabled
-
- def runners_token=(value)
- self.runners_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { runners_token: { attribute: :runners_token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/project.rb b/lib/gitlab/background_migration/models/encrypt_columns/project.rb
deleted file mode 100644
index bfeae14584d..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/project.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `projects` table.
- #
- class Project < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'projects'
- self.inheritance_column = :_type_disabled
-
- def runners_token=(value)
- self.runners_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { runners_token: { attribute: :runners_token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/runner.rb b/lib/gitlab/background_migration/models/encrypt_columns/runner.rb
deleted file mode 100644
index 14ddce4b147..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/runner.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `token` column in `ci_runners` table.
- #
- class Runner < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'ci_runners'
- self.inheritance_column = :_type_disabled
-
- def token=(value)
- self.token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { token: { attribute: :token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/settings.rb b/lib/gitlab/background_migration/models/encrypt_columns/settings.rb
deleted file mode 100644
index 08ae35c0671..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/settings.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `application_settings` table.
- #
- class Settings < ActiveRecord::Base
- include ::EachBatch
- include ::CacheableAttributes
-
- self.table_name = 'application_settings'
- self.inheritance_column = :_type_disabled
-
- after_commit do
- ::ApplicationSetting.expire
- end
-
- def runners_registration_token=(value)
- self.runners_registration_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- {
- runners_registration_token: {
- attribute: :runners_registration_token_encrypted
- }
- }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb b/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb
deleted file mode 100644
index 34e72fd9f34..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `token` and `url` columns
- class WebHook < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'web_hooks'
- self.inheritance_column = :_type_disabled
-
- attr_encrypted :token,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm',
- key: ::Settings.attr_encrypted_db_key_base_32
-
- attr_encrypted :url,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm',
- key: ::Settings.attr_encrypted_db_key_base_32
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_project_snippet_statistics.rb b/lib/gitlab/background_migration/populate_project_snippet_statistics.rb
new file mode 100644
index 00000000000..7659b63271f
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_project_snippet_statistics.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class creates/updates those project snippets statistics
+ # that haven't been created nor initialized.
+ # It also updates the related project statistics and its root storage namespace stats
+ class PopulateProjectSnippetStatistics
+ def perform(snippet_ids)
+ project_snippets(snippet_ids).group_by(&:namespace_id).each do |namespace_id, namespace_snippets|
+ namespace_snippets.group_by(&:project).each do |project, snippets|
+ upsert_snippet_statistics(snippets)
+ update_project_statistics(project)
+ rescue
+ error_message("Error updating statistics for project #{project.id}")
+ end
+
+ update_namespace_statistics(namespace_snippets.first.project.root_namespace)
+ rescue => e
+ error_message("Error updating statistics for namespace #{namespace_id}: #{e.message}")
+ end
+ end
+
+ private
+
+ def project_snippets(snippet_ids)
+ ProjectSnippet
+ .select('snippets.*, projects.namespace_id')
+ .where(id: snippet_ids)
+ .joins(:project)
+ .includes(:statistics)
+ .includes(snippet_repository: :shard)
+ .includes(project: [:route, :statistics, :namespace])
+ end
+
+ def upsert_snippet_statistics(snippets)
+ snippets.each do |snippet|
+ response = Snippets::UpdateStatisticsService.new(snippet).execute
+
+ error_message("#{response.message} snippet: #{snippet.id}") if response.error?
+ end
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def error_message(message)
+ logger.error(message: "Snippet Statistics Migration: #{message}")
+ end
+
+ def update_project_statistics(project)
+ project.statistics&.refresh!(only: [:snippets_size])
+ end
+
+ def update_namespace_statistics(namespace)
+ Namespaces::StatisticsRefresherService.new.execute(namespace)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb b/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb
new file mode 100644
index 00000000000..bfe9f673b53
--- /dev/null
+++ b/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # rubocop: disable Style/Documentation
+ class UpdateVulnerabilitiesFromDismissalFeedback
+ def perform(project_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback')
diff --git a/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
new file mode 100644
index 00000000000..5f63cf5836e
--- /dev/null
+++ b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class WrongfullyConfirmedEmailUnconfirmer
+ class UserModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'users'
+
+ scope :active, -> { where(state: 'active', user_type: nil) } # only humans, skip bots
+
+ devise :confirmable
+ end
+
+ class EmailModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'emails'
+
+ belongs_to :user
+
+ devise :confirmable
+
+ def self.wrongfully_confirmed_emails(start_id, stop_id)
+ joins(:user)
+ .merge(UserModel.active)
+ .where(id: (start_id..stop_id))
+ .where('emails.confirmed_at IS NOT NULL')
+ .where('emails.confirmed_at = users.confirmed_at')
+ .where('emails.email <> users.email')
+ end
+ end
+
+ def perform(start_id, stop_id)
+ email_records = EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .to_a
+
+ user_ids = email_records.map(&:user_id).uniq
+
+ ActiveRecord::Base.transaction do
+ update_email_records(start_id, stop_id)
+ update_user_records(user_ids)
+ end
+
+ # Refind the records with the "real" Email model so devise will notice that the user / email is unconfirmed
+ unconfirmed_email_records = ::Email.where(id: email_records.map(&:id))
+ ActiveRecord::Associations::Preloader.new.preload(unconfirmed_email_records, [:user])
+
+ send_emails(unconfirmed_email_records)
+ end
+
+ private
+
+ def update_email_records(start_id, stop_id)
+ EmailModel.connection.execute <<-SQL
+ WITH md5_strings as (
+ #{email_query_for_update(start_id, stop_id).to_sql}
+ )
+ UPDATE #{EmailModel.connection.quote_table_name(EmailModel.table_name)}
+ SET confirmed_at = NULL,
+ confirmation_token = md5_strings.md5_string,
+ confirmation_sent_at = NOW()
+ FROM md5_strings
+ WHERE id = md5_strings.email_id
+ SQL
+ end
+
+ def update_user_records(user_ids)
+ UserModel
+ .where(id: user_ids)
+ .update_all("confirmed_at = NULL, confirmation_sent_at = NOW(), unconfirmed_email = NULL, confirmation_token=md5(users.id::varchar || users.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}')")
+ end
+
+ def email_query_for_update(start_id, stop_id)
+ EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .select('emails.id as email_id', "md5(emails.id::varchar || emails.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}') as md5_string")
+ end
+
+ def send_emails(email_records)
+ user_records = email_records.map(&:user).uniq
+
+ user_records.each do |user|
+ Gitlab::BackgroundMigration::Mailers::UnconfirmMailer.unconfirm_notification_email(user).deliver_later
+ DeviseMailer.confirmation_instructions(user, user.confirmation_token).deliver_later(wait: 1.minute)
+ end
+
+ email_records.each do |email|
+ DeviseMailer.confirmation_instructions(email, email.confirmation_token).deliver_later(wait: 1.minute)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_import/importer.rb b/lib/gitlab/bitbucket_import/importer.rb
index 5a9fad3be56..e59494c9d9c 100644
--- a/lib/gitlab/bitbucket_import/importer.rb
+++ b/lib/gitlab/bitbucket_import/importer.rb
@@ -3,8 +3,6 @@
module Gitlab
module BitbucketImport
class Importer
- include Gitlab::BitbucketImport::Metrics
-
LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' },
@@ -26,6 +24,7 @@ module Gitlab
import_issues
import_pull_requests
handle_errors
+ metrics.track_finished_import
true
end
@@ -115,6 +114,8 @@ module Gitlab
updated_at: issue.updated_at
)
+ metrics.issues_counter.increment
+
gitlab_issue.labels << @labels[label_name]
import_issue_comments(issue, gitlab_issue) if gitlab_issue.persisted?
@@ -195,6 +196,8 @@ module Gitlab
updated_at: pull_request.updated_at
)
+ metrics.merge_requests_counter.increment
+
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
rescue StandardError => e
store_pull_request_error(pull_request, e)
@@ -288,6 +291,10 @@ module Gitlab
project_path: project.full_path
}
end
+
+ def metrics
+ @metrics ||= Gitlab::Import::Metrics.new(:bitbucket_importer, @project)
+ end
end
end
end
diff --git a/lib/gitlab/bitbucket_import/metrics.rb b/lib/gitlab/bitbucket_import/metrics.rb
deleted file mode 100644
index 25e2d9b211e..00000000000
--- a/lib/gitlab/bitbucket_import/metrics.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BitbucketImport
- module Metrics
- extend ActiveSupport::Concern
-
- IMPORTER = :bitbucket_importer
-
- included do
- prepend Gitlab::Import::Metrics
-
- Gitlab::Import::Metrics.measure(:execute, metrics: {
- "#{IMPORTER}_imported_projects": {
- type: :counter,
- description: 'The number of imported Bitbucket projects'
- },
- "#{IMPORTER}_total_duration_seconds": {
- type: :histogram,
- labels: { importer: IMPORTER },
- description: 'Total time spent importing Bitbucket projects, in seconds'
- }
- })
-
- Gitlab::Import::Metrics.measure(:import_issue, metrics: {
- "#{IMPORTER}_imported_issues": {
- type: :counter,
- description: 'The number of imported Bitbucket issues'
- }
- })
-
- Gitlab::Import::Metrics.measure(:import_pull_request, metrics: {
- "#{IMPORTER}_imported_pull_requests": {
- type: :counter,
- description: 'The number of imported Bitbucket pull requests'
- }
- })
- end
- end
- end
-end
diff --git a/lib/gitlab/bitbucket_server_import/importer.rb b/lib/gitlab/bitbucket_server_import/importer.rb
index 16fe5b46b1f..18a1b64729e 100644
--- a/lib/gitlab/bitbucket_server_import/importer.rb
+++ b/lib/gitlab/bitbucket_server_import/importer.rb
@@ -43,6 +43,7 @@ module Gitlab
import_pull_requests
delete_temp_branches
handle_errors
+ metrics.track_finished_import
log_info(stage: "complete")
@@ -219,7 +220,11 @@ module Gitlab
creator = Gitlab::Import::MergeRequestCreator.new(project)
merge_request = creator.execute(attributes)
- import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
+ if merge_request.persisted?
+ import_pull_request_comments(pull_request, merge_request)
+
+ metrics.merge_requests_counter.increment
+ end
log_info(stage: 'import_bitbucket_pull_requests', message: 'finished', iid: pull_request.iid)
end
@@ -388,6 +393,10 @@ module Gitlab
project_path: project.full_path
}
end
+
+ def metrics
+ @metrics ||= Gitlab::Import::Metrics.new(:bitbucket_server_importer, @project)
+ end
end
end
end
diff --git a/lib/gitlab/cache/ci/project_pipeline_status.rb b/lib/gitlab/cache/ci/project_pipeline_status.rb
index e7a7d23ef7e..d981f263c5e 100644
--- a/lib/gitlab/cache/ci/project_pipeline_status.rb
+++ b/lib/gitlab/cache/ci/project_pipeline_status.rb
@@ -49,7 +49,8 @@ module Gitlab
def load_status
return if loaded?
- return unless commit
+
+ return unless Gitlab::Ci::Features.pipeline_status_omit_commit_sha_in_cache_key?(project) || commit
if has_cache?
load_from_cache
@@ -66,6 +67,8 @@ module Gitlab
end
def load_from_project
+ return unless commit
+
self.sha, self.status, self.ref = commit.sha, commit.status, project.default_branch
end
@@ -114,7 +117,11 @@ module Gitlab
end
def cache_key
- "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{commit&.sha}"
+ if Gitlab::Ci::Features.pipeline_status_omit_commit_sha_in_cache_key?(project)
+ "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status"
+ else
+ "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{commit&.sha}"
+ end
end
def commit
diff --git a/lib/gitlab/ci/build/releaser.rb b/lib/gitlab/ci/build/releaser.rb
index ba6c7857e96..facb5f619bd 100644
--- a/lib/gitlab/ci/build/releaser.rb
+++ b/lib/gitlab/ci/build/releaser.rb
@@ -5,6 +5,8 @@ module Gitlab
module Build
class Releaser
BASE_COMMAND = 'release-cli create'
+ SINGLE_FLAGS = %i[name description tag_name ref released_at].freeze
+ ARRAY_FLAGS = %i[milestones].freeze
attr_reader :config
@@ -14,9 +16,20 @@ module Gitlab
def script
command = BASE_COMMAND.dup
- config.each { |k, v| command.concat(" --#{k.to_s.dasherize} \"#{v}\"") }
+ single_flags.each { |k, v| command.concat(" --#{k.to_s.dasherize} \"#{v}\"") }
+ array_commands.each { |k, v| v.each { |elem| command.concat(" --#{k.to_s.singularize.dasherize} \"#{elem}\"") } }
- command
+ [command]
+ end
+
+ private
+
+ def single_flags
+ config.slice(*SINGLE_FLAGS)
+ end
+
+ def array_commands
+ config.slice(*ARRAY_FLAGS)
end
end
end
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index 10e0f4b8e4d..d81a3fef1f5 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -39,6 +39,10 @@ module Gitlab
@root.errors
end
+ def warnings
+ @root.warnings
+ end
+
def to_hash
@config
end
@@ -93,7 +97,7 @@ module Gitlab
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, @context.sentry_payload)
end
- # Overriden in EE
+ # Overridden in EE
def rescue_errors
RESCUE_ERRORS
end
diff --git a/lib/gitlab/ci/config/entry/environment.rb b/lib/gitlab/ci/config/entry/environment.rb
index fc62cca58ff..64e6d48133f 100644
--- a/lib/gitlab/ci/config/entry/environment.rb
+++ b/lib/gitlab/ci/config/entry/environment.rb
@@ -44,7 +44,7 @@ module Gitlab
validates :action,
type: String,
- inclusion: { in: %w[start stop], message: 'should be start or stop' },
+ inclusion: { in: %w[start stop prepare], message: 'should be start, stop or prepare' },
allow_nil: true
validates :on_stop, type: String, allow_nil: true
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index 66050a7bbe0..a615cab1a80 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -15,7 +15,7 @@ module Gitlab
allow_failure type when start_in artifacts cache
dependencies before_script needs after_script
environment coverage retry parallel interruptible timeout
- resource_group release].freeze
+ resource_group release secrets].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze
@@ -191,3 +191,5 @@ module Gitlab
end
end
end
+
+::Gitlab::Ci::Config::Entry::Job.prepend_if_ee('::EE::Gitlab::Ci::Config::Entry::Job')
diff --git a/lib/gitlab/ci/config/entry/processable.rb b/lib/gitlab/ci/config/entry/processable.rb
index 81211acbec7..b4539475d88 100644
--- a/lib/gitlab/ci/config/entry/processable.rb
+++ b/lib/gitlab/ci/config/entry/processable.rb
@@ -82,6 +82,10 @@ module Gitlab
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+ if has_rules? && !has_workflow_rules && Gitlab::Ci::Features.raise_job_rules_without_workflow_rules_warning?
+ add_warning('uses `rules` without defining `workflow:rules`')
+ end
+
# inherit root variables
@root_variables_value = deps&.variables_value # rubocop:disable Gitlab/ModuleWithInstanceVariables
diff --git a/lib/gitlab/ci/config/entry/release.rb b/lib/gitlab/ci/config/entry/release.rb
index b4e4c149730..7e504c24ade 100644
--- a/lib/gitlab/ci/config/entry/release.rb
+++ b/lib/gitlab/ci/config/entry/release.rb
@@ -12,8 +12,9 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
- ALLOWED_KEYS = %i[tag_name name description assets].freeze
- attributes %i[tag_name name assets].freeze
+ ALLOWED_KEYS = %i[tag_name name description ref released_at milestones assets].freeze
+ attributes %i[tag_name name ref milestones assets].freeze
+ attr_reader :released_at
# Attributable description conflicts with
# ::Gitlab::Config::Entry::Node.description
@@ -29,8 +30,25 @@ module Gitlab
validations do
validates :config, allowed_keys: ALLOWED_KEYS
- validates :tag_name, presence: true
+ validates :tag_name, type: String, presence: true
validates :description, type: String, presence: true
+ validates :milestones, array_of_strings_or_string: true, allow_blank: true
+ validate do
+ next unless config[:released_at]
+
+ begin
+ @released_at = DateTime.iso8601(config[:released_at])
+ rescue ArgumentError
+ errors.add(:released_at, "must be a valid datetime")
+ end
+ end
+ validate do
+ next unless config[:ref]
+ next if Commit.reference_valid?(config[:ref])
+ next if Gitlab::GitRefValidator.validate(config[:ref])
+
+ errors.add(:ref, "must be a valid ref")
+ end
end
def value
diff --git a/lib/gitlab/ci/config/entry/reports.rb b/lib/gitlab/ci/config/entry/reports.rb
index 74736b24d73..0ae65f43723 100644
--- a/lib/gitlab/ci/config/entry/reports.rb
+++ b/lib/gitlab/ci/config/entry/reports.rb
@@ -13,9 +13,9 @@ module Gitlab
ALLOWED_KEYS =
%i[junit codequality sast secret_detection dependency_scanning container_scanning
- dast performance license_management license_scanning metrics lsif
+ dast performance browser_performance load_performance license_management license_scanning metrics lsif
dotenv cobertura terraform accessibility cluster_applications
- requirements].freeze
+ requirements coverage_fuzzing].freeze
attributes ALLOWED_KEYS
@@ -25,13 +25,16 @@ module Gitlab
with_options allow_nil: true do
validates :junit, array_of_strings_or_string: true
- validates :codequality, array_of_strings_or_string: true
+ validates :coverage_fuzzing, array_of_strings_or_string: true
+ validates :sast, array_of_strings_or_string: true
validates :sast, array_of_strings_or_string: true
validates :secret_detection, array_of_strings_or_string: true
validates :dependency_scanning, array_of_strings_or_string: true
validates :container_scanning, array_of_strings_or_string: true
validates :dast, array_of_strings_or_string: true
validates :performance, array_of_strings_or_string: true
+ validates :browser_performance, array_of_strings_or_string: true
+ validates :load_performance, array_of_strings_or_string: true
validates :license_management, array_of_strings_or_string: true
validates :license_scanning, array_of_strings_or_string: true
validates :metrics, array_of_strings_or_string: true
diff --git a/lib/gitlab/ci/features.rb b/lib/gitlab/ci/features.rb
index a2eb31369c7..6130baeb9d5 100644
--- a/lib/gitlab/ci/features.rb
+++ b/lib/gitlab/ci/features.rb
@@ -10,20 +10,12 @@ module Gitlab
::Feature.enabled?(:ci_artifacts_exclude, default_enabled: true)
end
- def self.ensure_scheduling_type_enabled?
- ::Feature.enabled?(:ci_ensure_scheduling_type, default_enabled: true)
- end
-
def self.job_heartbeats_runner?(project)
::Feature.enabled?(:ci_job_heartbeats_runner, project, default_enabled: true)
end
- def self.instance_level_variables_limit_enabled?
- ::Feature.enabled?(:ci_instance_level_variables_limit, default_enabled: true)
- end
-
def self.pipeline_fixed_notifications?
- ::Feature.enabled?(:ci_pipeline_fixed_notifications)
+ ::Feature.enabled?(:ci_pipeline_fixed_notifications, default_enabled: true)
end
def self.instance_variables_ui_enabled?
@@ -38,9 +30,51 @@ module Gitlab
::Feature.enabled?(:ci_atomic_processing, project, default_enabled: true)
end
+ def self.pipeline_latest?
+ ::Feature.enabled?(:ci_pipeline_latest, default_enabled: true)
+ end
+
+ def self.pipeline_status_omit_commit_sha_in_cache_key?(project)
+ Feature.enabled?(:ci_pipeline_status_omit_commit_sha_in_cache_key, project)
+ end
+
def self.release_generation_enabled?
- ::Feature.enabled?(:ci_release_generation)
+ ::Feature.enabled?(:ci_release_generation, default_enabled: true)
+ end
+
+ # Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/224199
+ def self.store_pipeline_messages?(project)
+ ::Feature.enabled?(:ci_store_pipeline_messages, project, default_enabled: true)
+ end
+
+ # Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/227052
+ def self.variables_api_filter_environment_scope?
+ ::Feature.enabled?(:ci_variables_api_filter_environment_scope, default_enabled: false)
+ end
+
+ # This FF is only used for development purpose to test that warnings can be
+ # raised and propagated to the UI.
+ def self.raise_job_rules_without_workflow_rules_warning?
+ ::Feature.enabled?(:ci_raise_job_rules_without_workflow_rules_warning)
+ end
+
+ def self.keep_latest_artifacts_for_ref_enabled?(project)
+ ::Feature.enabled?(:keep_latest_artifacts_for_ref, project, default_enabled: false)
+ end
+
+ def self.destroy_only_unlocked_expired_artifacts_enabled?
+ ::Feature.enabled?(:destroy_only_unlocked_expired_artifacts, default_enabled: false)
+ end
+
+ def self.bulk_insert_on_create?(project)
+ ::Feature.enabled?(:ci_bulk_insert_on_create, project, default_enabled: true)
+ end
+
+ def self.allow_to_create_merge_request_pipelines_in_target_project?(target_project)
+ ::Feature.enabled?(:ci_allow_to_create_merge_request_pipelines_in_target_project, target_project)
end
end
end
end
+
+::Gitlab::Ci::Features.prepend_if_ee('::EE::Gitlab::Ci::Features')
diff --git a/lib/gitlab/ci/parsers/terraform/tfplan.rb b/lib/gitlab/ci/parsers/terraform/tfplan.rb
index 19f724b79af..abfbe18e23f 100644
--- a/lib/gitlab/ci/parsers/terraform/tfplan.rb
+++ b/lib/gitlab/ci/parsers/terraform/tfplan.rb
@@ -8,15 +8,21 @@ module Gitlab
TfplanParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
def parse!(json_data, terraform_reports, artifact:)
+ job_details = job_details(artifact.job)
+ job_id = job_details['job_id']
plan_data = Gitlab::Json.parse(json_data)
- raise TfplanParserError, 'Tfplan missing required key' unless has_required_keys?(plan_data)
-
- terraform_reports.add_plan(artifact.job.id.to_s, tfplan(plan_data, artifact.job))
+ if has_required_keys?(plan_data)
+ terraform_reports.add_plan(job_id, valid_tfplan(plan_data, job_details))
+ else
+ terraform_reports.add_plan(job_id, invalid_tfplan(:missing_json_keys, job_details))
+ end
rescue JSON::ParserError
- raise TfplanParserError, 'JSON parsing failed'
+ terraform_reports.add_plan(job_id, invalid_tfplan(:invalid_json_format, job_details))
rescue
- raise TfplanParserError, 'Tfplan parsing failed'
+ details = job_details || {}
+ plan_name = job_id || 'failed_tf_plan'
+ terraform_reports.add_plan(plan_name, invalid_tfplan(:unknown_error, details))
end
private
@@ -25,14 +31,24 @@ module Gitlab
(%w[create update delete] - plan_data.keys).empty?
end
- def tfplan(plan_data, artifact_job)
+ def job_details(job)
{
+ 'job_id' => job.id.to_s,
+ 'job_name' => job.options.dig(:artifacts, :name).to_s,
+ 'job_path' => Gitlab::Routing.url_helpers.project_job_path(job.project, job)
+ }
+ end
+
+ def invalid_tfplan(error_type, job_details)
+ job_details.merge('tf_report_error' => error_type)
+ end
+
+ def valid_tfplan(plan_data, job_details)
+ job_details.merge(
'create' => plan_data['create'].to_i,
'delete' => plan_data['delete'].to_i,
- 'job_name' => artifact_job.options.dig(:artifacts, :name).to_s,
- 'job_path' => Gitlab::Routing.url_helpers.project_job_path(artifact_job.project, artifact_job),
'update' => plan_data['update'].to_i
- }
+ )
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/build.rb b/lib/gitlab/ci/pipeline/chain/build.rb
index 9662209f88e..4190c40eb66 100644
--- a/lib/gitlab/ci/pipeline/chain/build.rb
+++ b/lib/gitlab/ci/pipeline/chain/build.rb
@@ -20,7 +20,11 @@ module Gitlab
pipeline_schedule: @command.schedule,
merge_request: @command.merge_request,
external_pull_request: @command.external_pull_request,
- variables_attributes: Array(@command.variables_attributes)
+ variables_attributes: Array(@command.variables_attributes),
+ # This should be removed and set on the database column default
+ # level when the keep_latest_artifacts_for_ref feature flag is
+ # removed.
+ locked: ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(@command.project) ? :artifacts_locked : :unlocked
)
end
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
index 8118e7b2487..74b28b181bc 100644
--- a/lib/gitlab/ci/pipeline/chain/command.rb
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -10,7 +10,7 @@ module Gitlab
:trigger_request, :schedule, :merge_request, :external_pull_request,
:ignore_skip_ci, :save_incompleted,
:seeds_block, :variables_attributes, :push_options,
- :chat_data, :allow_mirror_update, :bridge,
+ :chat_data, :allow_mirror_update, :bridge, :content,
# These attributes are set by Chains during processing:
:config_content, :config_processor, :stage_seeds
) do
@@ -78,7 +78,7 @@ module Gitlab
end
def metrics
- @metrics ||= Chain::Metrics.new
+ @metrics ||= ::Gitlab::Ci::Pipeline::Metrics.new
end
def observe_creation_duration(duration)
@@ -90,6 +90,10 @@ module Gitlab
metrics.pipeline_size_histogram
.observe({ source: pipeline.source.to_s }, pipeline.total_size)
end
+
+ def dangling_build?
+ %i[ondemand_dast_scan webide].include?(source)
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/config/content.rb b/lib/gitlab/ci/pipeline/chain/config/content.rb
index 2008010b523..5314fd471c3 100644
--- a/lib/gitlab/ci/pipeline/chain/config/content.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/content.rb
@@ -9,6 +9,7 @@ module Gitlab
include Chain::Helpers
SOURCES = [
+ Gitlab::Ci::Pipeline::Chain::Config::Content::Parameter,
Gitlab::Ci::Pipeline::Chain::Config::Content::Bridge,
Gitlab::Ci::Pipeline::Chain::Config::Content::Repository,
Gitlab::Ci::Pipeline::Chain::Config::Content::ExternalProject,
diff --git a/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb b/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb
new file mode 100644
index 00000000000..3dd216b33d1
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Config
+ class Content
+ class Parameter < Source
+ UnsupportedSourceError = Class.new(StandardError)
+
+ def content
+ strong_memoize(:content) do
+ next unless command.content.present?
+ raise UnsupportedSourceError, "#{command.source} not a dangling build" unless command.dangling_build?
+
+ command.content
+ end
+ end
+
+ def source
+ :parameter_source
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/config/content/source.rb b/lib/gitlab/ci/pipeline/chain/config/content/source.rb
index 3389187473b..8bc172f93d3 100644
--- a/lib/gitlab/ci/pipeline/chain/config/content/source.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/content/source.rb
@@ -11,6 +11,8 @@ module Gitlab
DEFAULT_YAML_FILE = '.gitlab-ci.yml'
+ attr_reader :command
+
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
diff --git a/lib/gitlab/ci/pipeline/chain/config/process.rb b/lib/gitlab/ci/pipeline/chain/config/process.rb
index 1e47be21b93..2cfcb295407 100644
--- a/lib/gitlab/ci/pipeline/chain/config/process.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/process.rb
@@ -19,7 +19,11 @@ module Gitlab
parent_pipeline: parent_pipeline
}
)
+
+ add_warnings_to_pipeline(@command.config_processor.warnings)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
+ add_warnings_to_pipeline(ex.warnings)
+
error(ex.message, config_error: true)
rescue => ex
Gitlab::ErrorTracking.track_exception(ex,
@@ -34,6 +38,14 @@ module Gitlab
def break?
@pipeline.errors.any? || @pipeline.persisted?
end
+
+ private
+
+ def add_warnings_to_pipeline(warnings)
+ return unless warnings.present?
+
+ warnings.each { |message| warning(message) }
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/create.rb b/lib/gitlab/ci/pipeline/chain/create.rb
index aa627bdb009..34649fe16f3 100644
--- a/lib/gitlab/ci/pipeline/chain/create.rb
+++ b/lib/gitlab/ci/pipeline/chain/create.rb
@@ -8,7 +8,9 @@ module Gitlab
include Chain::Helpers
def perform!
- pipeline.save!
+ BulkInsertableAssociations.with_bulk_insert(enabled: ::Gitlab::Ci::Features.bulk_insert_on_create?(project)) do
+ pipeline.save!
+ end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
end
diff --git a/lib/gitlab/ci/pipeline/chain/helpers.rb b/lib/gitlab/ci/pipeline/chain/helpers.rb
index 982ecc0ff51..aba7dab508d 100644
--- a/lib/gitlab/ci/pipeline/chain/helpers.rb
+++ b/lib/gitlab/ci/pipeline/chain/helpers.rb
@@ -11,9 +11,18 @@ module Gitlab
pipeline.yaml_errors = message
end
+ pipeline.add_error_message(message)
pipeline.drop!(drop_reason) if drop_reason
+
+ # TODO: consider not to rely on AR errors directly as they can be
+ # polluted with other unrelated errors (e.g. state machine)
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/220823
pipeline.errors.add(:base, message)
end
+
+ def warning(message)
+ pipeline.add_warning_message(message)
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/metrics.rb b/lib/gitlab/ci/pipeline/chain/metrics.rb
deleted file mode 100644
index 980ab2de9b0..00000000000
--- a/lib/gitlab/ci/pipeline/chain/metrics.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Ci
- module Pipeline
- module Chain
- class Metrics
- include Gitlab::Utils::StrongMemoize
-
- def pipeline_creation_duration_histogram
- strong_memoize(:pipeline_creation_duration_histogram) do
- name = :gitlab_ci_pipeline_creation_duration_seconds
- comment = 'Pipeline creation duration'
- labels = {}
- buckets = [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 20.0, 50.0, 240.0]
-
- ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
- end
- end
-
- def pipeline_size_histogram
- strong_memoize(:pipeline_size_histogram) do
- name = :gitlab_ci_pipeline_size_builds
- comment = 'Pipeline size'
- labels = { source: nil }
- buckets = [0, 1, 5, 10, 20, 50, 100, 200, 500, 1000]
-
- ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
- end
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
index a30b6c6ef0e..769d0dffd0b 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
@@ -19,7 +19,7 @@ module Gitlab
end
unless allowed_to_write_ref?
- return error("Insufficient permissions for protected ref '#{command.ref}'")
+ error("Insufficient permissions for protected ref '#{command.ref}'")
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/repository.rb b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
index 8f5445850d7..7977ce90443 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/repository.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
@@ -18,7 +18,7 @@ module Gitlab
end
if @command.ambiguous_ref?
- return error('Ref is ambiguous')
+ error('Ref is ambiguous')
end
end
diff --git a/lib/gitlab/ci/pipeline/metrics.rb b/lib/gitlab/ci/pipeline/metrics.rb
new file mode 100644
index 00000000000..649da745eea
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/metrics.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ class Metrics
+ include Gitlab::Utils::StrongMemoize
+
+ def pipeline_creation_duration_histogram
+ strong_memoize(:pipeline_creation_duration_histogram) do
+ name = :gitlab_ci_pipeline_creation_duration_seconds
+ comment = 'Pipeline creation duration'
+ labels = {}
+ buckets = [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 20.0, 50.0, 240.0]
+
+ ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
+ end
+ end
+
+ def pipeline_size_histogram
+ strong_memoize(:pipeline_size_histogram) do
+ name = :gitlab_ci_pipeline_size_builds
+ comment = 'Pipeline size'
+ labels = { source: nil }
+ buckets = [0, 1, 5, 10, 20, 50, 100, 200, 500, 1000]
+
+ ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
+ end
+ end
+
+ def pipeline_processing_events_counter
+ strong_memoize(:pipeline_processing_events_counter) do
+ name = :gitlab_ci_pipeline_processing_events_total
+ comment = 'Total amount of pipeline processing events'
+
+ Gitlab::Metrics.counter(name, comment)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/preloader.rb b/lib/gitlab/ci/pipeline/preloader.rb
index db0a1ea4dab..7befc126ca9 100644
--- a/lib/gitlab/ci/pipeline/preloader.rb
+++ b/lib/gitlab/ci/pipeline/preloader.rb
@@ -17,6 +17,7 @@ module Gitlab
pipelines.each do |pipeline|
self.new(pipeline).tap do |preloader|
preloader.preload_commit_authors
+ preloader.preload_ref_commits
preloader.preload_pipeline_warnings
preloader.preload_stages_warnings
end
@@ -27,12 +28,19 @@ module Gitlab
@pipeline = pipeline
end
+ # This also preloads the author of every commit. We're using "lazy_author"
+ # here since "author" immediately loads the data on the first call.
def preload_commit_authors
- # This also preloads the author of every commit. We're using "lazy_author"
- # here since "author" immediately loads the data on the first call.
@pipeline.commit.try(:lazy_author)
end
+ # This preloads latest commits for given refs and therefore makes it
+ # much less expensive to check if a pipeline is a latest one for
+ # given branch.
+ def preload_ref_commits
+ @pipeline.lazy_ref_commit
+ end
+
def preload_pipeline_warnings
# This preloads the number of warnings for every pipeline, ensuring
# that Ci::Pipeline#has_warnings? doesn't execute any additional
@@ -40,10 +48,10 @@ module Gitlab
@pipeline.number_of_warnings
end
+ # This preloads the number of warnings for every stage, ensuring
+ # that Ci::Stage#has_warnings? doesn't execute any additional
+ # queries.
def preload_stages_warnings
- # This preloads the number of warnings for every stage, ensuring
- # that Ci::Stage#has_warnings? doesn't execute any additional
- # queries.
@pipeline.stages.each { |stage| stage.number_of_warnings }
end
end
diff --git a/lib/gitlab/ci/reports/test_report_summary.rb b/lib/gitlab/ci/reports/test_report_summary.rb
new file mode 100644
index 00000000000..85b83b790e7
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_report_summary.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Reports
+ class TestReportSummary
+ attr_reader :all_results
+
+ def initialize(all_results)
+ @all_results = all_results
+ end
+
+ def total
+ TestSuiteSummary.new(all_results)
+ end
+
+ def total_time
+ total.total_time
+ end
+
+ def total_count
+ total.total_count
+ end
+
+ def success_count
+ total.success_count
+ end
+
+ def failed_count
+ total.failed_count
+ end
+
+ def skipped_count
+ total.skipped_count
+ end
+
+ def error_count
+ total.error_count
+ end
+
+ def test_suites
+ all_results
+ .group_by(&:tests_name)
+ .transform_values { |results| TestSuiteSummary.new(results) }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_suite.rb b/lib/gitlab/ci/reports/test_suite.rb
index 8bbf2e0f6cf..28b81e7a471 100644
--- a/lib/gitlab/ci/reports/test_suite.rb
+++ b/lib/gitlab/ci/reports/test_suite.rb
@@ -4,9 +4,9 @@ module Gitlab
module Ci
module Reports
class TestSuite
- attr_reader :name
- attr_reader :test_cases
- attr_reader :total_time
+ attr_accessor :name
+ attr_accessor :test_cases
+ attr_accessor :total_time
attr_reader :suite_error
def initialize(name = nil)
@@ -70,6 +70,14 @@ module Gitlab
@suite_error = msg
end
+ def +(other)
+ self.class.new.tap do |test_suite|
+ test_suite.name = self.name
+ test_suite.test_cases = self.test_cases.deep_merge(other.test_cases)
+ test_suite.total_time = self.total_time + other.total_time
+ end
+ end
+
private
def existing_key?(test_case)
diff --git a/lib/gitlab/ci/reports/test_suite_summary.rb b/lib/gitlab/ci/reports/test_suite_summary.rb
new file mode 100644
index 00000000000..f9b0bedb712
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_suite_summary.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Reports
+ class TestSuiteSummary
+ attr_reader :results
+
+ def initialize(results)
+ @results = results
+ end
+
+ def name
+ @name ||= results.first.tests_name
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def build_ids
+ results.pluck(:build_id)
+ end
+
+ def total_time
+ @total_time ||= results.sum(&:tests_duration)
+ end
+
+ def success_count
+ @success_count ||= results.sum(&:tests_success)
+ end
+
+ def failed_count
+ @failed_count ||= results.sum(&:tests_failed)
+ end
+
+ def skipped_count
+ @skipped_count ||= results.sum(&:tests_skipped)
+ end
+
+ def error_count
+ @error_count ||= results.sum(&:tests_errored)
+ end
+
+ def total_count
+ @total_count ||= [success_count, failed_count, skipped_count, error_count].sum
+ end
+ # rubocop: disable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/status/composite.rb b/lib/gitlab/ci/status/composite.rb
index 074651f1040..04a9fc29802 100644
--- a/lib/gitlab/ci/status/composite.rb
+++ b/lib/gitlab/ci/status/composite.rb
@@ -112,13 +112,13 @@ module Gitlab
def success_with_warnings?(status)
@allow_failure_key &&
status[@allow_failure_key] &&
- HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(status[@status_key])
+ ::Ci::HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(status[@status_key])
end
def ignored_status?(status)
@allow_failure_key &&
status[@allow_failure_key] &&
- HasStatus::EXCLUDE_IGNORED_STATUSES.include?(status[@status_key])
+ ::Ci::HasStatus::EXCLUDE_IGNORED_STATUSES.include?(status[@status_key])
end
end
end
diff --git a/lib/gitlab/ci/status/factory.rb b/lib/gitlab/ci/status/factory.rb
index 73c73a3b3fc..4a384531c57 100644
--- a/lib/gitlab/ci/status/factory.rb
+++ b/lib/gitlab/ci/status/factory.rb
@@ -7,7 +7,7 @@ module Gitlab
def initialize(subject, user)
@subject = subject
@user = user
- @status = subject.status || HasStatus::DEFAULT_STATUS
+ @status = subject.status || ::Ci::HasStatus::DEFAULT_STATUS
end
def fabricate!
diff --git a/lib/gitlab/ci/status/stage/play_manual.rb b/lib/gitlab/ci/status/stage/play_manual.rb
index ac3fc0912fa..58859a8f191 100644
--- a/lib/gitlab/ci/status/stage/play_manual.rb
+++ b/lib/gitlab/ci/status/stage/play_manual.rb
@@ -18,7 +18,7 @@ module Gitlab
def action_path
pipeline = subject.pipeline
- project_stage_play_manual_path(pipeline.project, pipeline, subject.name)
+ project_pipeline_stage_play_manual_path(pipeline.project, pipeline, subject.name)
end
def action_method
diff --git a/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml b/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
index be584814271..5ebbbf15682 100644
--- a/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
@@ -20,7 +20,7 @@ stages:
- docker:dind
script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
- - docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG || true
+ - docker pull --quiet $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG || true
- docker build --cache-from $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG -t $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG .
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
diff --git a/lib/gitlab/ci/templates/Android.gitlab-ci.yml b/lib/gitlab/ci/templates/Android.gitlab-ci.yml
index b7194110002..d20dabc0b00 100644
--- a/lib/gitlab/ci/templates/Android.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Android.gitlab-ci.yml
@@ -4,32 +4,65 @@
image: openjdk:8-jdk
variables:
- ANDROID_COMPILE_SDK: "28"
- ANDROID_BUILD_TOOLS: "28.0.2"
- ANDROID_SDK_TOOLS: "4333796"
+ # ANDROID_COMPILE_SDK is the version of Android you're compiling with.
+ # It should match compileSdkVersion.
+ ANDROID_COMPILE_SDK: "29"
+
+ # ANDROID_BUILD_TOOLS is the version of the Android build tools you are using.
+ # It should match buildToolsVersion.
+ ANDROID_BUILD_TOOLS: "29.0.3"
+
+ # It's what version of the command line tools we're going to download from the official site.
+ # Official Site-> https://developer.android.com/studio/index.html
+ # There, look down below at the cli tools only, sdk tools package is of format:
+ # commandlinetools-os_type-ANDROID_SDK_TOOLS_latest.zip
+ # when the script was last modified for latest compileSdkVersion, it was which is written down below
+ ANDROID_SDK_TOOLS: "6514223"
+
+# Packages installation before running script
before_script:
- apt-get --quiet update --yes
- apt-get --quiet install --yes wget tar unzip lib32stdc++6 lib32z1
- - wget --quiet --output-document=android-sdk.zip https://dl.google.com/android/repository/sdk-tools-linux-${ANDROID_SDK_TOOLS}.zip
- - unzip -d android-sdk-linux android-sdk.zip
- - echo y | android-sdk-linux/tools/bin/sdkmanager "platforms;android-${ANDROID_COMPILE_SDK}" >/dev/null
- - echo y | android-sdk-linux/tools/bin/sdkmanager "platform-tools" >/dev/null
- - echo y | android-sdk-linux/tools/bin/sdkmanager "build-tools;${ANDROID_BUILD_TOOLS}" >/dev/null
- - export ANDROID_HOME=$PWD/android-sdk-linux
- - export PATH=$PATH:$PWD/android-sdk-linux/platform-tools/
+
+ # Setup path as android_home for moving/exporting the downloaded sdk into it
+ - export ANDROID_HOME="${PWD}/android-home"
+ # Create a new directory at specified location
+ - install -d $ANDROID_HOME
+ # Here we are installing androidSDK tools from official source,
+ # (the key thing here is the url from where you are downloading these sdk tool for command line, so please do note this url pattern there and here as well)
+ # after that unzipping those tools and
+ # then running a series of SDK manager commands to install necessary android SDK packages that'll allow the app to build
+ - wget --output-document=$ANDROID_HOME/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-${ANDROID_SDK_TOOLS}_latest.zip
+ # move to the archive at ANDROID_HOME
+ - pushd $ANDROID_HOME
+ - unzip -d cmdline-tools cmdline-tools.zip
+ - popd
+ - export PATH=$PATH:${ANDROID_HOME}/cmdline-tools/tools/bin/
+
+ # Nothing fancy here, just checking sdkManager version
+ - sdkmanager --version
+
+ # use yes to accept all licenses
+ - yes | sdkmanager --sdk_root=${ANDROID_HOME} --licenses || true
+ - sdkmanager --sdk_root=${ANDROID_HOME} "platforms;android-${ANDROID_COMPILE_SDK}"
+ - sdkmanager --sdk_root=${ANDROID_HOME} "platform-tools"
+ - sdkmanager --sdk_root=${ANDROID_HOME} "build-tools;${ANDROID_BUILD_TOOLS}"
+
+ # Not necessary, but just for surity
- chmod +x ./gradlew
- # temporarily disable checking for EPIPE error and use yes to accept all licenses
- - set +o pipefail
- - yes | android-sdk-linux/tools/bin/sdkmanager --licenses
- - set -o pipefail
+# Basic android and gradle stuff
+# Check linting
lintDebug:
+ interruptible: true
stage: build
script:
- ./gradlew -Pci --console=plain :app:lintDebug -PbuildDir=lint
+# Make Project
assembleDebug:
+ interruptible: true
stage: build
script:
- ./gradlew assembleDebug
@@ -37,7 +70,9 @@ assembleDebug:
paths:
- app/build/outputs/
+# Run all tests, if any fails, interrupt the pipeline(fail it)
debugTests:
+ interruptible: true
stage: test
script:
- ./gradlew -Pci --console=plain :app:testDebug
diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
index e37cd14d1d1..c10d87a537b 100644
--- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
@@ -12,6 +12,7 @@
# * code_quality: CODE_QUALITY_DISABLED
# * license_management: LICENSE_MANAGEMENT_DISABLED
# * performance: PERFORMANCE_DISABLED
+# * load_performance: LOAD_PERFORMANCE_DISABLED
# * sast: SAST_DISABLED
# * secret_detection: SECRET_DETECTION_DISABLED
# * dependency_scanning: DEPENDENCY_SCANNING_DISABLED
@@ -74,7 +75,7 @@ stages:
workflow:
rules:
- - if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1"'
+ - if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1" || $DOCKERFILE_PATH'
- exists:
- Dockerfile
diff --git a/lib/gitlab/ci/templates/Composer.gitlab-ci.yml b/lib/gitlab/ci/templates/Composer.gitlab-ci.yml
new file mode 100644
index 00000000000..5d9c68d3031
--- /dev/null
+++ b/lib/gitlab/ci/templates/Composer.gitlab-ci.yml
@@ -0,0 +1,19 @@
+# Publishes a tag/branch to Composer Packages of the current project
+publish:
+ image: curlimages/curl:latest
+ stage: build
+ variables:
+ URL: "$CI_SERVER_PROTOCOL://$CI_SERVER_HOST:$CI_SERVER_PORT/api/v4/projects/$CI_PROJECT_ID/packages/composer?job_token=$CI_JOB_TOKEN"
+ script:
+ - version=$([[ -z "$CI_COMMIT_TAG" ]] && echo "branch=$CI_COMMIT_REF_NAME" || echo "tag=$CI_COMMIT_TAG")
+ - insecure=$([ "$CI_SERVER_PROTOCOL" = "http" ] && echo "--insecure" || echo "")
+ - response=$(curl -s -w "\n%{http_code}" $insecure --data $version $URL)
+ - code=$(echo "$response" | tail -n 1)
+ - body=$(echo "$response" | head -n 1)
+ # Output state information
+ - if [ $code -eq 201 ]; then
+ echo "Package created - Code $code - $body";
+ else
+ echo "Could not create package - Code $code - $body";
+ exit 1;
+ fi
diff --git a/lib/gitlab/ci/templates/Dart.gitlab-ci.yml b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml
new file mode 100644
index 00000000000..cc383f89b0c
--- /dev/null
+++ b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml
@@ -0,0 +1,22 @@
+# https://hub.docker.com/r/google/dart
+image: google/dart:2.8.4
+
+variables:
+ # Use to learn more:
+ # pub run test --help
+ PUB_VARS: "--platform vm --timeout 30s --concurrency=6 --test-randomize-ordering-seed=random --reporter=expanded"
+
+# Cache downloaded dependencies and plugins between builds.
+# To keep cache across branches add 'key: "$CI_JOB_NAME"'
+cache:
+ paths:
+ - .pub-cache/global_packages
+
+before_script:
+ - export PATH="$PATH":"~/.pub-cache/bin"
+ - pub get --no-precompile
+
+test:
+ stage: test
+ script:
+ - pub run test $PUB_VARS
diff --git a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
index 9a34f8cb113..8553a940bd7 100644
--- a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
@@ -1,11 +1,16 @@
+# Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/browser_performance_testing.html
+
performance:
stage: performance
- image: docker:19.03.11
+ image: docker:19.03.12
allow_failure: true
variables:
DOCKER_TLS_CERTDIR: ""
+ SITESPEED_IMAGE: sitespeedio/sitespeed.io
+ SITESPEED_VERSION: 13.3.0
+ SITESPEED_OPTIONS: ''
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
script:
- |
if ! docker info &>/dev/null; then
@@ -15,21 +20,22 @@ performance:
fi
- export CI_ENVIRONMENT_URL=$(cat environment_url.txt)
- mkdir gitlab-exporter
- - wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/1.0.0/index.js
+ - wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/1.0.1/index.js
- mkdir sitespeed-results
- |
if [ -f .gitlab-urls.txt ]
then
sed -i -e 's@^@'"$CI_ENVIRONMENT_URL"'@' .gitlab-urls.txt
- docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt
+ docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt $SITESPEED_OPTIONS
else
- docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL"
+ docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL" $SITESPEED_OPTIONS
fi
- - mv sitespeed-results/data/performance.json performance.json
+ - mv sitespeed-results/data/performance.json browser-performance.json
artifacts:
paths:
- - performance.json
- sitespeed-results/
+ reports:
+ browser_performance: browser-performance.json
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
index b5550461482..dbe870953ae 100644
--- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
@@ -1,10 +1,10 @@
build:
stage: build
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.2.3"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.3.1"
variables:
DOCKER_TLS_CERTDIR: ""
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
script:
- |
if [[ -z "$CI_COMMIT_TAG" ]]; then
diff --git a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
index bde6f185d3a..6b76d7e0c9b 100644
--- a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
@@ -1,13 +1,14 @@
code_quality:
stage: test
- image: docker:19.03.11
+ image: docker:19.03.12
allow_failure: true
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
- CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.9"
+ CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.10"
+ needs: []
script:
- |
if ! docker info &>/dev/null; then
diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
index bab4fae67f0..d7d927ac8ee 100644
--- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
.dast-auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.2"
dast_environment_deploy:
extends: .dast-auto-deploy
@@ -51,3 +51,4 @@ stop_dast_environment:
- if: $CI_COMMIT_BRANCH &&
$CI_KUBERNETES_ACTIVE &&
$GITLAB_FEATURES =~ /\bdast\b/
+ when: always
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
index 97b5f3fd7f5..66c60e85892 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
@@ -1,5 +1,6 @@
.auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.2"
+ dependencies: []
include:
- template: Jobs/Deploy/ECS.gitlab-ci.yml
@@ -20,7 +21,8 @@ review:
url: http://$CI_PROJECT_ID-$CI_ENVIRONMENT_SLUG.$KUBE_INGRESS_BASE_DOMAIN
on_stop: stop_review
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
when: never
@@ -41,7 +43,6 @@ stop_review:
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
- dependencies: []
allow_failure: true
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
@@ -122,7 +123,8 @@ canary:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
production:
<<: *production_template
@@ -172,7 +174,8 @@ production_manual:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
.manual_rollout_template: &manual_rollout_template
<<: *rollout_template
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
index bb3d5526f3a..da474f8ac88 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
@@ -13,11 +13,20 @@
script:
- ecs update-task-definition
-review_ecs:
- extends: .deploy_to_ecs
+.review_ecs_base:
stage: review
+ extends: .deploy_to_ecs
environment:
name: review/$CI_COMMIT_REF_NAME
+
+.production_ecs_base:
+ stage: production
+ extends: .deploy_to_ecs
+ environment:
+ name: production
+
+review_ecs:
+ extends: .review_ecs_base
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
@@ -29,11 +38,21 @@ review_ecs:
when: never
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+review_fargate:
+ extends: .review_ecs_base
+ rules:
+ - if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
+ when: never
+ - if: '$CI_KUBERNETES_ACTIVE'
+ when: never
+ - if: '$REVIEW_DISABLED'
+ when: never
+ - if: '$CI_COMMIT_BRANCH == "master"'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+
production_ecs:
- extends: .deploy_to_ecs
- stage: production
- environment:
- name: production
+ extends: .production_ecs_base
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
@@ -42,3 +61,14 @@ production_ecs:
- if: '$CI_COMMIT_BRANCH != "master"'
when: never
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+
+production_fargate:
+ extends: .production_ecs_base
+ rules:
+ - if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
+ when: never
+ - if: '$CI_KUBERNETES_ACTIVE'
+ when: never
+ - if: '$CI_COMMIT_BRANCH != "master"'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
diff --git a/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
new file mode 100644
index 00000000000..b437ddbd734
--- /dev/null
+++ b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
@@ -0,0 +1,29 @@
+load_performance:
+ stage: performance
+ image: docker:19.03.11
+ allow_failure: true
+ variables:
+ DOCKER_TLS_CERTDIR: ""
+ K6_IMAGE: loadimpact/k6
+ K6_VERSION: 0.26.2
+ K6_TEST_FILE: github.com/loadimpact/k6/samples/http_get.js
+ K6_OPTIONS: ''
+ services:
+ - docker:19.03.11-dind
+ script:
+ - |
+ if ! docker info &>/dev/null; then
+ if [ -z "$DOCKER_HOST" -a "$KUBERNETES_PORT" ]; then
+ export DOCKER_HOST='tcp://localhost:2375'
+ fi
+ fi
+ - docker run --rm -v "$(pwd)":/k6 -w /k6 $K6_IMAGE:$K6_VERSION run $K6_TEST_FILE --summary-export=load-performance.json $K6_OPTIONS
+ artifacts:
+ reports:
+ load_performance: load-performance.json
+ rules:
+ - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ when: never
+ - if: '$LOAD_PERFORMANCE_DISABLED'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
diff --git a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
index 316647b5921..3d0bacda853 100644
--- a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
@@ -1,6 +1,6 @@
apply:
stage: deploy
- image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.20.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.24.2"
environment:
name: production
variables:
diff --git a/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml
new file mode 100644
index 00000000000..2fab8b95a3d
--- /dev/null
+++ b/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml
@@ -0,0 +1,34 @@
+# Read more about this feature https://docs.gitlab.com/ee/user/application_security/coverage_fuzzing
+
+variables:
+ # Which branch we want to run full fledged long running fuzzing jobs.
+ # All others will run fuzzing regression
+ COVERAGE_FUZZING_BRANCH: "$CI_DEFAULT_BRANCH"
+ # This is using semantic version and will always download latest v1 gitlab-cov-fuzz release
+ COVERAGE_FUZZING_VERSION: v1
+ # This is for users who have an offline environment and will have to replicate gitlab-cov-fuzz release binaries
+ # to their own servers
+ COVERAGE_FUZZING_URL_PREFIX: "https://gitlab.com/gitlab-org/security-products/analyzers/gitlab-cov-fuzz/-/raw"
+
+.fuzz_base:
+ stage: fuzz
+ allow_failure: true
+ before_script:
+ - if [ -x "$(command -v apt-get)" ] ; then apt-get update && apt-get install -y wget; fi
+ - wget -O gitlab-cov-fuzz "${COVERAGE_FUZZING_URL_PREFIX}"/"${COVERAGE_FUZZING_VERSION}"/binaries/gitlab-cov-fuzz_Linux_x86_64
+ - chmod a+x gitlab-cov-fuzz
+ - export REGRESSION=true
+ - if [[ $CI_COMMIT_BRANCH = $COVERAGE_FUZZING_BRANCH ]]; then REGRESSION=false; fi;
+ artifacts:
+ paths:
+ - corpus
+ - crashes
+ - gl-coverage-fuzzing-report.json
+ reports:
+ coverage_fuzzing: gl-coverage-fuzzing-report.json
+ when: always
+ rules:
+ - if: $COVERAGE_FUZZING_DISABLED
+ when: never
+ - if: $GITLAB_FEATURES =~ /\bcoverage_fuzzing\b/
+ - if: $CI_RUNNER_EXECUTABLE_ARCH == "linux"
diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
index 07399216597..7abecfb7e49 100644
--- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
@@ -41,4 +41,11 @@ dast:
$DAST_API_SPECIFICATION == null
when: never
- if: $CI_COMMIT_BRANCH &&
+ $CI_KUBERNETES_ACTIVE &&
$GITLAB_FEATURES =~ /\bdast\b/
+ - if: $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bdast\b/ &&
+ $DAST_WEBSITE
+ - if: $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bdast\b/ &&
+ $DAST_API_SPECIFICATION
diff --git a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
index fa8ccb7cf93..37f6cd216ca 100644
--- a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
@@ -9,9 +9,6 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
- # Deprecated, use SECURE_ANALYZERS_PREFIX instead
- DS_ANALYZER_IMAGE_PREFIX: "$SECURE_ANALYZERS_PREFIX"
-
DS_DEFAULT_ANALYZERS: "bundler-audit, retire.js, gemnasium, gemnasium-maven, gemnasium-python"
DS_EXCLUDED_PATHS: "spec, test, tests, tmp"
DS_MAJOR_VERSION: 2
@@ -45,7 +42,7 @@ dependency_scanning:
docker run \
$(propagate_env_vars \
DS_ANALYZER_IMAGES \
- DS_ANALYZER_IMAGE_PREFIX \
+ SECURE_ANALYZERS_PREFIX \
DS_ANALYZER_IMAGE_TAG \
DS_DEFAULT_ANALYZERS \
DS_EXCLUDED_PATHS \
@@ -55,6 +52,7 @@ dependency_scanning:
DS_PYTHON_VERSION \
DS_PIP_VERSION \
DS_PIP_DEPENDENCY_PATH \
+ DS_JAVA_VERSION \
GEMNASIUM_DB_LOCAL_PATH \
GEMNASIUM_DB_REMOTE_URL \
GEMNASIUM_DB_REF_NAME \
@@ -98,7 +96,7 @@ dependency_scanning:
gemnasium-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -117,7 +115,7 @@ gemnasium-dependency_scanning:
gemnasium-maven-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium-maven:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium-maven:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -133,7 +131,7 @@ gemnasium-maven-dependency_scanning:
gemnasium-python-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium-python:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium-python:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -156,7 +154,7 @@ gemnasium-python-dependency_scanning:
bundler-audit-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/bundler-audit:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/bundler-audit:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -169,7 +167,7 @@ bundler-audit-dependency_scanning:
retire-js-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/retire.js:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/retire.js:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
diff --git a/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
index b0c75b0aab0..cc34d23decc 100644
--- a/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
@@ -18,15 +18,15 @@ license_scanning:
name: "$SECURE_ANALYZERS_PREFIX/license-finder:$LICENSE_MANAGEMENT_VERSION"
entrypoint: [""]
variables:
- LM_REPORT_FILE: gl-license-scanning-report.json
LM_REPORT_VERSION: '2.1'
SETUP_CMD: $LICENSE_MANAGEMENT_SETUP_CMD
allow_failure: true
+ needs: []
script:
- /run.sh analyze .
artifacts:
reports:
- license_scanning: $LM_REPORT_FILE
+ license_scanning: gl-license-scanning-report.json
dependencies: []
rules:
- if: $LICENSE_MANAGEMENT_DISABLED
diff --git a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
index ec7b34d17b5..f0e2f48dd5c 100644
--- a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
@@ -9,10 +9,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
- # Deprecated, use SECURE_ANALYZERS_PREFIX instead
- SAST_ANALYZER_IMAGE_PREFIX: "$SECURE_ANALYZERS_PREFIX"
-
- SAST_DEFAULT_ANALYZERS: "bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, tslint, secrets, sobelow, pmd-apex, kubesec"
+ SAST_DEFAULT_ANALYZERS: "bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kubesec"
SAST_EXCLUDED_PATHS: "spec, test, tests, tmp"
SAST_ANALYZER_IMAGE_TAG: 2
SAST_DISABLE_DIND: "true"
@@ -63,7 +60,7 @@ sast:
bandit-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/bandit:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/bandit:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -76,7 +73,7 @@ bandit-sast:
brakeman-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/brakeman:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/brakeman:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -88,21 +85,23 @@ brakeman-sast:
eslint-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/eslint:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/eslint:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
- if: $CI_COMMIT_BRANCH &&
- $GITLAB_FEATURES =~ /\bsast\b/ &&
$SAST_DEFAULT_ANALYZERS =~ /eslint/
exists:
- '**/*.html'
- '**/*.js'
+ - '**/*.jsx'
+ - '**/*.ts'
+ - '**/*.tsx'
flawfinder-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/flawfinder:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/flawfinder:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -116,7 +115,7 @@ flawfinder-sast:
kubesec-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/kubesec:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/kubesec:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -128,7 +127,7 @@ kubesec-sast:
gosec-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/gosec:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/gosec:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -141,7 +140,7 @@ gosec-sast:
nodejs-scan-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/nodejs-scan:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/nodejs-scan:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -154,7 +153,7 @@ nodejs-scan-sast:
phpcs-security-audit-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/phpcs-security-audit:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/phpcs-security-audit:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -167,7 +166,7 @@ phpcs-security-audit-sast:
pmd-apex-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/pmd-apex:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/pmd-apex:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -180,7 +179,7 @@ pmd-apex-sast:
secrets-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/secrets:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/secrets:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -191,7 +190,7 @@ secrets-sast:
security-code-scan-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/security-code-scan:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/security-code-scan:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -205,7 +204,7 @@ security-code-scan-sast:
sobelow-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/sobelow:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/sobelow:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -218,7 +217,7 @@ sobelow-sast:
spotbugs-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/spotbugs:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/spotbugs:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -229,16 +228,3 @@ spotbugs-sast:
- '**/*.groovy'
- '**/*.java'
- '**/*.scala'
-
-tslint-sast:
- extends: .sast-analyzer
- image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/tslint:$SAST_ANALYZER_IMAGE_TAG"
- rules:
- - if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
- when: never
- - if: $CI_COMMIT_BRANCH &&
- $GITLAB_FEATURES =~ /\bsast\b/ &&
- $SAST_DEFAULT_ANALYZERS =~ /tslint/
- exists:
- - '**/*.ts'
diff --git a/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
index e18f89cadd7..441a57048e1 100644
--- a/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
@@ -8,17 +8,33 @@ variables:
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
SECRETS_ANALYZER_VERSION: "3"
-secret_detection:
+.secret-analyzer:
stage: test
image: "$SECURE_ANALYZERS_PREFIX/secrets:$SECRETS_ANALYZER_VERSION"
services: []
- rules:
- - if: $SECRET_DETECTION_DISABLED
- when: never
- - if: $CI_COMMIT_BRANCH && $GITLAB_FEATURES =~ /\bsecret_detection\b/
- when: on_success
artifacts:
reports:
secret_detection: gl-secret-detection-report.json
+
+secret_detection_default_branch:
+ extends: .secret-analyzer
+ rules:
+ - if: $SECRET_DETECTION_DISABLED
+ when: never
+ - if: $CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bsecret_detection\b/
+ script:
+ - /analyzer run
+
+secret_detection:
+ extends: .secret-analyzer
+ rules:
+ - if: $SECRET_DETECTION_DISABLED
+ when: never
+ - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bsecret_detection\b/
script:
+ - git fetch origin $CI_DEFAULT_BRANCH $CI_BUILD_REF_NAME
+ - export SECRET_DETECTION_COMMIT_TO=$(git log --left-right --cherry-pick --pretty=format:"%H" refs/remotes/origin/$CI_DEFAULT_BRANCH...refs/remotes/origin/$CI_BUILD_REF_NAME | tail -n 1)
+ - export SECRET_DETECTION_COMMIT_FROM=$CI_COMMIT_SHA
- /analyzer run
diff --git a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
index b6c05c61db1..2d2e0859373 100644
--- a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
@@ -13,7 +13,7 @@
variables:
SECURE_BINARIES_ANALYZERS: >-
- bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, tslint, secrets, sobelow, pmd-apex, kubesec,
+ bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kubesec,
bundler-audit, retire.js, gemnasium, gemnasium-maven, gemnasium-python,
klar, clair-vulnerabilities-db,
license-finder,
@@ -40,7 +40,7 @@ variables:
- docker info
- env
- if [ -z "$SECURE_BINARIES_IMAGE" ]; then export SECURE_BINARIES_IMAGE=${SECURE_BINARIES_IMAGE:-"registry.gitlab.com/gitlab-org/security-products/analyzers/${CI_JOB_NAME}:${SECURE_BINARIES_ANALYZER_VERSION}"}; fi
- - docker pull ${SECURE_BINARIES_IMAGE}
+ - docker pull --quiet ${SECURE_BINARIES_IMAGE}
- mkdir -p output/$(dirname ${CI_JOB_NAME})
- |
if [ "$SECURE_BINARIES_SAVE_ARTIFACTS" = "true" ]; then
@@ -125,13 +125,6 @@ eslint:
- $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
$SECURE_BINARIES_ANALYZERS =~ /\beslint\b/
-tslint:
- extends: .download_images
- only:
- variables:
- - $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
- $SECURE_BINARIES_ANALYZERS =~ /\btslint\b/
-
secrets:
extends: .download_images
only:
diff --git a/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
index e6097ae322e..9dbd9b679a8 100644
--- a/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
@@ -10,8 +10,9 @@ performance:
stage: performance
image: docker:git
variables:
- URL: https://example.com
- SITESPEED_VERSION: 11.2.0
+ URL: ''
+ SITESPEED_IMAGE: sitespeedio/sitespeed.io
+ SITESPEED_VERSION: 13.3.0
SITESPEED_OPTIONS: ''
services:
- docker:stable-dind
@@ -19,11 +20,10 @@ performance:
- mkdir gitlab-exporter
- wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js
- mkdir sitespeed-results
- - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL $SITESPEED_OPTIONS
- - mv sitespeed-results/data/performance.json performance.json
+ - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL $SITESPEED_OPTIONS
+ - mv sitespeed-results/data/performance.json browser-performance.json
artifacts:
paths:
- - performance.json
- sitespeed-results/
reports:
- performance: performance.json
+ browser_performance: browser-performance.json
diff --git a/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml
new file mode 100644
index 00000000000..d39bd234020
--- /dev/null
+++ b/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml
@@ -0,0 +1,23 @@
+# Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/load_performance_testing.html
+
+stages:
+ - build
+ - test
+ - deploy
+ - performance
+
+load_performance:
+ stage: performance
+ image: docker:git
+ variables:
+ K6_IMAGE: loadimpact/k6
+ K6_VERSION: 0.26.2
+ K6_TEST_FILE: github.com/loadimpact/k6/samples/http_get.js
+ K6_OPTIONS: ''
+ services:
+ - docker:stable-dind
+ script:
+ - docker run --rm -v "$(pwd)":/k6 -w /k6 $K6_IMAGE:$K6_VERSION run $K6_TEST_FILE --summary-export=load-performance.json $K6_OPTIONS
+ artifacts:
+ reports:
+ load_performance: load-performance.json
diff --git a/lib/gitlab/ci/templates/index.md b/lib/gitlab/ci/templates/index.md
new file mode 100644
index 00000000000..ff151dd4d1a
--- /dev/null
+++ b/lib/gitlab/ci/templates/index.md
@@ -0,0 +1,3 @@
+# Development guide for GitLab CI templates
+
+Please follow [the development guideline](../../../../doc/development/cicd/templates.md)
diff --git a/lib/gitlab/ci/templates/npm.gitlab-ci.yml b/lib/gitlab/ci/templates/npm.gitlab-ci.yml
new file mode 100644
index 00000000000..035ba52da84
--- /dev/null
+++ b/lib/gitlab/ci/templates/npm.gitlab-ci.yml
@@ -0,0 +1,59 @@
+default:
+ image: node:latest
+
+ # Validate that the repository contains a package.json and extract a few values from it.
+ before_script:
+ - |
+ if [[ ! -f package.json ]]; then
+ echo "No package.json found! A package.json file is required to publish a package to GitLab's NPM registry."
+ echo 'For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#creating-a-project'
+ exit 1
+ fi
+ - NPM_PACKAGE_NAME=$(node -p "require('./package.json').name")
+ - NPM_PACKAGE_VERSION=$(node -p "require('./package.json').version")
+
+# Validate that the package name is properly scoped to the project's root namespace.
+# For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#package-naming-convention
+validate_package_scope:
+ stage: build
+ script:
+ - |
+ if [[ ! $NPM_PACKAGE_NAME =~ ^@$CI_PROJECT_ROOT_NAMESPACE/ ]]; then
+ echo "Invalid package scope! Packages must be scoped in the root namespace of the project, e.g. \"@${CI_PROJECT_ROOT_NAMESPACE}/${CI_PROJECT_NAME}\""
+ echo 'For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#package-naming-convention'
+ exit 1
+ fi
+
+# If no .npmrc if included in the repo, generate a temporary one to use during the publish step
+# that is configured to publish to GitLab's NPM registry
+create_npmrc:
+ stage: build
+ script:
+ - |
+ if [[ ! -f .npmrc ]]; then
+ echo 'No .npmrc found! Creating one now. Please review the following link for more information: https://docs.gitlab.com/ee/user/packages/npm_registry/index.html#authenticating-with-a-ci-job-token'
+
+ {
+ echo '@${CI_PROJECT_ROOT_NAMESPACE}:registry=${CI_SERVER_PROTOCOL}://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/${CI_PROJECT_ID}/packages/npm/'
+ echo '//${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/packages/npm/:_authToken=${CI_JOB_TOKEN}'
+ echo '//${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/${CI_PROJECT_ID}/packages/npm/:_authToken=${CI_JOB_TOKEN}'
+ } >> .npmrc
+
+ fi
+ artifacts:
+ paths:
+ - .npmrc
+
+# Publish the package. If the version in package.json has not yet been published, it will be
+# published to GitLab's NPM registry. If the version already exists, the publish command
+# will fail and the existing package will not be updated.
+publish_package:
+ stage: deploy
+ script:
+ - |
+ {
+ npm publish &&
+ echo "Successfully published version ${NPM_PACKAGE_VERSION} of ${NPM_PACKAGE_NAME} to GitLab's NPM registry: ${CI_PROJECT_URL}/-/packages"
+ } || {
+ echo "No new version of ${NPM_PACKAGE_NAME} published. This is most likely because version ${NPM_PACKAGE_VERSION} already exists in GitLab's NPM registry."
+ }
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index 6a9b7b2fc85..8cf355bbfc1 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -3,15 +3,33 @@
module Gitlab
module Ci
class YamlProcessor
- ValidationError = Class.new(StandardError)
+ # ValidationError is treated like a result object in the form of an exception.
+ # We can return any warnings, raised during the config validation, along with
+ # the error object until we support multiple messages to be returned.
+ class ValidationError < StandardError
+ attr_reader :warnings
+
+ def initialize(message, warnings: [])
+ @warnings = warnings
+ super(message)
+ end
+ end
include Gitlab::Config::Entry::LegacyValidationHelpers
attr_reader :stages, :jobs
- ResultWithErrors = Struct.new(:content, :errors) do
+ class Result
+ attr_reader :config, :errors, :warnings
+
+ def initialize(config: nil, errors: [], warnings: [])
+ @config = config
+ @errors = errors
+ @warnings = warnings
+ end
+
def valid?
- errors.empty?
+ config.present? && errors.empty?
end
end
@@ -20,24 +38,32 @@ module Gitlab
@config = @ci_config.to_hash
unless @ci_config.valid?
- raise ValidationError, @ci_config.errors.first
+ error!(@ci_config.errors.first)
end
initial_parsing
rescue Gitlab::Ci::Config::ConfigError => e
- raise ValidationError, e.message
+ error!(e.message)
end
def self.new_with_validation_errors(content, opts = {})
- return ResultWithErrors.new('', ['Please provide content of .gitlab-ci.yml']) if content.blank?
+ return Result.new(errors: ['Please provide content of .gitlab-ci.yml']) if content.blank?
config = Gitlab::Ci::Config.new(content, **opts)
- return ResultWithErrors.new("", config.errors) unless config.valid?
+ return Result.new(errors: config.errors, warnings: config.warnings) unless config.valid?
config = Gitlab::Ci::YamlProcessor.new(content, opts)
- ResultWithErrors.new(config, [])
- rescue ValidationError, Gitlab::Ci::Config::ConfigError => e
- ResultWithErrors.new('', [e.message])
+ Result.new(config: config, warnings: config.warnings)
+
+ rescue ValidationError => e
+ Result.new(errors: [e.message], warnings: e.warnings)
+
+ rescue Gitlab::Ci::Config::ConfigError => e
+ Result.new(errors: [e.message])
+ end
+
+ def warnings
+ @ci_config&.warnings || []
end
def builds
@@ -66,6 +92,7 @@ module Gitlab
cache: job[:cache],
resource_group_key: job[:resource_group],
scheduling_type: job[:scheduling_type],
+ secrets: job[:secrets],
options: {
image: job[:image],
services: job[:services],
@@ -157,10 +184,14 @@ module Gitlab
return unless job[:stage]
unless job[:stage].is_a?(String) && job[:stage].in?(@stages)
- raise ValidationError, "#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}"
+ error!("#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}")
end
end
+ def error!(message)
+ raise ValidationError.new(message, warnings: warnings)
+ end
+
def validate_job_dependencies!(name, job)
return unless job[:dependencies]
@@ -190,7 +221,7 @@ module Gitlab
def validate_job_dependency!(name, dependency, dependency_type = 'dependency')
unless @jobs[dependency.to_sym]
- raise ValidationError, "#{name} job: undefined #{dependency_type}: #{dependency}"
+ error!("#{name} job: undefined #{dependency_type}: #{dependency}")
end
job_stage_index = stage_index(name)
@@ -199,7 +230,7 @@ module Gitlab
# A dependency might be defined later in the configuration
# with a stage that does not exist
unless dependency_stage_index.present? && dependency_stage_index < job_stage_index
- raise ValidationError, "#{name} job: #{dependency_type} #{dependency} is not defined in prior stages"
+ error!("#{name} job: #{dependency_type} #{dependency} is not defined in prior stages")
end
end
@@ -221,19 +252,19 @@ module Gitlab
on_stop_job = @jobs[on_stop.to_sym]
unless on_stop_job
- raise ValidationError, "#{name} job: on_stop job #{on_stop} is not defined"
+ error!("#{name} job: on_stop job #{on_stop} is not defined")
end
unless on_stop_job[:environment]
- raise ValidationError, "#{name} job: on_stop job #{on_stop} does not have environment defined"
+ error!("#{name} job: on_stop job #{on_stop} does not have environment defined")
end
unless on_stop_job[:environment][:name] == environment[:name]
- raise ValidationError, "#{name} job: on_stop job #{on_stop} have different environment name"
+ error!("#{name} job: on_stop job #{on_stop} have different environment name")
end
unless on_stop_job[:environment][:action] == 'stop'
- raise ValidationError, "#{name} job: on_stop job #{on_stop} needs to have action stop defined"
+ error!("#{name} job: on_stop job #{on_stop} needs to have action stop defined")
end
end
end
diff --git a/lib/gitlab/class_attributes.rb b/lib/gitlab/class_attributes.rb
new file mode 100644
index 00000000000..6560c97b2e6
--- /dev/null
+++ b/lib/gitlab/class_attributes.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ClassAttributes
+ extend ActiveSupport::Concern
+
+ class_methods do
+ protected
+
+ # Returns an attribute declared on this class or its parent class.
+ # This approach allows declared attributes to be inherited by
+ # child classes.
+ def get_class_attribute(name)
+ class_attributes[name] || superclass_attributes(name)
+ end
+
+ private
+
+ def class_attributes
+ @class_attributes ||= {}
+ end
+
+ def superclass_attributes(name)
+ return unless superclass.include? Gitlab::ClassAttributes
+
+ superclass.get_class_attribute(name)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/code_navigation_path.rb b/lib/gitlab/code_navigation_path.rb
index faf623faccf..909d0536b5f 100644
--- a/lib/gitlab/code_navigation_path.rb
+++ b/lib/gitlab/code_navigation_path.rb
@@ -5,7 +5,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
include Gitlab::Routing
- LATEST_COMMITS_LIMIT = 10
+ LATEST_COMMITS_LIMIT = 2
def initialize(project, commit_sha)
@project = project
@@ -28,11 +28,11 @@ module Gitlab
latest_commits_shas =
project.repository.commits(commit_sha, limit: LATEST_COMMITS_LIMIT).map(&:sha)
- artifact =
- ::Ci::JobArtifact
- .with_file_types(['lsif'])
- .for_sha(latest_commits_shas, project.id)
- .last
+ pipeline = @project.ci_pipelines.for_sha(latest_commits_shas).last
+
+ next unless pipeline
+
+ artifact = pipeline.job_artifacts.with_file_types(['lsif']).last
artifact&.job
end
diff --git a/lib/gitlab/conan_token.rb b/lib/gitlab/conan_token.rb
new file mode 100644
index 00000000000..7526c10b608
--- /dev/null
+++ b/lib/gitlab/conan_token.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+# The Conan client uses a JWT for authenticating with remotes.
+# This class encodes and decodes a user's personal access token or
+# CI_JOB_TOKEN into a JWT that is used by the Conan client to
+# authenticate with GitLab
+
+module Gitlab
+ class ConanToken
+ HMAC_KEY = 'gitlab-conan-packages'.freeze
+
+ attr_reader :access_token_id, :user_id
+
+ class << self
+ def from_personal_access_token(access_token)
+ new(access_token_id: access_token.id, user_id: access_token.user_id)
+ end
+
+ def from_job(job)
+ new(access_token_id: job.token, user_id: job.user.id)
+ end
+
+ def from_deploy_token(deploy_token)
+ new(access_token_id: deploy_token.token, user_id: deploy_token.username)
+ end
+
+ def decode(jwt)
+ payload = JSONWebToken::HMACToken.decode(jwt, secret).first
+
+ new(access_token_id: payload['access_token'], user_id: payload['user_id'])
+ rescue JWT::DecodeError, JWT::ExpiredSignature, JWT::ImmatureSignature
+ # we return on expired and errored tokens because the Conan client
+ # will request a new token automatically.
+ end
+
+ def secret
+ OpenSSL::HMAC.hexdigest(
+ OpenSSL::Digest::SHA256.new,
+ ::Settings.attr_encrypted_db_key_base,
+ HMAC_KEY
+ )
+ end
+ end
+
+ def initialize(access_token_id:, user_id:)
+ @access_token_id = access_token_id
+ @user_id = user_id
+ end
+
+ def to_jwt
+ hmac_token.encoded
+ end
+
+ private
+
+ def hmac_token
+ JSONWebToken::HMACToken.new(self.class.secret).tap do |token|
+ token['access_token'] = access_token_id
+ token['user_id'] = user_id
+ token.expire_time = token.issued_at + 1.hour
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/config/entry/configurable.rb b/lib/gitlab/config/entry/configurable.rb
index 571e7a5127e..6bf77ebaa5b 100644
--- a/lib/gitlab/config/entry/configurable.rb
+++ b/lib/gitlab/config/entry/configurable.rb
@@ -62,7 +62,9 @@ module Gitlab
class_methods do
def nodes
- Hash[(@nodes || {}).map { |key, factory| [key, factory.dup] }]
+ return {} unless @nodes
+
+ @nodes.transform_values(&:dup)
end
def reserved_node_names
diff --git a/lib/gitlab/config/entry/node.rb b/lib/gitlab/config/entry/node.rb
index 84d3409ed91..32912cb1046 100644
--- a/lib/gitlab/config/entry/node.rb
+++ b/lib/gitlab/config/entry/node.rb
@@ -16,6 +16,7 @@ module Gitlab
@config = config
@metadata = metadata
@entries = {}
+ @warnings = []
yield(self) if block_given?
@@ -60,6 +61,14 @@ module Gitlab
[]
end
+ def warnings
+ @warnings + descendants.flat_map(&:warnings)
+ end
+
+ def add_warning(message)
+ @warnings << "#{location} #{message}"
+ end
+
def value
if leaf?
@config
@@ -68,7 +77,7 @@ module Gitlab
value.specified? && value.relevant?
end
- Hash[meaningful.map { |key, entry| [key, entry.value] }]
+ meaningful.transform_values { |entry| entry.value }
end
end
diff --git a/lib/gitlab/config/loader/yaml.rb b/lib/gitlab/config/loader/yaml.rb
index e001742a7f8..cb3fc49944c 100644
--- a/lib/gitlab/config/loader/yaml.rb
+++ b/lib/gitlab/config/loader/yaml.rb
@@ -5,6 +5,7 @@ module Gitlab
module Loader
class Yaml
DataTooLargeError = Class.new(Loader::FormatError)
+ NotHashError = Class.new(Loader::FormatError)
include Gitlab::Utils::StrongMemoize
@@ -23,7 +24,7 @@ module Gitlab
def load_raw!
raise DataTooLargeError, 'The parsed YAML is too big' if too_big?
- raise Loader::FormatError, 'Invalid configuration format' unless hash?
+ raise NotHashError, 'Invalid configuration format' unless hash?
@config
end
diff --git a/lib/gitlab/config_checker/external_database_checker.rb b/lib/gitlab/config_checker/external_database_checker.rb
index 795082a10a0..c08dd0351f3 100644
--- a/lib/gitlab/config_checker/external_database_checker.rb
+++ b/lib/gitlab/config_checker/external_database_checker.rb
@@ -5,22 +5,43 @@ module Gitlab
module ExternalDatabaseChecker
extend self
- # DB is considered deprecated if it is below version 11
- def db_version_deprecated?
- Gitlab::Database.version.to_f < 11
- end
-
def check
- return [] unless db_version_deprecated?
+ notices = []
+
+ unless Gitlab::Database.postgresql_minimum_supported_version?
+ notices <<
+ {
+ type: 'warning',
+ message: _('You are using PostgreSQL %{pg_version_current}, but PostgreSQL ' \
+ '%{pg_version_minimum} is required for this version of GitLab. ' \
+ 'Please upgrade your environment to a supported PostgreSQL version, ' \
+ 'see %{pg_requirements_url} for details.') % {
+ pg_version_current: Gitlab::Database.version,
+ pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
+ pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>'
+ }
+ }
+ end
+
+ if Gitlab::Database.postgresql_upcoming_deprecation?
+ upcoming_deprecation = Gitlab::Database::UPCOMING_POSTGRES_VERSION_DETAILS
+
+ notices <<
+ {
+ type: 'warning',
+ message: _('Note that PostgreSQL %{pg_version_upcoming} will become the minimum required ' \
+ 'version in GitLab %{gl_version_upcoming} (%{gl_version_upcoming_date}). Please ' \
+ 'consider upgrading your environment to a supported PostgreSQL version soon, ' \
+ 'see <a href="%{pg_version_upcoming_url}">the related epic</a> for details.') % {
+ pg_version_upcoming: upcoming_deprecation[:pg_version_minimum],
+ gl_version_upcoming: upcoming_deprecation[:gl_version],
+ gl_version_upcoming_date: upcoming_deprecation[:gl_version_date],
+ pg_version_upcoming_url: upcoming_deprecation[:url]
+ }
+ }
+ end
- [
- {
- type: 'warning',
- message: _('Note that PostgreSQL 11 will become the minimum required PostgreSQL version in GitLab 13.0 (May 2020). '\
- 'PostgreSQL 9.6 and PostgreSQL 10 will no longer be supported in GitLab 13.0. '\
- 'Please consider upgrading your PostgreSQL version (%{db_version}) soon.') % { db_version: Gitlab::Database.version.to_s }
- }
- ]
+ notices
end
end
end
diff --git a/lib/gitlab/danger/changelog.rb b/lib/gitlab/danger/changelog.rb
index 85f386594be..4427c331b8e 100644
--- a/lib/gitlab/danger/changelog.rb
+++ b/lib/gitlab/danger/changelog.rb
@@ -3,11 +3,17 @@
module Gitlab
module Danger
module Changelog
- NO_CHANGELOG_LABELS = %w[backstage ci-build meta].freeze
+ NO_CHANGELOG_LABELS = [
+ 'tooling',
+ 'tooling::pipelines',
+ 'tooling::workflow',
+ 'ci-build',
+ 'meta'
+ ].freeze
NO_CHANGELOG_CATEGORIES = %i[docs none].freeze
def needed?
- categories_need_changelog? && (gitlab.mr_labels & NO_CHANGELOG_LABELS).empty?
+ categories_need_changelog? && without_no_changelog_label?
end
def found
@@ -27,6 +33,10 @@ module Gitlab
def categories_need_changelog?
(helper.changes_by_category.keys - NO_CHANGELOG_CATEGORIES).any?
end
+
+ def without_no_changelog_label?
+ (gitlab.mr_labels & NO_CHANGELOG_LABELS).empty?
+ end
end
end
end
diff --git a/lib/gitlab/danger/commit_linter.rb b/lib/gitlab/danger/commit_linter.rb
index 58db2b58560..954934518d7 100644
--- a/lib/gitlab/danger/commit_linter.rb
+++ b/lib/gitlab/danger/commit_linter.rb
@@ -8,8 +8,6 @@ module Gitlab
class CommitLinter
MIN_SUBJECT_WORDS_COUNT = 3
MAX_LINE_LENGTH = 72
- WARN_SUBJECT_LENGTH = 50
- URL_LIMIT_SUBJECT = "https://chris.beams.io/posts/git-commit/#limit-50"
MAX_CHANGED_FILES_IN_COMMIT = 3
MAX_CHANGED_LINES_IN_COMMIT = 30
SHORT_REFERENCE_REGEX = %r{([\w\-\/]+)?(#|!|&|%)\d+\b}.freeze
@@ -18,7 +16,6 @@ module Gitlab
PROBLEMS = {
subject_too_short: "The %s must contain at least #{MIN_SUBJECT_WORDS_COUNT} words",
subject_too_long: "The %s may not be longer than #{MAX_LINE_LENGTH} characters",
- subject_above_warning: "The %s length is acceptable, but please try to [reduce it to #{WARN_SUBJECT_LENGTH} characters](#{URL_LIMIT_SUBJECT})",
subject_starts_with_lowercase: "The %s must start with a capital letter",
subject_ends_with_a_period: "The %s must not end with a period",
separator_missing: "The commit subject and body must be separated by a blank line",
@@ -88,8 +85,6 @@ module Gitlab
if subject_too_long?
add_problem(:subject_too_long, subject_description)
- elsif subject_above_warning?
- add_problem(:subject_above_warning, subject_description)
end
if subject_starts_with_lowercase?
@@ -195,10 +190,6 @@ module Gitlab
line_too_long?(subject)
end
- def subject_above_warning?
- subject.length > WARN_SUBJECT_LENGTH
- end
-
def subject_starts_with_lowercase?
first_char = subject.sub(/\A\[.+\]\s/, '')[0]
first_char_downcased = first_char.downcase
diff --git a/lib/gitlab/danger/helper.rb b/lib/gitlab/danger/helper.rb
index 327418ad100..db799c094b2 100644
--- a/lib/gitlab/danger/helper.rb
+++ b/lib/gitlab/danger/helper.rb
@@ -6,6 +6,7 @@ module Gitlab
module Danger
module Helper
RELEASE_TOOLS_BOT = 'gitlab-release-tools-bot'
+ DRAFT_REGEX = /\A*#{Regexp.union(/(?i)(\[WIP\]\s*|WIP:\s*|WIP$)/, /(?i)(\[draft\]|\(draft\)|draft:|draft\s\-\s|draft$)/)}+\s*/i.freeze
# Returns a list of all files that have been added, modified or renamed.
# `git.modified_files` might contain paths that already have been renamed,
@@ -34,6 +35,18 @@ module Gitlab
.sort
end
+ # Returns a string containing changed lines as git diff
+ #
+ # Considering changing a line in lib/gitlab/usage_data.rb it will return:
+ #
+ # [ "--- a/lib/gitlab/usage_data.rb",
+ # "+++ b/lib/gitlab/usage_data.rb",
+ # "+ # Test change",
+ # "- # Old change" ]
+ def changed_lines(changed_file)
+ git.diff_for_file(changed_file).patch.split("\n").select { |line| %r{^[+-]}.match?(line) }
+ end
+
def all_ee_changes
all_changed_files.grep(%r{\Aee/})
end
@@ -73,16 +86,25 @@ module Gitlab
# @return [Hash<String,Array<String>>]
def changes_by_category
all_changed_files.each_with_object(Hash.new { |h, k| h[k] = [] }) do |file, hash|
- hash[category_for_file(file)] << file
+ categories_for_file(file).each { |category| hash[category] << file }
end
end
- # Determines the category a file is in, e.g., `:frontend` or `:backend`
- # @return[Symbol]
- def category_for_file(file)
- _, category = CATEGORIES.find { |regexp, _| regexp.match?(file) }
+ # Determines the categories a file is in, e.g., `[:frontend]`, `[:backend]`, or `%i[frontend engineering_productivity]`
+ # using filename regex and specific change regex if given.
+ #
+ # @return Array<Symbol>
+ def categories_for_file(file)
+ _, categories = CATEGORIES.find do |key, _|
+ filename_regex, changes_regex = Array(key)
+
+ found = filename_regex.match?(file)
+ found &&= changed_lines(file).any? { |changed_line| changes_regex.match?(changed_line) } if changes_regex
- category || :unknown
+ found
+ end
+
+ Array(categories || :unknown)
end
# Returns the GFM for a category label, making its best guess if it's not
@@ -102,8 +124,10 @@ module Gitlab
}.freeze
# First-match win, so be sure to put more specific regex at the top...
CATEGORIES = {
- %r{\Adoc/} => :none, # To reinstate roulette for documentation, set to `:docs`.
- %r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :none, # To reinstate roulette for documentation, set to `:docs`.
+ [%r{usage_data}, %r{^(\+|-).*(count|distinct_count)\(.*\)(.*)$}] => [:database, :backend],
+
+ %r{\Adoc/.*(\.(md|png|gif|jpg))\z} => :docs,
+ %r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :docs,
%r{\A(ee/)?app/(assets|views)/} => :frontend,
%r{\A(ee/)?public/} => :frontend,
@@ -125,10 +149,13 @@ module Gitlab
jest\.config\.js |
package\.json |
yarn\.lock |
- config/.+\.js |
- \.gitlab/ci/frontend\.gitlab-ci\.yml
+ config/.+\.js
)\z}x => :frontend,
+ %r{(\A|/)(
+ \.gitlab/ci/frontend\.gitlab-ci\.yml
+ )\z}x => %i[frontend engineering_productivity],
+
%r{\A(ee/)?db/(?!fixtures)[^/]+} => :database,
%r{\A(ee/)?lib/gitlab/(database|background_migration|sql|github_import)(/|\.rb)} => :database,
%r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => :database,
@@ -136,13 +163,13 @@ module Gitlab
%r{\Arubocop/cop/migration(/|\.rb)} => :database,
%r{\A(\.gitlab-ci\.yml\z|\.gitlab\/ci)} => :engineering_productivity,
+ %r{\A\.codeclimate\.yml\z} => :engineering_productivity,
%r{\A\.overcommit\.yml\.example\z} => :engineering_productivity,
- %r{\Atooling/overcommit/} => :engineering_productivity,
- %r{\A.editorconfig\z} => :engineering_productivity,
+ %r{\A\.editorconfig\z} => :engineering_productivity,
%r{Dangerfile\z} => :engineering_productivity,
%r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity,
%r{\A(ee/)?scripts/} => :engineering_productivity,
- %r{\A\.codeclimate\.yml\z} => :engineering_productivity,
+ %r{\Atooling/} => :engineering_productivity,
%r{\A(ee/)?app/(?!assets|views)[^/]+} => :backend,
%r{\A(ee/)?(bin|config|generator_templates|lib|rubocop)/} => :backend,
@@ -184,7 +211,7 @@ module Gitlab
end
def sanitize_mr_title(title)
- title.gsub(/^WIP: */, '').gsub(/`/, '\\\`')
+ title.gsub(DRAFT_REGEX, '').gsub(/`/, '\\\`')
end
def security_mr?
@@ -193,6 +220,18 @@ module Gitlab
gitlab_helper.mr_json['web_url'].include?('/gitlab-org/security/')
end
+ def cherry_pick_mr?
+ return false unless gitlab_helper
+
+ /cherry[\s-]*pick/i.match?(gitlab_helper.mr_json['title'])
+ end
+
+ def stable_branch?
+ return false unless gitlab_helper
+
+ /\A\d+-\d+-stable-ee/i.match?(gitlab_helper.mr_json['target_branch'])
+ end
+
def mr_has_labels?(*labels)
return false unless gitlab_helper
diff --git a/lib/gitlab/danger/roulette.rb b/lib/gitlab/danger/roulette.rb
index 9f7980dc20a..ed4af3f4a43 100644
--- a/lib/gitlab/danger/roulette.rb
+++ b/lib/gitlab/danger/roulette.rb
@@ -5,8 +5,12 @@ require_relative 'teammate'
module Gitlab
module Danger
module Roulette
- ROULETTE_DATA_URL = 'https://about.gitlab.com/roulette.json'
- OPTIONAL_CATEGORIES = [:qa, :test].freeze
+ ROULETTE_DATA_URL = 'https://gitlab-org.gitlab.io/gitlab-roulette/roulette.json'
+ HOURS_WHEN_PERSON_CAN_BE_PICKED = (6..14).freeze
+
+ INCLUDE_TIMEZONE_FOR_CATEGORY = {
+ database: false
+ }.freeze
Spin = Struct.new(:category, :reviewer, :maintainer, :optional_role)
@@ -14,7 +18,7 @@ module Gitlab
# for each change category that a Merge Request contains.
#
# @return [Array<Spin>]
- def spin(project, categories, branch_name)
+ def spin(project, categories, branch_name, timezone_experiment: false)
team =
begin
project_team(project)
@@ -26,7 +30,9 @@ module Gitlab
canonical_branch_name = canonical_branch_name(branch_name)
spin_per_category = categories.each_with_object({}) do |category, memo|
- memo[category] = spin_for_category(team, project, category, canonical_branch_name)
+ including_timezone = INCLUDE_TIMEZONE_FOR_CATEGORY.fetch(category, timezone_experiment)
+
+ memo[category] = spin_for_category(team, project, category, canonical_branch_name, timezone_experiment: including_timezone)
end
spin_per_category.map do |category, spin|
@@ -80,9 +86,14 @@ module Gitlab
# Known issue: If someone is rejected due to OOO, and then becomes not OOO, the
# selection will change on next spin
# @param [Array<Teammate>] people
- def spin_for_person(people, random:)
- people.shuffle(random: random)
- .find(&method(:valid_person?))
+ def spin_for_person(people, random:, timezone_experiment: false)
+ shuffled_people = people.shuffle(random: random)
+
+ if timezone_experiment
+ shuffled_people.find(&method(:valid_person_with_timezone?))
+ else
+ shuffled_people.find(&method(:valid_person?))
+ end
end
private
@@ -90,7 +101,13 @@ module Gitlab
# @param [Teammate] person
# @return [Boolean]
def valid_person?(person)
- !mr_author?(person) && person.available?
+ !mr_author?(person) && person.available
+ end
+
+ # @param [Teammate] person
+ # @return [Boolean]
+ def valid_person_with_timezone?(person)
+ valid_person?(person) && HOURS_WHEN_PERSON_CAN_BE_PICKED.cover?(person.local_hour)
end
# @param [Teammate] person
@@ -105,7 +122,7 @@ module Gitlab
end
end
- def spin_for_category(team, project, category, branch_name)
+ def spin_for_category(team, project, category, branch_name, timezone_experiment: false)
reviewers, traintainers, maintainers =
%i[reviewer traintainer maintainer].map do |role|
spin_role_for_category(team, role, project, category)
@@ -116,14 +133,10 @@ module Gitlab
# Make traintainers have triple the chance to be picked as a reviewer
random = new_random(branch_name)
- reviewer = spin_for_person(reviewers + traintainers + traintainers, random: random)
- maintainer = spin_for_person(maintainers, random: random)
+ reviewer = spin_for_person(reviewers + traintainers + traintainers, random: random, timezone_experiment: timezone_experiment)
+ maintainer = spin_for_person(maintainers, random: random, timezone_experiment: timezone_experiment)
- Spin.new(category, reviewer, maintainer).tap do |spin|
- if OPTIONAL_CATEGORIES.include?(category)
- spin.optional_role = :maintainer
- end
- end
+ Spin.new(category, reviewer, maintainer)
end
end
end
diff --git a/lib/gitlab/danger/sidekiq_queues.rb b/lib/gitlab/danger/sidekiq_queues.rb
new file mode 100644
index 00000000000..726b6134abf
--- /dev/null
+++ b/lib/gitlab/danger/sidekiq_queues.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Danger
+ module SidekiqQueues
+ def changed_queue_files
+ @changed_queue_files ||= git.modified_files.grep(%r{\A(ee/)?app/workers/all_queues\.yml})
+ end
+
+ def added_queue_names
+ @added_queue_names ||= new_queues.keys - old_queues.keys
+ end
+
+ def changed_queue_names
+ @changed_queue_names ||=
+ (new_queues.values_at(*old_queues.keys) - old_queues.values)
+ .compact.map { |queue| queue[:name] }
+ end
+
+ private
+
+ def old_queues
+ @old_queues ||= queues_for(gitlab.base_commit)
+ end
+
+ def new_queues
+ @new_queues ||= queues_for(gitlab.head_commit)
+ end
+
+ def queues_for(branch)
+ changed_queue_files
+ .flat_map { |file| YAML.safe_load(`git show #{branch}:#{file}`, permitted_classes: [Symbol]) }
+ .to_h { |queue| [queue[:name], queue] }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/danger/teammate.rb b/lib/gitlab/danger/teammate.rb
index 651b002d2bf..f7da66e77cd 100644
--- a/lib/gitlab/danger/teammate.rb
+++ b/lib/gitlab/danger/teammate.rb
@@ -1,28 +1,19 @@
# frozen_string_literal: true
-require 'cgi'
-require 'set'
-
module Gitlab
module Danger
class Teammate
- attr_reader :name, :username, :role, :projects
-
- AT_CAPACITY_EMOJI = Set.new(%w[red_circle]).freeze
- OOO_EMOJI = Set.new(%w[
- palm_tree
- beach beach_umbrella beach_with_umbrella
- ]).freeze
+ attr_reader :username, :name, :role, :projects, :available, :tz_offset_hours
+ # The options data are produced by https://gitlab.com/gitlab-org/gitlab-roulette/-/blob/master/lib/team_member.rb
def initialize(options = {})
@username = options['username']
- @name = options['name'] || @username
+ @name = options['name']
+ @markdown_name = options['markdown_name']
@role = options['role']
@projects = options['projects']
- end
-
- def markdown_name
- "[#{name}](https://gitlab.com/#{username}) (`@#{username}`)"
+ @available = options['available']
+ @tz_offset_hours = options['tz_offset_hours']
end
def in_project?(name)
@@ -43,40 +34,47 @@ module Gitlab
has_capability?(project, category, :maintainer, labels)
end
- def status
- return @status if defined?(@status)
+ def markdown_name(timezone_experiment: false, author: nil)
+ return @markdown_name unless timezone_experiment
- @status ||=
- begin
- Gitlab::Danger::RequestHelper.http_get_json(status_api_endpoint)
- rescue Gitlab::Danger::RequestHelper::HTTPError, JSON::ParserError
- nil # better no status than a crashing Danger
- end
+ "#{@markdown_name} (#{utc_offset_text(author)})"
end
- # @return [Boolean]
- def available?
- !out_of_office? && has_capacity?
+ def local_hour
+ (Time.now.utc + tz_offset_hours * 3600).hour
end
- private
+ protected
- def status_api_endpoint
- "https://gitlab.com/api/v4/users/#{CGI.escape(username)}/status"
- end
+ def floored_offset_hours
+ floored_offset = tz_offset_hours.floor(0)
- def status_emoji
- status&.dig("emoji")
+ floored_offset == tz_offset_hours ? floored_offset : tz_offset_hours
end
- # @return [Boolean]
- def out_of_office?
- status&.dig("message")&.match?(/OOO/i) || OOO_EMOJI.include?(status_emoji)
+ private
+
+ def utc_offset_text(author = nil)
+ offset_text =
+ if floored_offset_hours >= 0
+ "UTC+#{floored_offset_hours}"
+ else
+ "UTC#{floored_offset_hours}"
+ end
+
+ return offset_text unless author
+
+ "#{offset_text}, #{offset_diff_compared_to_author(author)}"
end
- # @return [Boolean]
- def has_capacity?
- !AT_CAPACITY_EMOJI.include?(status_emoji)
+ def offset_diff_compared_to_author(author)
+ diff = floored_offset_hours - author.floored_offset_hours
+ return "same timezone as `@#{author.username}`" if diff.zero?
+
+ ahead_or_behind = diff < 0 ? 'behind' : 'ahead'
+ pluralized_hours = pluralize(diff.abs, 'hour', 'hours')
+
+ "#{pluralized_hours} #{ahead_or_behind} `@#{author.username}`"
end
def has_capability?(project, category, kind, labels)
@@ -98,6 +96,12 @@ module Gitlab
def capabilities(project)
Array(projects.fetch(project, []))
end
+
+ def pluralize(count, singular, plural)
+ word = count == 1 || count.to_s =~ /^1(\.0+)?$/ ? singular : plural
+
+ "#{count || 0} #{word}"
+ end
end
end
end
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index 02005be1f6a..2bfb6c32886 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -4,6 +4,20 @@ module Gitlab
module Database
include Gitlab::Metrics::Methods
+ # Minimum PostgreSQL version requirement per documentation:
+ # https://docs.gitlab.com/ee/install/requirements.html#postgresql-requirements
+ MINIMUM_POSTGRES_VERSION = 11
+
+ # Upcoming PostgreSQL version requirements
+ # Allows a soft warning about an upcoming minimum version requirement
+ # so administrators can prepare to upgrade
+ UPCOMING_POSTGRES_VERSION_DETAILS = {
+ gl_version: '13.6.0',
+ gl_version_date: 'November 2020',
+ pg_version_minimum: 12,
+ url: 'https://gitlab.com/groups/gitlab-org/-/epics/2374'
+ }.freeze
+
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
MAX_INT_VALUE = 2147483647
@@ -22,6 +36,16 @@ module Gitlab
MIN_SCHEMA_VERSION = 20190506135400
MIN_SCHEMA_GITLAB_VERSION = '11.11.0'
+ # Schema we store dynamically managed partitions in (e.g. for time partitioning)
+ DYNAMIC_PARTITIONS_SCHEMA = :gitlab_partitions_dynamic
+
+ # Schema we store static partitions in (e.g. for hash partitioning)
+ STATIC_PARTITIONS_SCHEMA = :gitlab_partitions_static
+
+ # This is an extensive list of postgres schemas owned by GitLab
+ # It does not include the default public schema
+ EXTRA_SCHEMAS = [DYNAMIC_PARTITIONS_SCHEMA, STATIC_PARTITIONS_SCHEMA].freeze
+
define_histogram :gitlab_database_transaction_seconds do
docstring "Time spent in database transactions, in seconds"
end
@@ -87,16 +111,39 @@ module Gitlab
version.to_f < 10
end
- def self.replication_slots_supported?
- version.to_f >= 9.4
+ def self.postgresql_minimum_supported_version?
+ version.to_f >= MINIMUM_POSTGRES_VERSION
end
- def self.postgresql_minimum_supported_version?
- version.to_f >= 9.6
+ def self.postgresql_upcoming_deprecation?
+ version.to_f < UPCOMING_POSTGRES_VERSION_DETAILS[:pg_version_minimum]
end
- def self.upsert_supported?
- version.to_f >= 9.5
+ def self.check_postgres_version_and_print_warning
+ return if Gitlab::Database.postgresql_minimum_supported_version?
+ return if Gitlab::Runtime.rails_runner?
+
+ Kernel.warn ERB.new(Rainbow.new.wrap(<<~EOS).red).result
+
+ ██  ██  █████  ██████  ███  ██ ██ ███  ██  ██████ 
+ ██  ██ ██   ██ ██   ██ ████  ██ ██ ████  ██ ██      
+ ██  █  ██ ███████ ██████  ██ ██  ██ ██ ██ ██  ██ ██  ███ 
+ ██ ███ ██ ██   ██ ██   ██ ██  ██ ██ ██ ██  ██ ██ ██  ██ 
+  ███ ███  ██  ██ ██  ██ ██   ████ ██ ██   ████  ██████  
+
+ ******************************************************************************
+ You are using PostgreSQL <%= Gitlab::Database.version %>, but PostgreSQL >= <%= Gitlab::Database::MINIMUM_POSTGRES_VERSION %>
+ is required for this version of GitLab.
+ <% if Rails.env.development? || Rails.env.test? %>
+ If using gitlab-development-kit, please find the relevant steps here:
+ https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/howto/postgresql.md#upgrade-postgresql
+ <% end %>
+ Please upgrade your environment to a supported PostgreSQL version, see
+ https://docs.gitlab.com/ee/install/requirements.html#database for details.
+ ******************************************************************************
+ EOS
+ rescue ActiveRecord::ActiveRecordError, PG::Error
+ # ignore - happens when Rake tasks yet have to create a database, e.g. for testing
end
# map some of the function names that changed between PostgreSQL 9 and 10
@@ -182,9 +229,7 @@ module Gitlab
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
EOF
- if upsert_supported? && on_conflict == :do_nothing
- sql = "#{sql} ON CONFLICT DO NOTHING"
- end
+ sql = "#{sql} ON CONFLICT DO NOTHING" if on_conflict == :do_nothing
sql = "#{sql} RETURNING id" if return_ids
diff --git a/lib/gitlab/database/background_migration_job.rb b/lib/gitlab/database/background_migration_job.rb
new file mode 100644
index 00000000000..445735b232a
--- /dev/null
+++ b/lib/gitlab/database/background_migration_job.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ class BackgroundMigrationJob < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord
+ self.table_name = :background_migration_jobs
+
+ scope :for_migration_class, -> (class_name) { where(class_name: normalize_class_name(class_name)) }
+ scope :for_migration_execution, -> (class_name, arguments) do
+ for_migration_class(class_name).where('arguments = ?', arguments.to_json)
+ end
+
+ scope :for_partitioning_migration, -> (class_name, table_name) do
+ for_migration_class(class_name).where('arguments ->> 2 = ?', table_name)
+ end
+
+ enum status: {
+ pending: 0,
+ succeeded: 1
+ }
+
+ def self.mark_all_as_succeeded(class_name, arguments)
+ self.pending.for_migration_execution(class_name, arguments)
+ .update_all("status = #{statuses[:succeeded]}, updated_at = NOW()")
+ end
+
+ def self.normalize_class_name(class_name)
+ return class_name unless class_name.present? && class_name.start_with?('::')
+
+ class_name[2..]
+ end
+
+ def class_name=(value)
+ write_attribute(:class_name, self.class.normalize_class_name(value))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/dynamic_model_helpers.rb b/lib/gitlab/database/dynamic_model_helpers.rb
new file mode 100644
index 00000000000..892f8291780
--- /dev/null
+++ b/lib/gitlab/database/dynamic_model_helpers.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module DynamicModelHelpers
+ def define_batchable_model(table_name)
+ Class.new(ActiveRecord::Base) do
+ include EachBatch
+
+ self.table_name = table_name
+ self.inheritance_column = :_type_disabled
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index fd09c31e994..006a24da8fe 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -3,10 +3,10 @@
module Gitlab
module Database
module MigrationHelpers
+ include Migrations::BackgroundMigrationHelpers
+
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63
- BACKGROUND_MIGRATION_BATCH_SIZE = 1000 # Number of rows to process per job
- BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1000 # Number of jobs to bulk queue at a time
PERMITTED_TIMESTAMP_COLUMNS = %i[created_at updated_at deleted_at].to_set.freeze
DEFAULT_TIMESTAMP_COLUMNS = %i[created_at updated_at].freeze
@@ -136,6 +136,10 @@ module Gitlab
'in the body of your migration class'
end
+ index_name = index_name[:name] if index_name.is_a?(Hash)
+
+ raise 'remove_concurrent_index_by_name must get an index name as the second argument' if index_name.blank?
+
options = options.merge({ algorithm: :concurrently })
unless index_exists_by_name?(table_name, index_name)
@@ -477,7 +481,7 @@ module Gitlab
# type is used.
# batch_column_name - option is for tables without primary key, in this
# case another unique integer column can be used. Example: :user_id
- def rename_column_concurrently(table, old, new, type: nil, batch_column_name: :id)
+ def rename_column_concurrently(table, old, new, type: nil, type_cast_function: nil, batch_column_name: :id)
unless column_exists?(table, batch_column_name)
raise "Column #{batch_column_name} does not exist on #{table}"
end
@@ -488,7 +492,7 @@ module Gitlab
check_trigger_permissions!(table)
- create_column_from(table, old, new, type: type, batch_column_name: batch_column_name)
+ create_column_from(table, old, new, type: type, batch_column_name: batch_column_name, type_cast_function: type_cast_function)
install_rename_triggers(table, old, new)
end
@@ -536,10 +540,10 @@ module Gitlab
# table - The table containing the column.
# column - The name of the column to change.
# new_type - The new column type.
- def change_column_type_concurrently(table, column, new_type)
+ def change_column_type_concurrently(table, column, new_type, type_cast_function: nil)
temp_column = "#{column}_for_type_change"
- rename_column_concurrently(table, column, temp_column, type: new_type)
+ rename_column_concurrently(table, column, temp_column, type: new_type, type_cast_function: type_cast_function)
end
# Performs cleanup of a concurrent type change.
@@ -786,10 +790,6 @@ module Gitlab
end
end
- def perform_background_migration_inline?
- Rails.env.test? || Rails.env.development?
- end
-
# Performs a concurrent column rename when using PostgreSQL.
def install_rename_triggers_for_postgresql(trigger, table, old, new)
execute <<-EOF.strip_heredoc
@@ -973,106 +973,6 @@ into similar problems in the future (e.g. when new tables are created).
end
end
- # Bulk queues background migration jobs for an entire table, batched by ID range.
- # "Bulk" meaning many jobs will be pushed at a time for efficiency.
- # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`.
- #
- # model_class - The table being iterated over
- # job_class_name - The background migration job class as a string
- # batch_size - The maximum number of rows per job
- #
- # Example:
- #
- # class Route < ActiveRecord::Base
- # include EachBatch
- # self.table_name = 'routes'
- # end
- #
- # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes')
- #
- # Where the model_class includes EachBatch, and the background migration exists:
- #
- # class Gitlab::BackgroundMigration::ProcessRoutes
- # def perform(start_id, end_id)
- # # do something
- # end
- # end
- def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
- raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
-
- jobs = []
- table_name = model_class.quoted_table_name
-
- model_class.each_batch(of: batch_size) do |relation|
- start_id, end_id = relation.pluck("MIN(#{table_name}.id)", "MAX(#{table_name}.id)").first
-
- if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE
- # Note: This code path generally only helps with many millions of rows
- # We push multiple jobs at a time to reduce the time spent in
- # Sidekiq/Redis operations. We're using this buffer based approach so we
- # don't need to run additional queries for every range.
- bulk_migrate_async(jobs)
- jobs.clear
- end
-
- jobs << [job_class_name, [start_id, end_id]]
- end
-
- bulk_migrate_async(jobs) unless jobs.empty?
- end
-
- # Queues background migration jobs for an entire table, batched by ID range.
- # Each job is scheduled with a `delay_interval` in between.
- # If you use a small interval, then some jobs may run at the same time.
- #
- # model_class - The table or relation being iterated over
- # job_class_name - The background migration job class as a string
- # delay_interval - The duration between each job's scheduled time (must respond to `to_f`)
- # batch_size - The maximum number of rows per job
- # other_arguments - Other arguments to send to the job
- #
- # *Returns the final migration delay*
- #
- # Example:
- #
- # class Route < ActiveRecord::Base
- # include EachBatch
- # self.table_name = 'routes'
- # end
- #
- # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute)
- #
- # Where the model_class includes EachBatch, and the background migration exists:
- #
- # class Gitlab::BackgroundMigration::ProcessRoutes
- # def perform(start_id, end_id)
- # # do something
- # end
- # end
- def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE, other_job_arguments: [], initial_delay: 0)
- raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
-
- # To not overload the worker too much we enforce a minimum interval both
- # when scheduling and performing jobs.
- if delay_interval < BackgroundMigrationWorker.minimum_interval
- delay_interval = BackgroundMigrationWorker.minimum_interval
- end
-
- final_delay = 0
-
- model_class.each_batch(of: batch_size) do |relation, index|
- start_id, end_id = relation.pluck(Arel.sql('MIN(id), MAX(id)')).first
-
- # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for
- # the same time, which is not helpful in most cases where we wish to
- # spread the work over time.
- final_delay = initial_delay + delay_interval * index
- migrate_in(final_delay, job_class_name, [start_id, end_id] + other_job_arguments)
- end
-
- final_delay
- end
-
# Fetches indexes on a column by name for postgres.
#
# This will include indexes using an expression on the column, for example:
@@ -1131,30 +1031,6 @@ into similar problems in the future (e.g. when new tables are created).
execute(sql)
end
- def migrate_async(*args)
- with_migration_context do
- BackgroundMigrationWorker.perform_async(*args)
- end
- end
-
- def migrate_in(*args)
- with_migration_context do
- BackgroundMigrationWorker.perform_in(*args)
- end
- end
-
- def bulk_migrate_in(*args)
- with_migration_context do
- BackgroundMigrationWorker.bulk_perform_in(*args)
- end
- end
-
- def bulk_migrate_async(*args)
- with_migration_context do
- BackgroundMigrationWorker.bulk_perform_async(*args)
- end
- end
-
# Returns the name for a check constraint
#
# type:
@@ -1396,7 +1272,7 @@ into similar problems in the future (e.g. when new tables are created).
"ON DELETE #{on_delete.upcase}"
end
- def create_column_from(table, old, new, type: nil, batch_column_name: :id)
+ def create_column_from(table, old, new, type: nil, batch_column_name: :id, type_cast_function: nil)
old_col = column_for(table, old)
new_type = type || old_col.type
@@ -1410,7 +1286,13 @@ into similar problems in the future (e.g. when new tables are created).
# necessary since we copy over old values further down.
change_column_default(table, new, old_col.default) unless old_col.default.nil?
- update_column_in_batches(table, new, Arel::Table.new(table)[old], batch_column_name: batch_column_name)
+ old_value = Arel::Table.new(table)[old]
+
+ if type_cast_function.present?
+ old_value = Arel::Nodes::NamedFunction.new(type_cast_function, [old_value])
+ end
+
+ update_column_in_batches(table, new, old_value, batch_column_name: batch_column_name)
add_not_null_constraint(table, new) unless old_col.null
@@ -1437,10 +1319,6 @@ into similar problems in the future (e.g. when new tables are created).
your migration class
ERROR
end
-
- def with_migration_context(&block)
- Gitlab::ApplicationContext.with_context(caller_id: self.class.to_s, &block)
- end
end
end
end
diff --git a/lib/gitlab/database/migrations/background_migration_helpers.rb b/lib/gitlab/database/migrations/background_migration_helpers.rb
new file mode 100644
index 00000000000..a6cc03aa9eb
--- /dev/null
+++ b/lib/gitlab/database/migrations/background_migration_helpers.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Migrations
+ module BackgroundMigrationHelpers
+ BACKGROUND_MIGRATION_BATCH_SIZE = 1_000 # Number of rows to process per job
+ BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1_000 # Number of jobs to bulk queue at a time
+
+ # Bulk queues background migration jobs for an entire table, batched by ID range.
+ # "Bulk" meaning many jobs will be pushed at a time for efficiency.
+ # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`.
+ #
+ # model_class - The table being iterated over
+ # job_class_name - The background migration job class as a string
+ # batch_size - The maximum number of rows per job
+ #
+ # Example:
+ #
+ # class Route < ActiveRecord::Base
+ # include EachBatch
+ # self.table_name = 'routes'
+ # end
+ #
+ # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes')
+ #
+ # Where the model_class includes EachBatch, and the background migration exists:
+ #
+ # class Gitlab::BackgroundMigration::ProcessRoutes
+ # def perform(start_id, end_id)
+ # # do something
+ # end
+ # end
+ def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
+ raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
+
+ jobs = []
+ table_name = model_class.quoted_table_name
+
+ model_class.each_batch(of: batch_size) do |relation|
+ start_id, end_id = relation.pluck("MIN(#{table_name}.id)", "MAX(#{table_name}.id)").first
+
+ if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE
+ # Note: This code path generally only helps with many millions of rows
+ # We push multiple jobs at a time to reduce the time spent in
+ # Sidekiq/Redis operations. We're using this buffer based approach so we
+ # don't need to run additional queries for every range.
+ bulk_migrate_async(jobs)
+ jobs.clear
+ end
+
+ jobs << [job_class_name, [start_id, end_id]]
+ end
+
+ bulk_migrate_async(jobs) unless jobs.empty?
+ end
+
+ # Queues background migration jobs for an entire table, batched by ID range.
+ # Each job is scheduled with a `delay_interval` in between.
+ # If you use a small interval, then some jobs may run at the same time.
+ #
+ # model_class - The table or relation being iterated over
+ # job_class_name - The background migration job class as a string
+ # delay_interval - The duration between each job's scheduled time (must respond to `to_f`)
+ # batch_size - The maximum number of rows per job
+ # other_arguments - Other arguments to send to the job
+ # track_jobs - When this flag is set, creates a record in the background_migration_jobs table for each job that
+ # is scheduled to be run. These records can be used to trace execution of the background job, but there is no
+ # builtin support to manage that automatically at this time. You should only set this flag if you are aware of
+ # how it works, and intend to manually cleanup the database records in your background job.
+ #
+ # *Returns the final migration delay*
+ #
+ # Example:
+ #
+ # class Route < ActiveRecord::Base
+ # include EachBatch
+ # self.table_name = 'routes'
+ # end
+ #
+ # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute)
+ #
+ # Where the model_class includes EachBatch, and the background migration exists:
+ #
+ # class Gitlab::BackgroundMigration::ProcessRoutes
+ # def perform(start_id, end_id)
+ # # do something
+ # end
+ # end
+ def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE, other_job_arguments: [], initial_delay: 0, track_jobs: false)
+ raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
+
+ # To not overload the worker too much we enforce a minimum interval both
+ # when scheduling and performing jobs.
+ if delay_interval < BackgroundMigrationWorker.minimum_interval
+ delay_interval = BackgroundMigrationWorker.minimum_interval
+ end
+
+ final_delay = 0
+
+ model_class.each_batch(of: batch_size) do |relation, index|
+ start_id, end_id = relation.pluck(Arel.sql('MIN(id), MAX(id)')).first
+
+ # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for
+ # the same time, which is not helpful in most cases where we wish to
+ # spread the work over time.
+ final_delay = initial_delay + delay_interval * index
+ full_job_arguments = [start_id, end_id] + other_job_arguments
+
+ track_in_database(job_class_name, full_job_arguments) if track_jobs
+ migrate_in(final_delay, job_class_name, full_job_arguments)
+ end
+
+ final_delay
+ end
+
+ def perform_background_migration_inline?
+ Rails.env.test? || Rails.env.development?
+ end
+
+ def migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_async(*args)
+ end
+ end
+
+ def migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_async(*args)
+ end
+ end
+
+ def with_migration_context(&block)
+ Gitlab::ApplicationContext.with_context(caller_id: self.class.to_s, &block)
+ end
+
+ private
+
+ def track_in_database(class_name, arguments)
+ Gitlab::Database::BackgroundMigrationJob.create!(class_name: class_name, arguments: arguments)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/monthly_strategy.rb b/lib/gitlab/database/partitioning/monthly_strategy.rb
new file mode 100644
index 00000000000..ecc05d9654a
--- /dev/null
+++ b/lib/gitlab/database/partitioning/monthly_strategy.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class MonthlyStrategy
+ attr_reader :model, :partitioning_key
+
+ # We create this many partitions in the future
+ HEADROOM = 6.months
+
+ delegate :table_name, to: :model
+
+ def initialize(model, partitioning_key)
+ @model = model
+ @partitioning_key = partitioning_key
+ end
+
+ def current_partitions
+ result = connection.select_all(<<~SQL)
+ select
+ pg_class.relname,
+ parent_class.relname as base_table,
+ pg_get_expr(pg_class.relpartbound, inhrelid) as condition
+ from pg_class
+ inner join pg_inherits i on pg_class.oid = inhrelid
+ inner join pg_class parent_class on parent_class.oid = inhparent
+ inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
+ where pg_namespace.nspname = #{connection.quote(Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)}
+ and parent_class.relname = #{connection.quote(table_name)}
+ and pg_class.relispartition
+ order by pg_class.relname
+ SQL
+
+ result.map do |record|
+ TimePartition.from_sql(table_name, record['relname'], record['condition'])
+ end
+ end
+
+ # Check the currently existing partitions and determine which ones are missing
+ def missing_partitions
+ desired_partitions - current_partitions
+ end
+
+ private
+
+ def desired_partitions
+ [].tap do |parts|
+ min_date, max_date = relevant_range
+
+ parts << partition_for(upper_bound: min_date)
+
+ while min_date < max_date
+ next_date = min_date.next_month
+
+ parts << partition_for(lower_bound: min_date, upper_bound: next_date)
+
+ min_date = next_date
+ end
+ end
+ end
+
+ # This determines the relevant time range for which we expect to have data
+ # (and therefore need to create partitions for).
+ #
+ # Note: We typically expect the first partition to be half-unbounded, i.e.
+ # to start from MINVALUE to a specific date `x`. The range returned
+ # does not include the range of the first, half-unbounded partition.
+ def relevant_range
+ if first_partition = current_partitions.min
+ # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition
+ # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil
+ # In this case, use first partition beginning as a start
+ min_date = first_partition.from || first_partition.to
+ end
+
+ # In case we don't have a partition yet
+ min_date ||= Date.today
+ min_date = min_date.beginning_of_month
+
+ max_date = Date.today.end_of_month + HEADROOM
+
+ [min_date, max_date]
+ end
+
+ def partition_for(lower_bound: nil, upper_bound:)
+ TimePartition.new(table_name, lower_bound, upper_bound)
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/partition_creator.rb b/lib/gitlab/database/partitioning/partition_creator.rb
new file mode 100644
index 00000000000..348dd1ba660
--- /dev/null
+++ b/lib/gitlab/database/partitioning/partition_creator.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class PartitionCreator
+ def self.register(model)
+ raise ArgumentError, "Only models with a #partitioning_strategy can be registered." unless model.respond_to?(:partitioning_strategy)
+
+ models << model
+ end
+
+ def self.models
+ @models ||= Set.new
+ end
+
+ LEASE_TIMEOUT = 1.minute
+ LEASE_KEY = 'database_partition_creation_%s'
+
+ attr_reader :models
+
+ def initialize(models = self.class.models)
+ @models = models
+ end
+
+ def create_partitions
+ return unless Feature.enabled?(:postgres_dynamic_partition_creation, default_enabled: true)
+
+ models.each do |model|
+ # Double-checking before getting the lease:
+ # The prevailing situation is no missing partitions
+ next if missing_partitions(model).empty?
+
+ only_with_exclusive_lease(model) do
+ partitions_to_create = missing_partitions(model)
+
+ next if partitions_to_create.empty?
+
+ create(model, partitions_to_create)
+ end
+ rescue => e
+ Gitlab::AppLogger.error("Failed to create partition(s) for #{model.table_name}: #{e.class}: #{e.message}")
+ end
+ end
+
+ private
+
+ def missing_partitions(model)
+ return [] unless connection.table_exists?(model.table_name)
+
+ model.partitioning_strategy.missing_partitions
+ end
+
+ def only_with_exclusive_lease(model)
+ lease = Gitlab::ExclusiveLease.new(LEASE_KEY % model.table_name, timeout: LEASE_TIMEOUT)
+
+ yield if lease.try_obtain
+ ensure
+ lease&.cancel
+ end
+
+ def create(model, partitions)
+ connection.transaction do
+ with_lock_retries do
+ partitions.each do |partition|
+ connection.execute partition.to_sql
+
+ Gitlab::AppLogger.info("Created partition #{partition.partition_name} for table #{partition.table}")
+ end
+ end
+ end
+ end
+
+ def with_lock_retries(&block)
+ Gitlab::Database::WithLockRetries.new({
+ klass: self.class,
+ logger: Gitlab::AppLogger
+ }).run(&block)
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/time_partition.rb b/lib/gitlab/database/partitioning/time_partition.rb
new file mode 100644
index 00000000000..7dca60c0854
--- /dev/null
+++ b/lib/gitlab/database/partitioning/time_partition.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class TimePartition
+ include Comparable
+
+ def self.from_sql(table, partition_name, definition)
+ matches = definition.match(/FOR VALUES FROM \('?(?<from>.+)'?\) TO \('?(?<to>.+)'?\)/)
+
+ raise ArgumentError, "Unknown partition definition: #{definition}" unless matches
+
+ raise NotImplementedError, "Open-end time partitions with MAXVALUE are not supported yet" if matches[:to] == 'MAXVALUE'
+
+ from = matches[:from] == 'MINVALUE' ? nil : matches[:from]
+ to = matches[:to]
+
+ new(table, from, to, partition_name: partition_name)
+ end
+
+ attr_reader :table, :from, :to
+
+ def initialize(table, from, to, partition_name: nil)
+ @table = table.to_s
+ @from = date_or_nil(from)
+ @to = date_or_nil(to)
+ @partition_name = partition_name
+ end
+
+ def partition_name
+ return @partition_name if @partition_name
+
+ suffix = from&.strftime('%Y%m') || '000000'
+
+ "#{table}_#{suffix}"
+ end
+
+ def to_sql
+ from_sql = from ? conn.quote(from.strftime('%Y-%m-%d')) : 'MINVALUE'
+ to_sql = conn.quote(to.strftime('%Y-%m-%d'))
+
+ <<~SQL
+ CREATE TABLE IF NOT EXISTS #{fully_qualified_partition}
+ PARTITION OF #{conn.quote_table_name(table)}
+ FOR VALUES FROM (#{from_sql}) TO (#{to_sql})
+ SQL
+ end
+
+ def ==(other)
+ table == other.table && partition_name == other.partition_name && from == other.from && to == other.to
+ end
+ alias_method :eql?, :==
+
+ def hash
+ [table, partition_name, from, to].hash
+ end
+
+ def <=>(other)
+ return if table != other.table
+
+ partition_name <=> other.partition_name
+ end
+
+ private
+
+ def date_or_nil(obj)
+ return unless obj
+ return obj if obj.is_a?(Date)
+
+ Date.parse(obj)
+ end
+
+ def fully_qualified_partition
+ "%s.%s" % [conn.quote_table_name(Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA), conn.quote_table_name(partition_name)]
+ end
+
+ def conn
+ @conn ||= ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb
new file mode 100644
index 00000000000..f9ad1e60776
--- /dev/null
+++ b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module PartitioningMigrationHelpers
+ # Class that will generically copy data from a given table into its corresponding partitioned table
+ class BackfillPartitionedTable
+ include ::Gitlab::Database::DynamicModelHelpers
+
+ SUB_BATCH_SIZE = 2_500
+ PAUSE_SECONDS = 0.25
+
+ def perform(start_id, stop_id, source_table, partitioned_table, source_column)
+ return unless Feature.enabled?(:backfill_partitioned_audit_events, default_enabled: true)
+
+ if transaction_open?
+ raise "Aborting job to backfill partitioned #{source_table} table! Do not run this job in a transaction block!"
+ end
+
+ unless table_exists?(partitioned_table)
+ logger.warn "exiting backfill migration because partitioned table #{partitioned_table} does not exist. " \
+ "This could be due to the migration being rolled back after migration jobs were enqueued in sidekiq"
+ return
+ end
+
+ bulk_copy = BulkCopy.new(source_table, partitioned_table, source_column)
+ parent_batch_relation = relation_scoped_to_range(source_table, source_column, start_id, stop_id)
+
+ parent_batch_relation.each_batch(of: SUB_BATCH_SIZE) do |sub_batch|
+ sub_start_id, sub_stop_id = sub_batch.pluck(Arel.sql("MIN(#{source_column}), MAX(#{source_column})")).first
+
+ bulk_copy.copy_between(sub_start_id, sub_stop_id)
+ sleep(PAUSE_SECONDS)
+ end
+
+ mark_jobs_as_succeeded(start_id, stop_id, source_table, partitioned_table, source_column)
+ end
+
+ private
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def transaction_open?
+ connection.transaction_open?
+ end
+
+ def table_exists?(table)
+ connection.table_exists?(table)
+ end
+
+ def logger
+ @logger ||= ::Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
+ define_batchable_model(source_table).where(source_key_column => start_id..stop_id)
+ end
+
+ def mark_jobs_as_succeeded(*arguments)
+ BackgroundMigrationJob.mark_all_as_succeeded(self.class.name, arguments)
+ end
+
+ # Helper class to copy data between two tables via upserts
+ class BulkCopy
+ DELIMITER = ', '
+
+ attr_reader :source_table, :destination_table, :source_column
+
+ def initialize(source_table, destination_table, source_column)
+ @source_table = source_table
+ @destination_table = destination_table
+ @source_column = source_column
+ end
+
+ def copy_between(start_id, stop_id)
+ connection.execute(<<~SQL)
+ INSERT INTO #{destination_table} (#{column_listing})
+ SELECT #{column_listing}
+ FROM #{source_table}
+ WHERE #{source_column} BETWEEN #{start_id} AND #{stop_id}
+ FOR UPDATE
+ ON CONFLICT (#{conflict_targets}) DO NOTHING
+ SQL
+ end
+
+ private
+
+ def connection
+ @connection ||= ActiveRecord::Base.connection
+ end
+
+ def column_listing
+ @column_listing ||= connection.columns(source_table).map(&:name).join(DELIMITER)
+ end
+
+ def conflict_targets
+ connection.primary_key(destination_table).join(DELIMITER)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
index 9e687009cd7..1fb9476b7d9 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
@@ -99,7 +99,7 @@ module Gitlab
drop_function(fn_name, if_exists: true)
else
create_or_replace_fk_function(fn_name, final_keys)
- create_trigger(trigger_name, fn_name, fires: "AFTER DELETE ON #{to_table}")
+ create_trigger(to_table, trigger_name, fn_name, fires: 'AFTER DELETE')
end
end
end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
index f77fbe98df1..b676767f41d 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
@@ -5,10 +5,16 @@ module Gitlab
module PartitioningMigrationHelpers
module TableManagementHelpers
include ::Gitlab::Database::SchemaHelpers
+ include ::Gitlab::Database::DynamicModelHelpers
+ include ::Gitlab::Database::Migrations::BackgroundMigrationHelpers
- WHITELISTED_TABLES = %w[audit_events].freeze
+ ALLOWED_TABLES = %w[audit_events].freeze
ERROR_SCOPE = 'table partitioning'
+ MIGRATION_CLASS_NAME = "::#{module_parent_name}::BackfillPartitionedTable"
+ BATCH_INTERVAL = 2.minutes.freeze
+ BATCH_SIZE = 50_000
+
# Creates a partitioned copy of an existing table, using a RANGE partitioning strategy on a timestamp column.
# One partition is created per month between the given `min_date` and `max_date`.
#
@@ -18,14 +24,25 @@ module Gitlab
#
# partition_table_by_date :audit_events, :created_at, min_date: Date.new(2020, 1), max_date: Date.new(2020, 6)
#
- # Required options are:
+ # Options are:
# :min_date - a date specifying the lower bounds of the partition range
- # :max_date - a date specifying the upper bounds of the partitioning range
+ # :max_date - a date specifying the upper bounds of the partitioning range, defaults to today + 1 month
#
- def partition_table_by_date(table_name, column_name, min_date:, max_date:)
- assert_table_is_whitelisted(table_name)
+ # Unless min_date is specified explicitly, we default to
+ # 1. The minimum value for the partitioning column in the table
+ # 2. If no data is present yet, the current month
+ def partition_table_by_date(table_name, column_name, min_date: nil, max_date: nil)
+ assert_table_is_allowed(table_name)
+
assert_not_in_transaction_block(scope: ERROR_SCOPE)
+ max_date ||= Date.today + 1.month
+
+ min_date ||= connection.select_one(<<~SQL)['minimum'] || max_date - 1.month
+ SELECT date_trunc('MONTH', MIN(#{column_name})) AS minimum
+ FROM #{table_name}
+ SQL
+
raise "max_date #{max_date} must be greater than min_date #{min_date}" if min_date >= max_date
primary_key = connection.primary_key(table_name)
@@ -34,10 +51,12 @@ module Gitlab
partition_column = find_column_definition(table_name, column_name)
raise "partition column #{column_name} does not exist on #{table_name}" if partition_column.nil?
- new_table_name = partitioned_table_name(table_name)
- create_range_partitioned_copy(new_table_name, table_name, partition_column, primary_key)
- create_daterange_partitions(new_table_name, partition_column.name, min_date, max_date)
- create_sync_trigger(table_name, new_table_name, primary_key)
+ partitioned_table_name = make_partitioned_table_name(table_name)
+
+ create_range_partitioned_copy(table_name, partitioned_table_name, partition_column, primary_key)
+ create_daterange_partitions(partitioned_table_name, partition_column.name, min_date, max_date)
+ create_trigger_to_sync_tables(table_name, partitioned_table_name, primary_key)
+ enqueue_background_migration(table_name, partitioned_table_name, primary_key)
end
# Clean up a partitioned copy of an existing table. This deletes the partitioned table and all partitions.
@@ -47,39 +66,58 @@ module Gitlab
# drop_partitioned_table_for :audit_events
#
def drop_partitioned_table_for(table_name)
- assert_table_is_whitelisted(table_name)
+ assert_table_is_allowed(table_name)
assert_not_in_transaction_block(scope: ERROR_SCOPE)
+ cleanup_migration_jobs(table_name)
+
with_lock_retries do
- trigger_name = sync_trigger_name(table_name)
+ trigger_name = make_sync_trigger_name(table_name)
drop_trigger(table_name, trigger_name)
end
- function_name = sync_function_name(table_name)
+ function_name = make_sync_function_name(table_name)
drop_function(function_name)
- part_table_name = partitioned_table_name(table_name)
- drop_table(part_table_name)
+ partitioned_table_name = make_partitioned_table_name(table_name)
+ drop_table(partitioned_table_name)
+ end
+
+ def create_hash_partitions(table_name, number_of_partitions)
+ transaction do
+ (0..number_of_partitions - 1).each do |partition|
+ decimals = Math.log10(number_of_partitions).ceil
+ suffix = "%0#{decimals}d" % partition
+ partition_name = "#{table_name}_#{suffix}"
+ schema = Gitlab::Database::STATIC_PARTITIONS_SCHEMA
+
+ execute(<<~SQL)
+ CREATE TABLE #{schema}.#{partition_name}
+ PARTITION OF #{table_name}
+ FOR VALUES WITH (MODULUS #{number_of_partitions}, REMAINDER #{partition});
+ SQL
+ end
+ end
end
private
- def assert_table_is_whitelisted(table_name)
- return if WHITELISTED_TABLES.include?(table_name.to_s)
+ def assert_table_is_allowed(table_name)
+ return if ALLOWED_TABLES.include?(table_name.to_s)
- raise "partitioning helpers are in active development, and #{table_name} is not whitelisted for use, " \
+ raise "partitioning helpers are in active development, and #{table_name} is not allowed for use, " \
"for more information please contact the database team"
end
- def partitioned_table_name(table)
+ def make_partitioned_table_name(table)
tmp_table_name("#{table}_part")
end
- def sync_function_name(table)
+ def make_sync_function_name(table)
object_name(table, 'table_sync_function')
end
- def sync_trigger_name(table)
+ def make_sync_trigger_name(table)
object_name(table, 'table_sync_trigger')
end
@@ -87,11 +125,11 @@ module Gitlab
connection.columns(table).find { |c| c.name == column.to_s }
end
- def create_range_partitioned_copy(table_name, template_table_name, partition_column, primary_key)
- if table_exists?(table_name)
+ def create_range_partitioned_copy(source_table_name, partitioned_table_name, partition_column, primary_key)
+ if table_exists?(partitioned_table_name)
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn "Partitioned table not created because it already exists" \
- " (this may be due to an aborted migration or similar): table_name: #{table_name} "
+ " (this may be due to an aborted migration or similar): table_name: #{partitioned_table_name} "
# rubocop:enable Gitlab/RailsLogger
return
end
@@ -99,20 +137,20 @@ module Gitlab
tmp_column_name = object_name(partition_column.name, 'partition_key')
transaction do
execute(<<~SQL)
- CREATE TABLE #{table_name} (
- LIKE #{template_table_name} INCLUDING ALL EXCLUDING INDEXES,
+ CREATE TABLE #{partitioned_table_name} (
+ LIKE #{source_table_name} INCLUDING ALL EXCLUDING INDEXES,
#{tmp_column_name} #{partition_column.sql_type} NOT NULL,
PRIMARY KEY (#{[primary_key, tmp_column_name].join(", ")})
) PARTITION BY RANGE (#{tmp_column_name})
SQL
- remove_column(table_name, partition_column.name)
- rename_column(table_name, tmp_column_name, partition_column.name)
- change_column_default(table_name, primary_key, nil)
+ remove_column(partitioned_table_name, partition_column.name)
+ rename_column(partitioned_table_name, tmp_column_name, partition_column.name)
+ change_column_default(partitioned_table_name, primary_key, nil)
- if column_of_type?(table_name, primary_key, :integer)
+ if column_of_type?(partitioned_table_name, primary_key, :integer)
# Default to int8 primary keys to prevent overflow
- change_column(table_name, primary_key, :bigint)
+ change_column(partitioned_table_name, primary_key, :bigint)
end
end
end
@@ -125,7 +163,8 @@ module Gitlab
min_date = min_date.beginning_of_month.to_date
max_date = max_date.next_month.beginning_of_month.to_date
- create_range_partition_safely("#{table_name}_000000", table_name, 'MINVALUE', to_sql_date_literal(min_date))
+ upper_bound = to_sql_date_literal(min_date)
+ create_range_partition_safely("#{table_name}_000000", table_name, 'MINVALUE', upper_bound)
while min_date < max_date
partition_name = "#{table_name}_#{min_date.strftime('%Y%m')}"
@@ -143,7 +182,7 @@ module Gitlab
end
def create_range_partition_safely(partition_name, table_name, lower_bound, upper_bound)
- if table_exists?(partition_name)
+ if table_exists?(table_for_range_partition(partition_name))
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn "Partition not created because it already exists" \
" (this may be due to an aborted migration or similar): partition_name: #{partition_name}"
@@ -154,34 +193,42 @@ module Gitlab
create_range_partition(partition_name, table_name, lower_bound, upper_bound)
end
- def create_sync_trigger(source_table, target_table, unique_key)
- function_name = sync_function_name(source_table)
- trigger_name = sync_trigger_name(source_table)
+ def create_trigger_to_sync_tables(source_table_name, partitioned_table_name, unique_key)
+ function_name = make_sync_function_name(source_table_name)
+ trigger_name = make_sync_trigger_name(source_table_name)
with_lock_retries do
- create_sync_function(function_name, target_table, unique_key)
- create_comment('FUNCTION', function_name, "Partitioning migration: table sync for #{source_table} table")
+ create_sync_function(function_name, partitioned_table_name, unique_key)
+ create_comment('FUNCTION', function_name, "Partitioning migration: table sync for #{source_table_name} table")
- create_trigger(trigger_name, function_name, fires: "AFTER INSERT OR UPDATE OR DELETE ON #{source_table}")
+ create_sync_trigger(source_table_name, trigger_name, function_name)
end
end
- def create_sync_function(name, target_table, unique_key)
+ def create_sync_function(name, partitioned_table_name, unique_key)
+ if function_exists?(name)
+ # rubocop:disable Gitlab/RailsLogger
+ Rails.logger.warn "Partitioning sync function not created because it already exists" \
+ " (this may be due to an aborted migration or similar): function name: #{name}"
+ # rubocop:enable Gitlab/RailsLogger
+ return
+ end
+
delimiter = ",\n "
- column_names = connection.columns(target_table).map(&:name)
+ column_names = connection.columns(partitioned_table_name).map(&:name)
set_statements = build_set_statements(column_names, unique_key)
insert_values = column_names.map { |name| "NEW.#{name}" }
create_trigger_function(name, replace: false) do
<<~SQL
IF (TG_OP = 'DELETE') THEN
- DELETE FROM #{target_table} where #{unique_key} = OLD.#{unique_key};
+ DELETE FROM #{partitioned_table_name} where #{unique_key} = OLD.#{unique_key};
ELSIF (TG_OP = 'UPDATE') THEN
- UPDATE #{target_table}
+ UPDATE #{partitioned_table_name}
SET #{set_statements.join(delimiter)}
- WHERE #{target_table}.#{unique_key} = NEW.#{unique_key};
+ WHERE #{partitioned_table_name}.#{unique_key} = NEW.#{unique_key};
ELSIF (TG_OP = 'INSERT') THEN
- INSERT INTO #{target_table} (#{column_names.join(delimiter)})
+ INSERT INTO #{partitioned_table_name} (#{column_names.join(delimiter)})
VALUES (#{insert_values.join(delimiter)});
END IF;
RETURN NULL;
@@ -190,7 +237,35 @@ module Gitlab
end
def build_set_statements(column_names, unique_key)
- column_names.reject { |name| name == unique_key }.map { |column_name| "#{column_name} = NEW.#{column_name}" }
+ column_names.reject { |name| name == unique_key }.map { |name| "#{name} = NEW.#{name}" }
+ end
+
+ def create_sync_trigger(table_name, trigger_name, function_name)
+ if trigger_exists?(table_name, trigger_name)
+ # rubocop:disable Gitlab/RailsLogger
+ Rails.logger.warn "Partitioning sync trigger not created because it already exists" \
+ " (this may be due to an aborted migration or similar): trigger name: #{trigger_name}"
+ # rubocop:enable Gitlab/RailsLogger
+ return
+ end
+
+ create_trigger(table_name, trigger_name, function_name, fires: 'AFTER INSERT OR UPDATE OR DELETE')
+ end
+
+ def enqueue_background_migration(source_table_name, partitioned_table_name, source_key)
+ source_model = define_batchable_model(source_table_name)
+
+ queue_background_migration_jobs_by_range_at_intervals(
+ source_model,
+ MIGRATION_CLASS_NAME,
+ BATCH_INTERVAL,
+ batch_size: BATCH_SIZE,
+ other_job_arguments: [source_table_name.to_s, partitioned_table_name, source_key],
+ track_jobs: true)
+ end
+
+ def cleanup_migration_jobs(table_name)
+ ::Gitlab::Database::BackgroundMigrationJob.for_partitioning_migration(MIGRATION_CLASS_NAME, table_name).delete_all
end
end
end
diff --git a/lib/gitlab/database/schema_helpers.rb b/lib/gitlab/database/schema_helpers.rb
index 8e544307d81..34daafd06de 100644
--- a/lib/gitlab/database/schema_helpers.rb
+++ b/lib/gitlab/database/schema_helpers.rb
@@ -16,15 +16,30 @@ module Gitlab
SQL
end
- def create_trigger(name, function_name, fires: nil)
+ def function_exists?(name)
+ connection.select_value("SELECT 1 FROM pg_proc WHERE proname = '#{name}'")
+ end
+
+ def create_trigger(table_name, name, function_name, fires:)
execute(<<~SQL)
CREATE TRIGGER #{name}
- #{fires}
+ #{fires} ON #{table_name}
FOR EACH ROW
EXECUTE PROCEDURE #{function_name}()
SQL
end
+ def trigger_exists?(table_name, name)
+ connection.select_value(<<~SQL)
+ SELECT 1
+ FROM pg_trigger
+ INNER JOIN pg_class
+ ON pg_trigger.tgrelid = pg_class.oid
+ WHERE pg_class.relname = '#{table_name}'
+ AND pg_trigger.tgname = '#{name}'
+ SQL
+ end
+
def drop_function(name, if_exists: true)
exists_clause = optional_clause(if_exists, "IF EXISTS")
execute("DROP FUNCTION #{exists_clause} #{name}()")
@@ -69,9 +84,13 @@ module Gitlab
private
+ def table_for_range_partition(partition_name)
+ "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partition_name}"
+ end
+
def create_range_partition(partition_name, table_name, lower_bound, upper_bound)
execute(<<~SQL)
- CREATE TABLE #{partition_name} PARTITION OF #{table_name}
+ CREATE TABLE #{table_for_range_partition(partition_name)} PARTITION OF #{table_name}
FOR VALUES FROM (#{lower_bound}) TO (#{upper_bound})
SQL
end
diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb
index 72dcc4fde71..dcd4bbdabf5 100644
--- a/lib/gitlab/diff/file.rb
+++ b/lib/gitlab/diff/file.rb
@@ -230,11 +230,15 @@ module Gitlab
end
def added_lines
- @stats&.additions || diff_lines.count(&:added?)
+ strong_memoize(:added_lines) do
+ @stats&.additions || diff_lines.count(&:added?)
+ end
end
def removed_lines
- @stats&.deletions || diff_lines.count(&:removed?)
+ strong_memoize(:removed_lines) do
+ @stats&.deletions || diff_lines.count(&:removed?)
+ end
end
def file_identifier
diff --git a/lib/gitlab/diff/file_collection/base.rb b/lib/gitlab/diff/file_collection/base.rb
index 38b636e4e5a..cf0611e44da 100644
--- a/lib/gitlab/diff/file_collection/base.rb
+++ b/lib/gitlab/diff/file_collection/base.rb
@@ -60,12 +60,20 @@ module Gitlab
end
end
- def diff_file_with_old_path(old_path)
- diff_files.find { |diff_file| diff_file.old_path == old_path }
+ def diff_file_with_old_path(old_path, a_mode = nil)
+ if Feature.enabled?(:file_identifier_hash) && a_mode.present?
+ diff_files.find { |diff_file| diff_file.old_path == old_path && diff_file.a_mode == a_mode }
+ else
+ diff_files.find { |diff_file| diff_file.old_path == old_path }
+ end
end
- def diff_file_with_new_path(new_path)
- diff_files.find { |diff_file| diff_file.new_path == new_path }
+ def diff_file_with_new_path(new_path, b_mode = nil)
+ if Feature.enabled?(:file_identifier_hash) && b_mode.present?
+ diff_files.find { |diff_file| diff_file.new_path == new_path && diff_file.b_mode == b_mode }
+ else
+ diff_files.find { |diff_file| diff_file.new_path == new_path }
+ end
end
def clear_cache
@@ -80,15 +88,18 @@ module Gitlab
def diff_stats_collection
strong_memoize(:diff_stats) do
- # There are scenarios where we don't need to request Diff Stats,
- # when caching for instance.
- next unless @include_stats
- next unless diff_refs
+ next unless fetch_diff_stats?
@repository.diff_stats(diff_refs.base_sha, diff_refs.head_sha)
end
end
+ def fetch_diff_stats?
+ # There are scenarios where we don't need to request Diff Stats,
+ # when caching for instance.
+ @include_stats && diff_refs
+ end
+
def decorate_diff!(diff)
return diff if diff.is_a?(File)
diff --git a/lib/gitlab/diff/file_collection/merge_request_diff_base.rb b/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
index d126fdb2be2..d54e1aad19a 100644
--- a/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
+++ b/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
@@ -20,7 +20,7 @@ module Gitlab
strong_memoize(:diff_files) do
diff_files = super
- diff_files.each { |diff_file| cache.decorate(diff_file) }
+ diff_files.each { |diff_file| highlight_cache.decorate(diff_file) }
diff_files
end
@@ -28,16 +28,14 @@ module Gitlab
override :write_cache
def write_cache
- cache.write_if_empty
+ highlight_cache.write_if_empty
+ diff_stats_cache.write_if_empty(diff_stats_collection)
end
override :clear_cache
def clear_cache
- cache.clear
- end
-
- def cache_key
- cache.key
+ highlight_cache.clear
+ diff_stats_cache.clear
end
def real_size
@@ -46,8 +44,25 @@ module Gitlab
private
- def cache
- @cache ||= Gitlab::Diff::HighlightCache.new(self)
+ def highlight_cache
+ strong_memoize(:highlight_cache) do
+ Gitlab::Diff::HighlightCache.new(self)
+ end
+ end
+
+ def diff_stats_cache
+ strong_memoize(:diff_stats_cache) do
+ Gitlab::Diff::StatsCache.new(cachable_key: @merge_request_diff.cache_key)
+ end
+ end
+
+ override :diff_stats_collection
+ def diff_stats_collection
+ strong_memoize(:diff_stats) do
+ next unless fetch_diff_stats?
+
+ diff_stats_cache.read || super
+ end
end
end
end
diff --git a/lib/gitlab/diff/file_collection/wiki_page.rb b/lib/gitlab/diff/file_collection/wiki_page.rb
new file mode 100644
index 00000000000..7873e85a0eb
--- /dev/null
+++ b/lib/gitlab/diff/file_collection/wiki_page.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Diff
+ module FileCollection
+ class WikiPage < Base
+ def initialize(page, diff_options:)
+ commit = page.wiki.commit(page.version.commit)
+ diff_options = diff_options.merge(
+ expanded: true,
+ paths: [page.path]
+ )
+
+ super(commit,
+ # TODO: Uncouple diffing from projects
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/217752
+ project: page.wiki,
+ diff_options: diff_options,
+ diff_refs: commit.diff_refs)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/position_tracer.rb b/lib/gitlab/diff/position_tracer.rb
index a1c82ce9afc..1c21c35fa60 100644
--- a/lib/gitlab/diff/position_tracer.rb
+++ b/lib/gitlab/diff/position_tracer.rb
@@ -42,6 +42,10 @@ module Gitlab
@cd_diffs ||= compare(new_diff_refs.start_sha, new_diff_refs.head_sha)
end
+ def diff_file(position)
+ position.diff_file(project.repository)
+ end
+
private
def compare(start_sha, head_sha, straight: false)
diff --git a/lib/gitlab/diff/position_tracer/base_strategy.rb b/lib/gitlab/diff/position_tracer/base_strategy.rb
index 65049daabf4..61250bd2473 100644
--- a/lib/gitlab/diff/position_tracer/base_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/base_strategy.rb
@@ -8,6 +8,7 @@ module Gitlab
delegate \
:project,
+ :diff_file,
:ac_diffs,
:bd_diffs,
:cd_diffs,
diff --git a/lib/gitlab/diff/position_tracer/image_strategy.rb b/lib/gitlab/diff/position_tracer/image_strategy.rb
index 79244a17951..046a6782dda 100644
--- a/lib/gitlab/diff/position_tracer/image_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/image_strategy.rb
@@ -5,22 +5,26 @@ module Gitlab
class PositionTracer
class ImageStrategy < BaseStrategy
def trace(position)
+ a_path = position.old_path
b_path = position.new_path
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
+ b_mode = diff_file&.b_mode
# If file exists in B->D (e.g. updated, renamed, removed), let the
# note become outdated.
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
return { position: new_position(position, bd_diff), outdated: true } if bd_diff
# If file still exists in the new diff, update the position.
- cd_diff = cd_diffs.diff_file_with_new_path(bd_diff&.new_path || b_path)
+ cd_diff = cd_diffs.diff_file_with_new_path(b_path, b_mode)
return { position: new_position(position, cd_diff), outdated: false } if cd_diff
# If file exists in A->C (e.g. rebased and same changes were present
# in target branch), let the note become outdated.
- ac_diff = ac_diffs.diff_file_with_old_path(position.old_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
return { position: new_position(position, ac_diff), outdated: true } if ac_diff
diff --git a/lib/gitlab/diff/position_tracer/line_strategy.rb b/lib/gitlab/diff/position_tracer/line_strategy.rb
index 8db0fc6f963..e3c1e549b96 100644
--- a/lib/gitlab/diff/position_tracer/line_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/line_strategy.rb
@@ -76,16 +76,20 @@ module Gitlab
def trace_added_line(position)
b_path = position.new_path
b_line = position.new_line
+ diff_file = diff_file(position)
+ b_mode = diff_file&.b_mode
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
d_path = bd_diff&.new_path || b_path
+ d_mode = bd_diff&.b_mode || b_mode
d_line = LineMapper.new(bd_diff).old_to_new(b_line)
if d_line
- cd_diff = cd_diffs.diff_file_with_new_path(d_path)
+ cd_diff = cd_diffs.diff_file_with_new_path(d_path, d_mode)
c_path = cd_diff&.old_path || d_path
+ c_mode = cd_diff&.a_mode || d_mode
c_line = LineMapper.new(cd_diff).new_to_old(d_line)
if c_line
@@ -98,7 +102,7 @@ module Gitlab
else
# If the line is no longer in the MR, we unfortunately cannot show
# the current state on the CD diff, so we treat it as outdated.
- ac_diff = ac_diffs.diff_file_with_new_path(c_path)
+ ac_diff = ac_diffs.diff_file_with_new_path(c_path, c_mode)
{ position: new_position(ac_diff, nil, c_line), outdated: true }
end
@@ -115,22 +119,26 @@ module Gitlab
def trace_removed_line(position)
a_path = position.old_path
a_line = position.old_line
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
- ac_diff = ac_diffs.diff_file_with_old_path(a_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
c_path = ac_diff&.new_path || a_path
+ c_mode = ac_diff&.b_mode || a_mode
c_line = LineMapper.new(ac_diff).old_to_new(a_line)
if c_line
- cd_diff = cd_diffs.diff_file_with_old_path(c_path)
+ cd_diff = cd_diffs.diff_file_with_old_path(c_path, c_mode)
d_path = cd_diff&.new_path || c_path
+ d_mode = cd_diff&.b_mode || c_mode
d_line = LineMapper.new(cd_diff).old_to_new(c_line)
if d_line
# If the line is still in C but also in D, it has turned from a
# removed line into an unchanged one.
- bd_diff = bd_diffs.diff_file_with_new_path(d_path)
+ bd_diff = bd_diffs.diff_file_with_new_path(d_path, d_mode)
{ position: new_position(bd_diff, nil, d_line), outdated: true }
else
@@ -148,17 +156,21 @@ module Gitlab
a_line = position.old_line
b_path = position.new_path
b_line = position.new_line
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
+ b_mode = diff_file&.b_mode
- ac_diff = ac_diffs.diff_file_with_old_path(a_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
c_path = ac_diff&.new_path || a_path
+ c_mode = ac_diff&.b_mode || a_mode
c_line = LineMapper.new(ac_diff).old_to_new(a_line)
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
d_line = LineMapper.new(bd_diff).old_to_new(b_line)
- cd_diff = cd_diffs.diff_file_with_old_path(c_path)
+ cd_diff = cd_diffs.diff_file_with_old_path(c_path, c_mode)
if c_line && d_line
# If the line is still in C and D, it is still unchanged.
diff --git a/lib/gitlab/diff/stats_cache.rb b/lib/gitlab/diff/stats_cache.rb
new file mode 100644
index 00000000000..f38fb21d497
--- /dev/null
+++ b/lib/gitlab/diff/stats_cache.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+#
+module Gitlab
+ module Diff
+ class StatsCache
+ include Gitlab::Metrics::Methods
+ include Gitlab::Utils::StrongMemoize
+
+ EXPIRATION = 1.week
+ VERSION = 1
+
+ def initialize(cachable_key:)
+ @cachable_key = cachable_key
+ end
+
+ def read
+ strong_memoize(:cached_values) do
+ content = cache.fetch(key)
+
+ next unless content
+
+ stats = content.map { |stat| Gitaly::DiffStats.new(stat) }
+
+ Gitlab::Git::DiffStatsCollection.new(stats)
+ end
+ end
+
+ def write_if_empty(stats)
+ return if cache.exist?(key)
+ return unless stats
+
+ cache.write(key, stats.as_json, expires_in: EXPIRATION)
+ end
+
+ def clear
+ cache.delete(key)
+ end
+
+ private
+
+ attr_reader :cachable_key
+
+ def cache
+ Rails.cache
+ end
+
+ def key
+ strong_memoize(:redis_key) do
+ ['diff_stats', cachable_key, VERSION].join(":")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/discussions_diff/file_collection.rb b/lib/gitlab/discussions_diff/file_collection.rb
index 7a9d4c5c0c2..60b3a1738f1 100644
--- a/lib/gitlab/discussions_diff/file_collection.rb
+++ b/lib/gitlab/discussions_diff/file_collection.rb
@@ -70,8 +70,8 @@ module Gitlab
#
# Returns a Hash with { id => [Array of Gitlab::Diff::line], ...]
def highlighted_lines_by_ids(ids)
- diff_files_indexed_by_id.slice(*ids).each_with_object({}) do |(id, file), hash|
- hash[id] = file.highlighted_diff_lines.map(&:to_hash)
+ diff_files_indexed_by_id.slice(*ids).transform_values do |file|
+ file.highlighted_diff_lines.map(&:to_hash)
end
end
end
diff --git a/lib/gitlab/discussions_diff/highlight_cache.rb b/lib/gitlab/discussions_diff/highlight_cache.rb
index 75d5a5df74b..4bec6467c1a 100644
--- a/lib/gitlab/discussions_diff/highlight_cache.rb
+++ b/lib/gitlab/discussions_diff/highlight_cache.rb
@@ -36,7 +36,9 @@ module Gitlab
content =
Redis::Cache.with do |redis|
- redis.mget(keys)
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.mget(keys)
+ end
end
content.map! do |lines|
@@ -58,7 +60,11 @@ module Gitlab
keys = raw_keys.map { |id| cache_key_for(id) }
- Redis::Cache.with { |redis| redis.del(keys) }
+ Redis::Cache.with do |redis|
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.del(keys)
+ end
+ end
end
def cache_key_for(raw_key)
diff --git a/lib/gitlab/email/handler.rb b/lib/gitlab/email/handler.rb
index 7f8dd815103..1b8421d34f3 100644
--- a/lib/gitlab/email/handler.rb
+++ b/lib/gitlab/email/handler.rb
@@ -12,7 +12,8 @@ module Gitlab
CreateNoteHandler,
CreateIssueHandler,
UnsubscribeHandler,
- CreateMergeRequestHandler
+ CreateMergeRequestHandler,
+ ServiceDeskHandler
]
end
@@ -25,5 +26,3 @@ module Gitlab
end
end
end
-
-Gitlab::Email::Handler.prepend_if_ee('::EE::Gitlab::Email::Handler')
diff --git a/lib/gitlab/email/handler/reply_processing.rb b/lib/gitlab/email/handler/reply_processing.rb
index 312a9fdfbae..1beea4f9054 100644
--- a/lib/gitlab/email/handler/reply_processing.rb
+++ b/lib/gitlab/email/handler/reply_processing.rb
@@ -37,7 +37,11 @@ module Gitlab
def process_message(**kwargs)
message = ReplyParser.new(mail, **kwargs).execute.strip
- add_attachments(message)
+ message_with_attachments = add_attachments(message)
+
+ # Support bot is specifically forbidden
+ # from using slash commands.
+ strip_quick_actions(message_with_attachments)
end
def add_attachments(reply)
@@ -82,6 +86,15 @@ module Gitlab
def valid_project_slug?(found_project)
project_slug == found_project.full_path_slug
end
+
+ def strip_quick_actions(content)
+ return content unless author.support_bot?
+
+ command_definitions = ::QuickActions::InterpretService.command_definitions
+ extractor = ::Gitlab::QuickActions::Extractor.new(command_definitions)
+
+ extractor.redact_commands(content)
+ end
end
end
end
diff --git a/lib/gitlab/email/handler/service_desk_handler.rb b/lib/gitlab/email/handler/service_desk_handler.rb
new file mode 100644
index 00000000000..bcd8b98a06f
--- /dev/null
+++ b/lib/gitlab/email/handler/service_desk_handler.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+# handles service desk issue creation emails with these formats:
+# incoming+gitlab-org-gitlab-ce-20-issue-@incoming.gitlab.com
+# incoming+gitlab-org/gitlab-ce@incoming.gitlab.com (legacy)
+module Gitlab
+ module Email
+ module Handler
+ class ServiceDeskHandler < BaseHandler
+ include ReplyProcessing
+ include Gitlab::Utils::StrongMemoize
+
+ HANDLER_REGEX = /\A#{HANDLER_ACTION_BASE_REGEX}-issue-\z/.freeze
+ HANDLER_REGEX_LEGACY = /\A(?<project_path>[^\+]*)\z/.freeze
+ PROJECT_KEY_PATTERN = /\A(?<slug>.+)-(?<key>[a-z0-9_]+)\z/.freeze
+
+ def initialize(mail, mail_key, service_desk_key: nil)
+ super(mail, mail_key)
+
+ if service_desk_key.present?
+ @service_desk_key = service_desk_key
+ elsif !mail_key&.include?('/') && (matched = HANDLER_REGEX.match(mail_key.to_s))
+ @project_slug = matched[:project_slug]
+ @project_id = matched[:project_id]&.to_i
+ elsif matched = HANDLER_REGEX_LEGACY.match(mail_key.to_s)
+ @project_path = matched[:project_path]
+ end
+ end
+
+ def can_handle?
+ Gitlab::ServiceDesk.supported? && (project_id || can_handle_legacy_format? || service_desk_key)
+ end
+
+ def execute
+ raise ProjectNotFound if project.nil?
+
+ create_issue!
+ send_thank_you_email! if from_address
+ end
+
+ def metrics_params
+ super.merge(project: project&.full_path)
+ end
+
+ def metrics_event
+ :receive_email_service_desk
+ end
+
+ private
+
+ attr_reader :project_id, :project_path, :service_desk_key
+
+ def project
+ strong_memoize(:project) do
+ @project = service_desk_key ? project_from_key : super
+ @project = nil unless @project&.service_desk_enabled?
+ @project
+ end
+ end
+
+ def project_from_key
+ return unless match = service_desk_key.match(PROJECT_KEY_PATTERN)
+
+ project = Project.find_by_service_desk_project_key(match[:key])
+ return unless valid_project_key?(project, match[:slug])
+
+ project
+ end
+
+ def valid_project_key?(project, slug)
+ project.present? && slug == project.full_path_slug && Feature.enabled?(:service_desk_custom_address, project)
+ end
+
+ def create_issue!
+ @issue = Issues::CreateService.new(
+ project,
+ User.support_bot,
+ title: issue_title,
+ description: message_including_template,
+ confidential: true,
+ service_desk_reply_to: from_address
+ ).execute
+
+ raise InvalidIssueError unless @issue.persisted?
+
+ if service_desk_setting&.issue_template_missing?
+ create_template_not_found_note(@issue)
+ end
+ end
+
+ def send_thank_you_email!
+ Notify.service_desk_thank_you_email(@issue.id).deliver_later!
+ end
+
+ def message_including_template
+ description = message_including_reply
+ template_content = service_desk_setting&.issue_template_content
+
+ if template_content.present?
+ description += " \n" + template_content
+ end
+
+ description
+ end
+
+ def service_desk_setting
+ strong_memoize(:service_desk_setting) do
+ project.service_desk_setting
+ end
+ end
+
+ def create_template_not_found_note(issue)
+ issue_template_key = service_desk_setting&.issue_template_key
+
+ warning_note = <<-MD.strip_heredoc
+ WARNING: The template file #{issue_template_key}.md used for service desk issues is empty or could not be found.
+ Please check service desk settings and update the file to be used.
+ MD
+
+ note_params = {
+ noteable: issue,
+ note: warning_note
+ }
+
+ ::Notes::CreateService.new(
+ project,
+ User.support_bot,
+ note_params
+ ).execute
+ end
+
+ def from_address
+ (mail.reply_to || []).first || mail.from.first || mail.sender
+ end
+
+ def issue_title
+ from = "(from #{from_address})" if from_address
+
+ "Service Desk #{from}: #{mail.subject}"
+ end
+
+ def can_handle_legacy_format?
+ project_path && project_path.include?('/') && !mail_key.include?('+')
+ end
+
+ def author
+ User.support_bot
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email/service_desk_receiver.rb b/lib/gitlab/email/service_desk_receiver.rb
new file mode 100644
index 00000000000..1ee5c10097b
--- /dev/null
+++ b/lib/gitlab/email/service_desk_receiver.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Email
+ class ServiceDeskReceiver < Receiver
+ private
+
+ def find_handler(mail)
+ key = service_desk_key(mail)
+ return unless key
+
+ Gitlab::Email::Handler::ServiceDeskHandler.new(mail, nil, service_desk_key: key)
+ end
+
+ def service_desk_key(mail)
+ mail.to.find do |address|
+ key = ::Gitlab::ServiceDeskEmail.key_from_address(address)
+ break key if key
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/emoji.rb b/lib/gitlab/emoji.rb
index bcf92b35720..cab21d875ab 100644
--- a/lib/gitlab/emoji.rb
+++ b/lib/gitlab/emoji.rb
@@ -44,6 +44,10 @@ module Gitlab
"<img class='emoji' title=':#{name}:' alt=':#{name}:' src='#{src}' height='20' width='20' align='absmiddle' />"
end
+ def emoji_exists?(name)
+ emojis.has_key?(name)
+ end
+
# CSS sprite fallback takes precedence over image fallback
def gl_emoji_tag(name, options = {})
emoji_name = emojis_aliases[name] || name
diff --git a/lib/gitlab/error_tracking.rb b/lib/gitlab/error_tracking.rb
index a19ce22e53f..8d5611411c9 100644
--- a/lib/gitlab/error_tracking.rb
+++ b/lib/gitlab/error_tracking.rb
@@ -10,7 +10,6 @@ module Gitlab
Acme::Client::Error::Timeout
Acme::Client::Error::UnsupportedOperation
ActiveRecord::ConnectionTimeoutError
- ActiveRecord::QueryCanceled
Gitlab::RequestContext::RequestDeadlineExceeded
GRPC::DeadlineExceeded
JIRA::HTTPError
@@ -29,7 +28,7 @@ module Gitlab
config.processors << ::Gitlab::ErrorTracking::Processor::SidekiqProcessor
# Sanitize authentication headers
config.sanitize_http_headers = %w[Authorization Private-Token]
- config.tags = { program: Gitlab.process_name }
+ config.tags = extra_tags_from_env.merge(program: Gitlab.process_name)
config.before_send = method(:before_send)
yield config if block_given?
@@ -166,6 +165,15 @@ module Gitlab
}
end
+ # Static tags that are set on application start
+ def extra_tags_from_env
+ Gitlab::Json.parse(ENV.fetch('GITLAB_SENTRY_EXTRA_TAGS', '{}')).to_hash
+ rescue => e
+ Gitlab::AppLogger.debug("GITLAB_SENTRY_EXTRA_TAGS could not be parsed as JSON: #{e.class.name}: #{e.message}")
+
+ {}
+ end
+
# Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
def add_context_from_exception_type(event, hint)
if ActiveModel::MissingAttributeError === hint[:exception]
@@ -173,8 +181,7 @@ module Gitlab
.connection
.schema_cache
.instance_variable_get(:@columns_hash)
- .map { |k, v| [k, v.map(&:first)] }
- .to_h
+ .transform_values { |v| v.map(&:first) }
event.extra.merge!(columns_hash)
end
diff --git a/lib/gitlab/error_tracking/detailed_error.rb b/lib/gitlab/error_tracking/detailed_error.rb
index b49f2472e01..5d272efa64a 100644
--- a/lib/gitlab/error_tracking/detailed_error.rb
+++ b/lib/gitlab/error_tracking/detailed_error.rb
@@ -22,6 +22,7 @@ module Gitlab
:id,
:last_release_last_commit,
:last_release_short_version,
+ :last_release_version,
:last_seen,
:message,
:project_id,
diff --git a/lib/gitlab/file_finder.rb b/lib/gitlab/file_finder.rb
index d438b0415fa..6225955a930 100644
--- a/lib/gitlab/file_finder.rb
+++ b/lib/gitlab/file_finder.rb
@@ -42,7 +42,7 @@ module Gitlab
end
end
- # Overriden in Gitlab::WikiFileFinder
+ # Overridden in Gitlab::WikiFileFinder
def search_paths(query)
repository.search_files_by_name(query, ref)
end
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index 17d0a62ba8c..8db73ecc480 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -90,14 +90,15 @@ module Gitlab
#
# Commit.last_for_path(repo, 'master', 'Gemfile')
#
- def last_for_path(repo, ref, path = nil)
+ def last_for_path(repo, ref, path = nil, literal_pathspec: false)
# rubocop: disable Rails/FindBy
# This is not where..first from ActiveRecord
where(
repo: repo,
ref: ref,
path: path,
- limit: 1
+ limit: 1,
+ literal_pathspec: literal_pathspec
).first
# rubocop: enable Rails/FindBy
end
diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb
index bb845f11181..09a49b6c1ca 100644
--- a/lib/gitlab/git/diff.rb
+++ b/lib/gitlab/git/diff.rb
@@ -224,18 +224,18 @@ module Gitlab
end
end
- def init_from_gitaly(diff)
- @diff = diff.respond_to?(:patch) ? encode!(diff.patch) : ''
- @new_path = encode!(diff.to_path.dup)
- @old_path = encode!(diff.from_path.dup)
- @a_mode = diff.old_mode.to_s(8)
- @b_mode = diff.new_mode.to_s(8)
- @new_file = diff.from_id == BLANK_SHA
- @renamed_file = diff.from_path != diff.to_path
- @deleted_file = diff.to_id == BLANK_SHA
- @too_large = diff.too_large if diff.respond_to?(:too_large)
-
- collapse! if diff.respond_to?(:collapsed) && diff.collapsed
+ def init_from_gitaly(gitaly_diff)
+ @diff = gitaly_diff.respond_to?(:patch) ? encode!(gitaly_diff.patch) : ''
+ @new_path = encode!(gitaly_diff.to_path.dup)
+ @old_path = encode!(gitaly_diff.from_path.dup)
+ @a_mode = gitaly_diff.old_mode.to_s(8)
+ @b_mode = gitaly_diff.new_mode.to_s(8)
+ @new_file = gitaly_diff.from_id == BLANK_SHA
+ @renamed_file = gitaly_diff.from_path != gitaly_diff.to_path
+ @deleted_file = gitaly_diff.to_id == BLANK_SHA
+ @too_large = gitaly_diff.too_large if gitaly_diff.respond_to?(:too_large)
+
+ collapse! if gitaly_diff.respond_to?(:collapsed) && gitaly_diff.collapsed
end
def prune_diff_if_eligible
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index ed746163748..ea7a6e84195 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -127,9 +127,9 @@ module Gitlab
end
end
- def local_branches(sort_by: nil)
+ def local_branches(sort_by: nil, pagination_params: nil)
wrapped_gitaly_errors do
- gitaly_ref_client.local_branches(sort_by: sort_by)
+ gitaly_ref_client.local_branches(sort_by: sort_by, pagination_params: pagination_params)
end
end
@@ -1002,15 +1002,21 @@ module Gitlab
end
end
- def list_last_commits_for_tree(sha, path, offset: 0, limit: 25)
+ def list_last_commits_for_tree(sha, path, offset: 0, limit: 25, literal_pathspec: false)
wrapped_gitaly_errors do
- gitaly_commit_client.list_last_commits_for_tree(sha, path, offset: offset, limit: limit)
+ gitaly_commit_client.list_last_commits_for_tree(sha, path, offset: offset, limit: limit, literal_pathspec: literal_pathspec)
end
end
- def last_commit_for_path(sha, path)
+ def list_commits_by_ref_name(refs)
wrapped_gitaly_errors do
- gitaly_commit_client.last_commit_for_path(sha, path)
+ gitaly_commit_client.list_commits_by_ref_name(refs)
+ end
+ end
+
+ def last_commit_for_path(sha, path, literal_pathspec: false)
+ wrapped_gitaly_errors do
+ gitaly_commit_client.last_commit_for_path(sha, path, literal_pathspec: literal_pathspec)
end
end
diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb
index 3025fc6bfdb..76771f0417b 100644
--- a/lib/gitlab/git/wiki.rb
+++ b/lib/gitlab/git/wiki.rb
@@ -101,6 +101,10 @@ module Gitlab
wrapped_gitaly_errors do
gitaly_find_page(title: title, version: version, dir: dir)
end
+ rescue Gitlab::Git::CommandError
+ # Return nil for invalid versions.
+ # This can be removed with https://gitlab.com/gitlab-org/gitaly/-/merge_requests/2323 in place.
+ nil
end
def file(name, version)
diff --git a/lib/gitlab/git_ref_validator.rb b/lib/gitlab/git_ref_validator.rb
index dfff6823689..1330b06bf9c 100644
--- a/lib/gitlab/git_ref_validator.rb
+++ b/lib/gitlab/git_ref_validator.rb
@@ -19,7 +19,7 @@ module Gitlab
begin
Rugged::Reference.valid_name?("refs/heads/#{ref_name}")
rescue ArgumentError
- return false
+ false
end
end
@@ -35,7 +35,7 @@ module Gitlab
begin
Rugged::Reference.valid_name?(expanded_name)
rescue ArgumentError
- return false
+ false
end
end
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index bed99ef0ed4..b284aadc107 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -166,20 +166,7 @@ module Gitlab
# "gitaly-2 is at network address tcp://10.0.1.2:8075".
#
def self.call(storage, service, rpc, request, remote_storage: nil, timeout: default_timeout, &block)
- self.measure_timings(service, rpc, request) do
- self.execute(storage, service, rpc, request, remote_storage: remote_storage, timeout: timeout, &block)
- end
- end
-
- # This method is like GitalyClient.call but should be used with
- # Gitaly streaming RPCs. It measures how long the the RPC took to
- # produce the full response, not just the initial response.
- def self.streaming_call(storage, service, rpc, request, remote_storage: nil, timeout: default_timeout)
- self.measure_timings(service, rpc, request) do
- response = self.execute(storage, service, rpc, request, remote_storage: remote_storage, timeout: timeout)
-
- yield(response)
- end
+ Gitlab::GitalyClient::Call.new(storage, service, rpc, request, remote_storage, timeout).call(&block)
end
def self.execute(storage, service, rpc, request, remote_storage:, timeout:)
@@ -192,23 +179,6 @@ module Gitlab
stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend
end
- def self.measure_timings(service, rpc, request)
- start = Gitlab::Metrics::System.monotonic_time
-
- yield
- ensure
- duration = Gitlab::Metrics::System.monotonic_time - start
- request_hash = request.is_a?(Google::Protobuf::MessageExts) ? request.to_h : {}
-
- # Keep track, separately, for the performance bar
- self.add_query_time(duration)
-
- if Gitlab::PerformanceBar.enabled_for_request?
- add_call_details(feature: "#{service}##{rpc}", duration: duration, request: request_hash, rpc: rpc,
- backtrace: Gitlab::BacktraceCleaner.clean_backtrace(caller))
- end
- end
-
def self.query_time
query_time = Gitlab::SafeRequestStore[:gitaly_query_time] || 0
query_time.round(Gitlab::InstrumentationHelper::DURATION_PRECISION)
diff --git a/lib/gitlab/gitaly_client/blob_service.rb b/lib/gitlab/gitaly_client/blob_service.rb
index 8c704c2ceea..c66b3335d89 100644
--- a/lib/gitlab/gitaly_client/blob_service.rb
+++ b/lib/gitlab/gitaly_client/blob_service.rb
@@ -16,27 +16,7 @@ module Gitlab
limit: limit
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_blob, request, timeout: GitalyClient.fast_timeout)
-
- data = []
- blob = nil
- response.each do |msg|
- if blob.nil?
- blob = msg
- end
-
- data << msg.data
- end
-
- return if blob.oid.blank?
-
- data = data.join
-
- Gitlab::Git::Blob.new(
- id: blob.oid,
- size: blob.size,
- data: data,
- binary: Gitlab::Git::Blob.binary?(data)
- )
+ consume_blob_response(response)
end
def batch_lfs_pointers(blob_ids)
@@ -48,7 +28,6 @@ module Gitlab
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_lfs_pointers, request, timeout: GitalyClient.medium_timeout)
-
map_lfs_pointers(response)
end
@@ -70,8 +49,7 @@ module Gitlab
:blob_service,
:get_blobs,
request,
- timeout: GitalyClient.fast_timeout
- )
+ timeout: GitalyClient.fast_timeout)
GitalyClient::BlobsStitcher.new(response)
end
@@ -96,7 +74,6 @@ module Gitlab
request,
timeout: GitalyClient.fast_timeout
)
-
map_blob_types(response)
end
@@ -127,7 +104,6 @@ module Gitlab
request,
timeout: timeout
)
-
map_lfs_pointers(response)
end
@@ -137,12 +113,34 @@ module Gitlab
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_all_lfs_pointers, request, timeout: GitalyClient.medium_timeout)
-
map_lfs_pointers(response)
end
private
+ def consume_blob_response(response)
+ data = []
+ blob = nil
+ response.each do |msg|
+ if blob.nil?
+ blob = msg
+ end
+
+ data << msg.data
+ end
+
+ return if blob.oid.blank?
+
+ data = data.join
+
+ Gitlab::Git::Blob.new(
+ id: blob.oid,
+ size: blob.size,
+ data: data,
+ binary: Gitlab::Git::Blob.binary?(data)
+ )
+ end
+
def map_lfs_pointers(response)
response.flat_map do |message|
message.lfs_pointers.map do |lfs_pointer|
diff --git a/lib/gitlab/gitaly_client/call.rb b/lib/gitlab/gitaly_client/call.rb
new file mode 100644
index 00000000000..9d4d86997ad
--- /dev/null
+++ b/lib/gitlab/gitaly_client/call.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GitalyClient
+ class Call
+ def initialize(storage, service, rpc, request, remote_storage, timeout)
+ @storage = storage
+ @service = service
+ @rpc = rpc
+ @request = request
+ @remote_storage = remote_storage
+ @timeout = timeout
+ @duration = 0
+ end
+
+ def call(&block)
+ response = recording_request do
+ GitalyClient.execute(@storage, @service, @rpc, @request, remote_storage: @remote_storage, timeout: @timeout, &block)
+ end
+
+ if response.is_a?(Enumerator)
+ # When the given response is an enumerator (coming from streamed
+ # responses), we wrap it in order to properly measure the stream
+ # consumption as it happens.
+ #
+ # store_timings is not called in that scenario as needs to be
+ # handled lazily in the custom Enumerator context.
+ instrument_stream(response)
+ else
+ store_timings
+ response
+ end
+ rescue => err
+ store_timings
+ raise err
+ end
+
+ private
+
+ def instrument_stream(response)
+ Enumerator.new do |yielder|
+ loop do
+ value = recording_request { response.next }
+
+ yielder.yield(value)
+ end
+ ensure
+ store_timings
+ end
+ end
+
+ def recording_request
+ start = Gitlab::Metrics::System.monotonic_time
+
+ yield
+ ensure
+ @duration += Gitlab::Metrics::System.monotonic_time - start
+ end
+
+ def store_timings
+ GitalyClient.add_query_time(@duration)
+
+ return unless Gitlab::PerformanceBar.enabled_for_request?
+
+ request_hash = @request.is_a?(Google::Protobuf::MessageExts) ? @request.to_h : {}
+
+ GitalyClient.add_call_details(feature: "#{@service}##{@rpc}", duration: @duration, request: request_hash, rpc: @rpc,
+ backtrace: Gitlab::BacktraceCleaner.clean_backtrace(caller))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/cleanup_service.rb b/lib/gitlab/gitaly_client/cleanup_service.rb
index e2293d3121a..649aaa46362 100644
--- a/lib/gitlab/gitaly_client/cleanup_service.rb
+++ b/lib/gitlab/gitaly_client/cleanup_service.rb
@@ -13,15 +13,14 @@ module Gitlab
end
def apply_bfg_object_map_stream(io, &blk)
- responses = GitalyClient.call(
+ response = GitalyClient.call(
storage,
:cleanup_service,
:apply_bfg_object_map_stream,
build_object_map_enum(io),
timeout: GitalyClient.long_timeout
)
-
- responses.each(&blk)
+ response.each(&blk)
end
private
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index aed132aaca0..464d2519b27 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -73,7 +73,6 @@ module Gitlab
def commit_deltas(commit)
request = Gitaly::CommitDeltaRequest.new(diff_from_parent_request_params(commit))
response = GitalyClient.call(@repository.storage, :diff_service, :commit_delta, request, timeout: GitalyClient.fast_timeout)
-
response.flat_map { |msg| msg.deltas }
end
@@ -162,13 +161,14 @@ module Gitlab
[response.left_count, response.right_count]
end
- def list_last_commits_for_tree(revision, path, offset: 0, limit: 25)
+ def list_last_commits_for_tree(revision, path, offset: 0, limit: 25, literal_pathspec: false)
request = Gitaly::ListLastCommitsForTreeRequest.new(
repository: @gitaly_repo,
revision: encode_binary(revision),
path: encode_binary(path.to_s),
offset: offset,
- limit: limit
+ limit: limit,
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
response = GitalyClient.call(@repository.storage, :commit_service, :list_last_commits_for_tree, request, timeout: GitalyClient.medium_timeout)
@@ -180,11 +180,12 @@ module Gitlab
end
end
- def last_commit_for_path(revision, path)
+ def last_commit_for_path(revision, path, literal_pathspec: false)
request = Gitaly::LastCommitForPathRequest.new(
repository: @gitaly_repo,
revision: encode_binary(revision),
- path: encode_binary(path.to_s)
+ path: encode_binary(path.to_s),
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request, timeout: GitalyClient.fast_timeout).commit
@@ -200,9 +201,8 @@ module Gitlab
to: to
)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def diff_stats(left_commit_sha, right_commit_sha)
@@ -212,9 +212,8 @@ module Gitlab
right_commit_id: right_commit_sha
)
- GitalyClient.streaming_call(@repository.storage, :diff_service, :diff_stats, request, timeout: GitalyClient.medium_timeout) do |response|
- response.flat_map(&:stats)
- end
+ response = GitalyClient.call(@repository.storage, :diff_service, :diff_stats, request, timeout: GitalyClient.medium_timeout)
+ response.flat_map(&:stats)
end
def find_all_commits(opts = {})
@@ -226,9 +225,8 @@ module Gitlab
)
request.order = opts[:order].upcase if opts[:order].present?
- GitalyClient.streaming_call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def list_commits_by_oid(oids)
@@ -236,26 +234,25 @@ module Gitlab
request = Gitaly::ListCommitsByOidRequest.new(repository: @gitaly_repo, oid: oids)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
rescue GRPC::NotFound # If no repository is found, happens mainly during testing
[]
end
- def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0)
+ def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0, literal_pathspec: true)
request = Gitaly::CommitsByMessageRequest.new(
repository: @gitaly_repo,
query: query,
revision: encode_binary(revision),
path: encode_binary(path),
limit: limit.to_i,
- offset: offset.to_i
+ offset: offset.to_i,
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def languages(ref = nil)
@@ -320,6 +317,7 @@ module Gitlab
skip_merges: options[:skip_merges],
all: !!options[:all],
first_parent: !!options[:first_parent],
+ global_options: parse_global_options!(options),
disable_walk: true # This option is deprecated. The 'walk' implementation is being removed.
)
request.after = GitalyClient.timestamp(options[:after]) if options[:after]
@@ -330,9 +328,8 @@ module Gitlab
request.paths = encode_repeated(Array(options[:path])) if options[:path].present?
- GitalyClient.streaming_call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def filter_shas_with_signatures(shas)
@@ -349,7 +346,6 @@ module Gitlab
end
response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum, timeout: GitalyClient.fast_timeout)
-
response.flat_map do |msg|
msg.shas.map { |sha| EncodingHelper.encode!(sha) }
end
@@ -390,8 +386,28 @@ module Gitlab
messages
end
+ def list_commits_by_ref_name(refs)
+ request = Gitaly::ListCommitsByRefNameRequest
+ .new(repository: @gitaly_repo, ref_names: refs.map { |ref| encode_binary(ref) })
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_ref_name, request, timeout: GitalyClient.medium_timeout)
+
+ commit_refs = response.flat_map do |message|
+ message.commit_refs.map do |commit_ref|
+ [encode_utf8(commit_ref.ref_name), Gitlab::Git::Commit.new(@repository, commit_ref.commit)]
+ end
+ end
+
+ Hash[commit_refs]
+ end
+
private
+ def parse_global_options!(options)
+ literal_pathspec = options.delete(:literal_pathspec)
+ Gitaly::GlobalOptions.new(literal_pathspecs: literal_pathspec)
+ end
+
def call_commit_diff(request_params, options = {})
request_params[:ignore_whitespace_change] = options.fetch(:ignore_whitespace_change, false)
request_params[:enforce_limits] = options.fetch(:limits, true)
diff --git a/lib/gitlab/gitaly_client/conflicts_service.rb b/lib/gitlab/gitaly_client/conflicts_service.rb
index f7eb4b45197..6f08dcc69b6 100644
--- a/lib/gitlab/gitaly_client/conflicts_service.rb
+++ b/lib/gitlab/gitaly_client/conflicts_service.rb
@@ -21,7 +21,6 @@ module Gitlab
their_commit_oid: @their_commit_oid
)
response = GitalyClient.call(@repository.storage, :conflicts_service, :list_conflict_files, request, timeout: GitalyClient.long_timeout)
-
GitalyClient::ConflictFilesStitcher.new(response, @gitaly_repo)
end
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index 9ed4b2da09a..87505418ae9 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -178,6 +178,10 @@ module Gitlab
timeout: GitalyClient.long_timeout
)
+ if response.pre_receive_error.present?
+ raise Gitlab::Git::PreReceiveError.new("GL-HOOK-ERR: pre-receive hook failed.")
+ end
+
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
rescue GRPC::FailedPrecondition => e
raise Gitlab::Git::CommitError, e
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index 63def4e29c9..97b6813c080 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -15,14 +15,12 @@ module Gitlab
def branches
request = Gitaly::FindAllBranchesRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request, timeout: GitalyClient.fast_timeout)
-
consume_find_all_branches_response(response)
end
def remote_branches(remote_name)
request = Gitaly::FindAllRemoteBranchesRequest.new(repository: @gitaly_repo, remote_name: remote_name)
- response = GitalyClient.call(@repository.storage, :ref_service, :find_all_remote_branches, request, timeout: GitalyClient.medium_timeout)
-
+ response = GitalyClient.call(@storage, :ref_service, :find_all_remote_branches, request, timeout: GitalyClient.medium_timeout)
consume_find_all_remote_branches_response(remote_name, response)
end
@@ -33,7 +31,6 @@ module Gitlab
merged_branches: branch_names.map { |s| encode_binary(s) }
)
response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request, timeout: GitalyClient.fast_timeout)
-
consume_find_all_branches_response(response)
end
@@ -71,10 +68,9 @@ module Gitlab
commit_id: newrev
)
- response = GitalyClient
- .call(@storage, :ref_service, :list_new_commits, request, timeout: GitalyClient.medium_timeout)
-
commits = []
+
+ response = GitalyClient.call(@storage, :ref_service, :list_new_commits, request, timeout: GitalyClient.medium_timeout)
response.each do |msg|
msg.commits.each do |c|
commits << Gitlab::Git::Commit.new(@repository, c)
@@ -98,9 +94,7 @@ module Gitlab
GitalyClient.medium_timeout
end
- response = GitalyClient
- .call(@storage, :ref_service, :list_new_blobs, request, timeout: timeout)
-
+ response = GitalyClient.call(@storage, :ref_service, :list_new_blobs, request, timeout: timeout)
response.flat_map do |msg|
# Returns an Array of Gitaly::NewBlobObject objects
# Available methods are: #size, #oid and #path
@@ -116,8 +110,8 @@ module Gitlab
branch_names.count
end
- def local_branches(sort_by: nil)
- request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo)
+ def local_branches(sort_by: nil, pagination_params: nil)
+ request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo, pagination_params: pagination_params)
request.sort_by = sort_by_param(sort_by) if sort_by
response = GitalyClient.call(@storage, :ref_service, :find_local_branches, request, timeout: GitalyClient.fast_timeout)
consume_find_local_branches_response(response)
@@ -171,9 +165,8 @@ module Gitlab
limit: limit
)
- stream = GitalyClient.call(@repository.storage, :ref_service, :list_tag_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
-
- consume_ref_contains_sha_response(stream, :tag_names)
+ response = GitalyClient.call(@storage, :ref_service, :list_tag_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
+ consume_ref_contains_sha_response(response, :tag_names)
end
# Limit: 0 implies no limit, thus all tag names will be returned
@@ -184,18 +177,16 @@ module Gitlab
limit: limit
)
- stream = GitalyClient.call(@repository.storage, :ref_service, :list_branch_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
-
- consume_ref_contains_sha_response(stream, :branch_names)
+ response = GitalyClient.call(@storage, :ref_service, :list_branch_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
+ consume_ref_contains_sha_response(response, :branch_names)
end
def get_tag_messages(tag_ids)
request = Gitaly::GetTagMessagesRequest.new(repository: @gitaly_repo, tag_ids: tag_ids)
- response = GitalyClient.call(@repository.storage, :ref_service, :get_tag_messages, request, timeout: GitalyClient.fast_timeout)
-
messages = Hash.new { |h, k| h[k] = +''.b }
current_tag_id = nil
+ response = GitalyClient.call(@storage, :ref_service, :get_tag_messages, request, timeout: GitalyClient.fast_timeout)
response.each do |rpc_message|
current_tag_id = rpc_message.tag_id if rpc_message.tag_id.present?
diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb
index 4566c59bbe0..06aaf460751 100644
--- a/lib/gitlab/gitaly_client/remote_service.rb
+++ b/lib/gitlab/gitaly_client/remote_service.rb
@@ -8,9 +8,11 @@ module Gitlab
MAX_MSG_SIZE = 128.kilobytes.freeze
def self.exists?(remote_url)
- request = Gitaly::FindRemoteRepositoryRequest.new(remote: remote_url)
+ storage = GitalyClient.random_storage
- response = GitalyClient.call(GitalyClient.random_storage,
+ request = Gitaly::FindRemoteRepositoryRequest.new(remote: remote_url, storage_name: storage)
+
+ response = GitalyClient.call(storage,
:remote_service,
:find_remote_repository, request,
timeout: GitalyClient.medium_timeout)
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index f74c9ea4192..20ad6d0184b 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -201,9 +201,9 @@ module Gitlab
response = GitalyClient.call(@storage, :repository_service, :fsck, request, timeout: GitalyClient.long_timeout)
if response.error.empty?
- return "", 0
+ ["", 0]
else
- return response.error.b, 1
+ [response.error.b, 1]
end
end
@@ -335,7 +335,6 @@ module Gitlab
def search_files_by_content(ref, query, options = {})
request = Gitaly::SearchFilesByContentRequest.new(repository: @gitaly_repo, ref: ref, query: query)
response = GitalyClient.call(@storage, :repository_service, :search_files_by_content, request, timeout: GitalyClient.default_timeout)
-
search_results_from_response(response, options)
end
@@ -410,7 +409,10 @@ module Gitlab
request,
timeout: timeout
)
+ write_stream_to_file(response, save_path)
+ end
+ def write_stream_to_file(response, save_path)
File.open(save_path, 'wb') do |f|
response.each do |message|
f.write(message.data)
diff --git a/lib/gitlab/gl_repository.rb b/lib/gitlab/gl_repository.rb
index abd4e847a50..7346de13626 100644
--- a/lib/gitlab/gl_repository.rb
+++ b/lib/gitlab/gl_repository.rb
@@ -43,10 +43,10 @@ module Gitlab
end
def self.parse(gl_repository)
- result = ::Gitlab::GlRepository::Identifier.new(gl_repository)
+ identifier = ::Gitlab::GlRepository::Identifier.parse(gl_repository)
- repo_type = result.repo_type
- container = result.fetch_container!
+ repo_type = identifier.repo_type
+ container = identifier.container
[container, repo_type.project_for(container), repo_type]
end
diff --git a/lib/gitlab/gl_repository/identifier.rb b/lib/gitlab/gl_repository/identifier.rb
index dc3e7931696..57350b1edb0 100644
--- a/lib/gitlab/gl_repository/identifier.rb
+++ b/lib/gitlab/gl_repository/identifier.rb
@@ -3,71 +3,83 @@
module Gitlab
class GlRepository
class Identifier
- attr_reader :gl_repository, :repo_type
+ include Gitlab::Utils::StrongMemoize
- def initialize(gl_repository)
- @gl_repository = gl_repository
- @segments = gl_repository.split('-')
+ InvalidIdentifier = Class.new(ArgumentError)
- raise_error if segments.size > 3
+ def self.parse(gl_repository)
+ segments = gl_repository&.split('-')
- @repo_type = find_repo_type
- @container_id = find_container_id
- @container_class = find_container_class
- end
+ # gl_repository can either have 2 or 3 segments:
+ #
+ # TODO: convert all 2-segment format to 3-segment:
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/219192
+ identifier = case segments&.size
+ when 2
+ TwoPartIdentifier.new(*segments)
+ when 3
+ ThreePartIdentifier.new(*segments)
+ end
+
+ return identifier if identifier&.valid?
- def fetch_container!
- container_class.find_by_id(container_id)
+ raise InvalidIdentifier, %Q(Invalid GL Repository "#{gl_repository}")
end
- private
+ # The older 2-segment format, where the container is implied.
+ # eg. project-1, wiki-1
+ class TwoPartIdentifier < Identifier
+ def initialize(repo_type_name, container_id_str)
+ @container_id_str = container_id_str
+ @repo_type_name = repo_type_name
+ end
- attr_reader :segments, :container_class, :container_id
+ private
- def find_repo_type
- type_name = three_segments_format? ? segments.last : segments.first
- type = Gitlab::GlRepository.types[type_name]
+ def container_class
+ repo_type.container_class
+ end
+ end
- raise_error unless type
+ # The newer 3-segment format, where the container is explicit
+ # eg. group-1-wiki, project-1-wiki
+ class ThreePartIdentifier < Identifier
+ def initialize(container_type, container_id_str, repo_type_name)
+ @container_id_str = container_id_str
+ @container_type = container_type
+ @repo_type_name = repo_type_name
+ end
- type
- end
+ private
- def find_container_class
- if three_segments_format?
- case segments[0]
+ def container_class
+ case @container_type
when 'project'
Project
when 'group'
Group
- else
- raise_error
end
- else
- repo_type.container_class
end
end
- def find_container_id
- id = Integer(segments[1], 10, exception: false)
-
- raise_error unless id
+ def repo_type
+ strong_memoize(:repo_type) { Gitlab::GlRepository.types[repo_type_name] }
+ end
- id
+ def container
+ strong_memoize(:container) { container_class.find_by_id(container_id) }
end
- # gl_repository can either have 2 or 3 segments:
- # "wiki-1" is the older 2-segment format, where container is implied.
- # "group-1-wiki" is the newer 3-segment format, including container information.
- #
- # TODO: convert all 2-segment format to 3-segment:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/219192
- def three_segments_format?
- segments.size == 3
+ def valid?
+ repo_type.present? && container_class.present? && container_id&.positive?
end
- def raise_error
- raise ArgumentError, "Invalid GL Repository \"#{gl_repository}\""
+ private
+
+ attr_reader :container_id_str, :repo_type_name
+
+ def container_id
+ strong_memoize(:container_id) { Integer(container_id_str, 10, exception: false) }
end
end
end
diff --git a/lib/gitlab/global_id.rb b/lib/gitlab/global_id.rb
index cc82b6c5897..e8a6006dce1 100644
--- a/lib/gitlab/global_id.rb
+++ b/lib/gitlab/global_id.rb
@@ -2,6 +2,8 @@
module Gitlab
module GlobalId
+ CoerceError = Class.new(ArgumentError)
+
def self.build(object = nil, model_name: nil, id: nil, params: nil)
if object
model_name ||= object.class.name
@@ -10,5 +12,20 @@ module Gitlab
::URI::GID.build(app: GlobalID.app, model_name: model_name, model_id: id, params: params)
end
+
+ def self.as_global_id(value, model_name: nil)
+ case value
+ when GlobalID
+ value
+ when URI::GID
+ GlobalID.new(value)
+ when Integer
+ raise CoerceError, 'Cannot coerce Integer' unless model_name.present?
+
+ GlobalID.new(::Gitlab::GlobalId.build(model_name: model_name, id: value))
+ else
+ raise CoerceError, "Invalid ID. Cannot coerce instances of #{value.class}"
+ end
+ end
end
end
diff --git a/lib/gitlab/graphql/authorize/authorize_resource.rb b/lib/gitlab/graphql/authorize/authorize_resource.rb
index 94871498cf8..27673e5c27a 100644
--- a/lib/gitlab/graphql/authorize/authorize_resource.rb
+++ b/lib/gitlab/graphql/authorize/authorize_resource.rb
@@ -30,8 +30,7 @@ module Gitlab
end
def authorized_find!(*args)
- object = find_object(*args)
- object = object.sync if object.respond_to?(:sync)
+ object = Graphql::Lazy.force(find_object(*args))
authorize!(object)
diff --git a/lib/gitlab/graphql/lazy.rb b/lib/gitlab/graphql/lazy.rb
new file mode 100644
index 00000000000..a7f7610a041
--- /dev/null
+++ b/lib/gitlab/graphql/lazy.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ class Lazy
+ # Force evaluation of a (possibly) lazy value
+ def self.force(value)
+ case value
+ when ::BatchLoader::GraphQL
+ value.sync
+ when ::Concurrent::Promise
+ value.execute.value
+ else
+ value
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/loaders/issuable_loader.rb b/lib/gitlab/graphql/loaders/issuable_loader.rb
new file mode 100644
index 00000000000..1cc0fbe215f
--- /dev/null
+++ b/lib/gitlab/graphql/loaders/issuable_loader.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Loaders
+ class IssuableLoader
+ attr_reader :parent, :issuable_finder
+
+ BatchKey = Struct.new(:parent, :finder_class, :current_user)
+
+ def initialize(parent, issuable_finder)
+ @parent = parent
+ @issuable_finder = issuable_finder
+ end
+
+ def batching_find_all(&with_query)
+ if issuable_finder.params.keys == ['iids']
+ batch_load_issuables(issuable_finder.params[:iids], with_query)
+ else
+ post_process(find_all, with_query)
+ end
+ end
+
+ def find_all
+ issuable_finder.params[parent_param] = parent if parent
+
+ issuable_finder.execute
+ end
+
+ private
+
+ def parent_param
+ case parent
+ when Project
+ :project_id
+ when Group
+ :group_id
+ else
+ raise "Unexpected parent: #{parent.class}"
+ end
+ end
+
+ def post_process(query, with_query)
+ if with_query
+ with_query.call(query)
+ else
+ query
+ end
+ end
+
+ def batch_load_issuables(iids, with_query)
+ Array.wrap(iids).map { |iid| batch_load(iid, with_query) }
+ end
+
+ def batch_load(iid, with_query)
+ return if parent.nil?
+
+ BatchLoader::GraphQL
+ .for([parent_param, iid.to_s])
+ .batch(key: batch_key) do |params, loader, args|
+ batch_key = args[:key]
+ user = batch_key.current_user
+
+ params.group_by(&:first).each do |key, group|
+ iids = group.map(&:second).uniq
+ args = { key => batch_key.parent, iids: iids }
+ query = batch_key.finder_class.new(user, args).execute
+
+ post_process(query, with_query).each do |item|
+ loader.call([key, item.iid.to_s], item)
+ end
+ end
+ end
+ end
+
+ def batch_key
+ BatchKey.new(parent, issuable_finder.class, issuable_finder.current_user)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/mount_mutation.rb b/lib/gitlab/graphql/mount_mutation.rb
index b10e963170a..8e507ba4531 100644
--- a/lib/gitlab/graphql/mount_mutation.rb
+++ b/lib/gitlab/graphql/mount_mutation.rb
@@ -13,6 +13,14 @@ module Gitlab
mutation: mutation_class,
**custom_kwargs
end
+
+ def mount_aliased_mutation(alias_name, mutation_class, **custom_kwargs)
+ aliased_mutation_class = Class.new(mutation_class) do
+ graphql_name alias_name
+ end
+
+ mount_mutation(aliased_mutation_class, **custom_kwargs)
+ end
end
end
end
diff --git a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
index 6f705239fa3..6b6bb72eb31 100644
--- a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
+++ b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
@@ -52,8 +52,7 @@ module Gitlab
end
def duration(time_started)
- nanoseconds = Gitlab::Metrics::System.monotonic_time - time_started
- nanoseconds * 1000000
+ Gitlab::Metrics::System.monotonic_time - time_started
end
def default_initial_values(query)
diff --git a/lib/gitlab/health_checks/probes/collection.rb b/lib/gitlab/health_checks/probes/collection.rb
index db3ef4834c2..08b6d82291e 100644
--- a/lib/gitlab/health_checks/probes/collection.rb
+++ b/lib/gitlab/health_checks/probes/collection.rb
@@ -20,6 +20,12 @@ module Gitlab
success ? 200 : 503,
status(success).merge(payload(readiness))
)
+ rescue => e
+ exception_payload = { message: "#{e.class} : #{e.message}" }
+
+ Probes::Status.new(
+ 500,
+ status(false).merge(exception_payload))
end
private
diff --git a/lib/gitlab/import/metrics.rb b/lib/gitlab/import/metrics.rb
index 76638a8cf86..2692ab2fa12 100644
--- a/lib/gitlab/import/metrics.rb
+++ b/lib/gitlab/import/metrics.rb
@@ -1,59 +1,54 @@
# frozen_string_literal: true
-# Prepend `Gitlab::Import::Metrics` to a class in order
-# to measure and emit `Gitlab::Metrics` metrics of specified methods.
-#
-# @example
-# class Importer
-# prepend Gitlab::Import::Metrics
-#
-# Gitlab::ImportExport::Metrics.measure :execute, metrics: {
-# importer_counter: {
-# type: :counter,
-# description: 'counter'
-# },
-# importer_histogram: {
-# type: :histogram,
-# labels: { importer: 'importer' },
-# description: 'histogram'
-# }
-# }
-#
-# def execute
-# ...
-# end
-# end
-#
-# Each call to `#execute` increments `importer_counter` as well as
-# measures `#execute` duration and reports histogram `importer_histogram`
module Gitlab
module Import
- module Metrics
- def self.measure(method_name, metrics:)
- define_method "#{method_name}" do |*args|
- start_time = Time.zone.now
+ class Metrics
+ IMPORT_DURATION_BUCKETS = [0.5, 1, 3, 5, 10, 60, 120, 240, 360, 720, 1440].freeze
- result = super(*args)
+ attr_reader :importer
- end_time = Time.zone.now
+ def initialize(importer, project)
+ @importer = importer
+ @project = project
+ end
+
+ def track_finished_import
+ duration = Time.zone.now - @project.created_at
+
+ duration_histogram.observe({ importer: importer }, duration)
+ projects_counter.increment
+ end
- report_measurement_metrics(metrics, end_time - start_time)
+ def projects_counter
+ @projects_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_projects_total",
+ 'The number of imported projects'
+ )
+ end
+
+ def issues_counter
+ @issues_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_issues_total",
+ 'The number of imported issues'
+ )
+ end
- result
- end
+ def merge_requests_counter
+ @merge_requests_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_merge_requests_total",
+ 'The number of imported merge (pull) requests'
+ )
end
- def report_measurement_metrics(metrics, duration)
- metrics.each do |metric_name, metric_value|
- case metric_value[:type]
- when :counter
- Gitlab::Metrics.counter(metric_name, metric_value[:description]).increment
- when :histogram
- Gitlab::Metrics.histogram(metric_name, metric_value[:description]).observe(metric_value[:labels], duration)
- else
- nil
- end
- end
+ private
+
+ def duration_histogram
+ @duration_histogram ||= Gitlab::Metrics.histogram(
+ :"#{importer}_total_duration_seconds",
+ 'Total time spent importing projects, in seconds',
+ {},
+ IMPORT_DURATION_BUCKETS
+ )
end
end
end
diff --git a/lib/gitlab/import_export/json/streaming_serializer.rb b/lib/gitlab/import_export/json/streaming_serializer.rb
index 20f9c668b9c..05b7679e0ff 100644
--- a/lib/gitlab/import_export/json/streaming_serializer.rb
+++ b/lib/gitlab/import_export/json/streaming_serializer.rb
@@ -7,7 +7,7 @@ module Gitlab
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
- SMALLER_BATCH_SIZE = 20
+ SMALLER_BATCH_SIZE = 2
def self.batch_size(exportable)
if Feature.enabled?(:export_reduce_relation_batch_size, exportable)
@@ -69,8 +69,16 @@ module Gitlab
key_preloads = preloads&.dig(key)
records = records.preload(key_preloads) if key_preloads
- records.find_each(batch_size: batch_size) do |record|
- items << Raw.new(record.to_json(options))
+ records.in_batches(of: batch_size) do |batch| # rubocop:disable Cop/InBatches
+ # order each batch by its primary key to ensure
+ # consistent and predictable ordering of each exported relation
+ # as additional `WHERE` clauses can impact the order in which data is being
+ # returned by database when no `ORDER` is specified
+ batch = batch.reorder(batch.klass.primary_key)
+
+ batch.each do |record|
+ items << Raw.new(record.to_json(options))
+ end
end
end
diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml
index f0b733d7e95..aa961bd8d19 100644
--- a/lib/gitlab/import_export/project/import_export.yml
+++ b/lib/gitlab/import_export/project/import_export.yml
@@ -89,7 +89,6 @@ tree:
- :triggers
- :pipeline_schedules
- :container_expiration_policy
- - :services
- protected_branches:
- :merge_access_levels
- :push_access_levels
@@ -169,6 +168,7 @@ excluded_attributes:
- :marked_for_deletion_by_user_id
- :compliance_framework_setting
- :show_default_award_emojis
+ - :services
namespaces:
- :runners_token
- :runners_token_encrypted
@@ -261,10 +261,6 @@ excluded_attributes:
runners:
- :token
- :token_encrypted
- services:
- - :inherit_from_id
- - :instance
- - :template
error_tracking_setting:
- :encrypted_token
- :encrypted_token_iv
@@ -313,12 +309,14 @@ excluded_attributes:
- :merge_request_id
- :external_pull_request_id
- :ci_ref_id
+ - :locked
stages:
- :pipeline_id
merge_access_levels:
- :protected_branch_id
push_access_levels:
- :protected_branch_id
+ - :deploy_key_id
unprotect_access_levels:
- :protected_branch_id
create_access_levels:
@@ -353,8 +351,6 @@ methods:
- :type
statuses:
- :type
- services:
- - :type
merge_request_diff_files:
- :utf8_diff
merge_requests:
diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb
index 3ab9f2c4bfa..ae92228276e 100644
--- a/lib/gitlab/import_export/project/relation_factory.rb
+++ b/lib/gitlab/import_export/project/relation_factory.rb
@@ -70,10 +70,8 @@ module Gitlab
private
def invalid_relation?
- # Do not create relation if it is:
- # - An unknown service
- # - A legacy trigger
- unknown_service? || legacy_trigger?
+ # Do not create relation if it is a legacy trigger
+ legacy_trigger?
end
def setup_models
@@ -137,11 +135,6 @@ module Gitlab
end
end
- def unknown_service?
- @relation_name == :services && parsed_relation_hash['type'] &&
- !Object.const_defined?(parsed_relation_hash['type'])
- end
-
def legacy_trigger?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
diff --git a/lib/gitlab/import_export/snippet_repo_restorer.rb b/lib/gitlab/import_export/snippet_repo_restorer.rb
index 31b1a37bbe1..2d0aa05fc3c 100644
--- a/lib/gitlab/import_export/snippet_repo_restorer.rb
+++ b/lib/gitlab/import_export/snippet_repo_restorer.rb
@@ -42,6 +42,8 @@ module Gitlab
snippet.repository.expire_exists_cache
raise SnippetRepositoryError, _("Invalid repository bundle for snippet with id %{snippet_id}") % { snippet_id: snippet.id }
+ else
+ Snippets::UpdateStatisticsService.new(snippet).execute
end
end
diff --git a/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb b/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb
new file mode 100644
index 00000000000..cd947b15154
--- /dev/null
+++ b/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module IncidentManagement
+ module PagerDuty
+ class IncidentIssueDescription
+ def initialize(incident_payload)
+ @incident_payload = incident_payload
+ end
+
+ def to_s
+ markdown_line_break = " \n"
+
+ [
+ "**Incident:** #{markdown_incident}",
+ "**Incident number:** #{incident_payload['incident_number']}",
+ "**Urgency:** #{incident_payload['urgency']}",
+ "**Status:** #{incident_payload['status']}",
+ "**Incident key:** #{incident_payload['incident_key']}",
+ "**Created at:** #{markdown_incident_created_at}",
+ "**Assignees:** #{markdown_assignees.join(', ')}",
+ "**Impacted services:** #{markdown_impacted_services.join(', ')}"
+ ].join(markdown_line_break)
+ end
+
+ private
+
+ attr_reader :incident_payload
+
+ def markdown_incident
+ markdown_link(incident_payload['title'], incident_payload['url'])
+ end
+
+ def incident_created_at
+ Time.parse(incident_payload['created_at'])
+ rescue
+ Time.current.utc # PagerDuty provides time in UTC
+ end
+
+ def markdown_incident_created_at
+ incident_created_at.strftime('%d %B %Y, %-l:%M%p (%Z)')
+ end
+
+ def markdown_assignees
+ Array(incident_payload['assignees']).map do |assignee|
+ markdown_link(assignee['summary'], assignee['url'])
+ end
+ end
+
+ def markdown_impacted_services
+ Array(incident_payload['impacted_services']).map do |is|
+ markdown_link(is['summary'], is['url'])
+ end
+ end
+
+ def markdown_link(label, url)
+ return label if url.blank?
+
+ "[#{label}](#{url})"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation/elasticsearch_transport.rb b/lib/gitlab/instrumentation/elasticsearch_transport.rb
index deee0127c0c..56179eda22d 100644
--- a/lib/gitlab/instrumentation/elasticsearch_transport.rb
+++ b/lib/gitlab/instrumentation/elasticsearch_transport.rb
@@ -5,8 +5,10 @@ require 'elasticsearch-transport'
module Gitlab
module Instrumentation
module ElasticsearchTransportInterceptor
- def perform_request(*args)
+ def perform_request(method, path, params = {}, body = nil, headers = nil)
start = Time.now
+ headers = (headers || {})
+ .reverse_merge({ 'X-Opaque-Id': Labkit::Correlation::CorrelationId.current_or_new_id })
super
ensure
if ::Gitlab::SafeRequestStore.active?
@@ -14,7 +16,7 @@ module Gitlab
::Gitlab::Instrumentation::ElasticsearchTransport.increment_request_count
::Gitlab::Instrumentation::ElasticsearchTransport.add_duration(duration)
- ::Gitlab::Instrumentation::ElasticsearchTransport.add_call_details(duration, args)
+ ::Gitlab::Instrumentation::ElasticsearchTransport.add_call_details(duration, method, path, params, body)
end
end
end
@@ -47,14 +49,14 @@ module Gitlab
::Gitlab::SafeRequestStore[ELASTICSEARCH_CALL_DURATION] += duration
end
- def self.add_call_details(duration, args)
+ def self.add_call_details(duration, method, path, params, body)
return unless Gitlab::PerformanceBar.enabled_for_request?
detail_store << {
- method: args[0],
- path: args[1],
- params: args[2],
- body: args[3],
+ method: method,
+ path: path,
+ params: params,
+ body: body,
duration: duration,
backtrace: ::Gitlab::BacktraceCleaner.clean_backtrace(caller)
}
diff --git a/lib/gitlab/instrumentation/redis.rb b/lib/gitlab/instrumentation/redis.rb
index 82b4701872f..4a85a313fd7 100644
--- a/lib/gitlab/instrumentation/redis.rb
+++ b/lib/gitlab/instrumentation/redis.rb
@@ -5,9 +5,9 @@ module Gitlab
# Aggregates Redis measurements from different request storage sources.
class Redis
ActionCable = Class.new(RedisBase)
- Cache = Class.new(RedisBase)
+ Cache = Class.new(RedisBase).enable_redis_cluster_validation
Queues = Class.new(RedisBase)
- SharedState = Class.new(RedisBase)
+ SharedState = Class.new(RedisBase).enable_redis_cluster_validation
STORAGES = [ActionCable, Cache, Queues, SharedState].freeze
diff --git a/lib/gitlab/instrumentation/redis_base.rb b/lib/gitlab/instrumentation/redis_base.rb
index 012543e1645..1df899747e0 100644
--- a/lib/gitlab/instrumentation/redis_base.rb
+++ b/lib/gitlab/instrumentation/redis_base.rb
@@ -25,9 +25,6 @@ module Gitlab
# redis-rb passes an array (e.g. [[:get, key]])
return unless args.length == 1
- # TODO: Add information about current Redis client
- # being instrumented.
- # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/316.
detail_store << {
cmd: args.first,
duration: duration,
@@ -71,6 +68,40 @@ module Gitlab
query_time.round(::Gitlab::InstrumentationHelper::DURATION_PRECISION)
end
+ def redis_cluster_validate!(command)
+ ::Gitlab::Instrumentation::RedisClusterValidator.validate!(command) if @redis_cluster_validation
+ end
+
+ def enable_redis_cluster_validation
+ @redis_cluster_validation = true
+
+ self
+ end
+
+ def instance_count_request
+ @request_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_requests_total, 'Client side Redis request count, per Redis server')
+ @request_counter.increment({ storage: storage_key })
+ end
+
+ def instance_count_exception(ex)
+ # This metric is meant to give a client side view of how the Redis
+ # server is doing. Redis itself does not expose error counts. This
+ # metric can be used for Redis alerting and service health monitoring.
+ @exception_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_exceptions_total, 'Client side Redis exception count, per Redis server, per exception class')
+ @exception_counter.increment({ storage: storage_key, exception: ex.class.to_s })
+ end
+
+ def instance_observe_duration(duration)
+ @request_latency_histogram ||= Gitlab::Metrics.histogram(
+ :gitlab_redis_client_requests_duration_seconds,
+ 'Client side Redis request latency, per Redis server, excluding blocking commands',
+ {},
+ [0.005, 0.01, 0.1, 0.5]
+ )
+
+ @request_latency_histogram.observe({ storage: storage_key }, duration)
+ end
+
private
def request_count_key
diff --git a/lib/gitlab/instrumentation/redis_cluster_validator.rb b/lib/gitlab/instrumentation/redis_cluster_validator.rb
new file mode 100644
index 00000000000..6800e5667f6
--- /dev/null
+++ b/lib/gitlab/instrumentation/redis_cluster_validator.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'rails'
+require 'redis'
+
+module Gitlab
+ module Instrumentation
+ module RedisClusterValidator
+ # Generate with:
+ #
+ # Gitlab::Redis::Cache
+ # .with { |redis| redis.call('COMMAND') }
+ # .select { |command| command[3] != command[4] }
+ # .map { |command| [command[0].upcase, { first: command[3], last: command[4], step: command[5] }] }
+ # .sort_by(&:first)
+ # .to_h
+ #
+ MULTI_KEY_COMMANDS = {
+ "BITOP" => { first: 2, last: -1, step: 1 },
+ "BLPOP" => { first: 1, last: -2, step: 1 },
+ "BRPOP" => { first: 1, last: -2, step: 1 },
+ "BRPOPLPUSH" => { first: 1, last: 2, step: 1 },
+ "BZPOPMAX" => { first: 1, last: -2, step: 1 },
+ "BZPOPMIN" => { first: 1, last: -2, step: 1 },
+ "DEL" => { first: 1, last: -1, step: 1 },
+ "EXISTS" => { first: 1, last: -1, step: 1 },
+ "MGET" => { first: 1, last: -1, step: 1 },
+ "MSET" => { first: 1, last: -1, step: 2 },
+ "MSETNX" => { first: 1, last: -1, step: 2 },
+ "PFCOUNT" => { first: 1, last: -1, step: 1 },
+ "PFMERGE" => { first: 1, last: -1, step: 1 },
+ "RENAME" => { first: 1, last: 2, step: 1 },
+ "RENAMENX" => { first: 1, last: 2, step: 1 },
+ "RPOPLPUSH" => { first: 1, last: 2, step: 1 },
+ "SDIFF" => { first: 1, last: -1, step: 1 },
+ "SDIFFSTORE" => { first: 1, last: -1, step: 1 },
+ "SINTER" => { first: 1, last: -1, step: 1 },
+ "SINTERSTORE" => { first: 1, last: -1, step: 1 },
+ "SMOVE" => { first: 1, last: 2, step: 1 },
+ "SUNION" => { first: 1, last: -1, step: 1 },
+ "SUNIONSTORE" => { first: 1, last: -1, step: 1 },
+ "UNLINK" => { first: 1, last: -1, step: 1 },
+ "WATCH" => { first: 1, last: -1, step: 1 }
+ }.freeze
+
+ CrossSlotError = Class.new(StandardError)
+
+ class << self
+ def validate!(command)
+ return unless Rails.env.development? || Rails.env.test?
+ return if allow_cross_slot_commands?
+
+ command_name = command.first.to_s.upcase
+ argument_positions = MULTI_KEY_COMMANDS[command_name]
+
+ return unless argument_positions
+
+ arguments = command.flatten[argument_positions[:first]..argument_positions[:last]]
+
+ key_slots = arguments.each_slice(argument_positions[:step]).map do |args|
+ key_slot(args.first)
+ end
+
+ unless key_slots.uniq.length == 1
+ raise CrossSlotError.new("Redis command #{command_name} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands")
+ end
+ end
+
+ # Keep track of the call stack to allow nested calls to work.
+ def allow_cross_slot_commands
+ Thread.current[:allow_cross_slot_commands] ||= 0
+ Thread.current[:allow_cross_slot_commands] += 1
+
+ yield
+ ensure
+ Thread.current[:allow_cross_slot_commands] -= 1
+ end
+
+ private
+
+ def allow_cross_slot_commands?
+ Thread.current[:allow_cross_slot_commands].to_i > 0
+ end
+
+ def key_slot(key)
+ ::Redis::Cluster::KeySlotConverter.convert(extract_hash_tag(key))
+ end
+
+ # This is almost identical to Redis::Cluster::Command#extract_hash_tag,
+ # except that it returns the original string if no hash tag is found.
+ #
+ def extract_hash_tag(key)
+ s = key.index('{')
+
+ return key unless s
+
+ e = key.index('}', s + 1)
+
+ return key unless e
+
+ key[s + 1..e - 1]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation/redis_interceptor.rb b/lib/gitlab/instrumentation/redis_interceptor.rb
index a36aade59c3..b5a5f8fd984 100644
--- a/lib/gitlab/instrumentation/redis_interceptor.rb
+++ b/lib/gitlab/instrumentation/redis_interceptor.rb
@@ -5,13 +5,26 @@ require 'redis'
module Gitlab
module Instrumentation
module RedisInterceptor
+ APDEX_EXCLUDE = %w[brpop blpop brpoplpush bzpopmin bzpopmax xread xreadgroup].freeze
+
def call(*args, &block)
- start = Time.now
+ start = Time.now # must come first so that 'start' is always defined
+ instrumentation_class.instance_count_request
+ instrumentation_class.redis_cluster_validate!(args.first)
+
super(*args, &block)
+ rescue ::Redis::BaseError => ex
+ instrumentation_class.instance_count_exception(ex)
+ raise ex
ensure
- duration = (Time.now - start)
+ duration = Time.now - start
+
+ unless APDEX_EXCLUDE.include?(command_from_args(args))
+ instrumentation_class.instance_observe_duration(duration)
+ end
if ::RequestStore.active?
+ # These metrics measure total Redis usage per Rails request / job.
instrumentation_class.increment_request_count
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, args)
@@ -77,6 +90,12 @@ module Gitlab
def instrumentation_class
@options[:instrumentation_class] # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+
+ def command_from_args(args)
+ command = args[0]
+ command = command[0] if command.is_a?(Array)
+ command.to_s.downcase
+ end
end
end
end
diff --git a/lib/gitlab/issuable_metadata.rb b/lib/gitlab/issuable_metadata.rb
index e946fc00c4d..f96c937aec3 100644
--- a/lib/gitlab/issuable_metadata.rb
+++ b/lib/gitlab/issuable_metadata.rb
@@ -7,11 +7,13 @@ module Gitlab
# data structure to store issuable meta data like
# upvotes, downvotes, notes and closing merge requests counts for issues and merge requests
# this avoiding n+1 queries when loading issuable collections on frontend
- IssuableMeta = Struct.new(:upvotes, :downvotes, :user_notes_count, :mrs_count) do
- def merge_requests_count(user = nil)
- mrs_count
- end
- end
+ IssuableMeta = Struct.new(
+ :upvotes,
+ :downvotes,
+ :user_notes_count,
+ :merge_requests_count,
+ :blocking_issues_count # EE-ONLY
+ )
attr_reader :current_user, :issuable_collection
@@ -95,3 +97,5 @@ module Gitlab
end
end
end
+
+Gitlab::IssuableMetadata.prepend_if_ee('EE::Gitlab::IssuableMetadata')
diff --git a/lib/gitlab/jira_import/issue_serializer.rb b/lib/gitlab/jira_import/issue_serializer.rb
index df57680073e..43280606bb6 100644
--- a/lib/gitlab/jira_import/issue_serializer.rb
+++ b/lib/gitlab/jira_import/issue_serializer.rb
@@ -52,7 +52,9 @@ module Gitlab
end
def map_user_id(jira_user)
- Gitlab::JiraImport::UserMapper.new(project, jira_user).execute&.id
+ return unless jira_user&.dig('accountId')
+
+ Gitlab::JiraImport.get_user_mapping(project.id, jira_user['accountId'])
end
def reporter
diff --git a/lib/gitlab/jira_import/user_mapper.rb b/lib/gitlab/jira_import/user_mapper.rb
deleted file mode 100644
index 208ee49b724..00000000000
--- a/lib/gitlab/jira_import/user_mapper.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module JiraImport
- class UserMapper
- include ::Gitlab::Utils::StrongMemoize
-
- def initialize(project, jira_user)
- @project = project
- @jira_user = jira_user
- end
-
- def execute
- return unless jira_user
-
- email = jira_user['emailAddress']
-
- # We also include emails that are not yet confirmed
- users = User.by_any_email(email).to_a
-
- user = users.first
-
- # this event should never happen but we should log it in case we have invalid data
- log_user_mapping_message('Multiple users found for an email address', email) if users.count > 1
-
- unless project.project_member(user) || project.group&.group_member(user)
- log_user_mapping_message('Jira user not found', email)
-
- return
- end
-
- user
- end
-
- private
-
- attr_reader :project, :jira_user, :params
-
- def log_user_mapping_message(message, email)
- logger.info(
- project_id: project.id,
- project_path: project.full_path,
- user_email: email,
- message: message
- )
- end
-
- def logger
- @logger ||= Gitlab::Import::Logger.build
- end
- end
- end
-end
diff --git a/lib/gitlab/json.rb b/lib/gitlab/json.rb
index 5b6689dbefe..21f837c58bb 100644
--- a/lib/gitlab/json.rb
+++ b/lib/gitlab/json.rb
@@ -1,59 +1,224 @@
# frozen_string_literal: true
+# This is a GitLab-specific JSON interface. You should use this instead
+# of using `JSON` directly. This allows us to swap the adapter and handle
+# legacy issues.
+
module Gitlab
module Json
INVALID_LEGACY_TYPES = [String, TrueClass, FalseClass].freeze
class << self
- def parse(string, *args, **named_args)
- legacy_mode = legacy_mode_enabled?(named_args.delete(:legacy_mode))
- data = adapter.parse(string, *args, **named_args)
+ # Parse a string and convert it to a Ruby object
+ #
+ # @param string [String] the JSON string to convert to Ruby objects
+ # @param opts [Hash] an options hash in the standard JSON gem format
+ # @return [Boolean, String, Array, Hash]
+ # @raise [JSON::ParserError] raised if parsing fails
+ def parse(string, opts = {})
+ # First we should ensure this really is a string, not some other
+ # type which purports to be a string. This handles some legacy
+ # usage of the JSON class.
+ string = string.to_s unless string.is_a?(String)
+
+ legacy_mode = legacy_mode_enabled?(opts.delete(:legacy_mode))
+ data = adapter_load(string, opts)
handle_legacy_mode!(data) if legacy_mode
data
end
- def parse!(string, *args, **named_args)
- legacy_mode = legacy_mode_enabled?(named_args.delete(:legacy_mode))
- data = adapter.parse!(string, *args, **named_args)
+ alias_method :parse!, :parse
+
+ # Restricted method for converting a Ruby object to JSON. If you
+ # need to pass options to this, you should use `.generate` instead,
+ # as the underlying implementation of this varies wildly based on
+ # the adapter in use.
+ #
+ # @param object [Object] the object to convert to JSON
+ # @return [String]
+ def dump(object)
+ adapter_dump(object)
+ end
- handle_legacy_mode!(data) if legacy_mode
+ # Generates JSON for an object. In Oj this takes fewer options than .dump,
+ # in the JSON gem this is the only method which takes an options argument.
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def generate(object, opts = {})
+ adapter_generate(object, opts)
+ end
- data
+ # Generates JSON for an object and makes it look purdy
+ #
+ # The Oj variant in this looks seriously weird but these are the settings
+ # needed to emulate the style generated by the JSON gem.
+ #
+ # NOTE: This currently ignores Oj, because Oj doesn't generate identical
+ # formatting, issue: https://github.com/ohler55/oj/issues/608
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def pretty_generate(object, opts = {})
+ ::JSON.pretty_generate(object, opts)
end
- def dump(*args)
- adapter.dump(*args)
+ # Feature detection for using Oj instead of the `json` gem.
+ #
+ # @return [Boolean]
+ def enable_oj?
+ return false unless feature_table_exists?
+
+ Feature.enabled?(:oj_json, default_enabled: true)
end
- def generate(*args)
- adapter.generate(*args)
+ private
+
+ # Convert JSON string into Ruby through toggleable adapters.
+ #
+ # Must rescue adapter-specific errors and return `parser_error`, and
+ # must also standardize the options hash to support each adapter as
+ # they all take different options.
+ #
+ # @param string [String] the JSON string to convert to Ruby objects
+ # @param opts [Hash] an options hash in the standard JSON gem format
+ # @return [Boolean, String, Array, Hash]
+ # @raise [JSON::ParserError]
+ def adapter_load(string, *args, **opts)
+ opts = standardize_opts(opts)
+
+ if enable_oj?
+ Oj.load(string, opts)
+ else
+ ::JSON.parse(string, opts)
+ end
+ rescue Oj::ParseError, Encoding::UndefinedConversionError => ex
+ raise parser_error.new(ex)
end
- def pretty_generate(*args)
- adapter.pretty_generate(*args)
+ # Take a Ruby object and convert it to a string. This method varies
+ # based on the underlying JSON interpreter. Oj treats this like JSON
+ # treats `.generate`. JSON.dump takes no options.
+ #
+ # This supports these options to ensure this difference is recorded here,
+ # as it's very surprising. The public interface is more restrictive to
+ # prevent adapter-specific options being passed.
+ #
+ # @overload adapter_dump(object, opts)
+ # @param object [Object] the object to convert to JSON
+ # @param opts [Hash] options as named arguments, only supported by Oj
+ #
+ # @overload adapter_dump(object, anIO, limit)
+ # @param object [Object] the object, will have JSON.generate called on it
+ # @param anIO [Object] an IO-like object that responds to .write, default nil
+ # @param limit [Fixnum] the nested array/object limit, default nil
+ # @raise [ArgumentError] when depth limit exceeded
+ #
+ # @return [String]
+ def adapter_dump(object, *args, **opts)
+ if enable_oj?
+ Oj.dump(object, opts)
+ else
+ ::JSON.dump(object, *args)
+ end
end
- private
+ # Generates JSON for an object but with fewer options, using toggleable adapters.
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def adapter_generate(object, opts = {})
+ opts = standardize_opts(opts)
+
+ if enable_oj?
+ Oj.generate(object, opts)
+ else
+ ::JSON.generate(object, opts)
+ end
+ end
+
+ # Take a JSON standard options hash and standardize it to work across adapters
+ # An example of this is Oj taking :symbol_keys instead of :symbolize_names
+ #
+ # @param opts [Hash, Nil]
+ # @return [Hash]
+ def standardize_opts(opts)
+ opts ||= {}
- def adapter
- ::JSON
+ if enable_oj?
+ opts[:mode] = :rails
+ opts[:symbol_keys] = opts[:symbolize_keys] || opts[:symbolize_names]
+ end
+
+ opts
end
+ # The standard parser error we should be returning. Defined in a method
+ # so we can potentially override it later.
+ #
+ # @return [JSON::ParserError]
def parser_error
::JSON::ParserError
end
+ # @param [Nil, Boolean] an extracted :legacy_mode key from the opts hash
+ # @return [Boolean]
def legacy_mode_enabled?(arg_value)
arg_value.nil? ? false : arg_value
end
+ # If legacy mode is enabled, we need to raise an error depending on the values
+ # provided in the string. This will be deprecated.
+ #
+ # @param data [Boolean, String, Array, Hash, Object]
+ # @return [Boolean, String, Array, Hash, Object]
+ # @raise [JSON::ParserError]
def handle_legacy_mode!(data)
+ return data unless feature_table_exists?
return data unless Feature.enabled?(:json_wrapper_legacy_mode, default_enabled: true)
raise parser_error if INVALID_LEGACY_TYPES.any? { |type| data.is_a?(type) }
end
+
+ # There are a variety of database errors possible when checking the feature
+ # flags at the wrong time during boot, e.g. during migrations. We don't care
+ # about these errors, we just need to ensure that we skip feature detection
+ # if they will fail.
+ #
+ # @return [Boolean]
+ def feature_table_exists?
+ Feature::FlipperFeature.table_exists?
+ rescue
+ false
+ end
+ end
+
+ # GrapeFormatter is a JSON formatter for the Grape API.
+ # This is set in lib/api/api.rb
+
+ class GrapeFormatter
+ # Convert an object to JSON.
+ #
+ # This will default to the built-in Grape formatter if either :oj_json or :grape_gitlab_json
+ # flags are disabled.
+ #
+ # The `env` param is ignored because it's not needed in either our formatter or Grape's,
+ # but it is passed through for consistency.
+ #
+ # @param object [Object]
+ # @return [String]
+ def self.call(object, env = nil)
+ if Gitlab::Json.enable_oj? && Feature.enabled?(:grape_gitlab_json, default_enabled: true)
+ Gitlab::Json.dump(object)
+ else
+ Grape::Formatter::Json.call(object, env)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/json_logger.rb b/lib/gitlab/json_logger.rb
index ab34fb03158..3a74df8dc8f 100644
--- a/lib/gitlab/json_logger.rb
+++ b/lib/gitlab/json_logger.rb
@@ -19,7 +19,7 @@ module Gitlab
data.merge!(message)
end
- data.to_json + "\n"
+ Gitlab::Json.dump(data) + "\n"
end
end
end
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
index 9507f7bc117..39bd8d5a01f 100644
--- a/lib/gitlab/kubernetes/helm.rb
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -3,7 +3,7 @@
module Gitlab
module Kubernetes
module Helm
- HELM_VERSION = '2.16.6'
+ HELM_VERSION = '2.16.9'
KUBECTL_VERSION = '1.13.12'
NAMESPACE = 'gitlab-managed-apps'
NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze
diff --git a/lib/gitlab/kubernetes/node.rb b/lib/gitlab/kubernetes/node.rb
new file mode 100644
index 00000000000..bd765ef3852
--- /dev/null
+++ b/lib/gitlab/kubernetes/node.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Kubernetes
+ class Node
+ def initialize(cluster)
+ @cluster = cluster
+ end
+
+ def all
+ nodes.map do |node|
+ attributes = node(node)
+ attributes.merge(node_metrics(node))
+ end
+ end
+
+ private
+
+ attr_reader :cluster
+
+ def nodes_from_cluster
+ graceful_request { cluster.kubeclient.get_nodes }
+ end
+
+ def nodes_metrics_from_cluster
+ graceful_request { cluster.kubeclient.metrics_client.get_nodes }
+ end
+
+ def nodes
+ @nodes ||= nodes_from_cluster[:response].to_a
+ end
+
+ def nodes_metrics
+ @nodes_metrics ||= nodes_metrics_from_cluster[:response].to_a
+ end
+
+ def node_metrics_from_node(node)
+ nodes_metrics.find do |node_metric|
+ node_metric.metadata.name == node.metadata.name
+ end
+ end
+
+ def graceful_request(&block)
+ ::Gitlab::Kubernetes::KubeClient.graceful_request(cluster.id, &block)
+ end
+
+ def node(node)
+ {
+ 'metadata' => {
+ 'name' => node.metadata.name
+ },
+ 'status' => {
+ 'capacity' => {
+ 'cpu' => node.status.capacity.cpu,
+ 'memory' => node.status.capacity.memory
+ },
+ 'allocatable' => {
+ 'cpu' => node.status.allocatable.cpu,
+ 'memory' => node.status.allocatable.memory
+ }
+ }
+ }
+ end
+
+ def node_metrics(node)
+ node_metrics = node_metrics_from_node(node)
+ return {} unless node_metrics
+
+ {
+ 'usage' => {
+ 'cpu' => node_metrics.usage.cpu,
+ 'memory' => node_metrics.usage.memory
+ }
+ }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/lograge/custom_options.rb b/lib/gitlab/lograge/custom_options.rb
index 17a36c292c0..e6dd87a8bec 100644
--- a/lib/gitlab/lograge/custom_options.rb
+++ b/lib/gitlab/lograge/custom_options.rb
@@ -20,8 +20,6 @@ module Gitlab
username: event.payload[:username],
ua: event.payload[:ua]
}
- add_db_counters!(payload)
-
payload.merge!(event.payload[:metadata]) if event.payload[:metadata]
::Gitlab::InstrumentationHelper.add_instrumentation_data(payload)
@@ -46,16 +44,6 @@ module Gitlab
payload
end
-
- def self.add_db_counters!(payload)
- current_transaction = Gitlab::Metrics::Transaction.current
- if current_transaction
- payload[:db_count] = current_transaction.get(:db_count, :counter).to_i
- payload[:db_write_count] = current_transaction.get(:db_write_count, :counter).to_i
- payload[:db_cached_count] = current_transaction.get(:db_cached_count, :counter).to_i
- end
- end
- private_class_method :add_db_counters!
end
end
end
diff --git a/lib/gitlab/marginalia/comment.rb b/lib/gitlab/marginalia/comment.rb
index a0eee823763..d5dae5ef4b3 100644
--- a/lib/gitlab/marginalia/comment.rb
+++ b/lib/gitlab/marginalia/comment.rb
@@ -26,9 +26,9 @@ module Gitlab
job = ::Marginalia::Comment.marginalia_job
# We are using 'Marginalia::SidekiqInstrumentation' which does not support 'ActiveJob::Base'.
- # Gitlab also uses 'ActionMailer::DeliveryJob' which inherits from ActiveJob::Base.
+ # Gitlab also uses 'ActionMailer::MailDeliveryJob' which inherits from ActiveJob::Base.
# So below condition is used to return metadata for such jobs.
- if job && job.is_a?(ActionMailer::DeliveryJob)
+ if job.is_a?(ActionMailer::MailDeliveryJob) || job.is_a?(ActionMailer::DeliveryJob)
{
"class" => job.arguments.first,
"jid" => job.job_id
diff --git a/lib/gitlab/markdown_cache/redis/extension.rb b/lib/gitlab/markdown_cache/redis/extension.rb
index af3237f4ba6..add71fa120e 100644
--- a/lib/gitlab/markdown_cache/redis/extension.rb
+++ b/lib/gitlab/markdown_cache/redis/extension.rb
@@ -22,16 +22,32 @@ module Gitlab
end
end
- private
+ prepended do
+ def self.preload_markdown_cache!(objects)
+ fields = Gitlab::MarkdownCache::Redis::Store.bulk_read(objects)
- def save_markdown(updates)
- markdown_store.save(updates)
+ objects.each do |object|
+ fields[object.cache_key].value.each do |field_name, value|
+ object.write_markdown_field(field_name, value)
+ end
+ end
+ end
end
def write_markdown_field(field_name, value)
+ # The value read from redis is a string, so we're converting it back
+ # to an int.
+ value = value.to_i if field_name == :cached_markdown_version
+
instance_variable_set("@#{field_name}", value)
end
+ private
+
+ def save_markdown(updates)
+ markdown_store.save(updates)
+ end
+
def markdown_field_changed?(field_name)
false
end
diff --git a/lib/gitlab/markdown_cache/redis/store.rb b/lib/gitlab/markdown_cache/redis/store.rb
index 0f954404808..5a8efa34097 100644
--- a/lib/gitlab/markdown_cache/redis/store.rb
+++ b/lib/gitlab/markdown_cache/redis/store.rb
@@ -6,6 +6,20 @@ module Gitlab
class Store
EXPIRES_IN = 1.day
+ def self.bulk_read(subjects)
+ results = {}
+
+ Gitlab::Redis::Cache.with do |r|
+ r.pipelined do
+ subjects.each do |subject|
+ results[subject.cache_key] = new(subject).read
+ end
+ end
+ end
+
+ results
+ end
+
def initialize(subject)
@subject = subject
@loaded = false
@@ -23,13 +37,9 @@ module Gitlab
def read
@loaded = true
- results = Gitlab::Redis::Cache.with do |r|
+ Gitlab::Redis::Cache.with do |r|
r.mapped_hmget(markdown_cache_key, *fields)
end
- # The value read from redis is a string, so we're converting it back
- # to an int.
- results[:cached_markdown_version] = results[:cached_markdown_version].to_i
- results
end
def loaded?
diff --git a/lib/gitlab/metrics/background_transaction.rb b/lib/gitlab/metrics/background_transaction.rb
index fe1722b1095..7b05ae29b02 100644
--- a/lib/gitlab/metrics/background_transaction.rb
+++ b/lib/gitlab/metrics/background_transaction.rb
@@ -9,7 +9,7 @@ module Gitlab
end
def labels
- { controller: @worker_class.name, action: 'perform' }
+ { controller: @worker_class.name, action: 'perform', feature_category: @worker_class.try(:get_feature_category).to_s }
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/errors.rb b/lib/gitlab/metrics/dashboard/errors.rb
index 264ea0488e7..07ddd315bcc 100644
--- a/lib/gitlab/metrics/dashboard/errors.rb
+++ b/lib/gitlab/metrics/dashboard/errors.rb
@@ -20,20 +20,20 @@ module Gitlab
when DashboardProcessingError
error(error.message, :unprocessable_entity)
when NOT_FOUND_ERROR
- error("#{dashboard_path} could not be found.", :not_found)
+ error(_("%{dashboard_path} could not be found.") % { dashboard_path: dashboard_path }, :not_found)
when PanelNotFoundError
error(error.message, :not_found)
when ::Grafana::Client::Error
error(error.message, :service_unavailable)
when MissingIntegrationError
- error('Proxy support for this API is not available currently', :bad_request)
+ error(_('Proxy support for this API is not available currently'), :bad_request)
else
raise error
end
end
def panels_not_found!(opts)
- raise PanelNotFoundError.new("No panels matching properties #{opts}")
+ raise PanelNotFoundError.new(_("No panels matching properties %{opts}") % { opts: opts })
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/finder.rb b/lib/gitlab/metrics/dashboard/finder.rb
index d80985e0a0e..5e2d78e10a4 100644
--- a/lib/gitlab/metrics/dashboard/finder.rb
+++ b/lib/gitlab/metrics/dashboard/finder.rb
@@ -7,6 +7,19 @@ module Gitlab
module Metrics
module Dashboard
class Finder
+ # Dashboards that should not be part of the list of all dashboards
+ # displayed on the metrics dashboard page.
+ PREDEFINED_DASHBOARD_EXCLUSION_LIST = [
+ # This dashboard is only useful in the self monitoring project.
+ ::Metrics::Dashboard::SelfMonitoringDashboardService,
+
+ # This dashboard is displayed on the K8s cluster settings health page.
+ ::Metrics::Dashboard::ClusterDashboardService,
+
+ # This dashboard is not yet ready for the world.
+ ::Metrics::Dashboard::PodDashboardService
+ ].freeze
+
class << self
# Returns a formatted dashboard packed with DB info.
# @param project [Project]
@@ -67,12 +80,32 @@ module Gitlab
def find_all_paths_from_source(project)
Gitlab::Metrics::Dashboard::Cache.delete_all!
- default_dashboard_path(project)
- .+ project_service.all_dashboard_paths(project)
+ user_facing_dashboard_services(project).flat_map do |service|
+ service.all_dashboard_paths(project)
+ end
end
private
+ def user_facing_dashboard_services(project)
+ predefined_dashboard_services_for(project) + [project_service]
+ end
+
+ def predefined_dashboard_services_for(project)
+ # Only list the self monitoring dashboard on the self monitoring project,
+ # since it is the only dashboard (at time of writing) that shows data
+ # about GitLab itself.
+ if project.self_monitoring?
+ return [self_monitoring_service]
+ end
+
+ predefined_dashboard_services
+ end
+
+ def predefined_dashboard_services
+ ::Metrics::Dashboard::PredefinedDashboardService.descendants - PREDEFINED_DASHBOARD_EXCLUSION_LIST
+ end
+
def system_service
::Metrics::Dashboard::SystemDashboardService
end
@@ -85,14 +118,6 @@ module Gitlab
::Metrics::Dashboard::SelfMonitoringDashboardService
end
- def default_dashboard_path(project)
- if project.self_monitoring?
- self_monitoring_service.all_dashboard_paths(project)
- else
- system_service.all_dashboard_paths(project)
- end
- end
-
def service_for(options)
Gitlab::Metrics::Dashboard::ServiceSelector.call(options)
end
diff --git a/lib/gitlab/metrics/dashboard/service_selector.rb b/lib/gitlab/metrics/dashboard/service_selector.rb
index 49682da320c..641c0c76f8f 100644
--- a/lib/gitlab/metrics/dashboard/service_selector.rb
+++ b/lib/gitlab/metrics/dashboard/service_selector.rb
@@ -13,6 +13,8 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
SERVICES = [
+ ::Metrics::Dashboard::ClusterMetricsEmbedService,
+ ::Metrics::Dashboard::ClusterDashboardService,
::Metrics::Dashboard::GitlabAlertEmbedService,
::Metrics::Dashboard::CustomMetricEmbedService,
::Metrics::Dashboard::GrafanaMetricEmbedService,
@@ -51,5 +53,3 @@ module Gitlab
end
end
end
-
-Gitlab::Metrics::Dashboard::ServiceSelector.prepend_if_ee('EE::Gitlab::Metrics::Dashboard::ServiceSelector')
diff --git a/lib/gitlab/metrics/dashboard/stages/base_stage.rb b/lib/gitlab/metrics/dashboard/stages/base_stage.rb
index 622d5aa8cdb..ee2d36621b4 100644
--- a/lib/gitlab/metrics/dashboard/stages/base_stage.rb
+++ b/lib/gitlab/metrics/dashboard/stages/base_stage.rb
@@ -48,6 +48,14 @@ module Gitlab
end
end
+ def for_variables
+ return unless dashboard.dig(:templating, :variables).is_a?(Hash)
+
+ dashboard.dig(:templating, :variables).each do |variable_name, variable|
+ yield variable_name, variable
+ end
+ end
+
def for_panel_groups
dashboard[:panel_groups].each do |panel_group|
yield panel_group
diff --git a/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
new file mode 100644
index 00000000000..a12082b704c
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class ClusterEndpointInserter < BaseStage
+ def transform!
+ verify_params
+
+ for_metrics do |metric|
+ metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
+ end
+ end
+
+ private
+
+ def admin_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_admin_cluster_path(
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def endpoint_for_metric(metric)
+ case params[:cluster_type]
+ when :admin
+ admin_url(metric)
+ when :group
+ error!(_('Group is required when cluster_type is :group')) unless params[:group]
+ group_url(metric)
+ when :project
+ error!(_('Project is required when cluster_type is :project')) unless project
+ project_url(metric)
+ else
+ error!(_('Unrecognized cluster type'))
+ end
+ end
+
+ def error!(message)
+ raise Errors::DashboardProcessingError.new(message)
+ end
+
+ def group_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_group_cluster_path(
+ params[:group],
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def project_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_project_cluster_path(
+ project,
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def query_type(metric)
+ metric[:query] ? :query : :query_range
+ end
+
+ def query_for_metric(metric)
+ query = metric[query_type(metric)]
+
+ raise Errors::MissingQueryError.new('Each "metric" must define one of :query or :query_range') unless query
+
+ query
+ end
+
+ def verify_params
+ raise Errors::DashboardProcessingError.new(_('Cluster is required for Stages::ClusterEndpointInserter')) unless params[:cluster]
+ raise Errors::DashboardProcessingError.new(_('Cluster type must be specificed for Stages::ClusterEndpointInserter')) unless params[:cluster_type]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb
index e085f551952..c48a7ff25a5 100644
--- a/lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb
+++ b/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb
@@ -4,9 +4,9 @@ module Gitlab
module Metrics
module Dashboard
module Stages
- class EndpointInserter < BaseStage
+ class MetricEndpointInserter < BaseStage
def transform!
- raise Errors::DashboardProcessingError.new('Environment is required for Stages::EndpointInserter') unless params[:environment]
+ raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::MetricEndpointInserter')) unless params[:environment]
for_metrics do |metric|
metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
@@ -33,7 +33,11 @@ module Gitlab
end
def query_type(metric)
- metric[:query] ? :query : :query_range
+ if metric[:query]
+ ::Prometheus::ProxyService::PROMETHEUS_QUERY_API.to_sym
+ else
+ ::Prometheus::ProxyService::PROMETHEUS_QUERY_RANGE_API.to_sym
+ end
end
def query_for_metric(metric)
diff --git a/lib/gitlab/metrics/dashboard/stages/sorter.rb b/lib/gitlab/metrics/dashboard/stages/sorter.rb
index ba5aa78059c..882211e1441 100644
--- a/lib/gitlab/metrics/dashboard/stages/sorter.rb
+++ b/lib/gitlab/metrics/dashboard/stages/sorter.rb
@@ -16,7 +16,7 @@ module Gitlab
# Sorts the groups in the dashboard by the :priority key
def sort_groups!
- dashboard[:panel_groups] = dashboard[:panel_groups].sort_by { |group| -group[:priority].to_i }
+ dashboard[:panel_groups] = Gitlab::Utils.stable_sort_by(dashboard[:panel_groups]) { |group| -group[:priority].to_i }
end
# Sorts the panels in the dashboard by the :weight key
@@ -24,7 +24,7 @@ module Gitlab
dashboard[:panel_groups].each do |group|
missing_panels! unless group[:panels].is_a? Array
- group[:panels] = group[:panels].sort_by { |panel| -panel[:weight].to_i }
+ group[:panels] = Gitlab::Utils.stable_sort_by(group[:panels]) { |panel| -panel[:weight].to_i }
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/stages/url_validator.rb b/lib/gitlab/metrics/dashboard/stages/url_validator.rb
index ff36f7b605e..9e2bb0d1a70 100644
--- a/lib/gitlab/metrics/dashboard/stages/url_validator.rb
+++ b/lib/gitlab/metrics/dashboard/stages/url_validator.rb
@@ -6,8 +6,47 @@ module Gitlab
module Stages
class UrlValidator < BaseStage
def transform!
- dashboard[:links]&.each do |link|
- Gitlab::UrlBlocker.validate!(link[:url])
+ validate_dashboard_links(dashboard)
+
+ validate_chart_links(dashboard)
+ end
+
+ private
+
+ def blocker_args
+ {
+ schemes: %w(http https),
+ ports: [],
+ allow_localhost: allow_setting_local_requests?,
+ allow_local_network: allow_setting_local_requests?,
+ ascii_only: false,
+ enforce_user: false,
+ enforce_sanitization: false,
+ dns_rebind_protection: true
+ }
+ end
+
+ def allow_setting_local_requests?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
+
+ def validate_dashboard_links(dashboard)
+ validate_links(dashboard[:links])
+ end
+
+ def validate_chart_links(dashboard)
+ dashboard[:panel_groups].each do |panel_group|
+ panel_group[:panels].each do |panel|
+ validate_links(panel[:links])
+ end
+ end
+ end
+
+ def validate_links(links)
+ links&.each do |link|
+ next unless link.is_a? Hash
+
+ Gitlab::UrlBlocker.validate!(link[:url], blocker_args)
rescue Gitlab::UrlBlocker::BlockedUrlError
link[:url] = ''
end
diff --git a/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb
new file mode 100644
index 00000000000..20e7fe477e5
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class VariableEndpointInserter < BaseStage
+ VARIABLE_TYPE_METRIC_LABEL_VALUES = 'metric_label_values'
+
+ def transform!
+ raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::VariableEndpointInserter')) unless params[:environment]
+
+ for_variables do |variable_name, variable|
+ if variable.is_a?(Hash) && variable[:type] == VARIABLE_TYPE_METRIC_LABEL_VALUES
+ variable[:options][:prometheus_endpoint_path] = endpoint_for_variable(variable.dig(:options, :series_selector))
+ end
+ end
+ end
+
+ private
+
+ def endpoint_for_variable(series_selector)
+ Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
+ project,
+ params[:environment],
+ proxy_path: ::Prometheus::ProxyService::PROMETHEUS_SERIES_API,
+ match: Array(series_selector)
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/url.rb b/lib/gitlab/metrics/dashboard/url.rb
index 31670a3f533..10a2f3c2397 100644
--- a/lib/gitlab/metrics/dashboard/url.rb
+++ b/lib/gitlab/metrics/dashboard/url.rb
@@ -60,6 +60,22 @@ module Gitlab
Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_environment_url(*args)
end
+ # Matches dashboard urls for a metric chart embed
+ # for cluster metrics
+ #
+ # EX - https://<host>/<namespace>/<project>/-/clusters/<cluster_id>/?group=Cluster%20Health&title=Memory%20Usage&y_label=Memory%20(GiB)
+ def clusters_regex
+ strong_memoize(:clusters_regex) do
+ regex_for_project_metrics(
+ %r{
+ /clusters
+ /(?<cluster_id>\d+)
+ /?
+ }x
+ )
+ end
+ end
+
private
def regex_for_project_metrics(path_suffix_pattern)
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
index 5955987541c..83a7b925392 100644
--- a/lib/gitlab/metrics/methods.rb
+++ b/lib/gitlab/metrics/methods.rb
@@ -35,7 +35,7 @@ module Gitlab
end
def init_metric(type, name, opts = {}, &block)
- options = MetricOptions.new(opts)
+ options = ::Gitlab::Metrics::Methods::MetricOptions.new(opts)
options.evaluate(&block)
if disabled_by_feature(options)
diff --git a/lib/gitlab/metrics/sidekiq_middleware.rb b/lib/gitlab/metrics/sidekiq_middleware.rb
index de8e1ca3256..1c99e1e730c 100644
--- a/lib/gitlab/metrics/sidekiq_middleware.rb
+++ b/lib/gitlab/metrics/sidekiq_middleware.rb
@@ -26,9 +26,7 @@ module Gitlab
private
def add_info_to_payload(payload, trans)
- payload[:db_count] = trans.get(:db_count, :counter).to_i
- payload[:db_write_count] = trans.get(:db_write_count, :counter).to_i
- payload[:db_cached_count] = trans.get(:db_cached_count, :counter).to_i
+ payload.merge!(::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload)
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index 1628eeb5a95..d2736882432 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -23,6 +23,14 @@ module Gitlab
increment_db_counters(payload)
end
+ def self.db_counter_payload
+ return {} unless Gitlab::SafeRequestStore.active?
+
+ DB_COUNTERS.map do |counter|
+ [counter, Gitlab::SafeRequestStore[counter].to_i]
+ end.to_h
+ end
+
private
define_histogram :gitlab_sql_duration_seconds do
@@ -36,13 +44,21 @@ module Gitlab
end
def increment_db_counters(payload)
- current_transaction.increment(:db_count, 1)
+ increment(:db_count)
if payload.fetch(:cached, payload[:name] == 'CACHE')
- current_transaction.increment(:db_cached_count, 1)
+ increment(:db_cached_count)
end
- current_transaction.increment(:db_write_count, 1) unless select_sql_command?(payload)
+ increment(:db_write_count) unless select_sql_command?(payload)
+ end
+
+ def increment(counter)
+ current_transaction.increment(counter, 1)
+
+ if Gitlab::SafeRequestStore.active?
+ Gitlab::SafeRequestStore[counter] = Gitlab::SafeRequestStore[counter].to_i + 1
+ end
end
def current_transaction
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index 822f5243e9d..da06be9c79c 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -7,7 +7,7 @@ module Gitlab
include Gitlab::Metrics::Methods
# base labels shared among all transactions
- BASE_LABELS = { controller: nil, action: nil }.freeze
+ BASE_LABELS = { controller: nil, action: nil, feature_category: nil }.freeze
# labels that potentially contain sensitive information and will be filtered
FILTERED_LABELS = [:branch, :path].freeze
@@ -92,12 +92,6 @@ module Gitlab
self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
end
- def get(name, type, tags = {})
- metric = self.class.transaction_metric(name, type)
-
- metric.get(filter_tags(tags).merge(labels))
- end
-
def labels
BASE_LABELS
end
diff --git a/lib/gitlab/metrics/web_transaction.rb b/lib/gitlab/metrics/web_transaction.rb
index fa17548723e..2064f9290d3 100644
--- a/lib/gitlab/metrics/web_transaction.rb
+++ b/lib/gitlab/metrics/web_transaction.rb
@@ -32,6 +32,10 @@ module Gitlab
action = "#{controller.action_name}"
+ # Try to get the feature category, but don't fail when the controller is
+ # not an ApplicationController.
+ feature_category = controller.class.try(:feature_category_for_action, action).to_s
+
# Devise exposes a method called "request_format" that does the below.
# However, this method is not available to all controllers (e.g. certain
# Doorkeeper controllers). As such we use the underlying code directly.
@@ -45,7 +49,7 @@ module Gitlab
action = "#{action}.#{suffix}"
end
- { controller: controller.class.name, action: action }
+ { controller: controller.class.name, action: action, feature_category: feature_category }
end
def labels_from_endpoint
@@ -61,7 +65,10 @@ module Gitlab
if route
path = endpoint_paths_cache[route.request_method][route.path]
- { controller: 'Grape', action: "#{route.request_method} #{path}" }
+
+ # Feature categories will be added for grape endpoints in
+ # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/462
+ { controller: 'Grape', action: "#{route.request_method} #{path}", feature_category: '' }
end
end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index abdbccd3aa8..47d0b9ba8cb 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -101,7 +101,7 @@ module Gitlab
if project
# If a project is found and the user has access, we return the full project path
- return project.full_path, project.default_branch
+ [project.full_path, project.default_branch]
else
# If not, we return the first two components as if it were a simple `namespace/project` path,
# so that we don't reveal the existence of a nested project the user doesn't have access to.
@@ -112,7 +112,7 @@ module Gitlab
# `go get gitlab.com/group/subgroup/project/subpackage` will not work for private projects.
# `go get gitlab.com/group/subgroup/project.git/subpackage` will work, since Go is smart enough
# to figure that out. `import 'gitlab.com/...'` behaves the same as `go get`.
- return simple_project_path, 'master'
+ [simple_project_path, 'master']
end
end
diff --git a/lib/gitlab/middleware/multipart.rb b/lib/gitlab/middleware/multipart.rb
index 3c45f841653..c0b671abd44 100644
--- a/lib/gitlab/middleware/multipart.rb
+++ b/lib/gitlab/middleware/multipart.rb
@@ -105,6 +105,21 @@ module Gitlab
private
+ def package_allowed_paths
+ packages_config = ::Gitlab.config.packages
+ return [] unless allow_packages_storage_path?(packages_config)
+
+ [::Packages::PackageFileUploader.workhorse_upload_path]
+ end
+
+ def allow_packages_storage_path?(packages_config)
+ return false unless packages_config.enabled
+ return false unless packages_config['storage_path']
+ return false if packages_config.object_store.enabled && packages_config.object_store.direct_upload
+
+ true
+ end
+
def allowed_paths
[
::FileUploader.root,
@@ -112,7 +127,7 @@ module Gitlab
JobArtifactUploader.workhorse_upload_path,
LfsObjectUploader.workhorse_upload_path,
File.join(Rails.root, 'public/uploads/tmp')
- ]
+ ] + package_allowed_paths
end
end
@@ -135,5 +150,3 @@ module Gitlab
end
end
end
-
-::Gitlab::Middleware::Multipart::Handler.prepend_if_ee('EE::Gitlab::Middleware::Multipart::Handler')
diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb
index fdb3fbc03bc..e6e599e079d 100644
--- a/lib/gitlab/project_template.rb
+++ b/lib/gitlab/project_template.rb
@@ -59,6 +59,7 @@ module Gitlab
ProjectTemplate.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhexo', 'illustrations/logos/netlify.svg'),
ProjectTemplate.new('salesforcedx', 'SalesforceDX', _('A project boilerplate for Salesforce App development with Salesforce Developer tools.'), 'https://gitlab.com/gitlab-org/project-templates/salesforcedx'),
ProjectTemplate.new('serverless_framework', 'Serverless Framework/JS', _('A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages'), 'https://gitlab.com/gitlab-org/project-templates/serverless-framework', 'illustrations/logos/serverless_framework.svg'),
+ ProjectTemplate.new('jsonnet', 'Jsonnet for Dynamic Child Pipelines', _('An example showing how to use Jsonnet with GitLab dynamic child pipelines'), 'https://gitlab.com/gitlab-org/project-templates/jsonnet'),
ProjectTemplate.new('cluster_management', 'GitLab Cluster Management', _('An example project for managing Kubernetes clusters integrated with GitLab.'), 'https://gitlab.com/gitlab-org/project-templates/cluster-management')
].freeze
end
diff --git a/lib/gitlab/prometheus_client.rb b/lib/gitlab/prometheus_client.rb
index 213e3ba835d..69499b5494e 100644
--- a/lib/gitlab/prometheus_client.rb
+++ b/lib/gitlab/prometheus_client.rb
@@ -5,6 +5,8 @@ module Gitlab
class PrometheusClient
include Gitlab::Utils::StrongMemoize
Error = Class.new(StandardError)
+ ConnectionError = Class.new(Gitlab::PrometheusClient::Error)
+ UnexpectedResponseError = Class.new(Gitlab::PrometheusClient::Error)
QueryError = Class.new(Gitlab::PrometheusClient::Error)
HEALTHY_RESPONSE = "Prometheus is Healthy.\n"
@@ -44,7 +46,7 @@ module Gitlab
path = api_path(type)
get(path, args)
rescue Gitlab::HTTP::ResponseError => ex
- raise PrometheusClient::Error, "Network connection error" unless ex.response && ex.response.try(:code)
+ raise PrometheusClient::ConnectionError, "Network connection error" unless ex.response && ex.response.try(:code)
handle_querying_api_response(ex.response)
end
@@ -115,7 +117,7 @@ module Gitlab
response = get(path, args)
handle_querying_api_response(response)
rescue Gitlab::HTTP::ResponseError => ex
- raise PrometheusClient::Error, "Network connection error" unless ex.response && ex.response.try(:code)
+ raise PrometheusClient::ConnectionError, "Network connection error" unless ex.response && ex.response.try(:code)
handle_querying_api_response(ex.response)
end
@@ -137,18 +139,18 @@ module Gitlab
def get(path, args)
Gitlab::HTTP.get(path, { query: args }.merge(http_options) )
rescue SocketError
- raise PrometheusClient::Error, "Can't connect to #{api_url}"
+ raise PrometheusClient::ConnectionError, "Can't connect to #{api_url}"
rescue OpenSSL::SSL::SSLError
- raise PrometheusClient::Error, "#{api_url} contains invalid SSL data"
+ raise PrometheusClient::ConnectionError, "#{api_url} contains invalid SSL data"
rescue Errno::ECONNREFUSED
- raise PrometheusClient::Error, 'Connection refused'
+ raise PrometheusClient::ConnectionError, 'Connection refused'
end
def handle_management_api_response(response)
if response.code == 200
response.body
else
- raise PrometheusClient::Error, "#{response.code} - #{response.body}"
+ raise PrometheusClient::UnexpectedResponseError, "#{response.code} - #{response.body}"
end
end
@@ -156,7 +158,7 @@ module Gitlab
response_code = response.try(:code)
response_body = response.try(:body)
- raise PrometheusClient::Error, "#{response_code} - #{response_body}" unless response_code
+ raise PrometheusClient::UnexpectedResponseError, "#{response_code} - #{response_body}" unless response_code
json_data = parse_json(response_body) if [200, 400].include?(response_code)
@@ -166,7 +168,7 @@ module Gitlab
when 400
raise PrometheusClient::QueryError, json_data['error'] || 'Bad data received'
else
- raise PrometheusClient::Error, "#{response_code} - #{response_body}"
+ raise PrometheusClient::UnexpectedResponseError, "#{response_code} - #{response_body}"
end
end
@@ -178,7 +180,7 @@ module Gitlab
def parse_json(response_body)
Gitlab::Json.parse(response_body, legacy_mode: true)
rescue JSON::ParserError
- raise PrometheusClient::Error, 'Parsing response failed'
+ raise PrometheusClient::UnexpectedResponseError, 'Parsing response failed'
end
end
end
diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb
index 4caff8ae679..784f8b48f3c 100644
--- a/lib/gitlab/regex.rb
+++ b/lib/gitlab/regex.rb
@@ -43,6 +43,10 @@ module Gitlab
@maven_app_name_regex ||= /\A[\w\-\.]+\z/.freeze
end
+ def maven_version_regex
+ @maven_version_regex ||= /\A(\.?[\w\+-]+\.?)+\z/.freeze
+ end
+
def maven_app_group_regex
maven_app_name_regex
end
@@ -246,6 +250,14 @@ module Gitlab
@utc_date_regex ||= /\A[0-9]{4}-[0-9]{2}-[0-9]{2}\z/.freeze
end
+ def merge_request_wip
+ /(?i)(\[WIP\]\s*|WIP:\s*|WIP$)/
+ end
+
+ def merge_request_draft
+ /(?i)(\[draft\]|\(draft\)|draft:|draft\s\-\s|draft$)/
+ end
+
def issue
@issue ||= /(?<issue>\d+\b)/
end
diff --git a/lib/gitlab/runtime.rb b/lib/gitlab/runtime.rb
index abf6ee07d53..8b40aaa101a 100644
--- a/lib/gitlab/runtime.rb
+++ b/lib/gitlab/runtime.rb
@@ -37,7 +37,7 @@ module Gitlab
end
def puma?
- !!defined?(::Puma) && !defined?(ACTION_CABLE_SERVER)
+ !!defined?(::Puma)
end
# For unicorn, we need to check for actual server instances to avoid false positives.
@@ -70,11 +70,11 @@ module Gitlab
end
def web_server?
- puma? || unicorn? || action_cable?
+ puma? || unicorn?
end
def action_cable?
- !!defined?(ACTION_CABLE_SERVER)
+ web_server? && (!!defined?(ACTION_CABLE_SERVER) || Gitlab::ActionCable::Config.in_app?)
end
def multi_threaded?
@@ -82,19 +82,21 @@ module Gitlab
end
def max_threads
- main_thread = 1
+ threads = 1 # main thread
- if action_cable?
- Gitlab::Application.config.action_cable.worker_pool_size
- elsif puma?
- Puma.cli_config.options[:max_threads]
+ if puma?
+ threads += Puma.cli_config.options[:max_threads]
elsif sidekiq?
# An extra thread for the poller in Sidekiq Cron:
# https://github.com/ondrejbartas/sidekiq-cron#under-the-hood
- Sidekiq.options[:concurrency] + 1
- else
- 0
- end + main_thread
+ threads += Sidekiq.options[:concurrency] + 1
+ end
+
+ if action_cable?
+ threads += Gitlab::ActionCable::Config.worker_pool_size
+ end
+
+ threads
end
end
end
diff --git a/lib/gitlab/search_results.rb b/lib/gitlab/search_results.rb
index 6239158ef06..3d5f64ce05b 100644
--- a/lib/gitlab/search_results.rb
+++ b/lib/gitlab/search_results.rb
@@ -27,6 +27,7 @@ module Gitlab
end
def objects(scope, page: nil, per_page: DEFAULT_PER_PAGE, without_count: true, preload_method: nil)
+ should_preload = preload_method.present?
collection = case scope
when 'projects'
projects
@@ -39,9 +40,11 @@ module Gitlab
when 'users'
users
else
+ should_preload = false
Kaminari.paginate_array([])
end
+ collection = collection.public_send(preload_method) if should_preload # rubocop:disable GitlabSecurity/PublicSend
collection = collection.page(page).per(per_page)
without_count ? collection.without_count : collection
diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb
index 53cbd5b21ea..d652719721e 100644
--- a/lib/gitlab/seeder.rb
+++ b/lib/gitlab/seeder.rb
@@ -18,6 +18,7 @@ module Gitlab
MASS_INSERT_PROJECT_START = 'mass_insert_project_'
MASS_INSERT_USER_START = 'mass_insert_user_'
+ REPORTED_USER_START = 'reported_user_'
ESTIMATED_INSERT_PER_MINUTE = 2_000_000
MASS_INSERT_ENV = 'MASS_INSERT'
@@ -36,7 +37,7 @@ module Gitlab
included do
scope :not_mass_generated, -> do
- where.not("username LIKE '#{MASS_INSERT_USER_START}%'")
+ where.not("username LIKE '#{MASS_INSERT_USER_START}%' OR username LIKE '#{REPORTED_USER_START}%'")
end
end
end
diff --git a/lib/gitlab/service_desk.rb b/lib/gitlab/service_desk.rb
new file mode 100644
index 00000000000..b3d6e890e03
--- /dev/null
+++ b/lib/gitlab/service_desk.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ServiceDesk
+ # Check whether a project or GitLab instance can support the Service Desk
+ # feature. Use `project.service_desk_enabled?` to check whether it is
+ # enabled for a particular project.
+ def self.enabled?(project:)
+ supported? && project[:service_desk_enabled]
+ end
+
+ def self.supported?
+ Gitlab::IncomingEmail.enabled? && Gitlab::IncomingEmail.supports_wildcard?
+ end
+ end
+end
diff --git a/lib/gitlab/service_desk_email.rb b/lib/gitlab/service_desk_email.rb
new file mode 100644
index 00000000000..f8dba82cb40
--- /dev/null
+++ b/lib/gitlab/service_desk_email.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ServiceDeskEmail
+ class << self
+ def enabled?
+ !!config&.enabled && config&.address.present?
+ end
+
+ def key_from_address(address)
+ wildcard_address = config&.address
+ return unless wildcard_address
+
+ Gitlab::IncomingEmail.key_from_address(address, wildcard_address: wildcard_address)
+ end
+
+ def config
+ Gitlab.config.service_desk_email
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/set_cache.rb b/lib/gitlab/set_cache.rb
index e891b805879..6ba9ee26634 100644
--- a/lib/gitlab/set_cache.rb
+++ b/lib/gitlab/set_cache.rb
@@ -20,7 +20,10 @@ module Gitlab
with do |redis|
keys = keys.map { |key| cache_key(key) }
- unlink_or_delete(redis, keys)
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ unlink_or_delete(redis, keys)
+ end
end
end
diff --git a/lib/gitlab/sidekiq_logging/deduplication_logger.rb b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
index 01810e474dc..c5654819ffb 100644
--- a/lib/gitlab/sidekiq_logging/deduplication_logger.rb
+++ b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
@@ -6,11 +6,14 @@ module Gitlab
include Singleton
include LogsJobs
- def log(job, deduplication_type)
+ def log(job, deduplication_type, deduplication_options = {})
payload = parse_job(job)
payload['job_status'] = 'deduplicated'
payload['message'] = "#{base_message(payload)}: deduplicated: #{deduplication_type}"
- payload['deduplication_type'] = deduplication_type
+ payload['deduplication.type'] = deduplication_type
+ # removing nil values from deduplication options
+ payload.merge!(
+ deduplication_options.compact.transform_keys { |k| "deduplication.options.#{k}" })
Sidekiq.logger.info payload
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
index 0ed4912c4cc..46ce0eb4a91 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
@@ -17,7 +17,8 @@ module Gitlab
job['duplicate-of'] = duplicate_job.existing_jid
if duplicate_job.droppable?
- Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(job, "dropped until executing")
+ Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(
+ job, "dropped until executing", duplicate_job.options)
return false
end
end
diff --git a/lib/gitlab/static_site_editor/config.rb b/lib/gitlab/static_site_editor/config.rb
index 65c567ec2a6..08ed6599a6e 100644
--- a/lib/gitlab/static_site_editor/config.rb
+++ b/lib/gitlab/static_site_editor/config.rb
@@ -3,7 +3,7 @@
module Gitlab
module StaticSiteEditor
class Config
- SUPPORTED_EXTENSIONS = %w[.md].freeze
+ SUPPORTED_EXTENSIONS = %w[.md .md.erb].freeze
def initialize(repository, ref, file_path, return_url)
@repository = repository
@@ -20,7 +20,7 @@ module Gitlab
commit_id: commit_id,
project_id: project.id,
project: project.path,
- namespace: project.namespace.path,
+ namespace: project.namespace.full_path,
return_url: sanitize_url(return_url),
is_supported_content: supported_content?.to_s,
base_url: Gitlab::Routing.url_helpers.project_show_sse_path(project, full_path)
@@ -42,11 +42,11 @@ module Gitlab
end
def extension_supported?
- File.extname(file_path).in?(SUPPORTED_EXTENSIONS)
+ SUPPORTED_EXTENSIONS.any? { |ext| file_path.end_with?(ext) }
end
def file_exists?
- commit_id.present? && repository.blob_at(commit_id, file_path).present?
+ commit_id.present? && !repository.blob_at(commit_id, file_path).nil?
end
def full_path
diff --git a/lib/gitlab/suggestions/file_suggestion.rb b/lib/gitlab/suggestions/file_suggestion.rb
index 73b9800f0b8..7805b27902d 100644
--- a/lib/gitlab/suggestions/file_suggestion.rb
+++ b/lib/gitlab/suggestions/file_suggestion.rb
@@ -7,17 +7,14 @@ module Gitlab
SuggestionForDifferentFileError = Class.new(StandardError)
- def initialize
- @suggestions = []
- end
-
- def add_suggestion(new_suggestion)
- if for_different_file?(new_suggestion)
- raise SuggestionForDifferentFileError,
- 'Only add suggestions for the same file.'
- end
+ attr_reader :file_path
+ attr_reader :blob
+ attr_reader :suggestions
- suggestions << new_suggestion
+ def initialize(file_path, suggestions)
+ @file_path = file_path
+ @suggestions = suggestions.sort_by(&:from_line_index)
+ @blob = suggestions.first&.diff_file&.new_blob
end
def line_conflict?
@@ -30,18 +27,8 @@ module Gitlab
@new_content ||= _new_content
end
- def file_path
- @file_path ||= _file_path
- end
-
private
- attr_accessor :suggestions
-
- def blob
- first_suggestion&.diff_file&.new_blob
- end
-
def blob_data_lines
blob.load_all_data!
blob.data.lines
@@ -53,31 +40,19 @@ module Gitlab
def _new_content
current_content.tap do |content|
+ # NOTE: We need to cater for line number changes when the range is more than one line.
+ offset = 0
+
suggestions.each do |suggestion|
- range = line_range(suggestion)
+ range = line_range(suggestion, offset)
content[range] = suggestion.to_content
+ offset += range.count - 1
end
end.join
end
- def line_range(suggestion)
- suggestion.from_line_index..suggestion.to_line_index
- end
-
- def for_different_file?(suggestion)
- file_path && file_path != suggestion_file_path(suggestion)
- end
-
- def suggestion_file_path(suggestion)
- suggestion&.diff_file&.file_path
- end
-
- def first_suggestion
- suggestions.first
- end
-
- def _file_path
- suggestion_file_path(first_suggestion)
+ def line_range(suggestion, offset = 0)
+ (suggestion.from_line_index - offset)..(suggestion.to_line_index - offset)
end
def _line_conflict?
diff --git a/lib/gitlab/suggestions/suggestion_set.rb b/lib/gitlab/suggestions/suggestion_set.rb
index 22abef98bf0..abb05ba56a7 100644
--- a/lib/gitlab/suggestions/suggestion_set.rb
+++ b/lib/gitlab/suggestions/suggestion_set.rb
@@ -26,10 +26,10 @@ module Gitlab
end
def actions
- @actions ||= suggestions_per_file.map do |file_path, file_suggestion|
+ @actions ||= suggestions_per_file.map do |file_suggestion|
{
action: 'update',
- file_path: file_path,
+ file_path: file_suggestion.file_path,
content: file_suggestion.new_content
}
end
@@ -50,19 +50,9 @@ module Gitlab
end
def _suggestions_per_file
- suggestions.each_with_object({}) do |suggestion, result|
- file_path = suggestion.diff_file.file_path
- file_suggestion = result[file_path] ||= FileSuggestion.new
- file_suggestion.add_suggestion(suggestion)
- end
- end
-
- def file_suggestions
- suggestions_per_file.values
- end
-
- def first_file_suggestion
- file_suggestions.first
+ suggestions
+ .group_by { |suggestion| suggestion.diff_file.file_path }
+ .map { |file_path, group| FileSuggestion.new(file_path, group) }
end
def _error_message
@@ -72,7 +62,7 @@ module Gitlab
return message if message
end
- has_line_conflict = file_suggestions.any? do |file_suggestion|
+ has_line_conflict = suggestions_per_file.any? do |file_suggestion|
file_suggestion.line_conflict?
end
diff --git a/lib/gitlab/template/service_desk_template.rb b/lib/gitlab/template/service_desk_template.rb
new file mode 100644
index 00000000000..edc62a92004
--- /dev/null
+++ b/lib/gitlab/template/service_desk_template.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Template
+ class ServiceDeskTemplate < BaseTemplate
+ class << self
+ def extension
+ '.md'
+ end
+
+ def base_dir
+ '.gitlab/service_desk_templates/'
+ end
+
+ def finder(project)
+ Gitlab::Template::Finders::RepoTemplateFinder.new(project, self.base_dir, self.extension, self.categories)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/tracking/incident_management.rb b/lib/gitlab/tracking/incident_management.rb
index bd8d1669dd3..5fa819b3696 100644
--- a/lib/gitlab/tracking/incident_management.rb
+++ b/lib/gitlab/tracking/incident_management.rb
@@ -32,6 +32,9 @@ module Gitlab
},
send_email: {
name: 'sending_emails'
+ },
+ pagerduty_active: {
+ name: 'pagerduty_webhook'
}
}.with_indifferent_access.freeze
end
diff --git a/lib/gitlab/tree_summary.rb b/lib/gitlab/tree_summary.rb
index 4ec43e62c19..9b67599668a 100644
--- a/lib/gitlab/tree_summary.rb
+++ b/lib/gitlab/tree_summary.rb
@@ -97,7 +97,7 @@ module Gitlab
File.join(*[path, ""])
end
- commits_hsh = repository.list_last_commits_for_tree(commit.id, ensured_path, offset: offset, limit: limit)
+ commits_hsh = repository.list_last_commits_for_tree(commit.id, ensured_path, offset: offset, limit: limit, literal_pathspec: true)
prerender_commit_full_titles!(commits_hsh.values)
entries.each do |entry|
diff --git a/lib/gitlab/updated_notes_paginator.rb b/lib/gitlab/updated_notes_paginator.rb
new file mode 100644
index 00000000000..3d3d0e5bf9e
--- /dev/null
+++ b/lib/gitlab/updated_notes_paginator.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Gitlab
+ # UpdatedNotesPaginator implements a rudimentary form of keyset pagination on
+ # top of a notes relation that has been initialized with a `last_fetched_at`
+ # value. This class will attempt to limit the number of notes returned, and
+ # specify a new value for `last_fetched_at` that will pick up where the last
+ # page of notes left off.
+ class UpdatedNotesPaginator
+ LIMIT = 50
+ MICROSECOND = 1_000_000
+
+ attr_reader :next_fetched_at, :notes
+
+ def initialize(relation, last_fetched_at:)
+ @last_fetched_at = last_fetched_at
+ @now = Time.current
+
+ notes, more = fetch_page(relation)
+ if more
+ init_middle_page(notes)
+ else
+ init_final_page(notes)
+ end
+ end
+
+ def metadata
+ { last_fetched_at: next_fetched_at_microseconds, more: more }
+ end
+
+ private
+
+ attr_reader :last_fetched_at, :more, :now
+
+ def next_fetched_at_microseconds
+ (next_fetched_at.to_i * MICROSECOND) + next_fetched_at.usec
+ end
+
+ def fetch_page(relation)
+ relation = relation.by_updated_at
+ notes = relation.at_most(LIMIT + 1).to_a
+
+ return [notes, false] unless notes.size > LIMIT
+
+ marker = notes.pop # Remove the marker note
+
+ # Although very unlikely, it is possible that more notes with the same
+ # updated_at may exist, e.g., if created in bulk. Add them all to the page
+ # if this is detected, so pagination won't get stuck indefinitely
+ if notes.last.updated_at == marker.updated_at
+ notes += relation
+ .with_updated_at(marker.updated_at)
+ .id_not_in(notes.map(&:id))
+ .to_a
+ end
+
+ [notes, true]
+ end
+
+ def init_middle_page(notes)
+ @more = true
+
+ # The fetch overlap can be ignored if we're in an intermediate page.
+ @next_fetched_at = notes.last.updated_at + NotesFinder::FETCH_OVERLAP
+ @notes = notes
+ end
+
+ def init_final_page(notes)
+ @more = false
+ @next_fetched_at = now
+ @notes = notes
+ end
+ end
+end
diff --git a/lib/gitlab/url_builder.rb b/lib/gitlab/url_builder.rb
index cd15130cee6..1e522ae63b6 100644
--- a/lib/gitlab/url_builder.rb
+++ b/lib/gitlab/url_builder.rb
@@ -71,7 +71,11 @@ module Gitlab
end
def snippet_url(snippet, **options)
- if options.delete(:raw).present?
+ if options[:file].present?
+ file, ref = options.values_at(:file, :ref)
+
+ instance.gitlab_raw_snippet_blob_url(snippet, file, ref)
+ elsif options.delete(:raw).present?
instance.gitlab_raw_snippet_url(snippet, **options)
else
instance.gitlab_snippet_url(snippet, **options)
@@ -81,9 +85,11 @@ module Gitlab
def wiki_url(wiki, **options)
return wiki_page_url(wiki, Wiki::HOMEPAGE, **options) unless options[:action]
- options[:controller] = 'projects/wikis'
- options[:namespace_id] = wiki.container.namespace
- options[:project_id] = wiki.container
+ if wiki.container.is_a?(Project)
+ options[:controller] = 'projects/wikis'
+ options[:namespace_id] = wiki.container.namespace
+ options[:project_id] = wiki.container
+ end
instance.url_for(**options)
end
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index 7b6f5e69ee1..9d7e6536608 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -18,7 +18,6 @@ module Gitlab
class << self
include Gitlab::Utils::UsageData
include Gitlab::Utils::StrongMemoize
- include Gitlab::UsageDataConcerns::Topology
def data(force_refresh: false)
Rails.cache.fetch('usage_data', force: force_refresh, expires_in: 2.weeks) do
@@ -27,16 +26,21 @@ module Gitlab
end
def uncached_data
- clear_memoized_limits
-
- license_usage_data
- .merge(system_usage_data)
- .merge(features_usage_data)
- .merge(components_usage_data)
- .merge(cycle_analytics_usage_data)
- .merge(object_store_usage_data)
- .merge(topology_usage_data)
- .merge(recording_ce_finish_data)
+ clear_memoized
+
+ with_finished_at(:recording_ce_finished_at) do
+ license_usage_data
+ .merge(system_usage_data)
+ .merge(system_usage_data_monthly)
+ .merge(features_usage_data)
+ .merge(components_usage_data)
+ .merge(cycle_analytics_usage_data)
+ .merge(object_store_usage_data)
+ .merge(topology_usage_data)
+ .merge(usage_activity_by_stage)
+ .merge(usage_activity_by_stage(:usage_activity_by_stage_monthly, last_28_days_time_period))
+ .merge(analytics_unique_visits_data)
+ end
end
def to_json(force_refresh: false)
@@ -59,17 +63,11 @@ module Gitlab
Time.now
end
- def recording_ce_finish_data
- {
- recording_ce_finished_at: Time.now
- }
- end
-
# rubocop: disable Metrics/AbcSize
# rubocop: disable CodeReuse/ActiveRecord
def system_usage_data
- alert_bot_incident_count = count(::Issue.authored(::User.alert_bot))
- issues_created_manually_from_alerts = count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot))
+ alert_bot_incident_count = count(::Issue.authored(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
+ issues_created_manually_from_alerts = count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
{
counts: {
@@ -86,9 +84,9 @@ module Gitlab
auto_devops_enabled: count(::ProjectAutoDevops.enabled),
auto_devops_disabled: count(::ProjectAutoDevops.disabled),
deploy_keys: count(DeployKey),
- deployments: count(Deployment),
- successful_deployments: count(Deployment.success),
- failed_deployments: count(Deployment.failed),
+ deployments: deployment_count(Deployment),
+ successful_deployments: deployment_count(Deployment.success),
+ failed_deployments: deployment_count(Deployment.failed),
environments: count(::Environment),
clusters: count(::Clusters::Cluster),
clusters_enabled: count(::Clusters::Cluster.enabled),
@@ -111,11 +109,12 @@ module Gitlab
clusters_applications_knative: count(::Clusters::Applications::Knative.available),
clusters_applications_elastic_stack: count(::Clusters::Applications::ElasticStack.available),
clusters_applications_jupyter: count(::Clusters::Applications::Jupyter.available),
+ clusters_applications_cilium: count(::Clusters::Applications::Cilium.available),
clusters_management_project: count(::Clusters::Cluster.with_management_project),
in_review_folder: count(::Environment.in_review_folder),
grafana_integrated_projects: count(GrafanaIntegration.enabled),
groups: count(Group),
- issues: count(Issue),
+ issues: count(Issue, start: issue_minimum_id, finish: issue_maximum_id),
issues_created_from_gitlab_error_tracking_ui: count(SentryIssue),
issues_with_associated_zoom_link: count(ZoomMeeting.added_to_issue),
issues_using_zoom_quick_actions: distinct_count(ZoomMeeting, :issue_id),
@@ -125,7 +124,7 @@ module Gitlab
issues_created_manually_from_alerts: issues_created_manually_from_alerts,
incident_issues: alert_bot_incident_count,
alert_bot_incident_issues: alert_bot_incident_count,
- incident_labeled_issues: count(::Issue.with_label_attributes(IncidentManagement::CreateIssueService::INCIDENT_LABEL)),
+ incident_labeled_issues: count(::Issue.with_label_attributes(::IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES), start: issue_minimum_id, finish: issue_maximum_id),
keys: count(Key),
label_lists: count(List.label),
lfs_objects: count(LfsObject),
@@ -144,7 +143,6 @@ module Gitlab
protected_branches: count(ProtectedBranch),
releases: count(Release),
remote_mirrors: count(RemoteMirror),
- snippets: count(Snippet),
personal_snippets: count(PersonalSnippet),
project_snippets: count(ProjectSnippet),
suggestions: count(Suggestion),
@@ -161,14 +159,29 @@ module Gitlab
usage_counters,
user_preferences_usage,
ingress_modsecurity_usage,
- container_expiration_policies_usage,
- merge_requests_usage(default_time_period)
- )
+ container_expiration_policies_usage
+ ).tap do |data|
+ data[:snippets] = data[:personal_snippets] + data[:project_snippets]
+ end
}
end
- # rubocop: enable CodeReuse/ActiveRecord
# rubocop: enable Metrics/AbcSize
+ def system_usage_data_monthly
+ {
+ counts_monthly: {
+ deployments: deployment_count(Deployment.where(last_28_days_time_period)),
+ successful_deployments: deployment_count(Deployment.success.where(last_28_days_time_period)),
+ failed_deployments: deployment_count(Deployment.failed.where(last_28_days_time_period)),
+ personal_snippets: count(PersonalSnippet.where(last_28_days_time_period)),
+ project_snippets: count(ProjectSnippet.where(last_28_days_time_period))
+ }.tap do |data|
+ data[:snippets] = data[:personal_snippets] + data[:project_snippets]
+ end
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
def cycle_analytics_usage_data
Gitlab::CycleAnalytics::UsageData.new.to_json
rescue ActiveRecord::StatementInvalid
@@ -197,6 +210,7 @@ module Gitlab
ldap_enabled: alt_usage_data(fallback: nil) { Gitlab.config.ldap.enabled },
mattermost_enabled: alt_usage_data(fallback: nil) { Gitlab.config.mattermost.enabled },
omniauth_enabled: alt_usage_data(fallback: nil) { Gitlab::Auth.omniauth_enabled? },
+ prometheus_enabled: alt_usage_data(fallback: nil) { Gitlab::Prometheus::Internal.prometheus_enabled? },
prometheus_metrics_enabled: alt_usage_data(fallback: nil) { Gitlab::Metrics.prometheus_metrics_enabled? },
reply_by_email_enabled: alt_usage_data(fallback: nil) { Gitlab::IncomingEmail.enabled? },
signup_enabled: alt_usage_data(fallback: nil) { Gitlab::CurrentSettings.allow_signup? },
@@ -290,6 +304,10 @@ module Gitlab
}
end
+ def topology_usage_data
+ Gitlab::UsageData::Topology.new.topology_usage_data
+ end
+
def ingress_modsecurity_usage
##
# This method measures usage of the Modsecurity Web Application Firewall across the entire
@@ -336,15 +354,9 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def services_usage
- results = Service.available_services_names.without('jira').each_with_object({}) do |service_name, response|
+ Service.available_services_names.without('jira').each_with_object({}) do |service_name, response|
response["projects_#{service_name}_active".to_sym] = count(Service.active.where(template: false, type: "#{service_name}_service".camelize))
- end
-
- # Keep old Slack keys for backward compatibility, https://gitlab.com/gitlab-data/analytics/issues/3241
- results[:projects_slack_notifications_active] = results[:projects_slack_active]
- results[:projects_slack_slash_active] = results[:projects_slack_slash_commands_active]
-
- results.merge(jira_usage).merge(jira_import_usage)
+ end.merge(jira_usage).merge(jira_import_usage)
end
def jira_usage
@@ -357,18 +369,15 @@ module Gitlab
projects_jira_active: 0
}
- Service.active
- .by_type(:JiraService)
- .includes(:jira_tracker_data)
- .find_in_batches(batch_size: BATCH_SIZE) do |services|
+ JiraService.active.includes(:jira_tracker_data).find_in_batches(batch_size: BATCH_SIZE) do |services|
counts = services.group_by do |service|
# TODO: Simplify as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
service_url = service.data_fields&.url || (service.properties && service.properties['url'])
service_url&.include?('.atlassian.net') ? :cloud : :server
end
- results[:projects_jira_server_active] += counts[:server].count if counts[:server]
- results[:projects_jira_cloud_active] += counts[:cloud].count if counts[:cloud]
+ results[:projects_jira_server_active] += counts[:server].size if counts[:server]
+ results[:projects_jira_cloud_active] += counts[:cloud].size if counts[:cloud]
results[:projects_jira_active] += services.size
end
@@ -400,23 +409,18 @@ module Gitlab
end
# rubocop: disable CodeReuse/ActiveRecord
- def merge_requests_usage(time_period)
+ def merge_requests_users(time_period)
query =
Event
.where(target_type: Event::TARGET_TYPES[:merge_request].to_s)
.where(time_period)
- merge_request_users = distinct_count(
+ distinct_count(
query,
:author_id,
- batch_size: 5_000, # Based on query performance, this is the optimal batch size.
- start: User.minimum(:id),
- finish: User.maximum(:id)
+ start: user_minimum_id,
+ finish: user_maximum_id
)
-
- {
- merge_requests_users: merge_request_users
- }
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -428,19 +432,207 @@ module Gitlab
end
end
- def default_time_period
+ def last_28_days_time_period
{ created_at: 28.days.ago..Time.current }
end
+ # Source: https://gitlab.com/gitlab-data/analytics/blob/master/transform/snowflake-dbt/data/ping_metrics_to_stage_mapping_data.csv
+ def usage_activity_by_stage(key = :usage_activity_by_stage, time_period = {})
+ {
+ key => {
+ configure: usage_activity_by_stage_configure(time_period),
+ create: usage_activity_by_stage_create(time_period),
+ manage: usage_activity_by_stage_manage(time_period),
+ monitor: usage_activity_by_stage_monitor(time_period),
+ package: usage_activity_by_stage_package(time_period),
+ plan: usage_activity_by_stage_plan(time_period),
+ release: usage_activity_by_stage_release(time_period),
+ secure: usage_activity_by_stage_secure(time_period),
+ verify: usage_activity_by_stage_verify(time_period)
+ }
+ }
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_configure(time_period)
+ {
+ clusters_applications_cert_managers: cluster_applications_user_distinct_count(::Clusters::Applications::CertManager, time_period),
+ clusters_applications_helm: cluster_applications_user_distinct_count(::Clusters::Applications::Helm, time_period),
+ clusters_applications_ingress: cluster_applications_user_distinct_count(::Clusters::Applications::Ingress, time_period),
+ clusters_applications_knative: cluster_applications_user_distinct_count(::Clusters::Applications::Knative, time_period),
+ clusters_management_project: clusters_user_distinct_count(::Clusters::Cluster.with_management_project, time_period),
+ clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled, time_period),
+ clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled, time_period),
+ clusters_platforms_gke: clusters_user_distinct_count(::Clusters::Cluster.gcp_installed.enabled, time_period),
+ clusters_platforms_eks: clusters_user_distinct_count(::Clusters::Cluster.aws_installed.enabled, time_period),
+ clusters_platforms_user: clusters_user_distinct_count(::Clusters::Cluster.user_provided.enabled, time_period),
+ instance_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.instance_type, time_period),
+ instance_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.instance_type, time_period),
+ group_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.group_type, time_period),
+ group_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.group_type, time_period),
+ project_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.project_type, time_period),
+ project_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.project_type, time_period)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_create(time_period)
+ {
+ deploy_keys: distinct_count(::DeployKey.where(time_period), :user_id),
+ keys: distinct_count(::Key.regular_keys.where(time_period), :user_id),
+ merge_requests: distinct_count(::MergeRequest.where(time_period), :author_id),
+ projects_with_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: true))),
+ projects_without_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: [false, nil]))),
+ remote_mirrors: distinct_count(::Project.with_remote_mirrors.where(time_period), :creator_id),
+ snippets: distinct_count(::Snippet.where(time_period), :author_id)
+ }.tap do |h|
+ if time_period.present?
+ h[:merge_requests_users] = merge_requests_users(time_period)
+ h.merge!(action_monthly_active_users(time_period))
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `campaigns_imported_from_github`, `ldap_group_links`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_manage(time_period)
+ {
+ events: distinct_count(::Event.where(time_period), :author_id),
+ groups: distinct_count(::GroupMember.where(time_period), :user_id),
+ users_created: count(::User.where(time_period), start: user_minimum_id, finish: user_maximum_id),
+ omniauth_providers: filtered_omniauth_provider_names.reject { |name| name == 'group_saml' }
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_monitor(time_period)
+ {
+ clusters: distinct_count(::Clusters::Cluster.where(time_period), :user_id),
+ clusters_applications_prometheus: cluster_applications_user_distinct_count(::Clusters::Applications::Prometheus, time_period),
+ operations_dashboard_default_dashboard: count(::User.active.with_dashboard('operations').where(time_period),
+ start: user_minimum_id,
+ finish: user_maximum_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def usage_activity_by_stage_package(time_period)
+ {}
+ end
+
+ # Omitted because no user, creator or author associated: `boards`, `labels`, `milestones`, `uploads`
+ # Omitted because too expensive: `epics_deepest_relationship_level`
+ # Omitted because of encrypted properties: `projects_jira_cloud_active`, `projects_jira_server_active`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_plan(time_period)
+ {
+ issues: distinct_count(::Issue.where(time_period), :author_id),
+ notes: distinct_count(::Note.where(time_period), :author_id),
+ projects: distinct_count(::Project.where(time_period), :creator_id),
+ todos: distinct_count(::Todo.where(time_period), :author_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `environments`, `feature_flags`, `in_review_folder`, `pages_domains`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_release(time_period)
+ {
+ deployments: distinct_count(::Deployment.where(time_period), :user_id),
+ failed_deployments: distinct_count(::Deployment.failed.where(time_period), :user_id),
+ releases: distinct_count(::Release.where(time_period), :author_id),
+ successful_deployments: distinct_count(::Deployment.success.where(time_period), :user_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `ci_runners`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_verify(time_period)
+ {
+ ci_builds: distinct_count(::Ci::Build.where(time_period), :user_id),
+ ci_external_pipelines: distinct_count(::Ci::Pipeline.external.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_internal_pipelines: distinct_count(::Ci::Pipeline.internal.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_config_auto_devops: distinct_count(::Ci::Pipeline.auto_devops_source.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_config_repository: distinct_count(::Ci::Pipeline.repository_source.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_schedules: distinct_count(::Ci::PipelineSchedule.where(time_period), :owner_id),
+ ci_pipelines: distinct_count(::Ci::Pipeline.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_triggers: distinct_count(::Ci::Trigger.where(time_period), :owner_id),
+ clusters_applications_runner: cluster_applications_user_distinct_count(::Clusters::Applications::Runner, time_period)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Currently too complicated and to get reliable counts for these stats:
+ # container_scanning_jobs, dast_jobs, dependency_scanning_jobs, license_management_jobs, sast_jobs, secret_detection_jobs
+ # Once https://gitlab.com/gitlab-org/gitlab/merge_requests/17568 is merged, this might be doable
+ def usage_activity_by_stage_secure(time_period)
+ {}
+ end
+
+ def analytics_unique_visits_data
+ results = ::Gitlab::Analytics::UniqueVisits::TARGET_IDS.each_with_object({}) do |target_id, hash|
+ hash[target_id] = redis_usage_data { unique_visit_service.weekly_unique_visits_for_target(target_id) }
+ end
+ results['analytics_unique_visits_for_any_target'] = redis_usage_data { unique_visit_service.weekly_unique_visits_for_any_target }
+
+ { analytics_unique_visits: results }
+ end
+
+ def action_monthly_active_users(time_period)
+ return {} unless Feature.enabled?(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG)
+
+ counter = Gitlab::UsageDataCounters::TrackUniqueActions
+
+ project_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::PUSH_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ design_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::DESIGN_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ wiki_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::WIKI_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ {
+ action_monthly_active_users_project_repo: project_count,
+ action_monthly_active_users_design_management: design_count,
+ action_monthly_active_users_wiki_repo: wiki_count
+ }
+ end
+
private
+ def unique_visit_service
+ strong_memoize(:unique_visit_service) do
+ ::Gitlab::Analytics::UniqueVisits.new
+ end
+ end
+
def total_alert_issues
# Remove prometheus table queries once they are deprecated
# To be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/217407.
[
- count(Issue.with_alert_management_alerts),
- count(::Issue.with_self_managed_prometheus_alert_events),
- count(::Issue.with_prometheus_alert_events)
+ count(Issue.with_alert_management_alerts, start: issue_minimum_id, finish: issue_maximum_id),
+ count(::Issue.with_self_managed_prometheus_alert_events, start: issue_minimum_id, finish: issue_maximum_id),
+ count(::Issue.with_prometheus_alert_events, start: issue_minimum_id, finish: issue_maximum_id)
].reduce(:+)
end
@@ -456,9 +648,66 @@ module Gitlab
end
end
- def clear_memoized_limits
+ def issue_minimum_id
+ strong_memoize(:issue_minimum_id) do
+ ::Issue.minimum(:id)
+ end
+ end
+
+ def issue_maximum_id
+ strong_memoize(:issue_maximum_id) do
+ ::Issue.maximum(:id)
+ end
+ end
+
+ def deployment_minimum_id
+ strong_memoize(:deployment_minimum_id) do
+ ::Deployment.minimum(:id)
+ end
+ end
+
+ def deployment_maximum_id
+ strong_memoize(:deployment_maximum_id) do
+ ::Deployment.maximum(:id)
+ end
+ end
+
+ def clear_memoized
+ clear_memoization(:issue_minimum_id)
+ clear_memoization(:issue_maximum_id)
clear_memoization(:user_minimum_id)
clear_memoization(:user_maximum_id)
+ clear_memoization(:unique_visit_service)
+ clear_memoization(:deployment_minimum_id)
+ clear_memoization(:deployment_maximum_id)
+ clear_memoization(:approval_merge_request_rule_minimum_id)
+ clear_memoization(:approval_merge_request_rule_maximum_id)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def cluster_applications_user_distinct_count(applications, time_period)
+ distinct_count(applications.where(time_period).available.joins(:cluster), 'clusters.user_id')
+ end
+
+ def clusters_user_distinct_count(clusters, time_period)
+ distinct_count(clusters.where(time_period), :user_id)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def omniauth_provider_names
+ ::Gitlab.config.omniauth.providers.map(&:name)
+ end
+
+ # LDAP provider names are set by customers and could include
+ # sensitive info (server names, etc). LDAP providers normally
+ # don't appear in omniauth providers but filter to ensure
+ # no internal details leak via usage ping.
+ def filtered_omniauth_provider_names
+ omniauth_provider_names.reject { |name| name.starts_with?('ldap') }
+ end
+
+ def deployment_count(relation)
+ count relation, start: deployment_minimum_id, finish: deployment_maximum_id
end
end
end
diff --git a/lib/gitlab/usage_data/topology.rb b/lib/gitlab/usage_data/topology.rb
new file mode 100644
index 00000000000..4bca2cb07e4
--- /dev/null
+++ b/lib/gitlab/usage_data/topology.rb
@@ -0,0 +1,258 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class UsageData
+ class Topology
+ include Gitlab::Utils::UsageData
+
+ JOB_TO_SERVICE_NAME = {
+ 'gitlab-rails' => 'web',
+ 'gitlab-sidekiq' => 'sidekiq',
+ 'gitlab-workhorse' => 'workhorse',
+ 'redis' => 'redis',
+ 'postgres' => 'postgres',
+ 'gitaly' => 'gitaly',
+ 'prometheus' => 'prometheus',
+ 'node' => 'node-exporter',
+ 'registry' => 'registry'
+ }.freeze
+
+ CollectionFailure = Struct.new(:query, :error) do
+ def to_h
+ { query => error }
+ end
+ end
+
+ def topology_usage_data
+ @failures = []
+ @instances = Set[]
+ topology_data, duration = measure_duration { topology_fetch_all_data }
+ {
+ topology: topology_data
+ .merge(duration_s: duration)
+ .merge(failures: @failures.map(&:to_h))
+ }
+ end
+
+ private
+
+ def topology_fetch_all_data
+ with_prometheus_client(fallback: {}) do |client|
+ {
+ application_requests_per_hour: topology_app_requests_per_hour(client),
+ nodes: topology_node_data(client)
+ }.compact
+ end
+ rescue => e
+ @failures << CollectionFailure.new('other', e.class.to_s)
+
+ {}
+ end
+
+ def topology_app_requests_per_hour(client)
+ result = query_safely('gitlab_usage_ping:ops:rate5m', 'app_requests', fallback: nil) do |query|
+ client.query(one_week_average(query)).first
+ end
+
+ return unless result
+
+ # the metric is recorded as a per-second rate
+ (result['value'].last.to_f * 1.hour).to_i
+ end
+
+ def topology_node_data(client)
+ # node-level data
+ by_instance_mem = topology_node_memory(client)
+ by_instance_cpus = topology_node_cpus(client)
+ by_instance_uname_info = topology_node_uname_info(client)
+ # service-level data
+ by_instance_by_job_by_type_memory = topology_all_service_memory(client)
+ by_instance_by_job_process_count = topology_all_service_process_count(client)
+ by_instance_by_job_server_types = topology_all_service_server_types(client)
+
+ @instances.map do |instance|
+ {
+ node_memory_total_bytes: by_instance_mem[instance],
+ node_cpus: by_instance_cpus[instance],
+ node_uname_info: by_instance_uname_info[instance],
+ node_services:
+ topology_node_services(
+ instance, by_instance_by_job_process_count, by_instance_by_job_by_type_memory, by_instance_by_job_server_types
+ )
+ }.compact
+ end
+ end
+
+ def topology_node_memory(client)
+ query_safely('gitlab_usage_ping:node_memory_total_bytes:avg', 'node_memory', fallback: {}) do |query|
+ aggregate_by_instance(client, one_week_average(query))
+ end
+ end
+
+ def topology_node_cpus(client)
+ query_safely('gitlab_usage_ping:node_cpus:count', 'node_cpus', fallback: {}) do |query|
+ aggregate_by_instance(client, one_week_average(query))
+ end
+ end
+
+ def topology_node_uname_info(client)
+ node_uname_info = query_safely('node_uname_info', 'node_uname_info', fallback: []) do |query|
+ client.query(query)
+ end
+
+ map_instance_labels(node_uname_info, %w(machine sysname release))
+ end
+
+ def topology_all_service_memory(client)
+ {
+ rss: topology_service_memory_rss(client),
+ uss: topology_service_memory_uss(client),
+ pss: topology_service_memory_pss(client)
+ }
+ end
+
+ def topology_service_memory_rss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_resident_memory_bytes:avg', 'service_rss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_service_memory_uss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_unique_memory_bytes:avg', 'service_uss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_service_memory_pss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_proportional_memory_bytes:avg', 'service_pss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_all_service_process_count(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process:count', 'service_process_count', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_all_service_server_types(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_app_server_workers:sum', 'service_workers', fallback: {}
+ ) { |query| aggregate_by_labels(client, query) }
+ end
+
+ def query_safely(query, query_name, fallback:)
+ result = yield query
+
+ return result if result.present?
+
+ @failures << CollectionFailure.new(query_name, 'empty_result')
+ fallback
+ rescue => e
+ @failures << CollectionFailure.new(query_name, e.class.to_s)
+ fallback
+ end
+
+ def topology_node_services(instance, all_process_counts, all_process_memory, all_server_types)
+ # returns all node service data grouped by service name as the key
+ instance_service_data =
+ topology_instance_service_process_count(instance, all_process_counts)
+ .deep_merge(topology_instance_service_memory(instance, all_process_memory))
+ .deep_merge(topology_instance_service_server_types(instance, all_server_types))
+
+ # map to list of hashes where service names become values instead, and remove
+ # unknown services, since they might not be ours
+ instance_service_data.each_with_object([]) do |entry, list|
+ service, service_metrics = entry
+ gitlab_service = JOB_TO_SERVICE_NAME[service.to_s]
+ next unless gitlab_service
+
+ list << { name: gitlab_service }.merge(service_metrics)
+ end
+ end
+
+ def topology_instance_service_process_count(instance, all_instance_data)
+ topology_data_for_instance(instance, all_instance_data).to_h do |metric, count|
+ [metric['job'], { process_count: count }]
+ end
+ end
+
+ # Given a hash mapping memory set types to Prometheus response data, returns a hash
+ # mapping instance/node names to services and their respective memory use in bytes
+ def topology_instance_service_memory(instance, instance_data_by_type)
+ result = {}
+ instance_data_by_type.each do |memory_type, instance_data|
+ topology_data_for_instance(instance, instance_data).each do |metric, memory_bytes|
+ job = metric['job']
+ key = "process_memory_#{memory_type}".to_sym
+
+ result[job] ||= {}
+ result[job][key] ||= memory_bytes
+ end
+ end
+
+ result
+ end
+
+ def topology_instance_service_server_types(instance, all_instance_data)
+ topology_data_for_instance(instance, all_instance_data).to_h do |metric, _value|
+ [metric['job'], { server: metric['server'] }]
+ end
+ end
+
+ def topology_data_for_instance(instance, all_instance_data)
+ all_instance_data.filter { |metric, _value| metric['instance'] == instance }
+ end
+
+ def normalize_instance_label(instance)
+ normalize_localhost_address(drop_port_number(instance))
+ end
+
+ def normalize_localhost_address(instance)
+ ip_addr = IPAddr.new(instance)
+ is_local_ip = ip_addr.loopback? || ip_addr.to_i.zero?
+
+ is_local_ip ? 'localhost' : instance
+ rescue IPAddr::InvalidAddressError
+ # This most likely means it was a host name, not an IP address
+ instance
+ end
+
+ def drop_port_number(instance)
+ instance.gsub(/:\d+$/, '')
+ end
+
+ def normalize_and_track_instance(instance)
+ normalize_instance_label(instance).tap do |normalized_instance|
+ @instances << normalized_instance
+ end
+ end
+
+ def one_week_average(query)
+ "avg_over_time (#{query}[1w])"
+ end
+
+ def aggregate_by_instance(client, query)
+ client.aggregate(query) { |metric| normalize_and_track_instance(metric['instance']) }
+ end
+
+ # Will retain a composite key that values are mapped to
+ def aggregate_by_labels(client, query)
+ client.aggregate(query) do |metric|
+ metric['instance'] = normalize_and_track_instance(metric['instance'])
+ metric
+ end
+ end
+
+ # Given query result vector, map instance to a hash of target labels key/value.
+ # @return [Hash] mapping instance to a hash of target labels key/value, or the empty hash if input empty vector
+ def map_instance_labels(query_result_vector, target_labels)
+ query_result_vector.to_h do |result|
+ key = normalize_and_track_instance(result['metric']['instance'])
+ value = result['metric'].slice(*target_labels).symbolize_keys
+ [key, value]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_data_concerns/topology.rb b/lib/gitlab/usage_data_concerns/topology.rb
deleted file mode 100644
index 6e1d29f2a17..00000000000
--- a/lib/gitlab/usage_data_concerns/topology.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module UsageDataConcerns
- module Topology
- include Gitlab::Utils::UsageData
-
- JOB_TO_SERVICE_NAME = {
- 'gitlab-rails' => 'web',
- 'gitlab-sidekiq' => 'sidekiq',
- 'gitlab-workhorse' => 'workhorse',
- 'redis' => 'redis',
- 'postgres' => 'postgres',
- 'gitaly' => 'gitaly',
- 'prometheus' => 'prometheus',
- 'node' => 'node-exporter'
- }.freeze
-
- def topology_usage_data
- topology_data, duration = measure_duration do
- alt_usage_data(fallback: {}) do
- {
- nodes: topology_node_data
- }.compact
- end
- end
- { topology: topology_data.merge(duration_s: duration) }
- end
-
- private
-
- def topology_node_data
- with_prometheus_client do |client|
- # node-level data
- by_instance_mem = topology_node_memory(client)
- by_instance_cpus = topology_node_cpus(client)
- # service-level data
- by_instance_by_job_by_metric_memory = topology_all_service_memory(client)
- by_instance_by_job_process_count = topology_all_service_process_count(client)
-
- instances = Set.new(by_instance_mem.keys + by_instance_cpus.keys)
- instances.map do |instance|
- {
- node_memory_total_bytes: by_instance_mem[instance],
- node_cpus: by_instance_cpus[instance],
- node_services:
- topology_node_services(instance, by_instance_by_job_process_count, by_instance_by_job_by_metric_memory)
- }.compact
- end
- end
- end
-
- def topology_node_memory(client)
- aggregate_single(client, 'avg (node_memory_MemTotal_bytes) by (instance)')
- end
-
- def topology_node_cpus(client)
- aggregate_single(client, 'count (node_cpu_seconds_total{mode="idle"}) by (instance)')
- end
-
- def topology_all_service_memory(client)
- aggregate_many(
- client,
- 'avg ({__name__ =~ "(ruby_){0,1}process_(resident|unique|proportional)_memory_bytes", job != "gitlab_exporter_process"}) by (instance, job, __name__)'
- )
- end
-
- def topology_all_service_process_count(client)
- aggregate_many(client, 'count ({__name__ =~ "(ruby_){0,1}process_start_time_seconds", job != "gitlab_exporter_process"}) by (instance, job)')
- end
-
- def topology_node_services(instance, all_process_counts, all_process_memory)
- # returns all node service data grouped by service name as the key
- instance_service_data =
- topology_instance_service_process_count(instance, all_process_counts)
- .deep_merge(topology_instance_service_memory(instance, all_process_memory))
-
- # map to list of hashes where service names become values instead, and remove
- # unknown services, since they might not be ours
- instance_service_data.each_with_object([]) do |entry, list|
- service, service_metrics = entry
- gitlab_service = JOB_TO_SERVICE_NAME[service.to_s]
- next unless gitlab_service
-
- list << { name: gitlab_service }.merge(service_metrics)
- end
- end
-
- def topology_instance_service_process_count(instance, all_instance_data)
- topology_data_for_instance(instance, all_instance_data).to_h do |metric, count|
- [metric['job'], { process_count: count }]
- end
- end
-
- def topology_instance_service_memory(instance, all_instance_data)
- topology_data_for_instance(instance, all_instance_data).each_with_object({}) do |entry, hash|
- metric, memory = entry
- job = metric['job']
- key =
- case metric['__name__']
- when match_process_memory_metric_for_type('resident') then :process_memory_rss
- when match_process_memory_metric_for_type('unique') then :process_memory_uss
- when match_process_memory_metric_for_type('proportional') then :process_memory_pss
- end
-
- hash[job] ||= {}
- hash[job][key] ||= memory
- end
- end
-
- def match_process_memory_metric_for_type(type)
- /(ruby_){0,1}process_#{type}_memory_bytes/
- end
-
- def topology_data_for_instance(instance, all_instance_data)
- all_instance_data.filter { |metric, _value| metric['instance'] == instance }
- end
-
- def drop_port(instance)
- instance.gsub(/:.+$/, '')
- end
-
- # Will retain a single `instance` key that values are mapped to
- def aggregate_single(client, query)
- client.aggregate(query) { |metric| drop_port(metric['instance']) }
- end
-
- # Will retain a composite key that values are mapped to
- def aggregate_many(client, query)
- client.aggregate(query) do |metric|
- metric['instance'] = drop_port(metric['instance'])
- metric
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/usage_data_counters/track_unique_actions.rb b/lib/gitlab/usage_data_counters/track_unique_actions.rb
new file mode 100644
index 00000000000..9fb5a29748e
--- /dev/null
+++ b/lib/gitlab/usage_data_counters/track_unique_actions.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageDataCounters
+ module TrackUniqueActions
+ KEY_EXPIRY_LENGTH = 29.days
+ FEATURE_FLAG = :track_unique_actions
+
+ WIKI_ACTION = :wiki_action
+ DESIGN_ACTION = :design_action
+ PUSH_ACTION = :project_action
+
+ ACTION_TRANSFORMATIONS = HashWithIndifferentAccess.new({
+ wiki: {
+ created: WIKI_ACTION,
+ updated: WIKI_ACTION,
+ destroyed: WIKI_ACTION
+ },
+ design: {
+ created: DESIGN_ACTION,
+ updated: DESIGN_ACTION,
+ destroyed: DESIGN_ACTION
+ },
+ project: {
+ pushed: PUSH_ACTION
+ }
+ }).freeze
+
+ class << self
+ def track_action(event_action:, event_target:, author_id:, time: Time.zone.now)
+ return unless Gitlab::CurrentSettings.usage_ping_enabled
+ return unless Feature.enabled?(FEATURE_FLAG)
+ return unless valid_target?(event_target)
+ return unless valid_action?(event_action)
+
+ transformed_target = transform_target(event_target)
+ transformed_action = transform_action(event_action, transformed_target)
+
+ add_event(transformed_action, author_id, time)
+ end
+
+ def count_unique_events(event_action:, date_from:, date_to:)
+ keys = (date_from.to_date..date_to.to_date).map { |date| key(event_action, date) }
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(*keys)
+ end
+ end
+
+ private
+
+ def transform_action(event_action, event_target)
+ ACTION_TRANSFORMATIONS.dig(event_target, event_action) || event_action
+ end
+
+ def transform_target(event_target)
+ Event::TARGET_TYPES.key(event_target)
+ end
+
+ def valid_target?(target)
+ Event::TARGET_TYPES.value?(target)
+ end
+
+ def valid_action?(action)
+ Event.actions.key?(action)
+ end
+
+ def key(event_action, date)
+ year_day = date.strftime('%G-%j')
+ "#{year_day}-{#{event_action}}"
+ end
+
+ def add_event(event_action, author_id, date)
+ target_key = key(event_action, date)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.multi do |multi|
+ multi.pfadd(target_key, author_id)
+ multi.expire(target_key, KEY_EXPIRY_LENGTH)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/user_access.rb b/lib/gitlab/user_access.rb
index 5e0a4faeba8..1551548d9b4 100644
--- a/lib/gitlab/user_access.rb
+++ b/lib/gitlab/user_access.rb
@@ -92,12 +92,6 @@ module Gitlab
end
end
- def can_read_project?
- return false unless can_access_git?
-
- user.can?(:read_project, project)
- end
-
private
def permission_cache
diff --git a/lib/gitlab/utils.rb b/lib/gitlab/utils.rb
index e80cc51dc3b..8f5c1eda456 100644
--- a/lib/gitlab/utils.rb
+++ b/lib/gitlab/utils.rb
@@ -56,7 +56,7 @@ module Gitlab
# * Maximum length is 63 bytes
# * First/Last Character is not a hyphen
def slugify(str)
- return str.downcase
+ str.downcase
.gsub(/[^a-z0-9]/, '-')[0..62]
.gsub(/(\A-+|-+\z)/, '')
end
@@ -178,5 +178,15 @@ module Gitlab
.group_by(&:first)
.transform_values { |kvs| kvs.map(&:last) }
end
+
+ # This sort is stable (see https://en.wikipedia.org/wiki/Sorting_algorithm#Stability)
+ # contrary to the bare Ruby sort_by method. Using just sort_by leads to
+ # instability across different platforms (e.g., x86_64-linux and x86_64-darwin18)
+ # which in turn leads to different sorting results for the equal elements across
+ # these platforms.
+ # This method uses a list item's original index position to break ties.
+ def stable_sort_by(list)
+ list.sort_by.with_index { |x, idx| [yield(x), idx] }
+ end
end
end
diff --git a/lib/gitlab/utils/markdown.rb b/lib/gitlab/utils/markdown.rb
new file mode 100644
index 00000000000..82c4a0e3b23
--- /dev/null
+++ b/lib/gitlab/utils/markdown.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Utils
+ module Markdown
+ PUNCTUATION_REGEXP = /[^\p{Word}\- ]/u.freeze
+
+ def string_to_anchor(string)
+ string
+ .strip
+ .downcase
+ .gsub(PUNCTUATION_REGEXP, '') # remove punctuation
+ .tr(' ', '-') # replace spaces with dash
+ .squeeze('-') # replace multiple dashes with one
+ .gsub(/\A(\d+)\z/, 'anchor-\1') # digits-only hrefs conflict with issue refs
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/utils/usage_data.rb b/lib/gitlab/utils/usage_data.rb
index afc4e000977..625e1076a54 100644
--- a/lib/gitlab/utils/usage_data.rb
+++ b/lib/gitlab/utils/usage_data.rb
@@ -77,11 +77,11 @@ module Gitlab
end
end
- def with_prometheus_client
- if Gitlab::Prometheus::Internal.prometheus_enabled?
- prometheus_address = Gitlab::Prometheus::Internal.uri
- yield Gitlab::PrometheusClient.new(prometheus_address, allow_local_requests: true)
- end
+ def with_prometheus_client(fallback: nil)
+ return fallback unless Gitlab::Prometheus::Internal.prometheus_enabled?
+
+ prometheus_address = Gitlab::Prometheus::Internal.uri
+ yield Gitlab::PrometheusClient.new(prometheus_address, allow_local_requests: true)
end
def measure_duration
@@ -92,6 +92,10 @@ module Gitlab
[result, duration]
end
+ def with_finished_at(key, &block)
+ yield.merge(key => Time.now)
+ end
+
private
def redis_usage_counter
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index c91d1b05440..6d935bb8828 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -216,8 +216,8 @@ module Gitlab
def gitaly_server_hash(repository)
{
- address: Gitlab::GitalyClient.address(repository.container.repository_storage),
- token: Gitlab::GitalyClient.token(repository.container.repository_storage),
+ address: Gitlab::GitalyClient.address(repository.shard),
+ token: Gitlab::GitalyClient.token(repository.shard),
features: Feature::Gitaly.server_feature_flags
}
end
diff --git a/lib/gitlab_danger.rb b/lib/gitlab_danger.rb
index 1c1763454a5..a98ac9200da 100644
--- a/lib/gitlab_danger.rb
+++ b/lib/gitlab_danger.rb
@@ -21,6 +21,7 @@ class GitlabDanger
specs
roulette
ce_ee_vue_templates
+ sidekiq_queues
].freeze
MESSAGE_PREFIX = '==>'.freeze
diff --git a/lib/google_api/auth.rb b/lib/google_api/auth.rb
index 319e5d2063c..7d9ff579c92 100644
--- a/lib/google_api/auth.rb
+++ b/lib/google_api/auth.rb
@@ -22,7 +22,7 @@ module GoogleApi
def get_token(code)
ret = client.auth_code.get_token(code, redirect_uri: redirect_uri)
- return ret.token, ret.expires_at
+ [ret.token, ret.expires_at]
end
protected
diff --git a/lib/kramdown/converter/commonmark.rb b/lib/kramdown/converter/commonmark.rb
new file mode 100644
index 00000000000..4abb34cc008
--- /dev/null
+++ b/lib/kramdown/converter/commonmark.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+module Kramdown
+ module Converter
+ # Overrides the base Kramdown converter to add any special
+ # behaviour for CommonMark.
+ #
+ # Currently we support an option `html_tables` that outputs
+ # an HTML table instead of a Markdown table. This is to
+ # support possibly being given complex tables, such as from ADF.
+ #
+ # Note: this is only an initial implementation. Currently don't
+ # strip out IALs or other specific kramdown syntax.
+ class Commonmark < ::Kramdown::Converter::Kramdown
+ # replaces the ^ used in kramdown. This forces the current
+ # block to end, so that a different list or codeblock can be
+ # started. https://kramdown.gettalong.org/syntax.html#eob-marker
+ END_OF_BLOCK = '<!-- -->'
+
+ def convert(el, opts = { indent: 0 })
+ res = super
+
+ if [:ul, :dl, :ol, :codeblock].include?(el.type) && opts[:next] &&
+ ([el.type, :codeblock].include?(opts[:next].type) ||
+ (opts[:next].type == :blank && opts[:nnext] &&
+ [el.type, :codeblock].include?(opts[:nnext].type)))
+ # replace the end of block character
+ res.sub!(/\^\n\n\z/m, "#{END_OF_BLOCK}\n\n")
+ end
+
+ res
+ end
+
+ def convert_codeblock(el, _opts)
+ # Although tildes are supported in CommonMark, backticks are more common
+ "```#{el.options[:lang]}\n" +
+ el.value.split(/\n/).map {|l| l.empty? ? "" : "#{l}" }.join("\n") +
+ "\n```\n\n"
+ end
+
+ def convert_li(el, opts)
+ res = super
+
+ if el.children.first && el.children.first.type == :p && !el.children.first.options[:transparent]
+ if el.children.size == 1 && @stack.last.children.last == el &&
+ (@stack.last.children.any? {|c| c.children.first.type != :p } || @stack.last.children.size == 1)
+ # replace the end of block character
+ res.sub!(/\^\n\z/m, "#{END_OF_BLOCK}\n")
+ end
+ end
+
+ res
+ end
+
+ def convert_table(el, opts)
+ return super unless @options[:html_tables]
+
+ opts[:alignment] = el.options[:alignment]
+ result = inner(el, opts)
+
+ "<table>\n#{result}</table>\n\n"
+ end
+
+ def convert_thead(el, opts)
+ return super unless @options[:html_tables]
+
+ "<thead>\n#{inner(el, opts)}</thead>\n"
+ end
+
+ def convert_tbody(el, opts)
+ return super unless @options[:html_tables]
+
+ "<tbody>\n#{inner(el, opts)}</tbody>\n"
+ end
+
+ def convert_tfoot(el, opts)
+ return super unless @options[:html_tables]
+
+ "<tfoot>\n#{inner(el, opts)}</tfoot>\n"
+ end
+
+ def convert_tr(el, opts)
+ return super unless @options[:html_tables]
+
+ "<tr>\n#{el.children.map {|c| convert(c, opts) }.join}</tr>\n"
+ end
+
+ def convert_td(el, opts)
+ return super unless @options[:html_tables]
+
+ # We need to add two linefeeds in order for any inner text to
+ # be processed as markdown. The HTML block must be "closed",
+ # as referenced in the CommonMark spec
+ # @see https://spec.commonmark.org/0.29/#html-blocks
+ "<td>\n\n#{inner(el, opts)}</td>\n"
+ end
+
+ def convert_th(el, opts)
+ return super unless @options[:html_tables]
+
+ # We need to add two linefeeds in order for any inner text to
+ # be processed as markdown. The HTML block must be "closed",
+ # as referenced in the CommonMark spec
+ # @see https://spec.commonmark.org/0.29/#html-blocks
+ "<th>\n\n#{inner(el, opts)}</th>\n"
+ end
+ end
+ end
+end
diff --git a/lib/kramdown/parser/atlassian_document_format.rb b/lib/kramdown/parser/atlassian_document_format.rb
new file mode 100644
index 00000000000..4ceb879a04c
--- /dev/null
+++ b/lib/kramdown/parser/atlassian_document_format.rb
@@ -0,0 +1,381 @@
+# frozen_string_literal: true
+
+module Kramdown
+ module Parser
+ # Parses an Atlassian Document Format (ADF) json into a
+ # Kramdown AST tree, for conversion to another format.
+ # The primary goal is to convert in GitLab Markdown.
+ #
+ # This parser does NOT resolve external resources, such as media/attachments.
+ # A special url is generated for media based on the id, for example
+ # ![jira-10050-field-description](adf-media://79411c6b-50e0-477f-b4ed-ac3a5887750c)
+ # so that a later filter/process can resolve those.
+ #
+ # @see https://developer.atlassian.com/cloud/jira/platform/apis/document/structure/ ADF Document Structure
+ # @see https://developer.atlassian.com/cloud/jira/platform/apis/document/playground/ ADF Playground
+ # @see https://developer.atlassian.com/cloud/jira/platform/apis/document/viewer/ ADF Viewer
+ class AtlassianDocumentFormat < Kramdown::Parser::Base
+ unless defined?(TOP_LEVEL_BLOCK_NODES)
+ TOP_LEVEL_BLOCK_NODES = %w[blockquote
+ bulletList
+ codeBlock
+ heading
+ mediaGroup
+ mediaSingle
+ orderedList
+ panel
+ paragraph
+ rule
+ table].freeze
+
+ CHILD_BLOCK_NODES = %w[listItem
+ media
+ table_cell
+ table_header
+ table_row].freeze
+
+ INLINE_NODES = %w[emoji
+ hardBreak
+ inlineCard
+ mention
+ text].freeze
+
+ MARKS = %w[code
+ em
+ link
+ strike
+ strong
+ subsup
+ textColor
+ underline].freeze
+
+ TABLE_CELL_NODES = %w[blockquote
+ bulletList
+ codeBlock
+ heading
+ mediaGroup
+ orderedList
+ panel
+ paragraph
+ rule].freeze
+
+ LIST_ITEM_NODES = %w[bulletList
+ codeBlock
+ mediaSingle
+ orderedList
+ paragraph].freeze
+
+ PANEL_NODES = %w[bulletList
+ heading
+ orderedList
+ paragraph].freeze
+
+ PANEL_EMOJIS = { info: ':information_source:',
+ note: ':notepad_spiral:',
+ warning: ':warning:',
+ success: ':white_check_mark:',
+ error: ':octagonal_sign:' }.freeze
+
+ # The default language for code blocks is `java`, as indicated in
+ # You can't change the default in Jira. There was a comment that indicated
+ # Confluence can set the default language.
+ # @see https://jira.atlassian.com/secure/WikiRendererHelpAction.jspa?section=advanced&_ga=2.5135221.773220073.1591894917-438867908.1591894917
+ # @see https://jira.atlassian.com/browse/JRASERVER-29184?focusedCommentId=832255&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-832255
+ CODE_BLOCK_DEFAULT_LANGUAGE = 'java'
+ end
+
+ def parse
+ ast = Gitlab::Json.parse(@source)
+
+ validate_document(ast)
+
+ process_content(@root, ast, TOP_LEVEL_BLOCK_NODES)
+ rescue ::JSON::ParserError => e
+ msg = 'Invalid Atlassian Document Format JSON'
+ Gitlab::AppLogger.error msg
+ Gitlab::AppLogger.error e
+
+ raise ::Kramdown::Error, msg
+ end
+
+ def process_content(element, ast_node, allowed_types)
+ ast_node['content'].each do |node|
+ next unless allowed_types.include?(node['type'])
+
+ public_send("process_#{node['type'].underscore}", element, node) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+
+ def process_blockquote(element, ast_node)
+ new_element = Element.new(:blockquote)
+ element.children << new_element
+
+ process_content(new_element, ast_node, TOP_LEVEL_BLOCK_NODES)
+ end
+
+ def process_bullet_list(element, ast_node)
+ new_element = Element.new(:ul)
+ element.children << new_element
+
+ process_content(new_element, ast_node, %w[listItem])
+ end
+
+ def process_code_block(element, ast_node)
+ code_text = gather_text(ast_node)
+ lang = ast_node.dig('attrs', 'language') || CODE_BLOCK_DEFAULT_LANGUAGE
+
+ element.children << Element.new(:codeblock, code_text, {}, { lang: lang })
+ end
+
+ def process_emoji(element, ast_node)
+ emoji = ast_node.dig('attrs', 'text') || ast_node.dig('attrs', 'shortName')
+ return unless emoji
+
+ add_text(emoji, element, :text)
+ end
+
+ def process_hard_break(element, ast_node)
+ element.children << Element.new(:br)
+ end
+
+ def process_heading(element, ast_node)
+ level = ast_node.dig('attrs', 'level').to_i.clamp(1, 6)
+ options = { level: level }
+ new_element = Element.new(:header, nil, nil, options)
+ element.children << new_element
+
+ process_content(new_element, ast_node, INLINE_NODES)
+ extract_element_text(new_element, new_element.options[:raw_text] = +'')
+ end
+
+ def process_inline_card(element, ast_node)
+ url = ast_node.dig('attrs', 'url')
+ data = ast_node.dig('attrs', 'data')
+
+ if url
+ # we don't pull a description from the link and create a panel,
+ # just convert to a normal link
+ new_element = Element.new(:text, url)
+ element.children << wrap_element(new_element, :a, nil, { 'href' => url })
+ elsif data
+ # data is JSONLD (https://json-ld.org/), so for now output
+ # as a codespan of text, with `adf-inlineCard: ` at the start
+ text = "adf-inlineCard: #{data}"
+ element.children << Element.new(:codespan, text, nil, { lang: 'adf-inlinecard' })
+ end
+ end
+
+ def process_list_item(element, ast_node)
+ new_element = Element.new(:li)
+ element.children << new_element
+
+ process_content(new_element, ast_node, LIST_ITEM_NODES)
+ end
+
+ def process_media(element, ast_node)
+ media_url = "adf-media://#{ast_node['attrs']['id']}"
+
+ case ast_node['attrs']['type']
+ when 'file'
+ attrs = { 'src' => media_url, 'alt' => ast_node['attrs']['collection'] }
+ media_element = Element.new(:img, nil, attrs)
+ when 'link'
+ attrs = { 'href' => media_url }
+ media_element = wrap_element(Element.new(:text, media_url), :a, nil, attrs)
+ end
+
+ media_element = wrap_element(media_element, :p)
+ element.children << media_element
+ end
+
+ # wraps a single media element.
+ # Currently ignore attrs.layout and attrs.width
+ def process_media_single(element, ast_node)
+ new_element = Element.new(:p)
+ element.children << new_element
+
+ process_content(new_element, ast_node, %w[media])
+ end
+
+ # wraps a group media element.
+ # Currently ignore attrs.layout and attrs.width
+ def process_media_group(element, ast_node)
+ ul_element = Element.new(:ul)
+ element.children << ul_element
+
+ ast_node['content'].each do |node|
+ next unless node['type'] == 'media'
+
+ li_element = Element.new(:li)
+ ul_element.children << li_element
+
+ process_media(li_element, node)
+ end
+ end
+
+ def process_mention(element, ast_node)
+ # Make it `@adf-mention:` since there is no guarantee that it is
+ # a valid username in our system. This gives us an
+ # opportunity to replace it later. Mention name can have
+ # spaces, so double quote it
+ mention_text = ast_node.dig('attrs', 'text')&.gsub('@', '')
+ mention_text = %Q("#{mention_text}") if mention_text.match?(/ /)
+ mention_text = %Q(@adf-mention:#{mention_text})
+
+ add_text(mention_text, element, :text)
+ end
+
+ def process_ordered_list(element, ast_node)
+ # `attrs.order` is not supported in the Kramdown AST
+ new_element = Element.new(:ol)
+ element.children << new_element
+
+ process_content(new_element, ast_node, %w[listItem])
+ end
+
+ # since we don't have something similar, then put <hr> around it and
+ # add a bolded status text (eg: "Error:") to the front of it.
+ def process_panel(element, ast_node)
+ panel_type = ast_node.dig('attrs', 'panelType')
+ return unless %w[info note warning success error].include?(panel_type)
+
+ panel_header_text = "#{PANEL_EMOJIS[panel_type.to_sym]} "
+ panel_header_element = Element.new(:text, panel_header_text)
+
+ new_element = Element.new(:blockquote)
+ new_element.children << panel_header_element
+ element.children << new_element
+
+ process_content(new_element, ast_node, PANEL_NODES)
+ end
+
+ def process_paragraph(element, ast_node)
+ new_element = Element.new(:p)
+ element.children << new_element
+
+ process_content(new_element, ast_node, INLINE_NODES)
+ end
+
+ def process_rule(element, ast_node)
+ element.children << Element.new(:hr)
+ end
+
+ def process_table(element, ast_node)
+ table = Element.new(:table, nil, nil, { alignment: [:default, :default] })
+ element.children << table
+
+ tbody = Element.new(:tbody)
+ table.children << tbody
+
+ process_content(tbody, ast_node, %w[tableRow])
+ end
+
+ # we ignore the attributes, attrs.background, attrs.colspan,
+ # attrs.colwidth, and attrs.rowspan
+ def process_table_cell(element, ast_node)
+ new_element = Element.new(:td)
+ element.children << new_element
+
+ process_content(new_element, ast_node, TABLE_CELL_NODES)
+ end
+
+ # we ignore the attributes, attrs.background, attrs.colspan,
+ # attrs.colwidth, and attrs.rowspan
+ def process_table_header(element, ast_node)
+ new_element = Element.new(:th)
+ element.children << new_element
+
+ process_content(new_element, ast_node, TABLE_CELL_NODES)
+ end
+
+ def process_table_row(element, ast_node)
+ new_element = Element.new(:tr)
+ element.children << new_element
+
+ process_content(new_element, ast_node, %w[tableHeader tableCell])
+ end
+
+ def process_text(element, ast_node)
+ new_element = Element.new(:text, ast_node['text'])
+ new_element = apply_marks(new_element, ast_node, MARKS)
+ element.children << new_element
+ end
+
+ private
+
+ def validate_document(ast)
+ return if ast['type'] == 'doc'
+
+ raise ::JSON::ParserError, 'missing doc node'
+ end
+
+ # ADF marks are an attribute on the node. For kramdown,
+ # we have to wrap the node with an element for the mark.
+ def apply_marks(element, ast_node, allowed_types)
+ return element unless ast_node['marks']
+
+ new_element = element
+
+ ast_node['marks'].each do |mark|
+ next unless allowed_types.include?(mark['type'])
+
+ case mark['type']
+ when 'code'
+ new_element = Element.new(:codespan, ast_node['text'])
+ when 'em'
+ new_element = wrap_element(new_element, :em)
+ when 'link'
+ attrs = { 'href' => mark.dig('attrs', 'href') }
+ attrs['title'] = mark.dig('attrs', 'title')
+ new_element = wrap_element(new_element, :a, nil, attrs)
+ when 'strike'
+ new_element = wrap_element(new_element, :html_element, 'del', {}, category: :span)
+ when 'strong'
+ new_element = wrap_element(new_element, :strong)
+ when 'subsup'
+ type = mark.dig('attrs', 'type')
+
+ case type
+ when 'sub'
+ new_element = wrap_element(new_element, :html_element, 'sub', {}, category: :span)
+ when 'sup'
+ new_element = wrap_element(new_element, :html_element, 'sup', {}, category: :span)
+ else
+ next
+ end
+ when 'textColor'
+ color = mark.dig('attrs', 'color')
+ new_element = wrap_element(new_element, :html_element, 'span', { color: color }, category: :span)
+ when 'underline'
+ new_element = wrap_element(new_element, :html_element, 'u', {}, category: :span)
+ else
+ next
+ end
+ end
+
+ new_element
+ end
+
+ def wrap_element(element, type, *args)
+ wrapper = Element.new(type, *args)
+ wrapper.children << element
+ wrapper
+ end
+
+ def extract_element_text(element, raw)
+ raw << element.value.to_s if element.type == :text
+ element.children.each { |c| extract_element_text(c, raw) }
+ end
+
+ def gather_text(ast_node)
+ ast_node['content'].inject('') do |memo, node|
+ node['type'] == 'text' ? (memo + node['text']) : memo
+ end
+ end
+
+ def method_missing(method, *args)
+ raise NotImplementedError, "method `#{method}` not implemented yet"
+ end
+ end
+ end
+end
diff --git a/lib/learn_gitlab.rb b/lib/learn_gitlab.rb
new file mode 100644
index 00000000000..771083193d1
--- /dev/null
+++ b/lib/learn_gitlab.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+class LearnGitlab
+ PROJECT_NAME = 'Learn GitLab'.freeze
+ BOARD_NAME = 'GitLab onboarding'.freeze
+ LABEL_NAME = 'Novice'.freeze
+
+ def initialize(current_user)
+ @current_user = current_user
+ end
+
+ def available?
+ project && board && label
+ end
+
+ def project
+ @project ||= current_user.projects.find_by_name(PROJECT_NAME)
+ end
+
+ def board
+ return unless project
+
+ @board ||= project.boards.find_by_name(BOARD_NAME)
+ end
+
+ def label
+ return unless project
+
+ @label ||= project.labels.find_by_name(LABEL_NAME)
+ end
+
+ private
+
+ attr_reader :current_user
+end
diff --git a/lib/object_storage/direct_upload.rb b/lib/object_storage/direct_upload.rb
index 5eab882039d..76f92f62e9c 100644
--- a/lib/object_storage/direct_upload.rb
+++ b/lib/object_storage/direct_upload.rb
@@ -23,9 +23,9 @@ module ObjectStorage
MINIMUM_MULTIPART_SIZE = 5.megabytes
attr_reader :credentials, :bucket_name, :object_name
- attr_reader :has_length, :maximum_size
+ attr_reader :has_length, :maximum_size, :consolidated_settings
- def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil)
+ def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil, consolidated_settings: false)
unless has_length
raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
end
@@ -35,6 +35,7 @@ module ObjectStorage
@object_name = object_name
@has_length = has_length
@maximum_size = maximum_size
+ @consolidated_settings = consolidated_settings
end
def to_hash
@@ -80,10 +81,12 @@ module ObjectStorage
end
def use_workhorse_s3_client?
- Feature.enabled?(:use_workhorse_s3_client, default_enabled: true) &&
- credentials.fetch(:use_iam_profile, false) &&
- # The Golang AWS SDK does not support V2 signatures
- credentials.fetch(:aws_signature_version, 4).to_i >= 4
+ return false unless Feature.enabled?(:use_workhorse_s3_client, default_enabled: true)
+ return false unless credentials.fetch(:use_iam_profile, false) || consolidated_settings
+ # The Golang AWS SDK does not support V2 signatures
+ return false unless credentials.fetch(:aws_signature_version, 4).to_i >= 4
+
+ true
end
def provider
@@ -92,7 +95,11 @@ module ObjectStorage
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
def get_url
- connection.get_object_url(bucket_name, object_name, expire_at)
+ if google?
+ connection.get_object_https_url(bucket_name, object_name, expire_at)
+ else
+ connection.get_object_url(bucket_name, object_name, expire_at)
+ end
end
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html
@@ -166,6 +173,10 @@ module ObjectStorage
provider == 'AWS'
end
+ def google?
+ provider == 'Google'
+ end
+
def requires_multipart_upload?
aws? && !has_length
end
diff --git a/lib/pager_duty/webhook_payload_parser.rb b/lib/pager_duty/webhook_payload_parser.rb
new file mode 100644
index 00000000000..573fb36f0ca
--- /dev/null
+++ b/lib/pager_duty/webhook_payload_parser.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+module PagerDuty
+ class WebhookPayloadParser
+ def initialize(payload)
+ @payload = payload
+ end
+
+ def self.call(payload)
+ new(payload).call
+ end
+
+ def call
+ Array(payload['messages']).map { |msg| parse_message(msg) }
+ end
+
+ private
+
+ attr_reader :payload
+
+ def parse_message(message)
+ {
+ 'event' => message['event'],
+ 'incident' => parse_incident(message['incident'])
+ }
+ end
+
+ def parse_incident(incident)
+ return {} if incident.blank?
+
+ {
+ 'url' => incident['html_url'],
+ 'incident_number' => incident['incident_number'],
+ 'title' => incident['title'],
+ 'status' => incident['status'],
+ 'created_at' => incident['created_at'],
+ 'urgency' => incident['urgency'],
+ 'incident_key' => incident['incident_key'],
+ 'assignees' => reject_empty(parse_assignees(incident)),
+ 'impacted_services' => reject_empty(parse_impacted_services(incident))
+ }
+ end
+
+ def parse_assignees(incident)
+ Array(incident['assignments']).map do |a|
+ {
+ 'summary' => a.dig('assignee', 'summary'),
+ 'url' => a.dig('assignee', 'html_url')
+ }
+ end
+ end
+
+ def parse_impacted_services(incident)
+ Array(incident['impacted_services']).map do |is|
+ {
+ 'summary' => is['summary'],
+ 'url' => is['html_url']
+ }
+ end
+ end
+
+ def reject_empty(entities)
+ Array(entities).reject { |e| e['summary'].blank? && e['url'].blank? }
+ end
+ end
+end
diff --git a/lib/peek/views/elasticsearch.rb b/lib/peek/views/elasticsearch.rb
index 626a6fb1316..4d82a6eac4f 100644
--- a/lib/peek/views/elasticsearch.rb
+++ b/lib/peek/views/elasticsearch.rb
@@ -40,7 +40,7 @@ module Peek
end
def format_call_details(call)
- super.merge(request: "#{call[:method]} #{call[:path]}")
+ super.merge(request: "#{call[:method]} #{call[:path]}?#{call[:params].to_query}")
end
end
end
diff --git a/lib/product_analytics/collector_app.rb b/lib/product_analytics/collector_app.rb
new file mode 100644
index 00000000000..cf971eef4b6
--- /dev/null
+++ b/lib/product_analytics/collector_app.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module ProductAnalytics
+ class CollectorApp
+ def call(env)
+ request = Rack::Request.new(env)
+ params = request.params
+
+ return not_found unless EventParams.has_required_params?(params)
+
+ # Product analytics feature is behind a flag and is disabled by default.
+ # We expect limited amount of projects with this feature enabled in first release.
+ # Since collector has no authentication we temporary prevent recording of events
+ # for project without the feature enabled. During increase of feature adoption, this
+ # check will be removed for better performance.
+ project = Project.find(params['aid'].to_i)
+ return not_found unless Feature.enabled?(:product_analytics, project, default_enabled: false)
+
+ # Snowplow tracker has own format of events.
+ # We need to convert them to match the schema of our database.
+ event_params = EventParams.parse_event_params(params)
+
+ if ProductAnalyticsEvent.create(event_params)
+ ok
+ else
+ not_found
+ end
+ rescue ActiveRecord::InvalidForeignKey, ActiveRecord::RecordNotFound
+ not_found
+ end
+
+ def ok
+ [200, {}, []]
+ end
+
+ def not_found
+ [404, {}, []]
+ end
+ end
+end
diff --git a/lib/product_analytics/event_params.rb b/lib/product_analytics/event_params.rb
new file mode 100644
index 00000000000..d938fe1f594
--- /dev/null
+++ b/lib/product_analytics/event_params.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module ProductAnalytics
+ # Converts params from Snowplow tracker to one compatible with
+ # GitLab ProductAnalyticsEvent model. The field naming corresponds
+ # with snowplow event model. Only project_id is GitLab specific.
+ #
+ # For information on what each field is you can check next resources:
+ # * Snowplow tracker protocol: https://github.com/snowplow/snowplow/wiki/snowplow-tracker-protocol
+ # * Canonical event model: https://github.com/snowplow/snowplow/wiki/canonical-event-model
+ class EventParams
+ def self.parse_event_params(params)
+ {
+ project_id: params['aid'],
+ platform: params['p'],
+ collector_tstamp: Time.zone.now,
+ event_id: params['eid'],
+ v_tracker: params['tv'],
+ v_collector: Gitlab::VERSION,
+ v_etl: Gitlab::VERSION,
+ os_timezone: params['tz'],
+ name_tracker: params['tna'],
+ br_lang: params['lang'],
+ doc_charset: params['cs'],
+ br_features_pdf: Gitlab::Utils.to_boolean(params['f_pdf']),
+ br_features_flash: Gitlab::Utils.to_boolean(params['f_fla']),
+ br_features_java: Gitlab::Utils.to_boolean(params['f_java']),
+ br_features_director: Gitlab::Utils.to_boolean(params['f_dir']),
+ br_features_quicktime: Gitlab::Utils.to_boolean(params['f_qt']),
+ br_features_realplayer: Gitlab::Utils.to_boolean(params['f_realp']),
+ br_features_windowsmedia: Gitlab::Utils.to_boolean(params['f_wma']),
+ br_features_gears: Gitlab::Utils.to_boolean(params['f_gears']),
+ br_features_silverlight: Gitlab::Utils.to_boolean(params['f_ag']),
+ br_colordepth: params['cd'],
+ br_cookies: Gitlab::Utils.to_boolean(params['cookie']),
+ dvce_created_tstamp: params['dtm'],
+ br_viewheight: params['vp'],
+ domain_sessionidx: params['vid'],
+ domain_sessionid: params['sid'],
+ domain_userid: params['duid'],
+ user_fingerprint: params['fp'],
+ page_referrer: params['refr'],
+ page_url: params['url']
+ }
+ end
+
+ def self.has_required_params?(params)
+ params['aid'].present? && params['eid'].present?
+ end
+ end
+end
diff --git a/lib/quality/helm3_client.rb b/lib/quality/helm3_client.rb
deleted file mode 100644
index afea73cbc50..00000000000
--- a/lib/quality/helm3_client.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-# frozen_string_literal: true
-
-require 'time'
-require_relative '../gitlab/popen' unless defined?(Gitlab::Popen)
-
-module Quality
- class Helm3Client
- CommandFailedError = Class.new(StandardError)
-
- attr_reader :namespace
-
- RELEASE_JSON_ATTRIBUTES = %w[name revision updated status chart app_version namespace].freeze
- PAGINATION_SIZE = 256 # Default helm list pagination size
-
- Release = Struct.new(:name, :revision, :last_update, :status, :chart, :app_version, :namespace) do
- def revision
- @revision ||= self[:revision].to_i
- end
-
- def last_update
- @last_update ||= Time.parse(self[:last_update])
- end
- end
-
- # A single page of data and the corresponding page number.
- Page = Struct.new(:releases, :number)
-
- def initialize(namespace:)
- @namespace = namespace
- end
-
- def releases(args: [])
- each_release(args)
- end
-
- def delete(release_name:)
- run_command([
- 'uninstall',
- %(--namespace "#{namespace}"),
- release_name
- ])
- end
-
- private
-
- def run_command(command)
- final_command = ['helm', *command].join(' ')
- puts "Running command: `#{final_command}`" # rubocop:disable Rails/Output
-
- result = Gitlab::Popen.popen_with_detail([final_command])
-
- if result.status.success?
- result.stdout.chomp.freeze
- else
- raise CommandFailedError, "The `#{final_command}` command failed (status: #{result.status}) with the following error:\n#{result.stderr}"
- end
- end
-
- def raw_releases(page, args = [])
- command = [
- 'list',
- %(--namespace "#{namespace}"),
- %(--max #{PAGINATION_SIZE}),
- %(--offset #{PAGINATION_SIZE * page}),
- %(--output json),
- *args
- ]
- releases = JSON.parse(run_command(command))
-
- releases.map do |release|
- Release.new(*release.values_at(*RELEASE_JSON_ATTRIBUTES))
- end
- rescue JSON::ParserError => ex
- puts "Ignoring this JSON parsing error: #{ex}" # rubocop:disable Rails/Output
- []
- end
-
- # Fetches data from Helm and yields a Page object for every page
- # of data, without loading all of them into memory.
- #
- # method - The Octokit method to use for getting the data.
- # args - Arguments to pass to the `helm list` command.
- def each_releases_page(args, &block)
- return to_enum(__method__, args) unless block_given?
-
- page = 0
- final_args = args.dup
-
- begin
- collection = raw_releases(page, final_args)
-
- yield Page.new(collection, page += 1)
- end while collection.any?
- end
-
- # Iterates over all of the releases.
- #
- # args - Any arguments to pass to the `helm list` command.
- def each_release(args, &block)
- return to_enum(__method__, args) unless block_given?
-
- each_releases_page(args) do |page|
- page.releases.each do |release|
- yield release
- end
- end
- end
- end
-end
diff --git a/lib/quality/kubernetes_client.rb b/lib/quality/kubernetes_client.rb
deleted file mode 100644
index f83652e117f..00000000000
--- a/lib/quality/kubernetes_client.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require_relative '../gitlab/popen' unless defined?(Gitlab::Popen)
-
-module Quality
- class KubernetesClient
- RESOURCE_LIST = 'ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa,crd'
- CommandFailedError = Class.new(StandardError)
-
- attr_reader :namespace
-
- def initialize(namespace:)
- @namespace = namespace
- end
-
- def cleanup(release_name:, wait: true)
- delete_by_selector(release_name: release_name, wait: wait)
- delete_by_matching_name(release_name: release_name)
- end
-
- private
-
- def delete_by_selector(release_name:, wait:)
- selector = case release_name
- when String
- %(-l release="#{release_name}")
- when Array
- %(-l 'release in (#{release_name.join(', ')})')
- else
- raise ArgumentError, 'release_name must be a string or an array'
- end
-
- command = [
- 'delete',
- RESOURCE_LIST,
- %(--namespace "#{namespace}"),
- '--now',
- '--ignore-not-found',
- '--include-uninitialized',
- %(--wait=#{wait}),
- selector
- ]
-
- run_command(command)
- end
-
- def delete_by_matching_name(release_name:)
- resource_names = raw_resource_names
- command = [
- 'delete',
- %(--namespace "#{namespace}"),
- '--ignore-not-found'
- ]
-
- Array(release_name).each do |release|
- resource_names
- .select { |resource_name| resource_name.include?(release) }
- .each { |matching_resource| run_command(command + [matching_resource]) }
- end
- end
-
- def raw_resource_names
- command = [
- 'get',
- RESOURCE_LIST,
- %(--namespace "#{namespace}"),
- '-o name'
- ]
- run_command(command).lines.map(&:strip)
- end
-
- def run_command(command)
- final_command = ['kubectl', *command].join(' ')
- puts "Running command: `#{final_command}`" # rubocop:disable Rails/Output
-
- result = Gitlab::Popen.popen_with_detail([final_command])
-
- if result.status.success?
- result.stdout.chomp.freeze
- else
- raise CommandFailedError, "The `#{final_command}` command failed (status: #{result.status}) with the following error:\n#{result.stderr}"
- end
- end
- end
-end
diff --git a/lib/quality/seeders/issues.rb b/lib/quality/seeders/issues.rb
index 4c8cb6e97cc..ae19e86546a 100644
--- a/lib/quality/seeders/issues.rb
+++ b/lib/quality/seeders/issues.rb
@@ -29,6 +29,7 @@ module Quality
assignee_ids: Array(team.pluck(:id).sample(3)),
labels: labels.join(',')
}
+ params[:closed_at] = params[:created_at] + rand(35).days if params[:state] == 'closed'
issue = ::Issues::CreateService.new(project, team.sample, params).execute
if issue.persisted?
diff --git a/lib/quality/test_level.rb b/lib/quality/test_level.rb
index 334643fd0d3..cd94efddc1e 100644
--- a/lib/quality/test_level.rb
+++ b/lib/quality/test_level.rb
@@ -93,8 +93,14 @@ module Quality
private
+ def migration_and_background_migration_folders
+ TEST_LEVEL_FOLDERS.fetch(:migration) + TEST_LEVEL_FOLDERS.fetch(:background_migration)
+ end
+
def folders_pattern(level)
case level
+ when :migration
+ "{#{migration_and_background_migration_folders.join(',')}}"
# Geo specs aren't in a specific folder, but they all have the :geo tag, so we must search for them globally
when :all, :geo
'**'
@@ -105,6 +111,8 @@ module Quality
def folders_regex(level)
case level
+ when :migration
+ "(#{migration_and_background_migration_folders.join('|')})"
# Geo specs aren't in a specific folder, but they all have the :geo tag, so we must search for them globally
when :all, :geo
''
diff --git a/lib/rspec_flaky/flaky_examples_collection.rb b/lib/rspec_flaky/flaky_examples_collection.rb
index 290a51766e9..b86ec82bde6 100644
--- a/lib/rspec_flaky/flaky_examples_collection.rb
+++ b/lib/rspec_flaky/flaky_examples_collection.rb
@@ -23,7 +23,7 @@ module RspecFlaky
end
def to_h
- Hash[map { |uid, example| [uid, example.to_h] }].deep_symbolize_keys
+ transform_values { |example| example.to_h }.deep_symbolize_keys
end
def -(other)
diff --git a/lib/sentry/client/issue.rb b/lib/sentry/client/issue.rb
index 4a62b73a349..c5e9df9cd21 100644
--- a/lib/sentry/client/issue.rb
+++ b/lib/sentry/client/issue.rb
@@ -168,7 +168,8 @@ module Sentry
first_release_short_version: issue.dig('firstRelease', 'shortVersion'),
first_release_version: issue.dig('firstRelease', 'version'),
last_release_last_commit: issue.dig('lastRelease', 'lastCommit'),
- last_release_short_version: issue.dig('lastRelease', 'shortVersion')
+ last_release_short_version: issue.dig('lastRelease', 'shortVersion'),
+ last_release_version: issue.dig('lastRelease', 'version')
})
end
diff --git a/lib/support/logrotate/gitlab b/lib/support/logrotate/gitlab
index d9b07b61ec3..c34db47e214 100644
--- a/lib/support/logrotate/gitlab
+++ b/lib/support/logrotate/gitlab
@@ -2,6 +2,7 @@
# based on: http://stackoverflow.com/a/4883967
/home/git/gitlab/log/*.log {
+ su git git
daily
missingok
rotate 90
@@ -11,6 +12,7 @@
}
/home/git/gitlab-shell/gitlab-shell.log {
+ su git git
daily
missingok
rotate 90
diff --git a/lib/system_check/incoming_email/imap_authentication_check.rb b/lib/system_check/incoming_email/imap_authentication_check.rb
index 613c2296375..056021d460c 100644
--- a/lib/system_check/incoming_email/imap_authentication_check.rb
+++ b/lib/system_check/incoming_email/imap_authentication_check.rb
@@ -28,9 +28,12 @@ module SystemCheck
private
def try_connect_imap
- imap = Net::IMAP.new(config[:host], port: config[:port], ssl: config[:ssl])
- imap.starttls if config[:start_tls]
- imap.login(config[:email], config[:password])
+ config.each do |mailbox|
+ $stdout.puts "Checking #{mailbox[:email]}"
+ imap = Net::IMAP.new(mailbox[:host], port: mailbox[:port], ssl: mailbox[:ssl])
+ imap.starttls if mailbox[:start_tls]
+ imap.login(mailbox[:email], mailbox[:password])
+ end
true
rescue => error
@error = error
@@ -51,7 +54,7 @@ module SystemCheck
erb.filename = mail_room_config_path
config_file = YAML.load(erb.result)
- config_file.dig(:mailboxes, 0)
+ config_file[:mailboxes]
end
end
end
diff --git a/lib/tasks/cache.rake b/lib/tasks/cache.rake
index c380eb293b5..6af91d473a6 100644
--- a/lib/tasks/cache.rake
+++ b/lib/tasks/cache.rake
@@ -18,7 +18,9 @@ namespace :cache do
count: REDIS_CLEAR_BATCH_SIZE
)
- redis.del(*keys) if keys.any?
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.del(*keys) if keys.any?
+ end
break if cursor == REDIS_SCAN_START_STOP
end
diff --git a/lib/tasks/gitlab/container_registry.rake b/lib/tasks/gitlab/container_registry.rake
index 7687cb237cc..cd18c873a5a 100644
--- a/lib/tasks/gitlab/container_registry.rake
+++ b/lib/tasks/gitlab/container_registry.rake
@@ -15,21 +15,7 @@ namespace :gitlab do
warn_user_is_not_gitlab
- url = registry_config.api_url
- # registry_info will query the /v2 route of the registry API. This route
- # requires authentication, but not authorization (the response has no body,
- # only headers that show the version of the registry). There is no
- # associated user when running this rake, so we need to generate a valid
- # JWT token with no access permissions to authenticate as a trusted client.
- token = Auth::ContainerRegistryAuthenticationService.access_token([], [])
- client = ContainerRegistry::Client.new(url, token: token)
- info = client.registry_info
-
- Gitlab::CurrentSettings.update!(
- container_registry_vendor: info[:vendor] || '',
- container_registry_version: info[:version] || '',
- container_registry_features: info[:features] || []
- )
+ UpdateContainerRegistryInfoService.new.execute
end
end
end
diff --git a/lib/tasks/gitlab/db.rake b/lib/tasks/gitlab/db.rake
index 4917d496d07..61318570fd5 100644
--- a/lib/tasks/gitlab/db.rake
+++ b/lib/tasks/gitlab/db.rake
@@ -39,6 +39,11 @@ namespace :gitlab do
# PG: http://www.postgresql.org/docs/current/static/ddl-depend.html
# Add `IF EXISTS` because cascade could have already deleted a table.
tables.each { |t| connection.execute("DROP TABLE IF EXISTS #{connection.quote_table_name(t)} CASCADE") }
+
+ # Drop all extra schema objects GitLab owns
+ Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
+ connection.execute("DROP SCHEMA IF EXISTS #{connection.quote_table_name(schema)}")
+ end
end
desc 'GitLab | DB | Configures the database by running migrate, or by loading the schema and seeding if needed'
@@ -129,5 +134,37 @@ namespace :gitlab do
Rake::Task['db:structure:load'].enhance do
Rake::Task['gitlab:db:load_custom_structure'].invoke
end
+
+ desc 'Create missing dynamic database partitions'
+ task :create_dynamic_partitions do
+ Gitlab::Database::Partitioning::PartitionCreator.new.create_partitions
+ end
+
+ # This is targeted towards deploys and upgrades of GitLab.
+ # Since we're running migrations already at this time,
+ # we also check and create partitions as needed here.
+ Rake::Task['db:migrate'].enhance do
+ Rake::Task['gitlab:db:create_dynamic_partitions'].invoke
+ end
+
+ # When we load the database schema from db/structure.sql
+ # we don't have any dynamic partitions created. We don't really need to
+ # because application initializers/sidekiq take care of that, too.
+ # However, the presence of partitions for a table has influence on their
+ # position in db/structure.sql (which is topologically sorted).
+ #
+ # Other than that it's helpful to create partitions early when bootstrapping
+ # a new installation.
+ Rake::Task['db:structure:load'].enhance do
+ Rake::Task['gitlab:db:create_dynamic_partitions'].invoke
+ end
+
+ # During testing, db:test:load restores the database schema from scratch
+ # which does not include dynamic partitions. We cannot rely on application
+ # initializers here as the application can continue to run while
+ # a rake task reloads the database schema.
+ Rake::Task['db:test:load'].enhance do
+ Rake::Task['gitlab:db:create_dynamic_partitions'].invoke
+ end
end
end
diff --git a/lib/tasks/gitlab/external_diffs.rake b/lib/tasks/gitlab/external_diffs.rake
new file mode 100644
index 00000000000..08f25914007
--- /dev/null
+++ b/lib/tasks/gitlab/external_diffs.rake
@@ -0,0 +1,35 @@
+namespace :gitlab do
+ namespace :external_diffs do
+ desc "Override external diffs in file storage to be in object storage instead. This does not change the actual location of the data"
+ task force_object_storage: :environment do |t, args|
+ ansi = Gitlab::Utils.to_boolean(ENV.fetch('ANSI', true))
+ batch = ENV.fetch('BATCH_SIZE', 1000)
+ start_id = ENV.fetch('START_ID', nil)
+ end_id = ENV.fetch('END_ID', nil)
+ update_delay = args.fetch('UPDATE_DELAY', 1)
+
+ # Use ANSI codes to overwrite the same line repeatedly if supported
+ newline = ansi ? "\x1B8\x1B[2K" : "\n"
+
+ total = 0
+
+ # The only useful index on the table is by id, so scan through the whole
+ # table by that and filter out those we don't want on each relation
+ MergeRequestDiff.in_batches(of: batch, start: start_id, finish: end_id) do |relation| # rubocop:disable Cop/InBatches
+ count = relation
+ .except(:order)
+ .where(stored_externally: true, external_diff_store: ExternalDiffUploader::Store::LOCAL)
+ .update_all(external_diff_store: ExternalDiffUploader::Store::REMOTE)
+
+ total += count
+
+ if count > 0
+ print "#{newline}#{total} updated..."
+ sleep(update_delay) if update_delay > 0
+ end
+ end
+
+ puts "done!"
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/packages/migrate.rake b/lib/tasks/gitlab/packages/migrate.rake
new file mode 100644
index 00000000000..cd6dcf78da3
--- /dev/null
+++ b/lib/tasks/gitlab/packages/migrate.rake
@@ -0,0 +1,23 @@
+require 'logger'
+
+desc "GitLab | Packages | Migrate packages files to remote storage"
+namespace :gitlab do
+ namespace :packages do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of package files to object storage')
+
+ unless ::Packages::PackageFileUploader.object_store_enabled?
+ raise 'Object store is disabled for packages feature'
+ end
+
+ ::Packages::PackageFile.with_files_stored_locally.find_each(batch_size: 10) do |package_file|
+ package_file.file.migrate!(::Packages::PackageFileUploader::Store::REMOTE)
+
+ logger.info("Transferred package file #{package_file.id} of size #{package_file.size.to_i.bytes} to object storage")
+ rescue => e
+ logger.error("Failed to transfer package file #{package_file.id} with error: #{e.message}")
+ end
+ end
+ end
+end