summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/api.js4
-rw-r--r--app/assets/javascripts/code_navigation/store/actions.js5
-rw-r--r--app/controllers/concerns/cycle_analytics_params.rb2
-rw-r--r--app/graphql/resolvers/projects/snippets_resolver.rb5
-rw-r--r--app/models/project.rb10
-rw-r--r--app/policies/project_policy.rb2
-rw-r--r--app/presenters/projects/prometheus/alert_presenter.rb6
-rw-r--r--app/presenters/snippet_blob_presenter.rb27
-rw-r--r--app/services/projects/container_repository/cleanup_tags_service.rb9
-rw-r--r--app/services/projects/lsif_data_service.rb28
-rw-r--r--app/services/projects/update_repository_storage_service.rb115
-rw-r--r--app/services/projects/update_service.rb11
-rw-r--r--app/workers/all_queues.yml7
-rw-r--r--app/workers/project_update_repository_storage_worker.rb15
-rw-r--r--changelogs/unreleased/27072-name-regex-allow-bulk-api.yml5
-rw-r--r--changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml5
-rw-r--r--changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml5
-rw-r--r--changelogs/unreleased/move-storage-shards.yml5
-rw-r--r--changelogs/unreleased/sy-auto-embed-alert.yml5
-rw-r--r--doc/api/container_registry.md18
-rw-r--r--doc/api/projects.md6
-rw-r--r--doc/user/incident_management/index.md2
-rw-r--r--lib/api/entities/project.rb3
-rw-r--r--lib/api/helpers/projects_helpers.rb2
-rw-r--r--lib/api/lsif_data.rb6
-rw-r--r--lib/api/project_container_repositories.rb11
-rw-r--r--lib/api/projects.rb1
-rw-r--r--lib/gitlab/danger/helper.rb3
-rw-r--r--lib/tasks/cleanup.rake33
-rw-r--r--spec/frontend/code_navigation/store/actions_spec.js26
-rw-r--r--spec/graphql/resolvers/projects/snippets_resolver_spec.rb10
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb3
-rw-r--r--spec/models/project_spec.rb38
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb83
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb4
-rw-r--r--spec/requests/api/lsif_data_spec.rb75
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb55
-rw-r--r--spec/requests/api/projects_spec.rb65
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb84
-rw-r--r--spec/services/projects/fork_service_spec.rb21
-rw-r--r--spec/services/projects/lsif_data_service_spec.rb15
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb83
-rw-r--r--spec/services/projects/update_service_spec.rb19
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb101
-rw-r--r--spec/workers/project_update_repository_storage_worker_spec.rb24
46 files changed, 916 insertions, 150 deletions
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 8a6395b42b5..dc6ea148047 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -476,12 +476,12 @@ const Api = {
return axios.get(url);
},
- lsifData(projectPath, commitId, path) {
+ lsifData(projectPath, commitId, paths) {
const url = Api.buildUrl(this.lsifPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':commit_id', commitId);
- return axios.get(url, { params: { path } });
+ return axios.get(url, { params: { paths } });
},
environments(id) {
diff --git a/app/assets/javascripts/code_navigation/store/actions.js b/app/assets/javascripts/code_navigation/store/actions.js
index 2c52074e362..5220b1215b8 100644
--- a/app/assets/javascripts/code_navigation/store/actions.js
+++ b/app/assets/javascripts/code_navigation/store/actions.js
@@ -13,9 +13,10 @@ export default {
commit(types.REQUEST_DATA);
api
- .lsifData(state.projectPath, state.commitId, state.blobPath)
+ .lsifData(state.projectPath, state.commitId, [state.blobPath])
.then(({ data }) => {
- const normalizedData = data.reduce((acc, d) => {
+ const dataForPath = data[state.blobPath];
+ const normalizedData = dataForPath.reduce((acc, d) => {
if (d.hover) {
acc[`${d.start_line}:${d.start_char}`] = d;
addInteractionClass(d);
diff --git a/app/controllers/concerns/cycle_analytics_params.rb b/app/controllers/concerns/cycle_analytics_params.rb
index 3e67f1f54cb..50e340dc9b1 100644
--- a/app/controllers/concerns/cycle_analytics_params.rb
+++ b/app/controllers/concerns/cycle_analytics_params.rb
@@ -38,7 +38,7 @@ module CycleAnalyticsParams
end
def to_utc_time(field)
- date = field.is_a?(Date) ? field : Date.parse(field)
+ date = field.is_a?(Date) || field.is_a?(Time) ? field : Date.parse(field)
date.to_time.utc
end
end
diff --git a/app/graphql/resolvers/projects/snippets_resolver.rb b/app/graphql/resolvers/projects/snippets_resolver.rb
index bf9aa45349f..22895a24054 100644
--- a/app/graphql/resolvers/projects/snippets_resolver.rb
+++ b/app/graphql/resolvers/projects/snippets_resolver.rb
@@ -10,6 +10,11 @@ module Resolvers
def resolve(**args)
return Snippet.none if project.nil?
+ unless project.feature_available?(:snippets, current_user)
+ raise Gitlab::Graphql::Errors::ResourceNotAvailable,
+ 'Snippets are not enabled for this Project'
+ end
+
super
end
diff --git a/app/models/project.rb b/app/models/project.rb
index fdf7452d143..b0a1e378b41 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -2039,6 +2039,16 @@ class Project < ApplicationRecord
end
end
+ def change_repository_storage(new_repository_storage_key)
+ return if repository_read_only?
+ return if repository_storage == new_repository_storage_key
+
+ raise ArgumentError unless ::Gitlab.config.repositories.storages.key?(new_repository_storage_key)
+
+ run_after_commit { ProjectUpdateRepositoryStorageWorker.perform_async(id, new_repository_storage_key) }
+ self.repository_read_only = true
+ end
+
def pushes_since_gc
Gitlab::Redis::SharedState.with { |redis| redis.get(pushes_since_gc_redis_shared_state_key).to_i }
end
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index 4d49c96d268..ee47acc6041 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -469,6 +469,8 @@ class ProjectPolicy < BasePolicy
prevent :create_pipeline
end
+ rule { admin }.enable :change_repository_storage
+
private
def team_member?
diff --git a/app/presenters/projects/prometheus/alert_presenter.rb b/app/presenters/projects/prometheus/alert_presenter.rb
index 7416b76b65d..02e22f8f46a 100644
--- a/app/presenters/projects/prometheus/alert_presenter.rb
+++ b/app/presenters/projects/prometheus/alert_presenter.rb
@@ -53,7 +53,7 @@ module Projects
#### Summary
#{metadata_list}
- #{alert_details}
+ #{alert_details}#{metric_embed_for_alert}
MARKDOWN
end
@@ -118,6 +118,10 @@ module Projects
def host_links
Array(hosts.value).join(' ')
end
+
+ def metric_embed_for_alert; end
end
end
end
+
+Projects::Prometheus::AlertPresenter.prepend_if_ee('EE::Projects::Prometheus::AlertPresenter')
diff --git a/app/presenters/snippet_blob_presenter.rb b/app/presenters/snippet_blob_presenter.rb
index 8e593d3a5fe..ed9c28bbc2c 100644
--- a/app/presenters/snippet_blob_presenter.rb
+++ b/app/presenters/snippet_blob_presenter.rb
@@ -3,18 +3,15 @@
class SnippetBlobPresenter < BlobPresenter
def rich_data
return if blob.binary?
+ return unless blob.rich_viewer
- if markup?
- blob.rendered_markup
- else
- highlight(plain: false)
- end
+ render_rich_partial
end
def plain_data
return if blob.binary?
- highlight(plain: !markup?)
+ highlight(plain: false)
end
def raw_path
@@ -27,10 +24,6 @@ class SnippetBlobPresenter < BlobPresenter
private
- def markup?
- blob.rich_viewer&.partial_name == 'markup'
- end
-
def snippet
blob.container
end
@@ -38,4 +31,18 @@ class SnippetBlobPresenter < BlobPresenter
def language
nil
end
+
+ def render_rich_partial
+ renderer.render("projects/blob/viewers/_#{blob.rich_viewer.partial_name}",
+ locals: { viewer: blob.rich_viewer, blob: blob, blob_raw_path: raw_path },
+ layout: false)
+ end
+
+ def renderer
+ proxy = Warden::Proxy.new({}, Warden::Manager.new({})).tap do |proxy_instance|
+ proxy_instance.set_user(current_user, scope: :user)
+ end
+
+ ApplicationController.renderer.new('warden' => proxy)
+ end
end
diff --git a/app/services/projects/container_repository/cleanup_tags_service.rb b/app/services/projects/container_repository/cleanup_tags_service.rb
index 046745d725e..6eb8f5c27d9 100644
--- a/app/services/projects/container_repository/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/cleanup_tags_service.rb
@@ -61,10 +61,15 @@ module Projects
end
def filter_by_name(tags)
- regex = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex']}\\z")
+ # Technical Debt: https://gitlab.com/gitlab-org/gitlab/issues/207267
+ # name_regex to be removed when container_expiration_policies is updated
+ # to have both regex columns
+ regex_delete = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_delete'] || params['name_regex']}\\z")
+ regex_retain = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_keep']}\\z")
tags.select do |tag|
- regex.scan(tag.name).any?
+ # regex_retain will override any overlapping matches by regex_delete
+ regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
end
end
diff --git a/app/services/projects/lsif_data_service.rb b/app/services/projects/lsif_data_service.rb
index 971885b680e..142a5a910d4 100644
--- a/app/services/projects/lsif_data_service.rb
+++ b/app/services/projects/lsif_data_service.rb
@@ -2,20 +2,22 @@
module Projects
class LsifDataService
- attr_reader :file, :project, :path, :commit_id,
- :docs, :doc_ranges, :ranges, :def_refs, :hover_refs
+ attr_reader :file, :project, :commit_id, :docs,
+ :doc_ranges, :ranges, :def_refs, :hover_refs
CACHE_EXPIRE_IN = 1.hour
- def initialize(file, project, params)
+ def initialize(file, project, commit_id)
@file = file
@project = project
- @path = params[:path]
- @commit_id = params[:commit_id]
- end
+ @commit_id = commit_id
- def execute
fetch_data!
+ end
+
+ def execute(path)
+ doc_id = find_doc_id(docs, path)
+ dir_absolute_path = docs[doc_id]&.delete_suffix(path)
doc_ranges[doc_id]&.map do |range_id|
location, ref_id = ranges[range_id].values_at('loc', 'ref_id')
@@ -26,7 +28,7 @@ module Projects
end_line: line_data.last,
start_char: column_data.first,
end_char: column_data.last,
- definition_url: definition_url_for(def_refs[ref_id]),
+ definition_url: definition_url_for(def_refs[ref_id], dir_absolute_path),
hover: highlighted_hover(hover_refs[ref_id])
}
end
@@ -58,8 +60,8 @@ module Projects
@hover_refs = data['hover_refs']
end
- def doc_id
- @doc_id ||= docs.reduce(nil) do |doc_id, (id, doc_path)|
+ def find_doc_id(docs, path)
+ docs.reduce(nil) do |doc_id, (id, doc_path)|
next doc_id unless doc_path =~ /#{path}$/
if doc_id.nil? || docs[doc_id].size > doc_path.size
@@ -70,11 +72,7 @@ module Projects
end
end
- def dir_absolute_path
- @dir_absolute_path ||= docs[doc_id]&.delete_suffix(path)
- end
-
- def definition_url_for(ref_id)
+ def definition_url_for(ref_id, dir_absolute_path)
return unless range = ranges[ref_id]
def_doc_id, location = range.values_at('doc_id', 'loc')
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
new file mode 100644
index 00000000000..30b99e85304
--- /dev/null
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+module Projects
+ class UpdateRepositoryStorageService < BaseService
+ include Gitlab::ShellAdapter
+
+ RepositoryAlreadyMoved = Class.new(StandardError)
+
+ def initialize(project)
+ @project = project
+ end
+
+ def execute(new_repository_storage_key)
+ # Raising an exception is a little heavy handed but this behavior (doing
+ # nothing if the repo is already on the right storage) prevents data
+ # loss, so it is valuable for us to be able to observe it via the
+ # exception.
+ raise RepositoryAlreadyMoved if project.repository_storage == new_repository_storage_key
+
+ if mirror_repositories(new_repository_storage_key)
+ mark_old_paths_for_archive
+
+ project.update(repository_storage: new_repository_storage_key, repository_read_only: false)
+ project.leave_pool_repository
+ project.track_project_repository
+
+ enqueue_housekeeping
+ else
+ project.update(repository_read_only: false)
+ end
+ end
+
+ private
+
+ def mirror_repositories(new_repository_storage_key)
+ result = mirror_repository(new_repository_storage_key)
+
+ if project.wiki.repository_exists?
+ result &&= mirror_repository(new_repository_storage_key, type: Gitlab::GlRepository::WIKI)
+ end
+
+ result
+ end
+
+ def mirror_repository(new_storage_key, type: Gitlab::GlRepository::PROJECT)
+ return false unless wait_for_pushes(type)
+
+ repository = type.repository_for(project)
+ full_path = repository.full_path
+ raw_repository = repository.raw
+
+ # Initialize a git repository on the target path
+ gitlab_shell.create_repository(new_storage_key, raw_repository.relative_path, full_path)
+ new_repository = Gitlab::Git::Repository.new(new_storage_key,
+ raw_repository.relative_path,
+ raw_repository.gl_repository,
+ full_path)
+
+ new_repository.fetch_repository_as_mirror(raw_repository)
+ end
+
+ def mark_old_paths_for_archive
+ old_repository_storage = project.repository_storage
+ new_project_path = moved_path(project.disk_path)
+
+ # Notice that the block passed to `run_after_commit` will run with `project`
+ # as its context
+ project.run_after_commit do
+ GitlabShellWorker.perform_async(:mv_repository,
+ old_repository_storage,
+ disk_path,
+ new_project_path)
+
+ if wiki.repository_exists?
+ GitlabShellWorker.perform_async(:mv_repository,
+ old_repository_storage,
+ wiki.disk_path,
+ "#{new_project_path}.wiki")
+ end
+ end
+ end
+
+ def moved_path(path)
+ "#{path}+#{project.id}+moved+#{Time.now.to_i}"
+ end
+
+ # The underlying FetchInternalRemote call uses a `git fetch` to move data
+ # to the new repository, which leaves it in a less-well-packed state,
+ # lacking bitmaps and commit graphs. Housekeeping will boost performance
+ # significantly.
+ def enqueue_housekeeping
+ return unless Gitlab::CurrentSettings.housekeeping_enabled?
+ return unless Feature.enabled?(:repack_after_shard_migration, project)
+
+ Projects::HousekeepingService.new(project, :gc).execute
+ rescue Projects::HousekeepingService::LeaseTaken
+ # No action required
+ end
+
+ def wait_for_pushes(type)
+ reference_counter = project.reference_counter(type: type)
+
+ # Try for 30 seconds, polling every 10
+ 3.times do
+ return true if reference_counter.value == 0
+
+ sleep 10
+ end
+
+ false
+ end
+ end
+end
+
+Projects::UpdateRepositoryStorageService.prepend_if_ee('EE::Projects::UpdateRepositoryStorageService')
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index aedd7252f63..e10dede632a 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -13,6 +13,10 @@ module Projects
ensure_wiki_exists if enabling_wiki?
+ if changing_storage_size?
+ project.change_repository_storage(params.delete(:repository_storage))
+ end
+
yield if block_given?
validate_classification_label(project, :external_authorization_classification_label)
@@ -140,6 +144,13 @@ module Projects
def changing_pages_https_only?
project.previous_changes.include?(:pages_https_only)
end
+
+ def changing_storage_size?
+ new_repository_storage = params[:repository_storage]
+
+ new_repository_storage && project.repository.exists? &&
+ can?(current_user, :change_repository_storage, project)
+ end
end
end
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 259b2efc49f..61e9f50c2dd 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -1151,6 +1151,13 @@
:resource_boundary: :unknown
:weight: 1
:idempotent:
+- :name: project_update_repository_storage
+ :feature_category: :source_code_management
+ :has_external_dependencies:
+ :urgency: :default
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent:
- :name: propagate_service_template
:feature_category: :source_code_management
:has_external_dependencies:
diff --git a/app/workers/project_update_repository_storage_worker.rb b/app/workers/project_update_repository_storage_worker.rb
new file mode 100644
index 00000000000..2d88b532dbf
--- /dev/null
+++ b/app/workers/project_update_repository_storage_worker.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/IdempotentWorker
+ include ApplicationWorker
+
+ feature_category :source_code_management
+
+ def perform(project_id, new_repository_storage_key)
+ project = Project.find(project_id)
+
+ ::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
+ rescue ::Projects::UpdateRepositoryStorageService::RepositoryAlreadyMoved
+ Rails.logger.info "#{self.class}: repository already moved: #{project}" # rubocop:disable Gitlab/RailsLogger
+ end
+end
diff --git a/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml b/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml
new file mode 100644
index 00000000000..69021920407
--- /dev/null
+++ b/changelogs/unreleased/27072-name-regex-allow-bulk-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add name_regex_keep param to container registry bulk delete API endpoint
+merge_request: 25484
+author:
+type: added
diff --git a/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml b/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml
new file mode 100644
index 00000000000..8c8f629d03c
--- /dev/null
+++ b/changelogs/unreleased/37320-ensure-project-snippet-feature-status-in-project-snippet-api-endpoi.yml
@@ -0,0 +1,5 @@
+---
+title: Project Snippets GraphQL resolver checks feature status
+merge_request: 26158
+author:
+type: performance
diff --git a/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml b/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml
new file mode 100644
index 00000000000..c7cda97c9e0
--- /dev/null
+++ b/changelogs/unreleased/al-205505-fix-snippet-blob-viewers.yml
@@ -0,0 +1,5 @@
+---
+title: Fix snippet blob viewers for rich and plain data
+merge_request: 25945
+author:
+type: fixed
diff --git a/changelogs/unreleased/move-storage-shards.yml b/changelogs/unreleased/move-storage-shards.yml
new file mode 100644
index 00000000000..442d86f80d8
--- /dev/null
+++ b/changelogs/unreleased/move-storage-shards.yml
@@ -0,0 +1,5 @@
+---
+title: "Backport API support to move between repository storages/shards"
+merge_request: 18721
+author: Ben Bodenmiller
+type: added
diff --git a/changelogs/unreleased/sy-auto-embed-alert.yml b/changelogs/unreleased/sy-auto-embed-alert.yml
new file mode 100644
index 00000000000..d89b0e161f5
--- /dev/null
+++ b/changelogs/unreleased/sy-auto-embed-alert.yml
@@ -0,0 +1,5 @@
+---
+title: Automatically include embedded metrics for GitLab alert incidents
+merge_request: 25277
+author:
+type: added
diff --git a/doc/api/container_registry.md b/doc/api/container_registry.md
index f09ba3fc4ce..eb46bcfc457 100644
--- a/doc/api/container_registry.md
+++ b/doc/api/container_registry.md
@@ -231,7 +231,9 @@ DELETE /projects/:id/registry/repositories/:repository_id/tags
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
| `repository_id` | integer | yes | The ID of registry repository. |
-| `name_regex` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.|
+| `name_regex` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. **Note:** `name_regex` is deprecated in favor of `name_regex_delete`.|
+| `name_regex_delete` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.|
+| `name_regex_keep` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to keep. This value will override any matches from `name_regex_delete`. Note: setting to `.*` will result in a no-op. |
| `keep_n` | integer | no | The amount of latest tags of given name to keep. |
| `older_than` | string | no | Tags to delete that are older than the given time, written in human readable form `1h`, `1d`, `1month`. |
@@ -239,7 +241,7 @@ This API call performs the following operations:
1. It orders all tags by creation date. The creation date is the time of the
manifest creation, not the time of tag push.
-1. It removes only the tags matching the given `name_regex`.
+1. It removes only the tags matching the given `name_regex_delete` (or deprecated `name_regex`), keeping any that match `name_regex_keep`.
1. It never removes the tag named `latest`.
1. It keeps N latest matching tags (if `keep_n` is specified).
1. It only removes tags that are older than X amount of time (if `older_than` is specified).
@@ -261,17 +263,23 @@ Examples:
and remove ones that are older than 2 days:
```shell
- curl --request DELETE --data 'name_regex=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
1. Remove all tags, but keep always the latest 5:
```shell
- curl --request DELETE --data 'name_regex=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ ```
+
+1. Remove all tags, but keep always tags beginning with `stable`:
+
+ ```shell
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'name_regex_keep=stable.*' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
1. Remove all tags that are older than 1 month:
```shell
- curl --request DELETE --data 'name_regex=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
+ curl --request DELETE --data 'name_regex_delete=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
diff --git a/doc/api/projects.md b/doc/api/projects.md
index c2154233cc5..5b66f0dd5d3 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -1041,7 +1041,7 @@ POST /projects
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
@@ -1109,7 +1109,7 @@ POST /projects/user/:user_id
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
@@ -1177,7 +1177,7 @@ PUT /projects/:id
| `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../user/project/pipelines/settings.md#git-shallow-clone) |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
-| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
+| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge request by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
diff --git a/doc/user/incident_management/index.md b/doc/user/incident_management/index.md
index 880083bf815..e003b6d5eaa 100644
--- a/doc/user/incident_management/index.md
+++ b/doc/user/incident_management/index.md
@@ -136,3 +136,5 @@ Incident Management features can be easily enabled & disabled via the Project se
#### Auto-creation
GitLab Issues can automatically be created as a result of an Alert notification. An Issue created this way will contain error information to help you further debug the error.
+
+For [GitLab-managed alerting rules](../project/integrations/prometheus.md#setting-up-alerts-for-prometheus-metrics-ultimate), the issue will include an embedded chart for the query corresponding to the alert. The chart will show an hour of data surrounding the starting point of the incident, 30 minutes before and after.
diff --git a/lib/api/entities/project.rb b/lib/api/entities/project.rb
index 6ed2ed34360..85a00273192 100644
--- a/lib/api/entities/project.rb
+++ b/lib/api/entities/project.rb
@@ -106,6 +106,9 @@ module API
project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy
end
expose :autoclose_referenced_issues
+ expose :repository_storage, if: ->(project, options) {
+ Ability.allowed?(options[:current_user], :change_repository_storage, project)
+ }
# rubocop: disable CodeReuse/ActiveRecord
def self.preload_relation(projects_relation, options = {})
diff --git a/lib/api/helpers/projects_helpers.rb b/lib/api/helpers/projects_helpers.rb
index c7c9f3ba077..85ed8a4d636 100644
--- a/lib/api/helpers/projects_helpers.rb
+++ b/lib/api/helpers/projects_helpers.rb
@@ -54,6 +54,7 @@ module API
optional :auto_devops_enabled, type: Boolean, desc: 'Flag indication if Auto DevOps is enabled'
optional :auto_devops_deploy_strategy, type: String, values: %w(continuous manual timed_incremental), desc: 'Auto Deploy strategy'
optional :autoclose_referenced_issues, type: Boolean, desc: 'Flag indication if referenced issues auto-closing is enabled'
+ optional :repository_storage, type: String, desc: 'Which storage shard the repository is on. Available only to admins'
end
params :optional_project_params_ee do
@@ -125,6 +126,7 @@ module API
:wiki_access_level,
:avatar,
:suggestion_commit_message,
+ :repository_storage,
# TODO: remove in API v5, replaced by *_access_level
:issues_enabled,
diff --git a/lib/api/lsif_data.rb b/lib/api/lsif_data.rb
index 6513973133a..a673ccb4af0 100644
--- a/lib/api/lsif_data.rb
+++ b/lib/api/lsif_data.rb
@@ -15,7 +15,7 @@ module API
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
segment ':id/commits/:commit_id' do
params do
- requires :path, type: String, desc: 'The path of a file'
+ requires :paths, type: Array, desc: 'The paths of the files'
end
get 'lsif/info' do
authorize! :download_code, user_project
@@ -30,7 +30,9 @@ module API
authorize! :read_pipeline, artifact.job.pipeline
file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE
- ::Projects::LsifDataService.new(artifact.file, @project, params).execute
+ service = ::Projects::LsifDataService.new(artifact.file, @project, params[:commit_id])
+
+ params[:paths].to_h { |path| [path, service.execute(path)] }
end
end
end
diff --git a/lib/api/project_container_repositories.rb b/lib/api/project_container_repositories.rb
index 70c913bea98..0fd887f4458 100644
--- a/lib/api/project_container_repositories.rb
+++ b/lib/api/project_container_repositories.rb
@@ -69,7 +69,16 @@ module API
end
params do
requires :repository_id, type: Integer, desc: 'The ID of the repository'
- requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ optional :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ # require either name_regex (deprecated) or name_regex_delete, it is ok to have both
+ given name_regex_delete: ->(val) { val.nil? } do
+ requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ end
+ optional :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ given name_regex: ->(val) { val.nil? } do
+ requires :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
+ end
+ optional :name_regex_keep, type: String, desc: 'The tag name regexp to retain'
optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name'
optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month'
end
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index 43d607bc0c1..f9d08881acf 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -25,6 +25,7 @@ module API
end
def verify_update_project_attrs!(project, attrs)
+ attrs.delete(:repository_storage) unless can?(current_user, :change_repository_storage, project)
end
def delete_project(user_project)
diff --git a/lib/gitlab/danger/helper.rb b/lib/gitlab/danger/helper.rb
index 3c3f95e5b78..50a68c2e2c4 100644
--- a/lib/gitlab/danger/helper.rb
+++ b/lib/gitlab/danger/helper.rb
@@ -118,10 +118,9 @@ module Gitlab
\.haml-lint_todo.yml |
babel\.config\.js |
jest\.config\.js |
- karma\.config\.js |
- webpack\.config\.js |
package\.json |
yarn\.lock |
+ config/.+\.js |
\.gitlab/ci/frontend\.gitlab-ci\.yml
)\z}x => :frontend,
diff --git a/lib/tasks/cleanup.rake b/lib/tasks/cleanup.rake
new file mode 100644
index 00000000000..8574f26dbdc
--- /dev/null
+++ b/lib/tasks/cleanup.rake
@@ -0,0 +1,33 @@
+namespace :gitlab do
+ namespace :cleanup do
+ desc "GitLab | Cleanup | Delete moved repositories"
+ task moved: :gitlab_environment do
+ warn_user_is_not_gitlab
+ remove_flag = ENV['REMOVE']
+
+ Gitlab.config.repositories.storages.each do |name, repository_storage|
+ repo_root = repository_storage.legacy_disk_path.chomp('/')
+ # Look for global repos (legacy, depth 1) and normal repos (depth 2)
+ IO.popen(%W(find #{repo_root} -mindepth 1 -maxdepth 2 -name *+moved*.git)) do |find|
+ find.each_line do |path|
+ path.chomp!
+
+ if remove_flag
+ if FileUtils.rm_rf(path)
+ puts "Removed...#{path}".color(:green)
+ else
+ puts "Cannot remove #{path}".color(:red)
+ end
+ else
+ puts "Can be removed: #{path}".color(:green)
+ end
+ end
+ end
+ end
+
+ unless remove_flag
+ puts "To cleanup these repositories run this command with REMOVE=true".color(:yellow)
+ end
+ end
+ end
+end
diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js
index 2230e0880bb..9c44480ca67 100644
--- a/spec/frontend/code_navigation/store/actions_spec.js
+++ b/spec/frontend/code_navigation/store/actions_spec.js
@@ -45,18 +45,20 @@ describe('Code navigation actions', () => {
describe('success', () => {
beforeEach(() => {
- mock.onGet(apiUrl).replyOnce(200, [
- {
- start_line: 0,
- start_char: 0,
- hover: { value: '123' },
- },
- {
- start_line: 1,
- start_char: 0,
- hover: null,
- },
- ]);
+ mock.onGet(apiUrl).replyOnce(200, {
+ index: [
+ {
+ start_line: 0,
+ start_char: 0,
+ hover: { value: '123' },
+ },
+ {
+ start_line: 1,
+ start_char: 0,
+ hover: null,
+ },
+ ],
+ });
});
it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
diff --git a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
index eef891bf984..6d301b1c742 100644
--- a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
@@ -75,6 +75,16 @@ describe Resolvers::Projects::SnippetsResolver do
expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty
end
end
+
+ context 'when project snippets are disabled' do
+ it 'raises an error' do
+ disabled_snippet_project = create(:project, :snippets_disabled)
+ disabled_snippet_project.add_developer(current_user)
+
+ expect(SnippetsFinder).not_to receive(:new)
+ expect { resolve_snippets(obj: disabled_snippet_project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
end
def resolve_snippets(args: {}, context: { current_user: current_user }, obj: project)
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 4b378936965..c76a1ffaa14 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -175,9 +175,12 @@ describe Gitlab::Danger::Helper do
'spec/javascripts/foo' | :frontend
'spec/frontend/bar' | :frontend
'vendor/assets/foo' | :frontend
+ 'babel.config.js' | :frontend
'jest.config.js' | :frontend
'package.json' | :frontend
'yarn.lock' | :frontend
+ 'config/foo.js' | :frontend
+ 'config/deep/foo.js' | :frontend
'ee/app/assets/foo' | :frontend
'ee/app/views/foo' | :frontend
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 4885d4b63ff..cbd55077316 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2822,6 +2822,44 @@ describe Project do
end
end
+ describe '#change_repository_storage' do
+ let(:project) { create(:project, :repository) }
+ let(:read_only_project) { create(:project, :repository, repository_read_only: true) }
+
+ before do
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ end
+
+ it 'schedules the transfer of the repository to the new storage and locks the project' do
+ expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'test_second_storage')
+
+ project.change_repository_storage('test_second_storage')
+ project.save!
+
+ expect(project).to be_repository_read_only
+ end
+
+ it "doesn't schedule the transfer if the repository is already read-only" do
+ expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
+
+ read_only_project.change_repository_storage('test_second_storage')
+ read_only_project.save!
+ end
+
+ it "doesn't lock or schedule the transfer if the storage hasn't changed" do
+ expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
+
+ project.change_repository_storage(project.repository_storage)
+ project.save!
+
+ expect(project).not_to be_repository_read_only
+ end
+
+ it 'throws an error if an invalid repository storage is provided' do
+ expect { project.change_repository_storage('unknown') }.to raise_error(ArgumentError)
+ end
+ end
+
describe '#pushes_since_gc' do
let(:project) { create(:project) }
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 89f4d65b47f..eb7621cc591 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -4,36 +4,73 @@ require 'spec_helper'
describe SnippetBlobPresenter do
describe '#rich_data' do
- let(:snippet) { build(:personal_snippet) }
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:current_user).and_return(nil)
+ end
+ end
subject { described_class.new(snippet.blob).rich_data }
- it 'returns nil when the snippet blob is binary' do
- allow(snippet.blob).to receive(:binary?).and_return(true)
+ context 'with PersonalSnippet' do
+ let(:raw_url) { "http://127.0.0.1:3000/snippets/#{snippet.id}/raw" }
+ let(:snippet) { build(:personal_snippet) }
- expect(subject).to be_nil
- end
+ it 'returns nil when the snippet blob is binary' do
+ allow(snippet.blob).to receive(:binary?).and_return(true)
- it 'returns markdown content when snippet file is markup' do
- snippet.file_name = 'test.md'
- snippet.content = '*foo*'
+ expect(subject).to be_nil
+ end
- expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>'
- end
+ context 'with markdown format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
- it 'returns syntax highlighted content' do
- snippet.file_name = 'test.rb'
- snippet.content = 'class Foo;end'
+ it 'returns rich markdown content' do
+ expected = <<~HTML
+ <div class="file-content md">
+ <p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
+ </div>
+ HTML
- expect(subject)
- .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
- end
+ expect(subject).to eq(expected)
+ end
+ end
- it 'returns plain text highlighted content' do
- snippet.file_name = 'test'
- snippet.content = 'foo'
+ context 'with notebook format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
- expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
+ it 'returns rich notebook content' do
+ expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
+ end
+ end
+
+ context 'with openapi format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
+
+ it 'returns rich openapi content' do
+ expect(subject).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
+ end
+ end
+
+ context 'with svg format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
+
+ it 'returns rich svg content' do
+ result = Nokogiri::HTML::DocumentFragment.parse(subject)
+ image_tag = result.search('img').first
+
+ expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
+ expect(image_tag.attr('alt')).to eq('test.svg')
+ end
+ end
+
+ context 'with other format' do
+ let(:snippet) { create(:personal_snippet, file_name: 'test') }
+
+ it 'does not return no rich content' do
+ expect(subject).to be_nil
+ end
+ end
end
end
@@ -55,19 +92,19 @@ describe SnippetBlobPresenter do
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
end
- it 'returns plain syntax content' do
+ it 'returns highlighted syntax content' do
snippet.file_name = 'test.rb'
snippet.content = 'class Foo;end'
expect(subject)
- .to eq '<span id="LC1" class="line" lang="">class Foo;end</span>'
+ .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
end
it 'returns plain text highlighted content' do
snippet.file_name = 'test'
snippet.content = 'foo'
- expect(subject).to eq '<span id="LC1" class="line" lang="">foo</span>'
+ expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index cb19f50b5b5..cef7fc5cbe3 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -67,7 +67,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
@@ -93,7 +93,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index e9481a36287..820c97e8341 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -56,7 +56,7 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(updated_content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
@@ -78,7 +78,7 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(mutation_response['snippet']['blob']['richData']).to match(original_content)
+ expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
diff --git a/spec/requests/api/lsif_data_spec.rb b/spec/requests/api/lsif_data_spec.rb
index 214bc832cda..a1516046e3e 100644
--- a/spec/requests/api/lsif_data_spec.rb
+++ b/spec/requests/api/lsif_data_spec.rb
@@ -9,18 +9,20 @@ describe API::LsifData do
let(:commit) { project.commit }
describe 'GET lsif/info' do
- let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" }
+ subject do
+ endpoint_path = "/projects/#{project.id}/commits/#{commit.id}/lsif/info"
+
+ get api(endpoint_path, user), params: { paths: ['main.go', 'morestrings/reverse.go'] }
+
+ response
+ end
context 'user does not have access to the project' do
before do
project.add_guest(user)
end
- it 'returns 403' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
end
context 'user has access to the project' do
@@ -28,35 +30,27 @@ describe API::LsifData do
project.add_reporter(user)
end
- context 'code_navigation feature is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'returns 404' do
- get api(endpoint_path, user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
context 'there is no job artifact for the passed commit' do
- it 'returns 404' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'lsif data is stored as a job artifact' do
let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
- it 'returns code navigation info for a given path' do
- get api(endpoint_path, user), params: { path: 'main.go' }
+ context 'code_navigation feature is disabled' do
+ before do
+ stub_feature_flags(code_navigation: false)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.parsed_body.last).to eq({
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ it 'returns code navigation info for a given path', :aggregate_failures do
+ expect(subject).to have_gitlab_http_status(:ok)
+
+ data_for_main = response.parsed_body['main.go']
+ expect(data_for_main.last).to eq({
'end_char' => 18,
'end_line' => 8,
'start_char' => 13,
@@ -67,26 +61,33 @@ describe API::LsifData do
'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go')
}]
})
+
+ data_for_reverse = response.parsed_body['morestrings/reverse.go']
+ expect(data_for_reverse.last).to eq({
+ 'end_char' => 9,
+ 'end_line' => 7,
+ 'start_char' => 8,
+ 'start_line' => 7,
+ 'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L6'),
+ 'hover' => [{
+ 'language' => 'go',
+ 'value' => Gitlab::Highlight.highlight(nil, 'var b string', language: 'go')
+ }]
+ })
end
context 'the stored file is too large' do
- it 'returns 413' do
+ before do
allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
-
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
end
+
+ it { is_expected.to have_gitlab_http_status(:payload_too_large) }
end
context 'the user does not have access to the pipeline' do
let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
- it 'returns 403' do
- get api(endpoint_path, user), params: { path: 'main.go' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
end
end
end
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 1854d4db920..91905635c3f 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -109,7 +109,7 @@ describe API::ProjectContainerRepositories do
context 'disallowed' do
let(:params) do
- { name_regex: 'v10.*' }
+ { name_regex_delete: 'v10.*' }
end
it_behaves_like 'rejected container repository access', :developer, :forbidden
@@ -130,16 +130,33 @@ describe API::ProjectContainerRepositories do
end
end
+ context 'without name_regex' do
+ let(:params) do
+ { keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
context 'passes all declared parameters' do
let(:params) do
- { name_regex: 'v10.*',
+ { name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
let(:worker_params) do
- { name_regex: 'v10.*',
+ { name_regex: nil,
+ name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
@@ -174,6 +191,38 @@ describe API::ProjectContainerRepositories do
end
end
end
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { name_regex: 'v10.*',
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ let(:worker_params) do
+ { name_regex: 'v10.*',
+ name_regex_delete: nil,
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ container_expiration_policy: false }
+ end
+
+ let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+
+ it 'schedules cleanup of tags repository' do
+ stub_last_activity_update
+ stub_exclusive_lease(lease_key, timeout: 1.hour)
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(maintainer.id, root_repository.id, worker_params)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 2f1760a570e..59c394d8d8d 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1751,6 +1751,27 @@ describe API::Projects do
subject { get api("/projects/#{project.id}", user) }
end
+
+ describe 'repository_storage attribute' do
+ before do
+ get api("/projects/#{project.id}", user)
+ end
+
+ context 'when authenticated as an admin' do
+ let(:user) { create(:admin) }
+
+ it 'returns repository_storage attribute' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['repository_storage']).to eq(project.repository_storage)
+ end
+ end
+
+ context 'when authenticated as a regular user' do
+ it 'does not return repository_storage attribute' do
+ expect(json_response).not_to have_key('repository_storage')
+ end
+ end
+ end
end
describe 'GET /projects/:id/users' do
@@ -2402,6 +2423,50 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ context 'when updating repository storage' do
+ let(:unknown_storage) { 'new-storage' }
+ let(:new_project) { create(:project, :repository, namespace: user.namespace) }
+
+ context 'as a user' do
+ it 'returns 200 but does not change repository_storage' do
+ expect do
+ Sidekiq::Testing.fake! do
+ put(api("/projects/#{new_project.id}", user), params: { repository_storage: unknown_storage, issues_enabled: false })
+ end
+ end.not_to change(ProjectUpdateRepositoryStorageWorker.jobs, :size)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['issues_enabled']).to eq(false)
+ expect(new_project.reload.repository.storage).to eq('default')
+ end
+ end
+
+ context 'as an admin' do
+ include_context 'custom session'
+
+ let(:admin) { create(:admin) }
+
+ it 'returns 500 when repository storage is unknown' do
+ put(api("/projects/#{new_project.id}", admin), params: { repository_storage: unknown_storage })
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['message']).to match('ArgumentError')
+ end
+
+ it 'returns 200 when repository storage has changed' do
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
+
+ expect do
+ Sidekiq::Testing.fake! do
+ put(api("/projects/#{new_project.id}", admin), params: { repository_storage: 'test_second_storage' })
+ end
+ end.to change(ProjectUpdateRepositoryStorageWorker.jobs, :size).by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
describe 'POST /projects/:id/archive' do
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index ef7e9cda9e0..96cddef4628 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -48,25 +48,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
context 'when regex matching everything is specified' do
+ shared_examples 'removes all matches' do
+ it 'does remove B* and C' do
+ # The :A cannot be removed as config is shared with :latest
+ # The :E cannot be removed as it does not have valid manifest
+
+ expect_delete('sha256:configB').twice
+ expect_delete('sha256:configC')
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
+ end
+ end
+
let(:params) do
- { 'name_regex' => '.*' }
+ { 'name_regex_delete' => '.*' }
end
- it 'does remove B* and C' do
- # The :A cannot be removed as config is shared with :latest
- # The :E cannot be removed as it does not have valid manifest
+ it_behaves_like 'removes all matches'
- expect_delete('sha256:configB').twice
- expect_delete('sha256:configC')
- expect_delete('sha256:configD')
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { 'name_regex' => '.*' }
+ end
- is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
+ it_behaves_like 'removes all matches'
end
end
- context 'when regex matching specific tags is used' do
+ context 'when delete regex matching specific tags is used' do
let(:params) do
- { 'name_regex' => 'C|D' }
+ { 'name_regex_delete' => 'C|D' }
end
it 'does remove C and D' do
@@ -75,11 +87,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
is_expected.to include(status: :success, deleted: %w(D C))
end
+
+ context 'with overriding allow regex' do
+ let(:params) do
+ { 'name_regex_delete' => 'C|D',
+ 'name_regex_keep' => 'C' }
+ end
+
+ it 'does not remove C' do
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D))
+ end
+ end
+
+ context 'with name_regex_delete overriding deprecated name_regex' do
+ let(:params) do
+ { 'name_regex' => 'C|D',
+ 'name_regex_delete' => 'D' }
+ end
+
+ it 'does not remove C' do
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D))
+ end
+ end
end
context 'when removing a tagged image that is used by another tag' do
let(:params) do
- { 'name_regex' => 'Ba' }
+ { 'name_regex_delete' => 'Ba' }
end
it 'does not remove the tag' do
@@ -89,9 +127,23 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
end
+ context 'with allow regex value' do
+ let(:params) do
+ { 'name_regex_delete' => '.*',
+ 'name_regex_keep' => 'B.*' }
+ end
+
+ it 'does not remove B*' do
+ expect_delete('sha256:configC')
+ expect_delete('sha256:configD')
+
+ is_expected.to include(status: :success, deleted: %w(D C))
+ end
+ end
+
context 'when removing keeping only 3' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 3 }
end
@@ -104,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when removing older than 1 day' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'older_than' => '1 day' }
end
@@ -118,7 +170,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when combining all parameters' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
@@ -136,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'with valid container_expiration_policy param' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day',
'container_expiration_policy' => true }
@@ -152,7 +204,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'without container_expiration_policy param' do
let(:params) do
- { 'name_regex' => '.*',
+ { 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index fa6d42369c8..12c51d01d63 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -307,6 +307,27 @@ describe Projects::ForkService do
end
end
+ context 'when a project is already forked' do
+ it 'creates a new poolresository after the project is moved to a new shard' do
+ project = create(:project, :public, :repository)
+ fork_before_move = fork_project(project)
+
+ # Stub everything required to move a project to a Gitaly shard that does not exist
+ stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
+
+ Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
+ fork_after_move = fork_project(project)
+ pool_repository_before_move = PoolRepository.joins(:shard)
+ .where(source_project: project, shards: { name: 'default' }).first
+ pool_repository_after_move = PoolRepository.joins(:shard)
+ .where(source_project: project, shards: { name: 'test_second_storage' }).first
+
+ expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
+ expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
+ end
+ end
+
context 'when forking with object pools' do
let(:fork_from_project) { create(:project, :public) }
let(:forker) { create(:user) }
diff --git a/spec/services/projects/lsif_data_service_spec.rb b/spec/services/projects/lsif_data_service_spec.rb
index 93579869d1d..4866f848121 100644
--- a/spec/services/projects/lsif_data_service_spec.rb
+++ b/spec/services/projects/lsif_data_service_spec.rb
@@ -7,9 +7,8 @@ describe Projects::LsifDataService do
let(:project) { build_stubbed(:project) }
let(:path) { 'main.go' }
let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) }
- let(:params) { { path: path, commit_id: commit_id } }
- let(:service) { described_class.new(artifact.file, project, params) }
+ let(:service) { described_class.new(artifact.file, project, commit_id) }
describe '#execute' do
def highlighted_value(value)
@@ -18,7 +17,7 @@ describe Projects::LsifDataService do
context 'fetched lsif file', :use_clean_rails_memory_store_caching do
it 'is cached' do
- service.execute
+ service.execute(path)
cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}")
@@ -30,7 +29,7 @@ describe Projects::LsifDataService do
let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" }
it 'returns lsif ranges for the file' do
- expect(service.execute).to eq([
+ expect(service.execute(path)).to eq([
{
end_char: 9,
end_line: 6,
@@ -87,7 +86,7 @@ describe Projects::LsifDataService do
let(:path) { 'morestrings/reverse.go' }
it 'returns lsif ranges for the file' do
- expect(service.execute.first).to eq({
+ expect(service.execute(path).first).to eq({
end_char: 2,
end_line: 11,
start_char: 1,
@@ -102,7 +101,7 @@ describe Projects::LsifDataService do
let(:path) { 'unknown.go' }
it 'returns nil' do
- expect(service.execute).to eq(nil)
+ expect(service.execute(path)).to eq(nil)
end
end
end
@@ -120,9 +119,7 @@ describe Projects::LsifDataService do
end
it 'fetches the document with the shortest absolute path' do
- service.instance_variable_set(:@docs, docs)
-
- expect(service.__send__(:doc_id)).to eq(3)
+ expect(service.__send__(:find_doc_id, docs, path)).to eq(3)
end
end
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
new file mode 100644
index 00000000000..a0917f718e6
--- /dev/null
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::UpdateRepositoryStorageService do
+ include Gitlab::ShellAdapter
+
+ subject { described_class.new(project) }
+
+ describe "#execute" do
+ let(:time) { Time.now }
+
+ before do
+ allow(Time).to receive(:now).and_return(time)
+ end
+
+ context 'without wiki and design repository' do
+ let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: false) }
+
+ context 'when the move succeeds' do
+ it 'moves the repository to the new storage and unmarks the repository as read only' do
+ old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ project.repository.path_to_repo
+ end
+
+ expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
+ .with(project.repository.raw).and_return(true)
+
+ subject.execute('test_second_storage')
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
+ expect(project.project_repository.shard_name).to eq('test_second_storage')
+ end
+ end
+
+ context 'when the project is already on the target storage' do
+ it 'bails out and does nothing' do
+ expect do
+ subject.execute(project.repository_storage)
+ end.to raise_error(described_class::RepositoryAlreadyMoved)
+ end
+ end
+
+ context 'when the move fails' do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
+ .with(project.repository.raw).and_return(false)
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ subject.execute('test_second_storage')
+
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+ end
+
+ context 'with wiki repository' do
+ include_examples 'moves repository to another storage', 'wiki' do
+ let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: true) }
+ let(:repository) { project.wiki.repository }
+
+ before do
+ project.create_wiki
+ end
+ end
+ end
+
+ context 'when a object pool was joined' do
+ let(:project) { create(:project, :repository, wiki_enabled: false, repository_read_only: true) }
+ let(:pool) { create(:pool_repository, :ready, source_project: project) }
+
+ it 'leaves the pool' do
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
+
+ subject.execute('test_second_storage')
+
+ expect(project.reload_pool_repository).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 90fb6b932ee..8b5b6e5ac4e 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -613,6 +613,25 @@ describe Projects::UpdateService do
end
end
+ describe 'repository_storage' do
+ let(:admin) { create(:admin) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:opts) { { repository_storage: 'test_second_storage' } }
+
+ it 'calls the change repository storage method if the storage changed' do
+ expect(project).to receive(:change_repository_storage).with('test_second_storage')
+
+ update_project(project, admin, opts).inspect
+ end
+
+ it "doesn't call the change repository storage for non-admin users" do
+ expect(project).not_to receive(:change_repository_storage)
+
+ update_project(project, user, opts).inspect
+ end
+ end
+
def update_project(project, user, opts)
described_class.new(project, user, opts).execute
end
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
new file mode 100644
index 00000000000..f222dff60ab
--- /dev/null
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'moves repository to another storage' do |repository_type|
+ let(:project_repository_double) { double(:repository) }
+ let(:repository_double) { double(:repository) }
+
+ before do
+ # Default stub for non-specified params
+ allow(Gitlab::Git::Repository).to receive(:new).and_call_original
+
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
+ .and_return(project_repository_double)
+
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('test_second_storage', repository.raw.relative_path, repository.gl_repository, repository.full_path)
+ .and_return(repository_double)
+ end
+
+ context 'when the move succeeds', :clean_gitlab_redis_shared_state do
+ before do
+ allow(project_repository_double)
+ .to receive(:fetch_repository_as_mirror)
+ .with(project.repository.raw)
+ .and_return(true)
+
+ allow(repository_double)
+ .to receive(:fetch_repository_as_mirror)
+ .with(repository.raw)
+ .and_return(true)
+ end
+
+ it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
+ old_project_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ project.repository.path_to_repo
+ end
+
+ old_repository_path = repository.full_path
+
+ subject.execute('test_second_storage')
+
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('test_second_storage')
+ expect(gitlab_shell.repository_exists?('default', old_project_repository_path)).to be(false)
+ expect(gitlab_shell.repository_exists?('default', old_repository_path)).to be(false)
+ end
+
+ context ':repack_after_shard_migration feature flag disabled' do
+ before do
+ stub_feature_flags(repack_after_shard_migration: false)
+ end
+
+ it 'does not enqueue a GC run' do
+ expect { subject.execute('test_second_storage') }
+ .not_to change(GitGarbageCollectWorker.jobs, :count)
+ end
+ end
+
+ context ':repack_after_shard_migration feature flag enabled' do
+ before do
+ stub_feature_flags(repack_after_shard_migration: true)
+ end
+
+ it 'does not enqueue a GC run if housekeeping is disabled' do
+ stub_application_setting(housekeeping_enabled: false)
+
+ expect { subject.execute('test_second_storage') }
+ .not_to change(GitGarbageCollectWorker.jobs, :count)
+ end
+
+ it 'enqueues a GC run' do
+ expect { subject.execute('test_second_storage') }
+ .to change(GitGarbageCollectWorker.jobs, :count).by(1)
+ end
+ end
+ end
+
+ context 'when the project is already on the target storage' do
+ it 'bails out and does nothing' do
+ expect do
+ subject.execute(project.repository_storage)
+ end.to raise_error(described_class::RepositoryAlreadyMoved)
+ end
+ end
+
+ context "when the move of the #{repository_type} repository fails" do
+ it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(project_repository_double).to receive(:fetch_repository_as_mirror)
+ .with(project.repository.raw).and_return(true)
+ allow(repository_double).to receive(:fetch_repository_as_mirror)
+ .with(repository.raw).and_return(false)
+
+ expect(GitlabShellWorker).not_to receive(:perform_async)
+
+ subject.execute('test_second_storage')
+
+ expect(project).not_to be_repository_read_only
+ expect(project.repository_storage).to eq('default')
+ end
+ end
+end
diff --git a/spec/workers/project_update_repository_storage_worker_spec.rb b/spec/workers/project_update_repository_storage_worker_spec.rb
new file mode 100644
index 00000000000..aa6545f7f89
--- /dev/null
+++ b/spec/workers/project_update_repository_storage_worker_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectUpdateRepositoryStorageWorker do
+ let(:project) { create(:project, :repository) }
+
+ subject { described_class.new }
+
+ describe "#perform" do
+ it "calls the update repository storage service" do
+ expect_any_instance_of(Projects::UpdateRepositoryStorageService)
+ .to receive(:execute).with('new_storage')
+
+ subject.perform(project.id, 'new_storage')
+ end
+
+ it 'catches and logs RepositoryAlreadyMoved' do
+ expect(Rails.logger).to receive(:info).with(/repository already moved/)
+
+ expect { subject.perform(project.id, project.repository_storage) }.not_to raise_error
+ end
+ end
+end