summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/issuable_bulk_update_actions.js5
-rw-r--r--app/assets/stylesheets/utilities.scss5
-rw-r--r--app/controllers/concerns/workhorse_import_export_upload.rb33
-rw-r--r--app/controllers/import/gitlab_groups_controller.rb63
-rw-r--r--app/controllers/import/gitlab_projects_controller.rb27
-rw-r--r--app/graphql/types/jira_import_type.rb6
-rw-r--r--app/helpers/todos_helper.rb11
-rw-r--r--app/models/import_export_upload.rb2
-rw-r--r--app/services/groups/import_export/import_service.rb4
-rw-r--r--app/views/projects/settings/operations/_incidents.html.haml2
-rw-r--r--changelogs/unreleased/217366-expose-jira-successfully-imported-issues-count-in-graphql.yml5
-rw-r--r--changelogs/unreleased/fix-design-todos-filename-duplication.yml5
-rw-r--r--changelogs/unreleased/issue-bulk-update.yml5
-rw-r--r--changelogs/unreleased/tr-fix-broken-incident-link.yml5
-rw-r--r--config/routes/import.rb4
-rw-r--r--doc/administration/logs.md6
-rw-r--r--doc/api/graphql/reference/gitlab_schema.graphql15
-rw-r--r--doc/api/graphql/reference/gitlab_schema.json54
-rw-r--r--doc/api/graphql/reference/index.md3
-rw-r--r--doc/integration/elasticsearch.md8
-rw-r--r--lib/api/group_import.rb9
-rw-r--r--lib/gitlab/application_rate_limiter.rb3
-rw-r--r--lib/gitlab/instrumentation/redis.rb20
-rw-r--r--lib/gitlab/instrumentation/redis_driver.rb61
-rw-r--r--lib/gitlab/instrumentation_helper.rb9
-rw-r--r--lib/gitlab/redis/wrapper.rb2
-rw-r--r--lib/gitlab/setup_helper.rb2
-rw-r--r--locale/gitlab.pot9
-rw-r--r--spec/features/issues/bulk_assignment_labels_spec.rb18
-rw-r--r--spec/graphql/types/jira_import_type_spec.rb5
-rw-r--r--spec/helpers/todos_helper_spec.rb37
-rw-r--r--spec/lib/gitlab/instrumentation/redis_driver_spec.rb42
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb36
-rw-r--r--spec/requests/import/gitlab_groups_controller_spec.rb258
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb31
36 files changed, 758 insertions, 56 deletions
diff --git a/app/assets/javascripts/issuable_bulk_update_actions.js b/app/assets/javascripts/issuable_bulk_update_actions.js
index 95e10cc75cc..01ea3eee16e 100644
--- a/app/assets/javascripts/issuable_bulk_update_actions.js
+++ b/app/assets/javascripts/issuable_bulk_update_actions.js
@@ -99,8 +99,11 @@ export default {
setOriginalDropdownData() {
const $labelSelect = $('.bulk-update .js-label-select');
+ const dirtyLabelIds = $labelSelect.data('marked') || [];
+ const chosenLabelIds = [...this.getOriginalMarkedIds(), ...dirtyLabelIds];
+
$labelSelect.data('common', this.getOriginalCommonIds());
- $labelSelect.data('marked', this.getOriginalMarkedIds());
+ $labelSelect.data('marked', chosenLabelIds);
$labelSelect.data('indeterminate', this.getOriginalIndeterminateIds());
},
diff --git a/app/assets/stylesheets/utilities.scss b/app/assets/stylesheets/utilities.scss
index 8cf5c533f1f..a8223a8ff56 100644
--- a/app/assets/stylesheets/utilities.scss
+++ b/app/assets/stylesheets/utilities.scss
@@ -97,11 +97,6 @@
padding-top: 16px;
}
-.gl-shim-mx-2 {
- margin-left: 4px;
- margin-right: 4px;
-}
-
.gl-text-purple { color: $purple; }
.gl-text-gray-800 { color: $gray-800; }
.gl-bg-purple-light { background-color: $purple-light; }
diff --git a/app/controllers/concerns/workhorse_import_export_upload.rb b/app/controllers/concerns/workhorse_import_export_upload.rb
new file mode 100644
index 00000000000..e9d666bec16
--- /dev/null
+++ b/app/controllers/concerns/workhorse_import_export_upload.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module WorkhorseImportExportUpload
+ extend ActiveSupport::Concern
+ include WorkhorseRequest
+
+ included do
+ skip_before_action :verify_authenticity_token, only: %i[authorize]
+ before_action :verify_workhorse_api!, only: %i[authorize]
+ end
+
+ def authorize
+ set_workhorse_internal_api_content_type
+
+ authorized = ImportExportUploader.workhorse_authorize(
+ has_length: false,
+ maximum_size: ImportExportUpload::MAXIMUM_IMPORT_FILE_SIZE
+ )
+
+ render json: authorized
+ rescue SocketError
+ render json: _("Error uploading file"), status: :internal_server_error
+ end
+
+ private
+
+ def file_is_valid?(file)
+ return false unless file.is_a?(::UploadedFile)
+
+ ImportExportUploader::EXTENSION_WHITELIST
+ .include?(File.extname(file.original_filename).delete('.'))
+ end
+end
diff --git a/app/controllers/import/gitlab_groups_controller.rb b/app/controllers/import/gitlab_groups_controller.rb
new file mode 100644
index 00000000000..9feded2c83f
--- /dev/null
+++ b/app/controllers/import/gitlab_groups_controller.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+class Import::GitlabGroupsController < ApplicationController
+ include WorkhorseImportExportUpload
+
+ before_action :ensure_group_import_enabled
+ before_action :import_rate_limit, only: %i[create]
+
+ def create
+ unless file_is_valid?(group_params[:file])
+ return redirect_back_or_default(options: { alert: _('Unable to process group import file') })
+ end
+
+ group_data = group_params.except(:file).merge(
+ visibility_level: closest_allowed_visibility_level,
+ import_export_upload: ImportExportUpload.new(import_file: group_params[:file])
+ )
+
+ group = ::Groups::CreateService.new(current_user, group_data).execute
+
+ if group.persisted?
+ Groups::ImportExport::ImportService.new(group: group, user: current_user).async_execute
+
+ redirect_to(
+ group_path(group),
+ notice: _("Group '%{group_name}' is being imported.") % { group_name: group.name }
+ )
+ else
+ redirect_back_or_default(
+ options: { alert: _("Group could not be imported: %{errors}") % { errors: group.errors.full_messages.to_sentence } }
+ )
+ end
+ end
+
+ private
+
+ def group_params
+ params.permit(:path, :name, :parent_id, :file)
+ end
+
+ def closest_allowed_visibility_level
+ if group_params[:parent_id].present?
+ parent_group = Group.find(group_params[:parent_id])
+
+ Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
+ else
+ Gitlab::VisibilityLevel::PRIVATE
+ end
+ end
+
+ def ensure_group_import_enabled
+ render_404 unless Feature.enabled?(:group_import_export, @group, default_enabled: true)
+ end
+
+ def import_rate_limit
+ if Gitlab::ApplicationRateLimiter.throttled?(:group_import, scope: current_user)
+ Gitlab::ApplicationRateLimiter.log_request(request, :group_import_request_limit, current_user)
+
+ flash[:alert] = _('This endpoint has been requested too many times. Try again later.')
+ redirect_to new_group_path
+ end
+ end
+end
diff --git a/app/controllers/import/gitlab_projects_controller.rb b/app/controllers/import/gitlab_projects_controller.rb
index 6a3715a4675..39d053347f0 100644
--- a/app/controllers/import/gitlab_projects_controller.rb
+++ b/app/controllers/import/gitlab_projects_controller.rb
@@ -1,14 +1,11 @@
# frozen_string_literal: true
class Import::GitlabProjectsController < Import::BaseController
- include WorkhorseRequest
+ include WorkhorseImportExportUpload
before_action :whitelist_query_limiting, only: [:create]
before_action :verify_gitlab_project_import_enabled
- skip_before_action :verify_authenticity_token, only: [:authorize]
- before_action :verify_workhorse_api!, only: [:authorize]
-
def new
@namespace = Namespace.find(project_params[:namespace_id])
return render_404 unless current_user.can?(:create_projects, @namespace)
@@ -17,7 +14,7 @@ class Import::GitlabProjectsController < Import::BaseController
end
def create
- unless file_is_valid?
+ unless file_is_valid?(project_params[:file])
return redirect_back_or_default(options: { alert: _("You need to upload a GitLab project export archive (ending in .gz).") })
end
@@ -33,28 +30,8 @@ class Import::GitlabProjectsController < Import::BaseController
end
end
- def authorize
- set_workhorse_internal_api_content_type
-
- authorized = ImportExportUploader.workhorse_authorize(
- has_length: false,
- maximum_size: Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i)
-
- render json: authorized
- rescue SocketError
- render json: _("Error uploading file"), status: :internal_server_error
- end
-
private
- def file_is_valid?
- return false unless project_params[:file].is_a?(::UploadedFile)
-
- filename = project_params[:file].original_filename
-
- ImportExportUploader::EXTENSION_WHITELIST.include?(File.extname(filename).delete('.'))
- end
-
def verify_gitlab_project_import_enabled
render_404 unless gitlab_project_import_enabled?
end
diff --git a/app/graphql/types/jira_import_type.rb b/app/graphql/types/jira_import_type.rb
index 4a124566ffb..cf58a53b40d 100644
--- a/app/graphql/types/jira_import_type.rb
+++ b/app/graphql/types/jira_import_type.rb
@@ -15,6 +15,12 @@ module Types
description: 'User that started the Jira import'
field :jira_project_key, GraphQL::STRING_TYPE, null: false,
description: 'Project key for the imported Jira project'
+ field :imported_issues_count, GraphQL::INT_TYPE, null: false,
+ description: 'Count of issues that were successfully imported'
+ field :failed_to_import_count, GraphQL::INT_TYPE, null: false,
+ description: 'Count of issues that failed to import'
+ field :total_issue_count, GraphQL::INT_TYPE, null: false,
+ description: 'Total count of issues that were attempted to import'
end
# rubocop: enable Graphql/AuthorizeTypes
end
diff --git a/app/helpers/todos_helper.rb b/app/helpers/todos_helper.rb
index 41f39c7e798..15c602757a5 100644
--- a/app/helpers/todos_helper.rb
+++ b/app/helpers/todos_helper.rb
@@ -37,11 +37,12 @@ module TodosHelper
end
def todo_target_title(todo)
- if todo.target
- "\"#{todo.target.title}\""
- else
- ""
- end
+ # Design To Dos' filenames are displayed in `#todo_target_link` (see `Design#to_reference`),
+ # so to avoid displaying duplicate filenames in the To Do list for designs,
+ # we return an empty string here.
+ return "" if todo.target.blank? || todo.for_design?
+
+ "\"#{todo.target.title}\""
end
def todo_parent_path(todo)
diff --git a/app/models/import_export_upload.rb b/app/models/import_export_upload.rb
index 7d73fd281f1..c42e0c4f968 100644
--- a/app/models/import_export_upload.rb
+++ b/app/models/import_export_upload.rb
@@ -4,6 +4,8 @@ class ImportExportUpload < ApplicationRecord
include WithUploads
include ObjectStorage::BackgroundMove
+ MAXIMUM_IMPORT_FILE_SIZE = 50.megabytes.freeze
+
belongs_to :project
belongs_to :group
diff --git a/app/services/groups/import_export/import_service.rb b/app/services/groups/import_export/import_service.rb
index 6f692c98c38..e1be40f0241 100644
--- a/app/services/groups/import_export/import_service.rb
+++ b/app/services/groups/import_export/import_service.rb
@@ -11,6 +11,10 @@ module Groups
@shared = Gitlab::ImportExport::Shared.new(@group)
end
+ def async_execute
+ GroupImportWorker.perform_async(current_user.id, group.id)
+ end
+
def execute
if valid_user_permissions? && import_file && restorer.restore
notify_success
diff --git a/app/views/projects/settings/operations/_incidents.html.haml b/app/views/projects/settings/operations/_incidents.html.haml
index 92fffa42b73..3b1b0a00380 100644
--- a/app/views/projects/settings/operations/_incidents.html.haml
+++ b/app/views/projects/settings/operations/_incidents.html.haml
@@ -9,7 +9,7 @@
= _('Expand')
%p
= _('Action to take when receiving an alert.')
- = link_to help_page_path('user/project/integrations/prometheus', anchor: 'taking-action-on-an-alert-ultimate') do
+ = link_to help_page_path('user/project/integrations/prometheus', anchor: 'taking-action-on-incidents-ultimate') do
= _('More information')
.settings-content
= form_for @project, url: project_settings_operations_path(@project), method: :patch do |f|
diff --git a/changelogs/unreleased/217366-expose-jira-successfully-imported-issues-count-in-graphql.yml b/changelogs/unreleased/217366-expose-jira-successfully-imported-issues-count-in-graphql.yml
new file mode 100644
index 00000000000..da139df5643
--- /dev/null
+++ b/changelogs/unreleased/217366-expose-jira-successfully-imported-issues-count-in-graphql.yml
@@ -0,0 +1,5 @@
+---
+title: Expose Jira imported issues count in GraphQL
+merge_request: 32580
+author:
+type: added
diff --git a/changelogs/unreleased/fix-design-todos-filename-duplication.yml b/changelogs/unreleased/fix-design-todos-filename-duplication.yml
new file mode 100644
index 00000000000..901e8bbc8e3
--- /dev/null
+++ b/changelogs/unreleased/fix-design-todos-filename-duplication.yml
@@ -0,0 +1,5 @@
+---
+title: Fix duplicate filename displayed in design todos
+merge_request: 32274
+author: Arun Kumar Mohan
+type: fixed
diff --git a/changelogs/unreleased/issue-bulk-update.yml b/changelogs/unreleased/issue-bulk-update.yml
new file mode 100644
index 00000000000..968c16f4d25
--- /dev/null
+++ b/changelogs/unreleased/issue-bulk-update.yml
@@ -0,0 +1,5 @@
+---
+title: Allow different in bulk editing issues
+merge_request: 32734
+author:
+type: fixed
diff --git a/changelogs/unreleased/tr-fix-broken-incident-link.yml b/changelogs/unreleased/tr-fix-broken-incident-link.yml
new file mode 100644
index 00000000000..bea818796d0
--- /dev/null
+++ b/changelogs/unreleased/tr-fix-broken-incident-link.yml
@@ -0,0 +1,5 @@
+---
+title: Fix broken help link on operations settings page
+merge_request: 32722
+author:
+type: fixed
diff --git a/config/routes/import.rb b/config/routes/import.rb
index 57a1fab48e9..e24b5f05847 100644
--- a/config/routes/import.rb
+++ b/config/routes/import.rb
@@ -63,6 +63,10 @@ namespace :import do
post :authorize
end
+ resource :gitlab_group, only: [:create] do
+ post :authorize
+ end
+
resource :manifest, only: [:create, :new], controller: :manifest do
get :status
get :jobs
diff --git a/doc/administration/logs.md b/doc/administration/logs.md
index e234c92655c..4483d0d41ce 100644
--- a/doc/administration/logs.md
+++ b/doc/administration/logs.md
@@ -46,6 +46,8 @@ Line breaks have been added to this example for legibility:
"gitaly_duration_s":0.16,
"redis_calls":115,
"redis_duration_s":0.13,
+ "redis_read_bytes":1507378,
+ "redis_write_bytes":2920,
"correlation_id":"O1SdybnnIq7",
"cpu_s":17.50,
"db_duration_s":0.08,
@@ -56,7 +58,7 @@ Line breaks have been added to this example for legibility:
This example was a GET request for a specific
issue. Each line also contains performance data, with times in
-milliseconds:
+seconds:
1. `duration_s`: total time taken to retrieve the request
1. `queue_duration_s`: total time that the request was queued inside GitLab Workhorse
@@ -67,6 +69,8 @@ milliseconds:
1. `gitaly_duration_s`: total time taken by Gitaly calls
1. `gitaly_calls`: total number of calls made to Gitaly
1. `redis_calls`: total number of calls made to Redis
+1. `redis_read_bytes`: total bytes read from Redis
+1. `redis_write_bytes`: total bytes written to Redis
User clone and fetch activity using HTTP transport appears in this log as `action: git_upload_pack`.
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 91f1413943c..e3b11d4cd1f 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -5421,6 +5421,16 @@ type JiraImport {
createdAt: Time
"""
+ Count of issues that failed to import
+ """
+ failedToImportCount: Int!
+
+ """
+ Count of issues that were successfully imported
+ """
+ importedIssuesCount: Int!
+
+ """
Project key for the imported Jira project
"""
jiraProjectKey: String!
@@ -5434,6 +5444,11 @@ type JiraImport {
User that started the Jira import
"""
scheduledBy: User
+
+ """
+ Total count of issues that were attempted to import
+ """
+ totalIssueCount: Int!
}
"""
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index 40bfa08cff3..746d7120ed4 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -15029,6 +15029,42 @@
"deprecationReason": null
},
{
+ "name": "failedToImportCount",
+ "description": "Count of issues that failed to import",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "importedIssuesCount",
+ "description": "Count of issues that were successfully imported",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "jiraProjectKey",
"description": "Project key for the imported Jira project",
"args": [
@@ -15073,6 +15109,24 @@
},
"isDeprecated": false,
"deprecationReason": null
+ },
+ {
+ "name": "totalIssueCount",
+ "description": "Total count of issues that were attempted to import",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Int",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
}
],
"inputFields": null,
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 4164c26e751..a6e06925b2d 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -801,9 +801,12 @@ Represents an iteration object.
| Name | Type | Description |
| --- | ---- | ---------- |
| `createdAt` | Time | Timestamp of when the Jira import was created |
+| `failedToImportCount` | Int! | Count of issues that failed to import |
+| `importedIssuesCount` | Int! | Count of issues that were successfully imported |
| `jiraProjectKey` | String! | Project key for the imported Jira project |
| `scheduledAt` | Time | Timestamp of when the Jira import was scheduled |
| `scheduledBy` | User | User that started the Jira import |
+| `totalIssueCount` | Int! | Total count of issues that were attempted to import |
## JiraImportStartPayload
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index f82c13fc379..c23ea51b702 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -418,14 +418,14 @@ The following are some available Rake tasks:
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects and queues Sidekiq jobs to index them in the background. |
| [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. |
| [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. |
-| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index on the Elasticsearch side only if it doesn't already exist. |
-| [`sudo gitlab-rake gitlab:elastic:delete_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index on the Elasticsearch instance. |
-| [`sudo gitlab-rake gitlab:elastic:recreate_index[<INDEX_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<INDEX_NAME>]` and `gitlab:elastic:create_empty_index[<INDEX_NAME>]`. |
+| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index and assigns an alias for it on the Elasticsearch side only if it doesn't already exist. |
+| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index and alias (if exists) on the Elasticsearch instance. |
+| [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`. |
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. |
NOTE: **Note:**
-The `INDEX_NAME` parameter is optional and will use the default index name from the current `RAILS_ENV` if not set.
+The `TARGET_NAME` parameter is optional and will use the default index/alias name from the current `RAILS_ENV` if not set.
### Environment variables
diff --git a/lib/api/group_import.rb b/lib/api/group_import.rb
index ed52506de14..a88a94c2c3d 100644
--- a/lib/api/group_import.rb
+++ b/lib/api/group_import.rb
@@ -2,8 +2,6 @@
module API
class GroupImport < Grape::API
- MAXIMUM_FILE_SIZE = 50.megabytes.freeze
-
helpers do
def parent_group
find_group!(params[:parent_id]) if params[:parent_id].present?
@@ -38,7 +36,10 @@ module API
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
- ImportExportUploader.workhorse_authorize(has_length: false, maximum_size: MAXIMUM_FILE_SIZE)
+ ImportExportUploader.workhorse_authorize(
+ has_length: false,
+ maximum_size: ImportExportUpload::MAXIMUM_IMPORT_FILE_SIZE
+ )
end
desc 'Create a new group import' do
@@ -76,7 +77,7 @@ module API
group = ::Groups::CreateService.new(current_user, group_params).execute
if group.persisted?
- GroupImportWorker.perform_async(current_user.id, group.id) # rubocop:disable CodeReuse/Worker
+ ::Groups::ImportExport::ImportService.new(group: group, user: current_user).async_execute
accepted!
else
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index 2defbd26b98..6651f5ce134 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -28,7 +28,8 @@ module Gitlab
play_pipeline_schedule: { threshold: 1, interval: 1.minute },
show_raw_controller: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.raw_blob_request_limit }, interval: 1.minute },
group_export: { threshold: 1, interval: 5.minutes },
- group_download_export: { threshold: 10, interval: 10.minutes }
+ group_download_export: { threshold: 10, interval: 10.minutes },
+ group_import: { threshold: 30, interval: 5.minutes }
}.freeze
end
diff --git a/lib/gitlab/instrumentation/redis.rb b/lib/gitlab/instrumentation/redis.rb
index 4bb5a68c68e..19d4d9d0f11 100644
--- a/lib/gitlab/instrumentation/redis.rb
+++ b/lib/gitlab/instrumentation/redis.rb
@@ -23,6 +23,8 @@ module Gitlab
REDIS_REQUEST_COUNT = :redis_request_count
REDIS_CALL_DURATION = :redis_call_duration
REDIS_CALL_DETAILS = :redis_call_details
+ REDIS_READ_BYTES = :redis_read_bytes
+ REDIS_WRITE_BYTES = :redis_write_bytes
# Milliseconds represented in seconds (from 1 to 500 milliseconds).
QUERY_TIME_BUCKETS = [0.001, 0.0025, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5].freeze
@@ -36,6 +38,24 @@ module Gitlab
::RequestStore[REDIS_REQUEST_COUNT] += 1
end
+ def self.increment_read_bytes(num_bytes)
+ ::RequestStore[REDIS_READ_BYTES] ||= 0
+ ::RequestStore[REDIS_READ_BYTES] += num_bytes
+ end
+
+ def self.increment_write_bytes(num_bytes)
+ ::RequestStore[REDIS_WRITE_BYTES] ||= 0
+ ::RequestStore[REDIS_WRITE_BYTES] += num_bytes
+ end
+
+ def self.read_bytes
+ ::RequestStore[REDIS_READ_BYTES] || 0
+ end
+
+ def self.write_bytes
+ ::RequestStore[REDIS_WRITE_BYTES] || 0
+ end
+
def self.detail_store
::RequestStore[REDIS_CALL_DETAILS] ||= []
end
diff --git a/lib/gitlab/instrumentation/redis_driver.rb b/lib/gitlab/instrumentation/redis_driver.rb
new file mode 100644
index 00000000000..2d0bce07be5
--- /dev/null
+++ b/lib/gitlab/instrumentation/redis_driver.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'redis'
+
+module Gitlab
+ module Instrumentation
+ class RedisDriver < ::Redis::Connection::Ruby
+ def write(command)
+ measure_write_size(command) if ::RequestStore.active?
+ super
+ end
+
+ def read
+ result = super
+ measure_read_size(result) if ::RequestStore.active?
+ result
+ end
+
+ private
+
+ def measure_write_size(command)
+ size = 0
+
+ # Mimic what happens in
+ # https://github.com/redis/redis-rb/blob/f597f21a6b954b685cf939febbc638f6c803e3a7/lib/redis/connection/command_helper.rb#L8.
+ # This count is an approximation that omits the Redis protocol overhead
+ # of type prefixes, length prefixes and line endings.
+ command.each do |x|
+ size += begin
+ if x.is_a? Array
+ x.inject(0) { |sum, y| sum + y.to_s.bytesize }
+ else
+ x.to_s.bytesize
+ end
+ end
+ end
+
+ ::Gitlab::Instrumentation::Redis.increment_write_bytes(size)
+ end
+
+ def measure_read_size(result)
+ # The superclass can return one of four types of results from read:
+ # https://github.com/redis/redis-rb/blob/f597f21a6b954b685cf939febbc638f6c803e3a7/lib/redis/connection/ruby.rb#L406
+ #
+ # 1. Error (exception, will not reach this line)
+ # 2. Status (string)
+ # 3. Integer (will be converted to string by to_s.bytesize and thrown away)
+ # 4. "Binary" string (i.e. may contain zero byte)
+ # 5. Array of binary string (recurses back into read)
+
+ # Avoid double-counting array responses: the array elements themselves
+ # are retrieved with 'read'.
+ unless result.is_a? Array
+ # This count is an approximation that omits the Redis protocol overhead
+ # of type prefixes, length prefixes and line endings.
+ ::Gitlab::Instrumentation::Redis.increment_read_bytes(result.to_s.bytesize)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation_helper.rb b/lib/gitlab/instrumentation_helper.rb
index 7c5a601cd5b..36adf1e6a80 100644
--- a/lib/gitlab/instrumentation_helper.rb
+++ b/lib/gitlab/instrumentation_helper.rb
@@ -4,7 +4,7 @@ module Gitlab
module InstrumentationHelper
extend self
- KEYS = %i(gitaly_calls gitaly_duration_s rugged_calls rugged_duration_s redis_calls redis_duration_s).freeze
+ KEYS = %i(gitaly_calls gitaly_duration_s rugged_calls rugged_duration_s redis_calls redis_duration_s redis_read_bytes redis_write_bytes).freeze
DURATION_PRECISION = 6 # microseconds
def add_instrumentation_data(payload)
@@ -28,6 +28,13 @@ module Gitlab
payload[:redis_calls] = redis_calls
payload[:redis_duration_s] = Gitlab::Instrumentation::Redis.query_time
end
+
+ redis_read_bytes = Gitlab::Instrumentation::Redis.read_bytes
+ redis_write_bytes = Gitlab::Instrumentation::Redis.write_bytes
+ if redis_read_bytes > 0 || redis_write_bytes > 0
+ payload[:redis_read_bytes] = redis_read_bytes
+ payload[:redis_write_bytes] = redis_write_bytes
+ end
end
# Returns the queuing duration for a Sidekiq job in seconds, as a float, if the
diff --git a/lib/gitlab/redis/wrapper.rb b/lib/gitlab/redis/wrapper.rb
index 06ee81ba172..bbd0b5ac8a9 100644
--- a/lib/gitlab/redis/wrapper.rb
+++ b/lib/gitlab/redis/wrapper.rb
@@ -100,6 +100,8 @@ module Gitlab
redis_url = config.delete(:url)
redis_uri = URI.parse(redis_url)
+ config[:driver] ||= ::Gitlab::Instrumentation::RedisDriver
+
if redis_uri.scheme == 'unix'
# Redis::Store does not handle Unix sockets well, so let's do it for them
config[:path] = redis_uri.path
diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb
index 3f36725cb66..7e41adc67bd 100644
--- a/lib/gitlab/setup_helper.rb
+++ b/lib/gitlab/setup_helper.rb
@@ -97,7 +97,7 @@ module Gitlab
def configuration_toml(gitaly_dir, storage_paths)
nodes = [{ storage: 'default', address: "unix:#{gitaly_dir}/gitaly.socket", primary: true, token: 'secret' }]
storages = [{ name: 'default', node: nodes }]
- config = { socket_path: "#{gitaly_dir}/praefect.socket", virtual_storage: storages }
+ config = { socket_path: "#{gitaly_dir}/praefect.socket", memory_queue_enabled: true, virtual_storage: storages }
config[:token] = 'secret' if Rails.env.test?
TomlRB.dump(config)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index fe81d66646c..5bdee7e3d6b 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -10662,6 +10662,9 @@ msgstr ""
msgid "Group %{group_name} was successfully created."
msgstr ""
+msgid "Group '%{group_name}' is being imported."
+msgstr ""
+
msgid "Group Audit Events"
msgstr ""
@@ -10695,6 +10698,9 @@ msgstr ""
msgid "Group avatar"
msgstr ""
+msgid "Group could not be imported: %{errors}"
+msgstr ""
+
msgid "Group description"
msgstr ""
@@ -23104,6 +23110,9 @@ msgstr ""
msgid "Unable to load the merge request widget. Try reloading the page."
msgstr ""
+msgid "Unable to process group import file"
+msgstr ""
+
msgid "Unable to resolve"
msgstr ""
diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/bulk_assignment_labels_spec.rb
index fc9176715c3..dbde41cf25b 100644
--- a/spec/features/issues/bulk_assignment_labels_spec.rb
+++ b/spec/features/issues/bulk_assignment_labels_spec.rb
@@ -302,7 +302,23 @@ describe 'Issues > Labels bulk assignment' do
sleep 1 # needed
expect(find("#issue_#{issue1.id}")).to have_content 'bug'
- expect(find("#issue_#{issue1.id}")).not_to have_content 'feature'
+ expect(find("#issue_#{issue1.id}")).to have_content 'feature'
+ end
+ end
+
+ context 'mark previously toggled label' do
+ before do
+ enable_bulk_update
+ end
+
+ it do
+ open_labels_dropdown ['feature']
+
+ check_issue issue1
+
+ update_issues
+
+ expect(find("#issue_#{issue1.id}")).to have_content 'feature'
end
end
diff --git a/spec/graphql/types/jira_import_type_spec.rb b/spec/graphql/types/jira_import_type_spec.rb
index ac1aa672e30..fa1152aec41 100644
--- a/spec/graphql/types/jira_import_type_spec.rb
+++ b/spec/graphql/types/jira_import_type_spec.rb
@@ -6,6 +6,9 @@ describe GitlabSchema.types['JiraImport'] do
specify { expect(described_class.graphql_name).to eq('JiraImport') }
it 'has the expected fields' do
- expect(described_class).to have_graphql_fields(:jira_project_key, :createdAt, :scheduled_at, :scheduled_by)
+ expect(described_class).to have_graphql_fields(
+ :jira_project_key, :created_at, :scheduled_at, :scheduled_by,
+ :failed_to_import_count, :imported_issues_count, :total_issue_count
+ )
end
end
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index b09e1e2b83b..19a0826be1d 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe TodosHelper do
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:issue) { create(:issue, title: 'Issue 1') }
let_it_be(:design) { create(:design, issue: issue) }
let_it_be(:note) do
create(:note,
@@ -68,6 +68,41 @@ describe TodosHelper do
end
end
+ describe '#todo_target_title' do
+ context 'when the target does not exist' do
+ let(:todo) { double('Todo', target: nil) }
+
+ it 'returns an empty string' do
+ title = helper.todo_target_title(todo)
+ expect(title).to eq("")
+ end
+ end
+
+ context 'when given a design todo' do
+ let(:todo) { design_todo }
+
+ it 'returns an empty string' do
+ title = helper.todo_target_title(todo)
+ expect(title).to eq("")
+ end
+ end
+
+ context 'when given a non-design todo' do
+ let(:todo) do
+ create(:todo, :assigned,
+ user: user,
+ project: issue.project,
+ target: issue,
+ author: author)
+ end
+
+ it 'returns the title' do
+ title = helper.todo_target_title(todo)
+ expect(title).to eq("\"Issue 1\"")
+ end
+ end
+ end
+
describe '#todo_target_path' do
context 'when given a design' do
let(:todo) { design_todo }
diff --git a/spec/lib/gitlab/instrumentation/redis_driver_spec.rb b/spec/lib/gitlab/instrumentation/redis_driver_spec.rb
new file mode 100644
index 00000000000..7b944d12d7d
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_driver_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::Instrumentation::RedisDriver, :clean_gitlab_redis_shared_state, :request_store do
+ using RSpec::Parameterized::TableSyntax
+
+ describe 'read and write' do
+ where(:setup, :command, :expect_write, :expect_read) do
+ # The response is 'OK', the request size is the combined size of array
+ # elements. Exercise counting of a status reply.
+ [] | [:set, 'foo', 'bar'] | 3 + 3 + 3 | 2
+
+ # The response is 1001, so 4 bytes. Exercise counting an integer reply.
+ [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | 4 + 6 | 4
+
+ # Exercise counting empty multi bulk reply
+ [] | [:hgetall, 'foobar'] | 7 + 6 | 0
+
+ # Hgetall response length is combined length of keys and values in the
+ # hash. Exercises counting of a multi bulk reply
+ [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | 7 + 6 | 5 + 11
+
+ # Exercise counting of a bulk reply
+ [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | 3 + 3 | 3 * 100
+ end
+
+ with_them do
+ it 'counts bytes read and written' do
+ Gitlab::Redis::SharedState.with do |redis|
+ setup.each { |cmd| redis.call(cmd) }
+ RequestStore.clear!
+ redis.call(command)
+ end
+
+ expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
+ expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index fdb842dac0f..144ce0247ed 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -34,12 +34,14 @@ describe Gitlab::InstrumentationHelper do
context 'when Redis calls are made' do
it 'adds Redis data and omits Gitaly data' do
- Gitlab::Redis::Cache.with { |redis| redis.get('test-instrumentation') }
+ Gitlab::Redis::Cache.with { |redis| redis.set('test-instrumentation', 123) }
subject
expect(payload[:redis_calls]).to eq(1)
expect(payload[:redis_duration_s]).to be >= 0
+ expect(payload[:redis_read_bytes]).to be >= 0
+ expect(payload[:redis_write_bytes]).to be >= 0
expect(payload[:gitaly_calls]).to be_nil
expect(payload[:gitaly_duration]).to be_nil
end
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index e063068eb1a..7be14696963 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -7,9 +7,30 @@ describe 'query Jira import data' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :private, :import_started, import_type: 'jira') }
- let_it_be(:jira_import1) { create(:jira_import_state, :finished, project: project, jira_project_key: 'AA', user: current_user, created_at: 2.days.ago) }
- let_it_be(:jira_import2) { create(:jira_import_state, :finished, project: project, jira_project_key: 'BB', user: current_user, created_at: 5.days.ago) }
-
+ let_it_be(:jira_import1) do
+ create(
+ :jira_import_state, :finished,
+ project: project,
+ jira_project_key: 'AA',
+ user: current_user,
+ created_at: 2.days.ago,
+ failed_to_import_count: 2,
+ imported_issues_count: 2,
+ total_issue_count: 4
+ )
+ end
+ let_it_be(:jira_import2) do
+ create(
+ :jira_import_state, :finished,
+ project: project,
+ jira_project_key: 'BB',
+ user: current_user,
+ created_at: 5.days.ago,
+ failed_to_import_count: 1,
+ imported_issues_count: 2,
+ total_issue_count: 3
+ )
+ end
let(:query) do
%(
query {
@@ -23,6 +44,9 @@ describe 'query Jira import data' do
scheduledBy {
username
}
+ importedIssuesCount
+ failedToImportCount
+ totalIssueCount
}
}
}
@@ -64,10 +88,16 @@ describe 'query Jira import data' do
it 'retuns list of jira imports' do
jira_proket_keys = jira_imports.map {|ji| ji['jiraProjectKey']}
usernames = jira_imports.map {|ji| ji.dig('scheduledBy', 'username')}
+ imported_issues_count = jira_imports.map {|ji| ji.dig('importedIssuesCount')}
+ failed_issues_count = jira_imports.map {|ji| ji.dig('failedToImportCount')}
+ total_issue_count = jira_imports.map {|ji| ji.dig('totalIssueCount')}
expect(jira_imports.size).to eq 2
expect(jira_proket_keys).to eq %w(BB AA)
expect(usernames).to eq [current_user.username, current_user.username]
+ expect(imported_issues_count).to eq [2, 2]
+ expect(failed_issues_count).to eq [1, 2]
+ expect(total_issue_count).to eq [3, 4]
end
end
diff --git a/spec/requests/import/gitlab_groups_controller_spec.rb b/spec/requests/import/gitlab_groups_controller_spec.rb
new file mode 100644
index 00000000000..35f2bf0c2f7
--- /dev/null
+++ b/spec/requests/import/gitlab_groups_controller_spec.rb
@@ -0,0 +1,258 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Import::GitlabGroupsController do
+ include WorkhorseHelpers
+
+ let(:import_path) { "#{Dir.tmpdir}/gitlab_groups_controller_spec" }
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:workhorse_headers) do
+ { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token }
+ end
+
+ before do
+ allow_next_instance_of(Gitlab::ImportExport) do |import_export|
+ expect(import_export).to receive(:storage_path).and_return(import_path)
+ end
+
+ stub_uploads_object_storage(ImportExportUploader)
+ end
+
+ after do
+ FileUtils.rm_rf(import_path, secure: true)
+ end
+
+ describe 'POST create' do
+ subject(:import_request) { upload_archive(file_upload, workhorse_headers, request_params) }
+
+ let_it_be(:user) { create(:user) }
+
+ let(:file) { File.join('spec', %w[fixtures group_export.tar.gz]) }
+ let(:file_upload) { fixture_file_upload(file) }
+
+ before do
+ login_as(user)
+ end
+
+ def upload_archive(file, headers = {}, params = {})
+ workhorse_finalize(
+ import_gitlab_group_path,
+ method: :post,
+ file_key: :file,
+ params: params.merge(file: file),
+ headers: headers,
+ send_rewritten_field: true
+ )
+ end
+
+ context 'when importing without a parent group' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
+
+ it 'successfully creates the group' do
+ expect { import_request }.to change { Group.count }.by 1
+
+ group = Group.find_by(name: 'test-group-import')
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(group_path(group))
+ expect(flash[:notice]).to include('is being imported')
+ end
+
+ it 'imports the group data', :sidekiq_inline do
+ allow(GroupImportWorker).to receive(:perform_async).and_call_original
+
+ import_request
+
+ group = Group.find_by(name: 'test-group-import')
+
+ expect(GroupImportWorker).to have_received(:perform_async).with(user.id, group.id)
+
+ expect(group.description).to eq 'A voluptate non sequi temporibus quam at.'
+ expect(group.visibility_level).to eq Gitlab::VisibilityLevel::PRIVATE
+ end
+ end
+
+ context 'when importing to a parent group' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: parent_group.id } }
+ let(:parent_group) { create(:group) }
+
+ before do
+ parent_group.add_owner(user)
+ end
+
+ it 'creates a new group under the parent' do
+ expect { import_request }
+ .to change { parent_group.children.reload.size }.by 1
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+
+ shared_examples 'is created with the parent visibility level' do |visibility_level|
+ before do
+ parent_group.update!(visibility_level: visibility_level)
+ end
+
+ it "imports a #{Gitlab::VisibilityLevel.level_name(visibility_level)} group" do
+ import_request
+
+ group = parent_group.children.find_by(name: 'test-group-import')
+ expect(group.visibility_level).to eq visibility_level
+ end
+ end
+
+ [
+ Gitlab::VisibilityLevel::PUBLIC,
+ Gitlab::VisibilityLevel::INTERNAL,
+ Gitlab::VisibilityLevel::PRIVATE
+ ].each do |visibility_level|
+ context "when the parent is #{Gitlab::VisibilityLevel.level_name(visibility_level)}" do
+ include_examples 'is created with the parent visibility level', visibility_level
+ end
+ end
+ end
+
+ context 'when supplied invalid params' do
+ subject(:import_request) do
+ upload_archive(
+ file_upload,
+ workhorse_headers,
+ { path: '', name: '' }
+ )
+ end
+
+ it 'responds with an error' do
+ expect { import_request }.not_to change { Group.count }
+
+ expect(flash[:alert])
+ .to include('Group could not be imported', "Name can't be blank", 'Group URL is too short')
+ end
+ end
+
+ context 'when the user is not authorized to create groups' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
+ let(:user) { create(:user, can_create_group: false) }
+
+ it 'returns an error' do
+ expect { import_request }.not_to change { Group.count }
+
+ expect(flash[:alert]).to eq 'Group could not be imported: You don’t have permission to create groups.'
+ end
+ end
+
+ context 'when the requests exceed the rate limit' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
+
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ end
+
+ it 'throttles the requests' do
+ import_request
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:alert]).to eq 'This endpoint has been requested too many times. Try again later.'
+ end
+ end
+
+ context 'when group import FF is disabled' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import' } }
+
+ before do
+ stub_feature_flags(group_import_export: false)
+ end
+
+ it 'returns an error' do
+ expect { import_request }.not_to change { Group.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the parent group is invalid' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: -1 } }
+
+ it 'does not create a new group' do
+ expect { import_request }.not_to change { Group.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the user is not an owner of the parent group' do
+ let(:request_params) { { path: 'test-group-import', name: 'test-group-import', parent_id: parent_group.id } }
+ let(:parent_group) { create(:group) }
+
+ it 'returns an error' do
+ expect { import_request }.not_to change { parent_group.children.reload.count }
+
+ expect(flash[:alert]).to include "You don’t have permission to create a subgroup in this group"
+ end
+ end
+ end
+
+ describe 'POST authorize' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ login_as(user)
+ end
+
+ context 'when using a workhorse header' do
+ subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
+
+ it 'authorizes the request' do
+ authorize_request
+
+ expect(response).to have_gitlab_http_status :ok
+ expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ expect(json_response['TempPath']).to eq ImportExportUploader.workhorse_local_upload_path
+ end
+ end
+
+ context 'when the request bypasses gitlab-workhorse' do
+ subject(:authorize_request) { post authorize_import_gitlab_group_path }
+
+ it 'rejects the request' do
+ expect { authorize_request }.to raise_error(JWT::DecodeError)
+ end
+ end
+
+ context 'when direct upload is enabled' do
+ subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
+
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: true)
+ end
+
+ it 'accepts the request and stores the files' do
+ authorize_request
+
+ expect(response).to have_gitlab_http_status :ok
+ expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ expect(json_response).not_to have_key 'TempPath'
+
+ expect(json_response['RemoteObject'].keys)
+ .to include('ID', 'GetURL', 'StoreURL', 'DeleteURL', 'MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ subject(:authorize_request) { post authorize_import_gitlab_group_path, headers: workhorse_headers }
+
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: true, direct_upload: false)
+ end
+
+ it 'handles the local file' do
+ authorize_request
+
+ expect(response).to have_gitlab_http_status :ok
+ expect(response.media_type).to eq Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+
+ expect(json_response['TempPath']).to eq ImportExportUploader.workhorse_local_upload_path
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 256e0a1b3c5..b1eae057c47 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -3,6 +3,37 @@
require 'spec_helper'
describe Groups::ImportExport::ImportService do
+ describe '#async_execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ context 'when the job can be successfully scheduled' do
+ subject(:import_service) { described_class.new(group: group, user: user) }
+
+ it 'enqueues an import job' do
+ expect(GroupImportWorker).to receive(:perform_async).with(user.id, group.id)
+
+ import_service.async_execute
+ end
+
+ it 'returns truthy' do
+ expect(import_service.async_execute).to be_truthy
+ end
+ end
+
+ context 'when the job cannot be scheduled' do
+ subject(:import_service) { described_class.new(group: group, user: user) }
+
+ before do
+ allow(GroupImportWorker).to receive(:perform_async).and_return(nil)
+ end
+
+ it 'returns falsey' do
+ expect(import_service.async_execute).to be_falsey
+ end
+ end
+ end
+
context 'with group_import_ndjson feature flag disabled' do
let(:user) { create(:admin) }
let(:group) { create(:group) }