summaryrefslogtreecommitdiff
path: root/spec/lib/bulk_imports
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/bulk_imports')
-rw-r--r--spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb15
-rw-r--r--spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb56
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb210
-rw-r--r--spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb161
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb27
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb35
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb40
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb119
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb24
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb27
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb32
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb78
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb1
14 files changed, 577 insertions, 250 deletions
diff --git a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
index 80607485b6e..50c54a7b47f 100644
--- a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
@@ -8,12 +8,15 @@ RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
let(:response) { double(original_hash: { 'data' => { 'foo' => 'bar' }, 'page_info' => {} }) }
let(:options) do
{
- query: double(
- to_s: 'test',
- variables: {},
- data_path: %w[data foo],
- page_info_path: %w[data page_info]
- )
+ query:
+ double(
+ new: double(
+ to_s: 'test',
+ variables: {},
+ data_path: %w[data foo],
+ page_info_path: %w[data page_info]
+ )
+ )
}
end
diff --git a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
new file mode 100644
index 00000000000..e3a7335a238
--- /dev/null
+++ b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Graphql::GetMembersQuery do
+ let(:entity) { create(:bulk_import_entity, :group_entity) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:query) { described_class.new(context: context) }
+
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data portable members nodes]
+
+ expect(query.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data portable members page_info]
+
+ expect(query.page_info_path).to eq(expected)
+ end
+ end
+
+ describe '#to_s' do
+ context 'when entity is group' do
+ it 'queries group & group members' do
+ expect(query.to_s).to include('group')
+ expect(query.to_s).to include('groupMembers')
+ end
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity) }
+
+ it 'queries project & project members' do
+ expect(query.to_s).to include('project')
+ expect(query.to_s).to include('projectMembers')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
new file mode 100644
index 00000000000..b769aa4af5a
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
+ let_it_be(:portable) { create(:project) }
+ let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' }
+
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:lfs_dir_path) { tmpdir }
+ let(:lfs_json_file_path) { File.join(lfs_dir_path, 'lfs_objects.json')}
+ let(:lfs_file_path) { File.join(lfs_dir_path, oid)}
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ FileUtils.mkdir_p(lfs_dir_path)
+ FileUtils.touch(lfs_json_file_path)
+ FileUtils.touch(lfs_file_path)
+ File.write(lfs_json_file_path, { oid => [0, 1, 2, nil] }.to_json )
+
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ it 'imports lfs objects into destination project and removes tmpdir' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
+
+ pipeline.run
+
+ expect(portable.lfs_objects.count).to eq(1)
+ expect(portable.lfs_objects_projects.count).to eq(4)
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts lfs objects filepaths' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=lfs_objects",
+ tmpdir: tmpdir,
+ filename: 'lfs_objects.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar.gz').and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar').and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(lfs_json_file_path, lfs_file_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
+ end
+
+ context 'when file path is lfs json' do
+ it 'returns' do
+ filepath = File.join(tmpdir, 'lfs_objects.json')
+
+ allow(Gitlab::Json).to receive(:parse).with(filepath).and_return({})
+
+ expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when file path is tar file' do
+ it 'returns' do
+ filepath = File.join(tmpdir, 'lfs_objects.tar')
+
+ expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when lfs json read failed' do
+ it 'raises an error' do
+ File.write(lfs_json_file_path, 'invalid json')
+
+ expect { pipeline.load(context, lfs_file_path) }.to raise_error(BulkImports::Error, 'LFS Objects JSON read failed')
+ end
+ end
+
+ context 'when file path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+
+ context 'when file path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }.to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when file path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, lfs_file_path), symlink)
+
+ expect { pipeline.load(context, symlink) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, Dir.tmpdir) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'lfs objects project' do
+ context 'when lfs objects json is invalid' do
+ context 'when oid value is not Array' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => 'test' }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when oid value is nil' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => nil }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when oid value is not allowed' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => ['invalid'] }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when repository type is duplicated' do
+ it 'creates only one lfs objects project' do
+ File.write(lfs_json_file_path, { oid => [0, 0, 1, 1, 2, 2] }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.to change { portable.lfs_objects_projects.count }.by(3)
+ end
+ end
+ end
+
+ context 'when lfs objects project fails to be created' do
+ it 'logs the failure' do
+ allow_next_instance_of(LfsObjectsProject) do |object|
+ allow(object).to receive(:persisted?).and_return(false)
+ end
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:warn)
+ .with(project_id: portable.id,
+ message: 'Failed to save lfs objects project',
+ errors: '', **Gitlab::ApplicationContext.current)
+ .exactly(4).times
+ end
+
+ pipeline.load(context, lfs_file_path)
+ end
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb
new file mode 100644
index 00000000000..f9b95f79104
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::MembersPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:member_user1) { create(:user, email: 'email1@email.com') }
+ let_it_be(:member_user2) { create(:user, email: 'email2@email.com') }
+ let_it_be(:member_data) do
+ {
+ user_id: member_user1.id,
+ created_by_id: member_user2.id,
+ access_level: 30,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
+ }
+ end
+
+ let(:parent) { create(:group) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:members) { portable.members.map { |m| m.slice(:user_id, :access_level) } }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ def extracted_data(email:, has_next_page: false)
+ data = {
+ 'created_at' => '2020-01-01T00:00:00Z',
+ 'updated_at' => '2020-01-02T00:00:00Z',
+ 'expires_at' => nil,
+ 'access_level' => {
+ 'integer_value' => 30
+ },
+ 'user' => {
+ 'public_email' => email
+ }
+ }
+
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
+ end
+
+ shared_examples 'members import' do
+ before do
+ portable.members.delete_all
+ end
+
+ describe '#run' do
+ it 'creates memberships for existing users' do
+ first_page = extracted_data(email: member_user1.email, has_next_page: true)
+ last_page = extracted_data(email: member_user2.email)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(first_page, last_page)
+ end
+
+ expect { pipeline.run }.to change(portable.members, :count).by(2)
+
+ expect(members).to contain_exactly(
+ { user_id: member_user1.id, access_level: 30 },
+ { user_id: member_user2.id, access_level: 30 }
+ )
+ end
+ end
+
+ describe '#load' do
+ it 'creates new membership' do
+ expect { subject.load(context, member_data) }.to change(portable.members, :count).by(1)
+
+ member = portable.members.find_by_user_id(member_user1.id)
+
+ expect(member.user).to eq(member_user1)
+ expect(member.created_by).to eq(member_user2)
+ expect(member.access_level).to eq(30)
+ expect(member.created_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.expires_at).to eq(nil)
+ end
+
+ context 'when user_id is current user id' do
+ it 'does not create new membership' do
+ data = { user_id: user.id }
+
+ expect { pipeline.load(context, data) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when data is nil' do
+ it 'does not create new membership' do
+ expect { pipeline.load(context, nil) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when user membership already exists with the same access level' do
+ it 'does not create new membership' do
+ portable.members.create!(member_data)
+
+ expect { pipeline.load(context, member_data) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when portable is in a parent group' do
+ let(:tracker) { create(:bulk_import_tracker, entity: entity_with_parent) }
+
+ before do
+ parent.members.create!(member_data)
+ end
+
+ context 'when the same membership exists in parent group' do
+ it 'does not create new membership' do
+ expect { pipeline.load(context, member_data) }.not_to change(portable_with_parent.members, :count)
+ end
+ end
+
+ context 'when membership with higher access level exists in parent group' do
+ it 'creates new direct membership' do
+ data = member_data.merge(access_level: Gitlab::Access::MAINTAINER)
+
+ expect { pipeline.load(context, data) }.to change(portable_with_parent.members, :count)
+
+ member = portable_with_parent.members.find_by_user_id(member_user1.id)
+
+ expect(member.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'when membership with lower access level exists in parent group' do
+ it 'does not create new membership' do
+ data = member_data.merge(access_level: Gitlab::Access::GUEST)
+
+ expect { pipeline.load(context, data) }.not_to change(portable_with_parent.members, :count)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when importing to group' do
+ let(:portable) { create(:group) }
+ let(:portable_with_parent) { create(:group, parent: parent) }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: portable, bulk_import: bulk_import) }
+ let(:entity_with_parent) { create(:bulk_import_entity, :group_entity, group: portable_with_parent, bulk_import: bulk_import) }
+
+ include_examples 'members import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { create(:project) }
+ let(:portable_with_parent) { create(:project, namespace: parent) }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, bulk_import: bulk_import) }
+ let(:entity_with_parent) { create(:bulk_import_entity, :project_entity, project: portable_with_parent, bulk_import: bulk_import) }
+
+ include_examples 'members import'
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
index b0f8f74783b..d03b8d8b5b2 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
@@ -3,14 +3,27 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
+ let_it_be(:tracker) { create(:bulk_import_tracker) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:query) { described_class.new(context: context) }
+
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
+
+ expect(result[:errors]).to be_empty
+ end
+
describe '#variables' do
it 'returns query variables based on entity information' do
- entity = double(source_full_path: 'test', bulk_import: nil)
- tracker = double(entity: entity)
- context = BulkImports::Pipeline::Context.new(tracker)
- expected = { full_path: entity.source_full_path }
+ expected = { full_path: tracker.entity.source_full_path }
- expect(described_class.variables(context)).to eq(expected)
+ expect(subject.variables).to eq(expected)
end
end
@@ -18,7 +31,7 @@ RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
it 'returns data path' do
expected = %w[data group]
- expect(described_class.data_path).to eq(expected)
+ expect(subject.data_path).to eq(expected)
end
end
@@ -26,7 +39,7 @@ RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
it 'returns pagination information path' do
expected = %w[data group page_info]
- expect(described_class.page_info_path).to eq(expected)
+ expect(subject.page_info_path).to eq(expected)
end
end
end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb
deleted file mode 100644
index d0c4bb817b2..00000000000
--- a/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Graphql::GetMembersQuery do
- it 'has a valid query' do
- tracker = create(:bulk_import_tracker)
- context = BulkImports::Pipeline::Context.new(tracker)
-
- query = GraphQL::Query.new(
- GitlabSchema,
- described_class.to_s,
- variables: described_class.variables(context)
- )
- result = GitlabSchema.static_validator.validate(query)
-
- expect(result[:errors]).to be_empty
- end
-
- describe '#data_path' do
- it 'returns data path' do
- expected = %w[data group group_members nodes]
-
- expect(described_class.data_path).to eq(expected)
- end
- end
-
- describe '#page_info_path' do
- it 'returns pagination information path' do
- expected = %w[data group group_members page_info]
-
- expect(described_class.page_info_path).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb
index 1a7c5a4993c..fe28e3959a0 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb
@@ -3,25 +3,25 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetProjectsQuery do
- describe '#variables' do
- it 'returns valid variables based on entity information' do
- tracker = create(:bulk_import_tracker)
- context = BulkImports::Pipeline::Context.new(tracker)
-
- query = GraphQL::Query.new(
- GitlabSchema,
- described_class.to_s,
- variables: described_class.variables(context)
- )
- result = GitlabSchema.static_validator.validate(query)
-
- expect(result[:errors]).to be_empty
- end
+ let_it_be(:tracker) { create(:bulk_import_tracker) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:query) { described_class.new(context: context) }
+
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
+
+ expect(result[:errors]).to be_empty
+ end
- context 'with invalid variables' do
- it 'raises an error' do
- expect { GraphQL::Query.new(GitlabSchema, described_class.to_s, variables: 'invalid') }.to raise_error(ArgumentError)
- end
+ context 'with invalid variables' do
+ it 'raises an error' do
+ expect { GraphQL::Query.new(GitlabSchema, subject.to_s, variables: 'invalid') }.to raise_error(ArgumentError)
end
end
@@ -29,7 +29,7 @@ RSpec.describe BulkImports::Groups::Graphql::GetProjectsQuery do
it 'returns data path' do
expected = %w[data group projects nodes]
- expect(described_class.data_path).to eq(expected)
+ expect(subject.data_path).to eq(expected)
end
end
@@ -37,7 +37,7 @@ RSpec.describe BulkImports::Groups::Graphql::GetProjectsQuery do
it 'returns pagination information path' do
expected = %w[data group projects page_info]
- expect(described_class.page_info_path).to eq(expected)
+ expect(subject.page_info_path).to eq(expected)
end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
deleted file mode 100644
index 0126acb320b..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ /dev/null
@@ -1,119 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
- let_it_be(:member_user1) { create(:user, email: 'email1@email.com') }
- let_it_be(:member_user2) { create(:user, email: 'email2@email.com') }
-
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'maps existing users to the imported group' do
- first_page = extracted_data(email: member_user1.email, has_next_page: true)
- last_page = extracted_data(email: member_user2.email)
-
- allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
- allow(extractor)
- .to receive(:extract)
- .and_return(first_page, last_page)
- end
-
- expect { subject.run }.to change(GroupMember, :count).by(2)
-
- members = group.members.map { |m| m.slice(:user_id, :access_level) }
-
- expect(members).to contain_exactly(
- { user_id: member_user1.id, access_level: 30 },
- { user_id: member_user2.id, access_level: 30 }
- )
- end
- end
-
- describe '#load' do
- it 'does nothing when there is no data' do
- expect { subject.load(context, nil) }.not_to change(GroupMember, :count)
- end
-
- it 'creates the member' do
- data = {
- 'user_id' => member_user1.id,
- 'created_by_id' => member_user2.id,
- 'access_level' => 30,
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil
- }
-
- expect { subject.load(context, data) }.to change(GroupMember, :count).by(1)
-
- member = group.members.last
-
- expect(member.user).to eq(member_user1)
- expect(member.created_by).to eq(member_user2)
- expect(member.access_level).to eq(30)
- expect(member.created_at).to eq('2020-01-01T00:00:00Z')
- expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
- expect(member.expires_at).to eq(nil)
- end
-
- context 'when user_id is current user id' do
- it 'does not create new member' do
- data = { 'user_id' => user.id }
-
- expect { subject.load(context, data) }.not_to change(GroupMember, :count)
- end
- end
- end
-
- describe 'pipeline parts' do
- it { expect(described_class).to include_module(BulkImports::Pipeline) }
- it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
-
- it 'has extractors' do
- expect(described_class.get_extractor)
- .to eq(
- klass: BulkImports::Common::Extractors::GraphqlExtractor,
- options: {
- query: BulkImports::Groups::Graphql::GetMembersQuery
- }
- )
- end
-
- it 'has transformers' do
- expect(described_class.transformers)
- .to contain_exactly(
- { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
- { klass: BulkImports::Groups::Transformers::MemberAttributesTransformer, options: nil }
- )
- end
- end
-
- def extracted_data(email:, has_next_page: false)
- data = {
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil,
- 'access_level' => {
- 'integer_value' => 30
- },
- 'user' => {
- 'public_email' => email
- }
- }
-
- page_info = {
- 'has_next_page' => has_next_page,
- 'next_page' => has_next_page ? 'cursor' : nil
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 55a8e40f480..b6bb8a7d195 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe BulkImports::Groups::Stage do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::MembersPipeline],
+ [1, BulkImports::Common::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
[1, BulkImports::Common::Pipelines::BadgesPipeline],
diff --git a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
index af99428e0c1..c8935f71f10 100644
--- a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
@@ -48,12 +48,12 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
data = member_data(email: user.email)
expect(subject.transform(context, data)).to eq(
- 'access_level' => 30,
- 'user_id' => user.id,
- 'created_by_id' => user.id,
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil
+ access_level: 30,
+ user_id: user.id,
+ created_by_id: user.id,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
)
end
@@ -62,12 +62,12 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
data = member_data(email: secondary_email)
expect(subject.transform(context, data)).to eq(
- 'access_level' => 30,
- 'user_id' => user.id,
- 'created_by_id' => user.id,
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil
+ access_level: 30,
+ user_id: user.id,
+ created_by_id: user.id,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
)
end
diff --git a/spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb
new file mode 100644
index 00000000000..6593aa56506
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Graphql::GetProjectQuery do
+ let_it_be(:tracker) { create(:bulk_import_tracker) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:query) { described_class.new(context: context) }
+
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ it 'queries project based on source_full_path' do
+ expected = { full_path: tracker.entity.source_full_path }
+
+ expect(subject.variables).to eq(expected)
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb
index 4dba81dc0d2..8ed105bc0c9 100644
--- a/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb
+++ b/spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb
@@ -3,19 +3,29 @@
require 'spec_helper'
RSpec.describe BulkImports::Projects::Graphql::GetRepositoryQuery do
- describe 'query repository based on full_path' do
- let(:entity) { double(source_full_path: 'test', bulk_import: nil) }
- let(:tracker) { double(entity: entity) }
- let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:tracker) { create(:bulk_import_tracker) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- it 'returns project repository url' do
- expect(described_class.to_s).to include('httpUrlToRepo')
- end
+ subject(:query) { described_class.new(context: context) }
- it 'queries project based on source_full_path' do
- expected = { full_path: entity.source_full_path }
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
- expect(described_class.variables(context)).to eq(expected)
- end
+ expect(result[:errors]).to be_empty
+ end
+
+ it 'returns project repository url' do
+ expect(subject.to_s).to include('httpUrlToRepo')
+ end
+
+ it 'queries project based on source_full_path' do
+ expected = { full_path: tracker.entity.source_full_path }
+
+ expect(subject.variables).to eq(expected)
end
end
diff --git a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
index b680fa5cbfc..1bd4106297d 100644
--- a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
+++ b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
@@ -3,56 +3,56 @@
require 'spec_helper'
RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
- describe 'query repository based on full_path' do
- let_it_be(:entity) { create(:bulk_import_entity) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- it 'has a valid query' do
- query = GraphQL::Query.new(
- GitlabSchema,
- described_class.to_s,
- variables: described_class.variables(context)
- )
- result = GitlabSchema.static_validator.validate(query)
-
- expect(result[:errors]).to be_empty
- end
+ let_it_be(:entity) { create(:bulk_import_entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- it 'returns snippet httpUrlToRepo' do
- expect(described_class.to_s).to include('httpUrlToRepo')
- end
+ subject(:query) { described_class.new(context: context) }
- it 'returns snippet createdAt' do
- expect(described_class.to_s).to include('createdAt')
- end
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
- it 'returns snippet title' do
- expect(described_class.to_s).to include('title')
- end
+ expect(result[:errors]).to be_empty
+ end
- describe '.variables' do
- it 'queries project based on source_full_path and pagination' do
- expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
+ it 'returns snippet httpUrlToRepo' do
+ expect(subject.to_s).to include('httpUrlToRepo')
+ end
- expect(described_class.variables(context)).to eq(expected)
- end
+ it 'returns snippet createdAt' do
+ expect(subject.to_s).to include('createdAt')
+ end
+
+ it 'returns snippet title' do
+ expect(subject.to_s).to include('title')
+ end
+
+ describe '.variables' do
+ it 'queries project based on source_full_path and pagination' do
+ expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
+
+ expect(subject.variables).to eq(expected)
end
+ end
- describe '.data_path' do
- it '.data_path returns data path' do
- expected = %w[data project snippets nodes]
+ describe '.data_path' do
+ it '.data_path returns data path' do
+ expected = %w[data project snippets nodes]
- expect(described_class.data_path).to eq(expected)
- end
+ expect(subject.data_path).to eq(expected)
end
+ end
- describe '.page_info_path' do
- it '.page_info_path returns pagination information path' do
- expected = %w[data project snippets page_info]
+ describe '.page_info_path' do
+ it '.page_info_path returns pagination information path' do
+ expected = %w[data project snippets page_info]
- expect(described_class.page_info_path).to eq(expected)
- end
+ expect(subject.page_info_path).to eq(expected)
end
end
end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index 81cbdcae9d1..ef98613dc25 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
+ [5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
[5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
[5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]