diff options
author | Grzegorz Bizon <grzegorz@gitlab.com> | 2018-06-05 08:42:20 +0000 |
---|---|---|
committer | Grzegorz Bizon <grzegorz@gitlab.com> | 2018-06-05 08:42:20 +0000 |
commit | e11a1001dcdc1ea5c65845fb0897b861b5c0b92d (patch) | |
tree | 27932bbdd58a1322b6b2c3fd82738c8330ecdec2 /spec | |
parent | 8576e45f28737f1e9f6b0b6e5649745c76751b06 (diff) | |
parent | eea26a93e7d4ac0c6fefe46592c9baa0d3e6a5cd (diff) | |
download | gitlab-ce-e11a1001dcdc1ea5c65845fb0897b861b5c0b92d.tar.gz |
Merge branch 'presigned-multipart-uploads' into 'master'
Support presigned multipart uploads
See merge request gitlab-org/gitlab-ce!18855
Diffstat (limited to 'spec')
-rw-r--r-- | spec/initializers/artifacts_direct_upload_support_spec.rb | 71 | ||||
-rw-r--r-- | spec/initializers/direct_upload_support_spec.rb | 90 | ||||
-rw-r--r-- | spec/lib/object_storage/direct_upload_spec.rb | 164 | ||||
-rw-r--r-- | spec/requests/api/runner_spec.rb | 1 | ||||
-rw-r--r-- | spec/requests/lfs_http_spec.rb | 1 | ||||
-rw-r--r-- | spec/support/helpers/stub_object_storage.rb | 12 | ||||
-rw-r--r-- | spec/uploaders/object_storage_spec.rb | 134 |
7 files changed, 375 insertions, 98 deletions
diff --git a/spec/initializers/artifacts_direct_upload_support_spec.rb b/spec/initializers/artifacts_direct_upload_support_spec.rb deleted file mode 100644 index bfb71da3388..00000000000 --- a/spec/initializers/artifacts_direct_upload_support_spec.rb +++ /dev/null @@ -1,71 +0,0 @@ -require 'spec_helper' - -describe 'Artifacts direct upload support' do - subject do - load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb') - end - - let(:connection) do - { provider: provider } - end - - before do - stub_artifacts_setting( - object_store: { - enabled: enabled, - direct_upload: direct_upload, - connection: connection - }) - end - - context 'when object storage is enabled' do - let(:enabled) { true } - - context 'when direct upload is enabled' do - let(:direct_upload) { true } - - context 'when provider is Google' do - let(:provider) { 'Google' } - - it 'succeeds' do - expect { subject }.not_to raise_error - end - end - - context 'when connection is empty' do - let(:connection) { nil } - - it 'raises an error' do - expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/ - end - end - - context 'when other provider is used' do - let(:provider) { 'AWS' } - - it 'raises an error' do - expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/ - end - end - end - - context 'when direct upload is disabled' do - let(:direct_upload) { false } - let(:provider) { 'AWS' } - - it 'succeeds' do - expect { subject }.not_to raise_error - end - end - end - - context 'when object storage is disabled' do - let(:enabled) { false } - let(:direct_upload) { false } - let(:provider) { 'AWS' } - - it 'succeeds' do - expect { subject }.not_to raise_error - end - end -end diff --git a/spec/initializers/direct_upload_support_spec.rb b/spec/initializers/direct_upload_support_spec.rb new file mode 100644 index 00000000000..e51d404e030 --- /dev/null +++ b/spec/initializers/direct_upload_support_spec.rb @@ -0,0 +1,90 @@ +require 'spec_helper' + +describe 'Direct upload support' do + subject do + load Rails.root.join('config/initializers/direct_upload_support.rb') + end + + where(:config_name) do + %w(lfs artifacts uploads) + end + + with_them do + let(:connection) do + { provider: provider } + end + + let(:object_store) do + { + enabled: enabled, + direct_upload: direct_upload, + connection: connection + } + end + + before do + allow(Gitlab.config).to receive_messages(to_settings(config_name => { + object_store: object_store + })) + end + + context 'when object storage is enabled' do + let(:enabled) { true } + + context 'when direct upload is enabled' do + let(:direct_upload) { true } + + context 'when provider is AWS' do + let(:provider) { 'AWS' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + + context 'when provider is Google' do + let(:provider) { 'Google' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + + context 'when connection is empty' do + let(:connection) { nil } + + it 'raises an error' do + expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/ + end + end + + context 'when other provider is used' do + let(:provider) { 'Rackspace' } + + it 'raises an error' do + expect { subject }.to raise_error /are supported as a object storage provider when 'direct_upload' is used/ + end + end + end + + context 'when direct upload is disabled' do + let(:direct_upload) { false } + let(:provider) { 'AWS' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + end + + context 'when object storage is disabled' do + let(:enabled) { false } + let(:direct_upload) { false } + let(:provider) { 'Rackspace' } + + it 'succeeds' do + expect { subject }.not_to raise_error + end + end + end +end diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb new file mode 100644 index 00000000000..5187821e8f4 --- /dev/null +++ b/spec/lib/object_storage/direct_upload_spec.rb @@ -0,0 +1,164 @@ +require 'spec_helper' + +describe ObjectStorage::DirectUpload do + let(:credentials) do + { + provider: 'AWS', + aws_access_key_id: 'AWS_ACCESS_KEY_ID', + aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY' + } + end + + let(:storage_url) { 'https://uploads.s3.amazonaws.com/' } + + let(:bucket_name) { 'uploads' } + let(:object_name) { 'tmp/uploads/my-file' } + let(:maximum_size) { 1.gigabyte } + + let(:direct_upload) { described_class.new(credentials, bucket_name, object_name, has_length: has_length, maximum_size: maximum_size) } + + describe '#has_length' do + context 'is known' do + let(:has_length) { true } + let(:maximum_size) { nil } + + it "maximum size is not required" do + expect { direct_upload }.not_to raise_error + end + end + + context 'is unknown' do + let(:has_length) { false } + + context 'and maximum size is specified' do + let(:maximum_size) { 1.gigabyte } + + it "does not raise an error" do + expect { direct_upload }.not_to raise_error + end + end + + context 'and maximum size is not specified' do + let(:maximum_size) { nil } + + it "raises an error" do + expect { direct_upload }.to raise_error /maximum_size has to be specified if length is unknown/ + end + end + end + end + + describe '#to_hash' do + subject { direct_upload.to_hash } + + shared_examples 'a valid upload' do + it "returns valid structure" do + expect(subject).to have_key(:Timeout) + expect(subject[:GetURL]).to start_with(storage_url) + expect(subject[:StoreURL]).to start_with(storage_url) + expect(subject[:DeleteURL]).to start_with(storage_url) + end + end + + shared_examples 'a valid upload with multipart data' do + before do + stub_object_storage_multipart_init(storage_url, "myUpload") + end + + it_behaves_like 'a valid upload' + + it "returns valid structure" do + expect(subject).to have_key(:MultipartUpload) + expect(subject[:MultipartUpload]).to have_key(:PartSize) + expect(subject[:MultipartUpload][:PartURLs]).to all(start_with(storage_url)) + expect(subject[:MultipartUpload][:PartURLs]).to all(include('uploadId=myUpload')) + expect(subject[:MultipartUpload][:CompleteURL]).to start_with(storage_url) + expect(subject[:MultipartUpload][:CompleteURL]).to include('uploadId=myUpload') + expect(subject[:MultipartUpload][:AbortURL]).to start_with(storage_url) + expect(subject[:MultipartUpload][:AbortURL]).to include('uploadId=myUpload') + end + end + + shared_examples 'a valid upload without multipart data' do + it_behaves_like 'a valid upload' + + it "returns valid structure" do + expect(subject).not_to have_key(:MultipartUpload) + end + end + + context 'when AWS is used' do + context 'when length is known' do + let(:has_length) { true } + + it_behaves_like 'a valid upload without multipart data' + end + + context 'when length is unknown' do + let(:has_length) { false } + + it_behaves_like 'a valid upload with multipart data' do + context 'when maximum upload size is 10MB' do + let(:maximum_size) { 10.megabyte } + + it 'returns only 2 parts' do + expect(subject[:MultipartUpload][:PartURLs].length).to eq(2) + end + + it 'part size is mimimum, 5MB' do + expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte) + end + end + + context 'when maximum upload size is 12MB' do + let(:maximum_size) { 12.megabyte } + + it 'returns only 3 parts' do + expect(subject[:MultipartUpload][:PartURLs].length).to eq(3) + end + + it 'part size is rounded-up to 5MB' do + expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte) + end + end + + context 'when maximum upload size is 49GB' do + let(:maximum_size) { 49.gigabyte } + + it 'returns maximum, 100 parts' do + expect(subject[:MultipartUpload][:PartURLs].length).to eq(100) + end + + it 'part size is rounded-up to 5MB' do + expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte) + end + end + end + end + end + + context 'when Google is used' do + let(:credentials) do + { + provider: 'Google', + google_storage_access_key_id: 'GOOGLE_ACCESS_KEY_ID', + google_storage_secret_access_key: 'GOOGLE_SECRET_ACCESS_KEY' + } + end + + let(:storage_url) { 'https://storage.googleapis.com/uploads/' } + + context 'when length is known' do + let(:has_length) { true } + + it_behaves_like 'a valid upload without multipart data' + end + + context 'when length is unknown' do + let(:has_length) { false } + + it_behaves_like 'a valid upload without multipart data' + end + end + end +end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 319ac389083..c981a10ac38 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -1101,6 +1101,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('DeleteURL') + expect(json_response['RemoteObject']).to have_key('MultipartUpload') end end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 79672fe1cc5..4d30b99262e 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -1021,6 +1021,7 @@ describe 'Git LFS API and storage' do expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('DeleteURL') + expect(json_response['RemoteObject']).not_to have_key('MultipartUpload') expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb index 19d744b959a..bceaf8277ee 100644 --- a/spec/support/helpers/stub_object_storage.rb +++ b/spec/support/helpers/stub_object_storage.rb @@ -45,4 +45,16 @@ module StubObjectStorage remote_directory: 'uploads', **params) end + + def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id") + stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z}) + .to_return status: 200, body: <<-EOS.strip_heredoc + <?xml version="1.0" encoding="UTF-8"?> + <InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> + <Bucket>example-bucket</Bucket> + <Key>example-object</Key> + <UploadId>#{upload_id}</UploadId> + </InitiateMultipartUploadResult> + EOS + end end diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb index 2dd0925a8e6..01166865e88 100644 --- a/spec/uploaders/object_storage_spec.rb +++ b/spec/uploaders/object_storage_spec.rb @@ -355,7 +355,10 @@ describe ObjectStorage do end describe '.workhorse_authorize' do - subject { uploader_class.workhorse_authorize } + let(:has_length) { true } + let(:maximum_size) { nil } + + subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) } before do # ensure that we use regular Fog libraries @@ -371,10 +374,6 @@ describe ObjectStorage do expect(subject[:TempPath]).to start_with(uploader_class.root) expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH) end - - it "does not return remote store" do - is_expected.not_to have_key('RemoteObject') - end end shared_examples 'uses remote storage' do @@ -383,7 +382,7 @@ describe ObjectStorage do expect(subject[:RemoteObject]).to have_key(:ID) expect(subject[:RemoteObject]).to include(Timeout: a_kind_of(Integer)) - expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DIRECT_UPLOAD_TIMEOUT) + expect(subject[:RemoteObject][:Timeout]).to be(ObjectStorage::DirectUpload::TIMEOUT) expect(subject[:RemoteObject]).to have_key(:GetURL) expect(subject[:RemoteObject]).to have_key(:DeleteURL) expect(subject[:RemoteObject]).to have_key(:StoreURL) @@ -391,9 +390,31 @@ describe ObjectStorage do expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH) expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH) end + end - it "does not return local store" do - is_expected.not_to have_key('TempPath') + shared_examples 'uses remote storage with multipart uploads' do + it_behaves_like 'uses remote storage' do + it "returns multipart upload" do + is_expected.to have_key(:RemoteObject) + + expect(subject[:RemoteObject]).to have_key(:MultipartUpload) + expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartSize) + expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:PartURLs) + expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:CompleteURL) + expect(subject[:RemoteObject][:MultipartUpload]).to have_key(:AbortURL) + expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(described_class::TMP_UPLOAD_PATH)) + expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(described_class::TMP_UPLOAD_PATH) + expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(described_class::TMP_UPLOAD_PATH) + end + end + end + + shared_examples 'uses remote storage without multipart uploads' do + it_behaves_like 'uses remote storage' do + it "does not return multipart upload" do + is_expected.to have_key(:RemoteObject) + expect(subject[:RemoteObject]).not_to have_key(:MultipartUpload) + end end end @@ -416,6 +437,8 @@ describe ObjectStorage do end context 'uses AWS' do + let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" } + before do expect(uploader_class).to receive(:object_store_credentials) do { provider: "AWS", @@ -425,18 +448,40 @@ describe ObjectStorage do end end - it_behaves_like 'uses remote storage' do - let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" } + context 'for known length' do + it_behaves_like 'uses remote storage without multipart uploads' do + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'for unknown length' do + let(:has_length) { false } + let(:maximum_size) { 1.gigabyte } - it 'returns links for S3' do - expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + before do + stub_object_storage_multipart_init(storage_url) + end + + it_behaves_like 'uses remote storage with multipart uploads' do + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url)) + expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url) + end end end end context 'uses Google' do + let(:storage_url) { "https://storage.googleapis.com/uploads/" } + before do expect(uploader_class).to receive(:object_store_credentials) do { provider: "Google", @@ -445,36 +490,71 @@ describe ObjectStorage do end end - it_behaves_like 'uses remote storage' do - let(:storage_url) { "https://storage.googleapis.com/uploads/" } + context 'for known length' do + it_behaves_like 'uses remote storage without multipart uploads' do + it 'returns links for Google Cloud' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'for unknown length' do + let(:has_length) { false } + let(:maximum_size) { 1.gigabyte } - it 'returns links for Google Cloud' do - expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + it_behaves_like 'uses remote storage without multipart uploads' do + it 'returns links for Google Cloud' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end end end end context 'uses GDK/minio' do + let(:storage_url) { "http://minio:9000/uploads/" } + before do expect(uploader_class).to receive(:object_store_credentials) do { provider: "AWS", aws_access_key_id: "AWS_ACCESS_KEY_ID", aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", - endpoint: 'http://127.0.0.1:9000', + endpoint: 'http://minio:9000', path_style: true, region: "gdk" } end end - it_behaves_like 'uses remote storage' do - let(:storage_url) { "http://127.0.0.1:9000/uploads/" } + context 'for known length' do + it_behaves_like 'uses remote storage without multipart uploads' do + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'for unknown length' do + let(:has_length) { false } + let(:maximum_size) { 1.gigabyte } - it 'returns links for S3' do - expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) - expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + before do + stub_object_storage_multipart_init(storage_url) + end + + it_behaves_like 'uses remote storage with multipart uploads' do + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(start_with(storage_url)) + expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to start_with(storage_url) + end end end end |