diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2019-10-18 11:11:44 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2019-10-18 11:11:44 +0000 |
commit | 25989ab7ef1a444ed2abd5479f176d58e1d9462a (patch) | |
tree | 271bb24f3c7178f320cb9de0be0833a285327d09 /spec/requests | |
parent | 9bbb32b29703f3ce33dd35d5101145774b793a6d (diff) | |
download | gitlab-ce-25989ab7ef1a444ed2abd5479f176d58e1d9462a.tar.gz |
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/requests')
-rw-r--r-- | spec/requests/api/commit_statuses_spec.rb | 60 | ||||
-rw-r--r-- | spec/requests/api/graphql/namespace/projects_spec.rb | 1 | ||||
-rw-r--r-- | spec/requests/api/pages/internal_access_spec.rb | 1 | ||||
-rw-r--r-- | spec/requests/api/pages/private_access_spec.rb | 1 | ||||
-rw-r--r-- | spec/requests/api/pages/public_access_spec.rb | 1 | ||||
-rw-r--r-- | spec/requests/api/runner_spec.rb | 25 | ||||
-rw-r--r-- | spec/requests/lfs_http_spec.rb | 54 |
7 files changed, 94 insertions, 49 deletions
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb index 1be8883bd3c..6cb02ba2f6b 100644 --- a/spec/requests/api/commit_statuses_spec.rb +++ b/spec/requests/api/commit_statuses_spec.rb @@ -125,25 +125,55 @@ describe API::CommitStatuses do let(:post_url) { "/projects/#{project.id}/statuses/#{sha}" } context 'developer user' do - %w[pending running success failed canceled].each do |status| - context "for #{status}" do - context 'uses only required parameters' do - it 'creates commit status' do - post api(post_url, developer), params: { state: status } + context 'uses only required parameters' do + %w[pending running success failed canceled].each do |status| + context "for #{status}" do + context 'when pipeline for sha does not exists' do + it 'creates commit status' do + post api(post_url, developer), params: { state: status } + + expect(response).to have_gitlab_http_status(201) + expect(json_response['sha']).to eq(commit.id) + expect(json_response['status']).to eq(status) + expect(json_response['name']).to eq('default') + expect(json_response['ref']).not_to be_empty + expect(json_response['target_url']).to be_nil + expect(json_response['description']).to be_nil + + if status == 'failed' + expect(CommitStatus.find(json_response['id'])).to be_api_failure + end + end + end + end + end + + context 'when pipeline already exists for the specified sha' do + let!(:pipeline) { create(:ci_pipeline, project: project, sha: sha, ref: 'ref') } + let(:params) { { state: 'pending' } } + + shared_examples_for 'creates a commit status for the existing pipeline' do + it do + expect do + post api(post_url, developer), params: params + end.not_to change { Ci::Pipeline.count } + + job = pipeline.statuses.find_by_name(json_response['name']) expect(response).to have_gitlab_http_status(201) - expect(json_response['sha']).to eq(commit.id) - expect(json_response['status']).to eq(status) - expect(json_response['name']).to eq('default') - expect(json_response['ref']).not_to be_empty - expect(json_response['target_url']).to be_nil - expect(json_response['description']).to be_nil - - if status == 'failed' - expect(CommitStatus.find(json_response['id'])).to be_api_failure - end + expect(job.status).to eq('pending') end end + + it_behaves_like 'creates a commit status for the existing pipeline' + + context 'with pipeline for merge request' do + let!(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) } + let!(:pipeline) { merge_request.all_pipelines.last } + let(:sha) { pipeline.sha } + + it_behaves_like 'creates a commit status for the existing pipeline' + end end end diff --git a/spec/requests/api/graphql/namespace/projects_spec.rb b/spec/requests/api/graphql/namespace/projects_spec.rb index 815e9531ecf..2a95b99572f 100644 --- a/spec/requests/api/graphql/namespace/projects_spec.rb +++ b/spec/requests/api/graphql/namespace/projects_spec.rb @@ -67,6 +67,7 @@ describe 'getting projects' do context 'when the namespace is a user' do subject { user.namespace } + let(:include_subgroups) { false } it_behaves_like 'a graphql namespace' diff --git a/spec/requests/api/pages/internal_access_spec.rb b/spec/requests/api/pages/internal_access_spec.rb index c41eabe0a48..28abe1a8456 100644 --- a/spec/requests/api/pages/internal_access_spec.rb +++ b/spec/requests/api/pages/internal_access_spec.rb @@ -27,6 +27,7 @@ describe "Internal Project Pages Access" do describe "Project should be internal" do describe '#internal?' do subject { project.internal? } + it { is_expected.to be_truthy } end end diff --git a/spec/requests/api/pages/private_access_spec.rb b/spec/requests/api/pages/private_access_spec.rb index c647537038e..6af441caf74 100644 --- a/spec/requests/api/pages/private_access_spec.rb +++ b/spec/requests/api/pages/private_access_spec.rb @@ -27,6 +27,7 @@ describe "Private Project Pages Access" do describe "Project should be private" do describe '#private?' do subject { project.private? } + it { is_expected.to be_truthy } end end diff --git a/spec/requests/api/pages/public_access_spec.rb b/spec/requests/api/pages/public_access_spec.rb index 16cc5697f30..d99224eca5b 100644 --- a/spec/requests/api/pages/public_access_spec.rb +++ b/spec/requests/api/pages/public_access_spec.rb @@ -27,6 +27,7 @@ describe "Public Project Pages Access" do describe "Project should be public" do describe '#public?' do subject { project.public? } + it { is_expected.to be_truthy } end end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index bc3a04420f9..70a95663aea 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' describe API::Runner, :clean_gitlab_redis_shared_state do include StubGitlabCalls include RedisHelpers + include WorkhorseHelpers let(:registration_token) { 'abcdefg123456' } @@ -1395,7 +1396,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do expect(response).to have_gitlab_http_status(200) expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) - expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path) + expect(json_response).not_to have_key('TempPath') expect(json_response['RemoteObject']).to have_key('ID') expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('StoreURL') @@ -1562,15 +1563,16 @@ describe API::Runner, :clean_gitlab_redis_shared_state do let!(:fog_connection) do stub_artifacts_object_storage(direct_upload: true) end - - before do + let(:object) do fog_connection.directories.new(key: 'artifacts').files.create( key: 'tmp/uploads/12312300', body: 'content' ) + end + let(:file_upload) { fog_to_uploaded_file(object) } - upload_artifacts(file_upload, headers_with_token, - { 'file.remote_id' => remote_id }) + before do + upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id) end context 'when valid remote_id is used' do @@ -1804,12 +1806,13 @@ describe API::Runner, :clean_gitlab_redis_shared_state do end def upload_artifacts(file, headers = {}, params = {}) - params = params.merge({ - 'file.path' => file.path, - 'file.name' => file.original_filename - }) - - post api("/jobs/#{job.id}/artifacts"), params: params, headers: headers + workhorse_finalize( + api("/jobs/#{job.id}/artifacts"), + method: :post, + file_key: :file, + params: params.merge(file: file), + headers: headers + ) end end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index ae34f7d1f87..62b9ee1d361 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' describe 'Git LFS API and storage' do include LfsHttpHelpers include ProjectForksHelper + include WorkhorseHelpers set(:project) { create(:project, :repository) } set(:other_project) { create(:project, :repository) } @@ -933,7 +934,7 @@ describe 'Git LFS API and storage' do it_behaves_like 'a valid response' do it 'responds with status 200, location of LFS remote store and object details' do - expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) + expect(json_response).not_to have_key('TempPath') expect(json_response['RemoteObject']).to have_key('ID') expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('StoreURL') @@ -992,10 +993,17 @@ describe 'Git LFS API and storage' do stub_lfs_object_storage(direct_upload: true) end + let(:tmp_object) do + fog_connection.directories.new(key: 'lfs-objects').files.create( + key: 'tmp/uploads/12312300', + body: 'content' + ) + end + ['123123', '../../123123'].each do |remote_id| context "with invalid remote_id: #{remote_id}" do subject do - put_finalize(with_tempfile: true, args: { + put_finalize(remote_object: tmp_object, args: { 'file.remote_id' => remote_id }) end @@ -1009,15 +1017,8 @@ describe 'Git LFS API and storage' do end context 'with valid remote_id' do - before do - fog_connection.directories.new(key: 'lfs-objects').files.create( - key: 'tmp/uploads/12312300', - body: 'content' - ) - end - subject do - put_finalize(with_tempfile: true, args: { + put_finalize(remote_object: tmp_object, args: { 'file.remote_id' => '12312300', 'file.name' => 'name' }) @@ -1027,6 +1028,10 @@ describe 'Git LFS API and storage' do subject expect(response).to have_gitlab_http_status(200) + + object = LfsObject.find_by_oid(sample_oid) + expect(object).to be_present + expect(object.file.read).to eq(tmp_object.body) end it 'schedules migration of file to object storage' do @@ -1268,28 +1273,31 @@ describe 'Git LFS API and storage' do put authorize_url(project, sample_oid, sample_size), params: {}, headers: authorize_headers end - def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, verified: true, args: {}) - upload_path = LfsObjectUploader.workhorse_local_upload_path - file_path = upload_path + '/' + lfs_tmp if lfs_tmp + def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, verified: true, remote_object: nil, args: {}) + uploaded_file = nil if with_tempfile + upload_path = LfsObjectUploader.workhorse_local_upload_path + file_path = upload_path + '/' + lfs_tmp if lfs_tmp + FileUtils.mkdir_p(upload_path) FileUtils.touch(file_path) - end - - extra_args = { - 'file.path' => file_path, - 'file.name' => File.basename(file_path) - } - put_finalize_with_args(args.merge(extra_args).compact, verified: verified) - end + uploaded_file = UploadedFile.new(file_path, filename: File.basename(file_path)) + elsif remote_object + uploaded_file = fog_to_uploaded_file(remote_object) + end - def put_finalize_with_args(args, verified:) finalize_headers = headers finalize_headers.merge!(workhorse_internal_api_request_header) if verified - put objects_url(project, sample_oid, sample_size), params: args, headers: finalize_headers + workhorse_finalize( + objects_url(project, sample_oid, sample_size), + method: :put, + file_key: :file, + params: args.merge(file: uploaded_file), + headers: finalize_headers + ) end def lfs_tmp_file |