diff options
Diffstat (limited to 'spec')
142 files changed, 5288 insertions, 1591 deletions
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb new file mode 100644 index 00000000000..f4c99ea4064 --- /dev/null +++ b/spec/controllers/concerns/send_file_upload_spec.rb @@ -0,0 +1,89 @@ +require 'spec_helper' + +describe SendFileUpload do + let(:uploader_class) do + Class.new(GitlabUploader) do + include ObjectStorage::Concern + + storage_options Gitlab.config.uploads + + private + + # user/:id + def dynamic_segment + File.join(model.class.to_s.underscore, model.id.to_s) + end + end + end + + let(:controller_class) do + Class.new do + include SendFileUpload + end + end + + let(:object) { build_stubbed(:user) } + let(:uploader) { uploader_class.new(object, :file) } + + describe '#send_upload' do + let(:controller) { controller_class.new } + let(:temp_file) { Tempfile.new('test') } + + subject { controller.send_upload(uploader) } + + before do + FileUtils.touch(temp_file) + end + + after do + FileUtils.rm_f(temp_file) + end + + context 'when local file is used' do + before do + uploader.store!(temp_file) + end + + it 'sends a file' do + expect(controller).to receive(:send_file).with(uploader.path, anything) + + subject + end + end + + context 'when remote file is used' do + before do + stub_uploads_object_storage(uploader: uploader_class) + uploader.object_store = ObjectStorage::Store::REMOTE + uploader.store!(temp_file) + end + + context 'and proxying is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true } + end + + it 'sends a file' do + headers = double + expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/) + expect(controller).to receive(:headers) { headers } + expect(controller).to receive(:head).with(:ok) + + subject + end + end + + context 'and proxying is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false } + end + + it 'sends a file' do + expect(controller).to receive(:redirect_to).with(/#{uploader.path}/) + + subject + end + end + end + end +end diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb index 25a2e13fe1a..4ea6f869aa3 100644 --- a/spec/controllers/projects/artifacts_controller_spec.rb +++ b/spec/controllers/projects/artifacts_controller_spec.rb @@ -145,9 +145,23 @@ describe Projects::ArtifactsController do context 'when using local file storage' do it_behaves_like 'a valid file' do let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + let(:store) { ObjectStorage::Store::LOCAL } let(:archive_path) { JobArtifactUploader.root } end end + + context 'when using remote file storage' do + before do + stub_artifacts_object_storage + end + + it_behaves_like 'a valid file' do + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + let!(:job) { create(:ci_build, :success, pipeline: pipeline) } + let(:store) { ObjectStorage::Store::REMOTE } + let(:archive_path) { 'https://' } + end + end end end diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb new file mode 100644 index 00000000000..1249a5528a9 --- /dev/null +++ b/spec/controllers/projects/ci/lints_controller_spec.rb @@ -0,0 +1,123 @@ +require 'spec_helper' + +describe Projects::Ci::LintsController do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + + before do + sign_in(user) + end + + describe 'GET #show' do + context 'with enough privileges' do + before do + project.add_developer(user) + + get :show, namespace_id: project.namespace, project_id: project + end + + it 'should be success' do + expect(response).to be_success + end + + it 'should render show page' do + expect(response).to render_template :show + end + + it 'should retrieve project' do + expect(assigns(:project)).to eq(project) + end + end + + context 'without enough privileges' do + before do + project.add_guest(user) + + get :show, namespace_id: project.namespace, project_id: project + end + + it 'should respond with 404' do + expect(response).to have_gitlab_http_status(404) + end + end + end + + describe 'POST #create' do + let(:remote_file_path) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' } + + let(:remote_file_content) do + <<~HEREDOC + before_script: + - apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs + - ruby -v + - which ruby + - gem install bundler --no-ri --no-rdoc + - bundle install --jobs $(nproc) "${FLAGS[@]}" + HEREDOC + end + + let(:content) do + <<~HEREDOC + include: + - #{remote_file_path} + + rubocop: + script: + - bundle exec rubocop + HEREDOC + end + + context 'with a valid gitlab-ci.yml' do + before do + WebMock.stub_request(:get, remote_file_path).to_return(body: remote_file_content) + project.add_developer(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should be success' do + expect(response).to be_success + end + + it 'render show page' do + expect(response).to render_template :show + end + + it 'should retrieve project' do + expect(assigns(:project)).to eq(project) + end + end + + context 'with an invalid gitlab-ci.yml' do + let(:content) do + <<~HEREDOC + rubocop: + scriptt: + - bundle exec rubocop + HEREDOC + end + + before do + project.add_developer(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should assign errors' do + expect(assigns[:error]).to eq('jobs:rubocop config contains unknown keys: scriptt') + end + end + + context 'without enough privileges' do + before do + project.add_guest(user) + + post :create, namespace_id: project.namespace, project_id: project, content: content + end + + it 'should respond with 404' do + expect(response).to have_gitlab_http_status(404) + end + end + end +end diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb index 15ce418d0d6..82b20e12850 100644 --- a/spec/controllers/projects/clusters_controller_spec.rb +++ b/spec/controllers/projects/clusters_controller_spec.rb @@ -18,7 +18,7 @@ describe Projects::ClustersController do context 'when project has one or more clusters' do let(:project) { create(:project) } let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } - let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) } + let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) } it 'lists available clusters' do go @@ -32,7 +32,7 @@ describe Projects::ClustersController do before do allow(Clusters::Cluster).to receive(:paginates_per).and_return(1) - create_list(:cluster, 2, :provided_by_gcp, projects: [project]) + create_list(:cluster, 2, :provided_by_gcp, :production_environment, projects: [project]) get :index, namespace_id: project.namespace, project_id: project, page: last_page end diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index f3e303bb0fe..31046c202e6 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -1,7 +1,9 @@ +# coding: utf-8 require 'spec_helper' describe Projects::JobsController do include ApiHelpers + include HttpIOHelpers let(:project) { create(:project, :public) } let(:pipeline) { create(:ci_pipeline, project: project) } @@ -203,6 +205,41 @@ describe Projects::JobsController do end end + context 'when trace artifact is in ObjectStorage' do + let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) } + + before do + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } + allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } + end + + context 'when there are no network issues' do + before do + stub_remote_trace_206 + + get_trace + end + + it 'returns a trace' do + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['id']).to eq job.id + expect(json_response['status']).to eq job.status + expect(json_response['html']).to eq(job.trace.html) + end + end + + context 'when there is a network issue' do + before do + stub_remote_trace_500 + end + + it 'returns a trace' do + expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + end + def get_trace get :trace, namespace_id: project.namespace, project_id: project, @@ -446,14 +483,18 @@ describe Projects::JobsController do end describe 'GET raw' do - before do - get_raw + subject do + post :raw, namespace_id: project.namespace, + project_id: project, + id: job.id end context 'when job has a trace artifact' do let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } it 'returns a trace' do + response = subject + expect(response).to have_gitlab_http_status(:ok) expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.body).to eq job.job_artifacts_trace.open.read @@ -464,6 +505,8 @@ describe Projects::JobsController do let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } it 'send a trace file' do + response = subject + expect(response).to have_gitlab_http_status(:ok) expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.body).to eq 'BUILD TRACE' @@ -474,14 +517,22 @@ describe Projects::JobsController do let(:job) { create(:ci_build, pipeline: pipeline) } it 'returns not_found' do + response = subject + expect(response).to have_gitlab_http_status(:not_found) end end - def get_raw - post :raw, namespace_id: project.namespace, - project_id: project, - id: job.id + context 'when the trace artifact is in ObjectStorage' do + let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + before do + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + end + + it 'redirect to the trace file url' do + expect(subject).to redirect_to(job.job_artifacts_trace.file.url) + end end end end diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb index 306094f7ffb..548c5ef36e7 100644 --- a/spec/controllers/projects/milestones_controller_spec.rb +++ b/spec/controllers/projects/milestones_controller_spec.rb @@ -20,14 +20,23 @@ describe Projects::MilestonesController do describe "#show" do render_views - def view_milestone - get :show, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid + def view_milestone(options = {}) + params = { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid } + get :show, params.merge(options) end it 'shows milestone page' do view_milestone expect(response).to have_gitlab_http_status(200) + expect(response.content_type).to eq 'text/html' + end + + it 'returns milestone json' do + view_milestone format: :json + + expect(response).to have_http_status(404) + expect(response.content_type).to eq 'application/json' end end @@ -98,7 +107,7 @@ describe Projects::MilestonesController do it 'shows group milestone' do post :promote, namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid - expect(flash[:notice]).to eq("#{milestone.title} promoted to group milestone") + expect(flash[:notice]).to eq("#{milestone.title} promoted to <a href=\"#{group_milestone_path(project.group, milestone.iid)}\">group milestone</a>.") expect(response).to redirect_to(project_milestones_path(project)) end end diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb index 966ffdf6996..3506305f755 100644 --- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb +++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb @@ -80,7 +80,7 @@ describe Projects::PipelineSchedulesController do context 'when variables_attributes has one variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -101,7 +101,8 @@ describe Projects::PipelineSchedulesController do context 'when variables_attributes has two variables and duplicated' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }, + { key: 'AAA', secret_value: 'BBB123' }] }) end @@ -152,7 +153,7 @@ describe Projects::PipelineSchedulesController do context 'when params include one variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -169,7 +170,8 @@ describe Projects::PipelineSchedulesController do context 'when params include two duplicated variables' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }, + { key: 'AAA', secret_value: 'BBB123' }] }) end @@ -194,7 +196,7 @@ describe Projects::PipelineSchedulesController do context 'when adds a new variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'AAA', value: 'AAA123' }] + variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }] }) end @@ -209,7 +211,7 @@ describe Projects::PipelineSchedulesController do context 'when adds a new duplicated variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ key: 'CCC', value: 'AAA123' }] + variables_attributes: [{ key: 'CCC', secret_value: 'AAA123' }] }) end @@ -224,7 +226,7 @@ describe Projects::PipelineSchedulesController do context 'when updates a variable' do let(:schedule) do basic_param.merge({ - variables_attributes: [{ id: pipeline_schedule_variable.id, value: 'new_value' }] + variables_attributes: [{ id: pipeline_schedule_variable.id, secret_value: 'new_value' }] }) end @@ -252,7 +254,7 @@ describe Projects::PipelineSchedulesController do let(:schedule) do basic_param.merge({ variables_attributes: [{ id: pipeline_schedule_variable.id, _destroy: true }, - { key: 'CCC', value: 'CCC123' }] + { key: 'CCC', secret_value: 'CCC123' }] }) end diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb index 80be135b5d8..096e29bc39f 100644 --- a/spec/controllers/projects/protected_branches_controller_spec.rb +++ b/spec/controllers/projects/protected_branches_controller_spec.rb @@ -1,6 +1,16 @@ require('spec_helper') describe Projects::ProtectedBranchesController do + let(:project) { create(:project, :repository) } + let(:protected_branch) { create(:protected_branch, project: project) } + let(:project_params) { { namespace_id: project.namespace.to_param, project_id: project } } + let(:base_params) { project_params.merge(id: protected_branch.id) } + let(:user) { create(:user) } + + before do + project.add_master(user) + end + describe "GET #index" do let(:project) { create(:project_empty_repo, :public) } @@ -8,4 +18,91 @@ describe Projects::ProtectedBranchesController do get(:index, namespace_id: project.namespace.to_param, project_id: project) end end + + describe "POST #create" do + let(:master_access_level) { [{ access_level: Gitlab::Access::MASTER }] } + let(:access_level_params) do + { merge_access_levels_attributes: master_access_level, + push_access_levels_attributes: master_access_level } + end + let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) } + + before do + sign_in(user) + end + + it 'creates the protected branch rule' do + expect do + post(:create, project_params.merge(protected_branch: create_params)) + end.to change(ProtectedBranch, :count).by(1) + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + post(:create, project_params.merge(protected_branch: create_params)) + + expect(ProtectedBranch.count).to eq 0 + end + end + end + + describe "PUT #update" do + let(:update_params) { { name: 'new_name' } } + + before do + sign_in(user) + end + + it 'updates the protected branch rule' do + put(:update, base_params.merge(protected_branch: update_params)) + + expect(protected_branch.reload.name).to eq('new_name') + expect(json_response["name"]).to eq('new_name') + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents update of the protected branch rule" do + old_name = protected_branch.name + + put(:update, base_params.merge(protected_branch: update_params)) + + expect(protected_branch.reload.name).to eq(old_name) + end + end + end + + describe "DELETE #destroy" do + before do + sign_in(user) + end + + it "deletes the protected branch rule" do + delete(:destroy, base_params) + + expect { ProtectedBranch.find(protected_branch.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + delete(:destroy, base_params) + + expect(response.status).to eq(403) + end + end + end end diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb index b7df42168e0..08e2ccf893a 100644 --- a/spec/controllers/projects/raw_controller_spec.rb +++ b/spec/controllers/projects/raw_controller_spec.rb @@ -8,10 +8,7 @@ describe Projects::RawController do let(:id) { 'master/README.md' } it 'delivers ASCII file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') @@ -25,10 +22,7 @@ describe Projects::RawController do let(:id) { 'master/files/images/6049019_460s.jpg' } it 'sets image content type header' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('image/jpeg') @@ -54,21 +48,40 @@ describe Projects::RawController do it 'serves the file' do expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) end + + context 'and lfs uses object storage' do + before do + lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") + lfs_object.save! + stub_lfs_object_storage + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect to file' do + get_show(public_project, id) + + expect(response).to have_gitlab_http_status(302) + expect(response.location).to include(lfs_object.reload.file.path) + end + + it 'sets content disposition' do + get_show(public_project, id) + + file_uri = URI.parse(response.location) + params = CGI.parse(file_uri.query) + + expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"' + end + end end context 'when project does not have access' do it 'does not serve the file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(404) end @@ -81,10 +94,7 @@ describe Projects::RawController do end it 'delivers ASCII file' do - get(:show, - namespace_id: public_project.namespace.to_param, - project_id: public_project, - id: id) + get_show(public_project, id) expect(response).to have_gitlab_http_status(200) expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') @@ -95,4 +105,10 @@ describe Projects::RawController do end end end + + def get_show(project, id) + get(:show, namespace_id: project.namespace.to_param, + project_id: project, + id: id) + end end diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb index b32eb39b1fb..7688538a468 100644 --- a/spec/controllers/root_controller_spec.rb +++ b/spec/controllers/root_controller_spec.rb @@ -90,6 +90,30 @@ describe RootController do end end + context 'who has customized their dashboard setting for assigned issues' do + before do + user.dashboard = 'issues' + end + + it 'redirects to their assigned issues' do + get :index + + expect(response).to redirect_to issues_dashboard_path(assignee_id: user.id) + end + end + + context 'who has customized their dashboard setting for assigned merge requests' do + before do + user.dashboard = 'merge_requests' + end + + it 'redirects to their assigned merge requests' do + get :index + + expect(response).to redirect_to merge_requests_dashboard_path(assignee_id: user.id) + end + end + context 'who uses the default dashboard setting' do it 'renders the default dashboard' do get :index diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb index 5f9c57c0c8d..18c7453bd1b 100644 --- a/spec/factories/appearances.rb +++ b/spec/factories/appearances.rb @@ -2,8 +2,21 @@ FactoryBot.define do factory :appearance do - title "MepMep" - description "This is my Community Edition instance" + title "GitLab Community Edition" + description "Open source software to collaborate on code" new_project_guidelines "Custom project guidelines" end + + trait :with_logo do + logo { fixture_file_upload('spec/fixtures/dk.png') } + end + + trait :with_header_logo do + header_logo { fixture_file_upload('spec/fixtures/dk.png') } + end + + trait :with_logos do + with_logo + with_header_logo + end end diff --git a/spec/factories/ci/build_metadata.rb b/spec/factories/ci/build_metadata.rb new file mode 100644 index 00000000000..66bbd977b88 --- /dev/null +++ b/spec/factories/ci/build_metadata.rb @@ -0,0 +1,9 @@ +FactoryBot.define do + factory :ci_build_metadata, class: Ci::BuildMetadata do + build factory: :ci_build + + after(:build) do |build_metadata, _| + build_metadata.project ||= build_metadata.build.project + end + end +end diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb index 8544d54ccaa..3d3287d8168 100644 --- a/spec/factories/ci/job_artifacts.rb +++ b/spec/factories/ci/job_artifacts.rb @@ -5,6 +5,10 @@ FactoryBot.define do job factory: :ci_build file_type :archive + trait :remote_store do + file_store JobArtifactUploader::Store::REMOTE + end + after :build do |artifact| artifact.project ||= artifact.job.project end diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb index 20d5580f0c2..98566f907f9 100644 --- a/spec/factories/clusters/clusters.rb +++ b/spec/factories/clusters/clusters.rb @@ -32,5 +32,9 @@ FactoryBot.define do trait :disabled do enabled false end + + trait :production_environment do + sequence(:environment_scope) { |n| "production#{n}/*" } + end end end diff --git a/spec/factories/lfs_objects.rb b/spec/factories/lfs_objects.rb index caaed4d5246..eaf3a4ed497 100644 --- a/spec/factories/lfs_objects.rb +++ b/spec/factories/lfs_objects.rb @@ -15,4 +15,8 @@ FactoryBot.define do trait :correct_oid do oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75' end + + trait :object_storage do + file_store { LfsObjectUploader::Store::REMOTE } + end end diff --git a/spec/factories/redirect_routes.rb b/spec/factories/redirect_routes.rb index c29c81c5df9..774232d0b34 100644 --- a/spec/factories/redirect_routes.rb +++ b/spec/factories/redirect_routes.rb @@ -2,14 +2,5 @@ FactoryBot.define do factory :redirect_route do sequence(:path) { |n| "redirect#{n}" } source factory: :group - permanent false - - trait :permanent do - permanent true - end - - trait :temporary do - permanent false - end end end diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb index ff3a2a76acc..b45f6f30e40 100644 --- a/spec/factories/uploads.rb +++ b/spec/factories/uploads.rb @@ -5,6 +5,7 @@ FactoryBot.define do uploader "AvatarUploader" mount_point :avatar secret nil + store ObjectStorage::Store::LOCAL # we should build a mount agnostic upload by default transient do @@ -27,6 +28,10 @@ FactoryBot.define do secret SecureRandom.hex end + trait :object_storage do + store ObjectStorage::Store::REMOTE + end + trait :namespace_upload do model { build(:group) } path { File.join(secret, filename) } diff --git a/spec/features/admin/admin_disables_git_access_protocol_spec.rb b/spec/features/admin/admin_disables_git_access_protocol_spec.rb index 9ea3cfa72c6..9946cc77d1d 100644 --- a/spec/features/admin/admin_disables_git_access_protocol_spec.rb +++ b/spec/features/admin/admin_disables_git_access_protocol_spec.rb @@ -55,14 +55,19 @@ feature 'Admin disables Git access protocol' do end def disable_http_protocol - visit admin_application_settings_path - find('#application_setting_enabled_git_access_protocol').find(:xpath, 'option[2]').select_option - click_on 'Save' + switch_git_protocol(2) end def disable_ssh_protocol + switch_git_protocol(3) + end + + def switch_git_protocol(value) visit admin_application_settings_path - find('#application_setting_enabled_git_access_protocol').find(:xpath, 'option[3]').select_option - click_on 'Save' + + page.within('.as-visibility-access') do + find('#application_setting_enabled_git_access_protocol').find(:xpath, "option[#{value}]").select_option + click_on 'Save' + end end end diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb index 39b213988f0..c89bc54cad4 100644 --- a/spec/features/admin/admin_settings_spec.rb +++ b/spec/features/admin/admin_settings_spec.rb @@ -10,18 +10,21 @@ feature 'Admin updates settings' do end scenario 'Change visibility settings' do - choose "application_setting_default_project_visibility_20" - click_button 'Save' + page.within('.as-visibility-access') do + choose "application_setting_default_project_visibility_20" + click_button 'Save changes' + end expect(page).to have_content "Application settings saved successfully" end scenario 'Uncheck all restricted visibility levels' do - find('#application_setting_visibility_level_0').set(false) - find('#application_setting_visibility_level_10').set(false) - find('#application_setting_visibility_level_20').set(false) - - click_button 'Save' + page.within('.as-visibility-access') do + find('#application_setting_visibility_level_0').set(false) + find('#application_setting_visibility_level_10').set(false) + find('#application_setting_visibility_level_20').set(false) + click_button 'Save changes' + end expect(page).to have_content "Application settings saved successfully" expect(find('#application_setting_visibility_level_0')).not_to be_checked @@ -29,34 +32,94 @@ feature 'Admin updates settings' do expect(find('#application_setting_visibility_level_20')).not_to be_checked end - scenario 'Change application settings' do - uncheck 'Gravatar enabled' - fill_in 'Home page URL', with: 'https://about.gitlab.com/' - fill_in 'Help page text', with: 'Example text' - check 'Hide marketing-related entries from help' - fill_in 'Support page URL', with: 'http://example.com/help' - uncheck 'Project export enabled' - click_button 'Save' + scenario 'Change Visibility and Access Controls' do + page.within('.as-visibility-access') do + uncheck 'Project export enabled' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.project_export_enabled).to be_falsey + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Account and Limit Settings' do + page.within('.as-account-limit') do + uncheck 'Gravatar enabled' + click_button 'Save changes' + end expect(Gitlab::CurrentSettings.gravatar_enabled).to be_falsey + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Sign-in restrictions' do + page.within('.as-signin') do + fill_in 'Home page URL', with: 'https://about.gitlab.com/' + click_button 'Save changes' + end + expect(Gitlab::CurrentSettings.home_page_url).to eq "https://about.gitlab.com/" + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Help page' do + page.within('.as-help-page') do + fill_in 'Help page text', with: 'Example text' + check 'Hide marketing-related entries from help' + fill_in 'Support page URL', with: 'http://example.com/help' + click_button 'Save changes' + end + expect(Gitlab::CurrentSettings.help_page_text).to eq "Example text" expect(Gitlab::CurrentSettings.help_page_hide_commercial_content).to be_truthy expect(Gitlab::CurrentSettings.help_page_support_url).to eq "http://example.com/help" - expect(Gitlab::CurrentSettings.project_export_enabled).to be_falsey expect(page).to have_content "Application settings saved successfully" end - scenario 'Change AutoDevOps settings' do - check 'Enabled Auto DevOps (Beta) for projects by default' - fill_in 'Auto devops domain', with: 'domain.com' - click_button 'Save' + scenario 'Change Pages settings' do + page.within('.as-pages') do + fill_in 'Maximum size of pages (MB)', with: 15 + check 'Require users to prove ownership of custom domains' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.max_pages_size).to eq 15 + expect(Gitlab::CurrentSettings.pages_domain_verification_enabled?).to be_truthy + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change CI/CD settings' do + page.within('.as-ci-cd') do + check 'Enabled Auto DevOps (Beta) for projects by default' + fill_in 'Auto devops domain', with: 'domain.com' + click_button 'Save changes' + end expect(Gitlab::CurrentSettings.auto_devops_enabled?).to be true expect(Gitlab::CurrentSettings.auto_devops_domain).to eq('domain.com') expect(page).to have_content "Application settings saved successfully" end + scenario 'Change Influx settings' do + page.within('.as-influx') do + check 'Enable InfluxDB Metrics' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.metrics_enabled?).to be true + expect(page).to have_content "Application settings saved successfully" + end + + scenario 'Change Prometheus settings' do + page.within('.as-prometheus') do + check 'Enable Prometheus Metrics' + click_button 'Save changes' + end + + expect(Gitlab::CurrentSettings.prometheus_metrics_enabled?).to be true + expect(page).to have_content "Application settings saved successfully" + end + scenario 'Change Slack Notifications Service template settings' do first(:link, 'Service Templates').click click_link 'Slack notifications' @@ -83,18 +146,22 @@ feature 'Admin updates settings' do context 'sign-in restrictions', :js do it 'de-activates oauth sign-in source' do - find('input#application_setting_enabled_oauth_sign_in_sources_[value=gitlab]').send_keys(:return) + page.within('.as-signin') do + find('input#application_setting_enabled_oauth_sign_in_sources_[value=gitlab]').send_keys(:return) - expect(find('.btn', text: 'GitLab.com')).not_to have_css('.active') + expect(find('.btn', text: 'GitLab.com')).not_to have_css('.active') + end end end scenario 'Change Keys settings' do - select 'Are forbidden', from: 'RSA SSH keys' - select 'Are allowed', from: 'DSA SSH keys' - select 'Must be at least 384 bits', from: 'ECDSA SSH keys' - select 'Are forbidden', from: 'ED25519 SSH keys' - click_on 'Save' + page.within('.as-visibility-access') do + select 'Are forbidden', from: 'RSA SSH keys' + select 'Are allowed', from: 'DSA SSH keys' + select 'Must be at least 384 bits', from: 'ECDSA SSH keys' + select 'Are forbidden', from: 'ED25519 SSH keys' + click_on 'Save changes' + end forbidden = ApplicationSetting::FORBIDDEN_KEY_VALUE.to_s diff --git a/spec/features/groups/activity_spec.rb b/spec/features/groups/activity_spec.rb index d3b25ec3d6c..7bc809b3104 100644 --- a/spec/features/groups/activity_spec.rb +++ b/spec/features/groups/activity_spec.rb @@ -8,11 +8,30 @@ feature 'Group activity page' do context 'when signed in' do before do sign_in(user) - visit path end - it_behaves_like "it has an RSS button with current_user's RSS token" - it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + describe 'RSS' do + before do + visit path + end + + it_behaves_like "it has an RSS button with current_user's RSS token" + it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + end + + context 'when project is in the group', :js do + let(:project) { create(:project, :public, namespace: group) } + + before do + project.add_master(user) + + visit path + end + + it 'renders user joined to project event' do + expect(page).to have_content 'joined project' + end + end end context 'when signed out' do diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb index b83bad3befb..1ce30015e81 100644 --- a/spec/features/groups/group_settings_spec.rb +++ b/spec/features/groups/group_settings_spec.rb @@ -76,6 +76,27 @@ feature 'Edit group settings' do end end end + + describe 'edit group avatar' do + before do + visit edit_group_path(group) + + attach_file(:group_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif')) + + expect { click_button 'Save group' }.to change { group.reload.avatar? }.to(true) + end + + it 'uploads new group avatar' do + expect(group.avatar).to be_instance_of AvatarUploader + expect(group.avatar.url).to eq "/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif" + expect(page).to have_link('Remove avatar') + end + + it 'removes group avatar' do + expect { click_link 'Remove avatar' }.to change { group.reload.avatar? }.to(false) + expect(page).not_to have_link('Remove avatar') + end + end end def update_path(new_group_path) diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index 450bc0ff8cf..90bf7ba49f6 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -3,8 +3,11 @@ require 'spec_helper' feature 'Group issues page' do include FilteredSearchHelpers + let(:group) { create(:group) } + let(:project) { create(:project, :public, group: group)} + let(:path) { issues_group_path(group) } + context 'with shared examples' do - let(:path) { issues_group_path(group) } let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} include_examples 'project features apply to issuables', Issue @@ -31,7 +34,6 @@ feature 'Group issues page' do let(:access_level) { ProjectFeature::ENABLED } let(:user) { user_in_group } let(:user2) { user_outside_group } - let(:path) { issues_group_path(group) } it 'filters by only group users' do filtered_search.set('assignee:') @@ -43,9 +45,7 @@ feature 'Group issues page' do end context 'issues list', :nested_groups do - let(:group) { create(:group)} let(:subgroup) { create(:group, parent: group) } - let(:project) { create(:project, :public, group: group)} let(:subgroup_project) { create(:project, :public, group: subgroup)} let!(:issue) { create(:issue, project: project, title: 'root group issue') } let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') } @@ -59,5 +59,17 @@ feature 'Group issues page' do expect(page).to have_content('subgroup issue') end end + + context 'when project is archived' do + before do + project.archive! + end + + it 'does not render issue' do + visit path + + expect(page).not_to have_content issue.title[0..80] + end + end end end diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb index 7ce6a61d50c..672ae785c2d 100644 --- a/spec/features/groups/merge_requests_spec.rb +++ b/spec/features/groups/merge_requests_spec.rb @@ -5,14 +5,14 @@ feature 'Group merge requests page' do let(:path) { merge_requests_group_path(group) } let(:issuable) { create(:merge_request, source_project: project, target_project: project, title: 'this is my created issuable') } + let(:access_level) { ProjectFeature::ENABLED } + let(:user) { user_in_group } include_examples 'project features apply to issuables', MergeRequest context 'archived issuable' do let(:project_archived) { create(:project, :archived, :merge_requests_enabled, :repository, group: group) } let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') } - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } before do issuable_archived @@ -36,9 +36,17 @@ feature 'Group merge requests page' do end end + context 'when merge request assignee to user' do + before do + issuable.update!(assignee: user) + + visit path + end + + it { expect(page).to have_content issuable.title[0..80] } + end + context 'group filtered search', :js do - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } let(:user2) { user_outside_group } it 'filters by assignee only group users' do diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb index ceccc471405..4ffadbbcd35 100644 --- a/spec/features/groups/show_spec.rb +++ b/spec/features/groups/show_spec.rb @@ -15,14 +15,44 @@ feature 'Group show page' do end it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + + context 'when group does not exist' do + let(:path) { group_path('not-exist') } + + it { expect(status_code).to eq(404) } + end end context 'when signed out' do - before do - visit path + describe 'RSS' do + before do + visit path + end + + it_behaves_like "an autodiscoverable RSS feed without an RSS token" + end + + context 'when group has a public project', :js do + let!(:project) { create(:project, :public, namespace: group) } + + it 'renders public project' do + visit path + + expect(page).to have_link group.name + expect(page).to have_link project.name + end end - it_behaves_like "an autodiscoverable RSS feed without an RSS token" + context 'when group has a private project', :js do + let!(:project) { create(:project, :private, namespace: group) } + + it 'does not render private project' do + visit path + + expect(page).to have_link group.name + expect(page).not_to have_link project.name + end + end end context 'subgroup support' do diff --git a/spec/features/groups/user_browse_projects_group_page_spec.rb b/spec/features/groups/user_browse_projects_group_page_spec.rb new file mode 100644 index 00000000000..e81c3180e78 --- /dev/null +++ b/spec/features/groups/user_browse_projects_group_page_spec.rb @@ -0,0 +1,29 @@ +require 'rails_helper' + +describe 'User browse group projects page' do + let(:user) { create :user } + let(:group) { create :group } + + context 'when user is owner' do + before do + group.add_owner(user) + end + + context 'when user signed in' do + before do + sign_in(user) + end + + context 'when group has archived project', :js do + let!(:project) { create :project, :archived, namespace: group } + + it 'renders projects list' do + visit projects_group_path(group) + + expect(page).to have_link project.name + expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived') + end + end + end + end +end diff --git a/spec/features/ci_lint_spec.rb b/spec/features/projects/ci/lint_spec.rb index 220b934154e..313950072e7 100644 --- a/spec/features/ci_lint_spec.rb +++ b/spec/features/projects/ci/lint_spec.rb @@ -1,10 +1,14 @@ require 'spec_helper' describe 'CI Lint', :js do + let(:project) { create(:project, :repository) } + let(:user) { create(:user) } + before do - sign_in(create(:user)) + project.add_developer(user) + sign_in(user) - visit ci_lint_path + visit project_ci_lint_path(project) find('#ci-editor') execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});") diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb new file mode 100644 index 00000000000..18e975fa653 --- /dev/null +++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb @@ -0,0 +1,21 @@ +require 'spec_helper' + +feature 'Hook logs' do + given(:web_hook_log) { create(:web_hook_log, response_body: '<script>') } + given(:project) { web_hook_log.web_hook.project } + given(:user) { create(:user) } + + before do + project.add_master(user) + + sign_in(user) + end + + scenario 'user reads log without getting XSS' do + visit( + project_hook_hook_log_path( + project, web_hook_log.web_hook, web_hook_log)) + + expect(page).to have_content('<script>') + end +end diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb index 65e24862d43..065d00d51d4 100644 --- a/spec/features/projects/pipeline_schedules_spec.rb +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -160,9 +160,9 @@ feature 'Pipeline Schedules', :js do click_link 'New schedule' fill_in_schedule_form all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA') - all('[name="schedule[variables_attributes][][value]"]')[0].set('AAA123') + all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123') all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB') - all('[name="schedule[variables_attributes][][value]"]')[1].set('BBB123') + all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123') save_pipeline_schedule end diff --git a/spec/finders/clusters_finder_spec.rb b/spec/finders/clusters_finder_spec.rb index c10efac2432..da529e0670f 100644 --- a/spec/finders/clusters_finder_spec.rb +++ b/spec/finders/clusters_finder_spec.rb @@ -6,7 +6,7 @@ describe ClustersFinder do describe '#execute' do let(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } - let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, projects: [project]) } + let(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) } subject { described_class.new(project, user, scope).execute } diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb index baf927a9acc..b77114a8152 100644 --- a/spec/helpers/page_layout_helper_spec.rb +++ b/spec/helpers/page_layout_helper_spec.rb @@ -50,6 +50,11 @@ describe PageLayoutHelper do allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) expect(helper.favicon).to eq 'favicon-blue.ico' end + + it 'has yellow favicon for canary' do + stub_env('CANARY', 'true') + expect(helper.favicon).to eq 'favicon-yellow.ico' + end end describe 'page_image' do diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb index e2a0c4322ff..c9d2ec8a4ae 100644 --- a/spec/helpers/preferences_helper_spec.rb +++ b/spec/helpers/preferences_helper_spec.rb @@ -21,7 +21,9 @@ describe PreferencesHelper do ["Your Projects' Activity", 'project_activity'], ["Starred Projects' Activity", 'starred_project_activity'], ["Your Groups", 'groups'], - ["Your Todos", 'todos'] + ["Your Todos", 'todos'], + ["Assigned Issues", 'issues'], + ["Assigned Merge Requests", 'merge_requests'] ] end end diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb new file mode 100644 index 00000000000..de3c157ab7b --- /dev/null +++ b/spec/initializers/fog_google_https_private_urls_spec.rb @@ -0,0 +1,24 @@ +require 'spec_helper' + +describe 'Fog::Storage::GoogleXML::File' do + let(:storage) do + Fog.mock! + Fog::Storage.new({ + google_storage_access_key_id: "asdf", + google_storage_secret_access_key: "asdf", + provider: "Google" + }) + end + + let(:file) do + directory = storage.directories.create(key: 'data') + directory.files.create( + body: 'Hello World!', + key: 'hello_world.txt' + ) + end + + it 'delegates to #get_https_url' do + expect(file.url(Time.now)).to start_with("https://") + end +end diff --git a/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js index 1ea8d86cb7e..94a0c999d66 100644 --- a/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js +++ b/spec/javascripts/ci_variable_list/native_form_variable_list_spec.js @@ -20,7 +20,7 @@ describe('NativeFormVariableList', () => { it('should clear out the `name` attribute on the inputs for the last empty row on form submission (avoid BE validation)', () => { const $row = $wrapper.find('.js-row'); expect($row.find('.js-ci-variable-input-key').attr('name')).toBe('schedule[variables_attributes][][key]'); - expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('schedule[variables_attributes][][value]'); + expect($row.find('.js-ci-variable-input-value').attr('name')).toBe('schedule[variables_attributes][][secret_value]'); $wrapper.closest('form').trigger('trigger-submit'); diff --git a/spec/javascripts/fixtures/gl_dropdown.html.haml b/spec/javascripts/fixtures/gl_dropdown.html.haml index a20390c08ee..43d57c2c4dc 100644 --- a/spec/javascripts/fixtures/gl_dropdown.html.haml +++ b/spec/javascripts/fixtures/gl_dropdown.html.haml @@ -1,7 +1,8 @@ %div .dropdown.inline %button#js-project-dropdown.dropdown-menu-toggle{type: 'button', data: {toggle: 'dropdown'}} - Projects + .dropdown-toggle-text + Projects %i.fa.fa-chevron-down.dropdown-toggle-caret.js-projects-dropdown-toggle .dropdown-menu.dropdown-select.dropdown-menu-selectable .dropdown-title diff --git a/spec/javascripts/fixtures/projects.rb b/spec/javascripts/fixtures/projects.rb index b344b389241..e8865b04874 100644 --- a/spec/javascripts/fixtures/projects.rb +++ b/spec/javascripts/fixtures/projects.rb @@ -17,8 +17,6 @@ describe 'Projects (JavaScript fixtures)', type: :controller do end before do - # EE-specific start - # EE specific end project.add_master(admin) sign_in(admin) end diff --git a/spec/javascripts/gfm_auto_complete_spec.js b/spec/javascripts/gfm_auto_complete_spec.js index dc0a5bc275c..1cb20a1e7ff 100644 --- a/spec/javascripts/gfm_auto_complete_spec.js +++ b/spec/javascripts/gfm_auto_complete_spec.js @@ -81,13 +81,21 @@ describe('GfmAutoComplete', function () { }); it('should quote if value contains any non-alphanumeric characters', () => { - expect(beforeInsert(atwhoInstance, '~label-20')).toBe('~"label-20"'); + expect(beforeInsert(atwhoInstance, '~label-20')).toBe('~"label\\-20"'); expect(beforeInsert(atwhoInstance, '~label 20')).toBe('~"label 20"'); }); it('should quote integer labels', () => { expect(beforeInsert(atwhoInstance, '~1234')).toBe('~"1234"'); }); + + it('should escape Markdown emphasis characters, except in the first character', () => { + expect(beforeInsert(atwhoInstance, '@_group')).toEqual('@\\_group'); + expect(beforeInsert(atwhoInstance, '~_bug')).toEqual('~\\_bug'); + expect(beforeInsert(atwhoInstance, '~a `bug`')).toEqual('~"a \\`bug\\`"'); + expect(beforeInsert(atwhoInstance, '~a ~bug')).toEqual('~"a \\~bug"'); + expect(beforeInsert(atwhoInstance, '~a **bug')).toEqual('~"a \\*\\*bug"'); + }); }); describe('DefaultOptions.matcher', function () { diff --git a/spec/javascripts/gl_dropdown_spec.js b/spec/javascripts/gl_dropdown_spec.js index 0e4a7017406..5393502196e 100644 --- a/spec/javascripts/gl_dropdown_spec.js +++ b/spec/javascripts/gl_dropdown_spec.js @@ -256,4 +256,29 @@ describe('glDropdown', function describeDropdown() { }); }); }); + + it('should keep selected item after selecting a second time', () => { + const options = { + isSelectable(item, $el) { + return !$el.hasClass('is-active'); + }, + toggleLabel(item) { + return item && item.id; + }, + }; + initDropDown.call(this, false, false, options); + const $item = $(`${ITEM_SELECTOR}:first() a`, this.$dropdownMenuElement); + + // select item the first time + this.dropdownButtonElement.click(); + $item.click(); + expect($item).toHaveClass('is-active'); + // select item the second time + this.dropdownButtonElement.click(); + $item.click(); + expect($item).toHaveClass('is-active'); + + expect($('.dropdown-toggle-text')).toHaveText(this.projectsData[0].id.toString()); + }); }); + diff --git a/spec/javascripts/helpers/vue_component_helper.js b/spec/javascripts/helpers/vue_component_helper.js new file mode 100644 index 00000000000..257c9f5526a --- /dev/null +++ b/spec/javascripts/helpers/vue_component_helper.js @@ -0,0 +1,3 @@ +export default function removeBreakLine (data) { + return data.replace(/\r?\n|\r/g, ' '); +} diff --git a/spec/javascripts/helpers/vuex_action_helper.js b/spec/javascripts/helpers/vuex_action_helper.js index 2d386fe1da5..83f29d1b0c2 100644 --- a/spec/javascripts/helpers/vuex_action_helper.js +++ b/spec/javascripts/helpers/vuex_action_helper.js @@ -1,37 +1,71 @@ -/* eslint-disable */ - /** - * helper for testing action with expected mutations + * helper for testing action with expected mutations inspired in * https://vuex.vuejs.org/en/testing.html + * + * @example + * testAction( + * actions.actionName, // action + * { }, // mocked response + * state, // state + * [ + * { type: types.MUTATION} + * { type: types.MUTATION_1, payload: {}} + * ], // mutations + * [ + * { type: 'actionName', payload: {}}, + * { type: 'actionName1', payload: {}} + * ] //actions + * done, + * ); */ -export default (action, payload, state, expectedMutations, done) => { - let count = 0; +export default (action, payload, state, expectedMutations, expectedActions, done) => { + let mutationsCount = 0; + let actionsCount = 0; // mock commit - const commit = (type, payload) => { - const mutation = expectedMutations[count]; - - try { - expect(mutation.type).to.equal(type); - if (payload) { - expect(mutation.payload).to.deep.equal(payload); - } - } catch (error) { - done(error); + const commit = (type, mutationPayload) => { + const mutation = expectedMutations[mutationsCount]; + + expect(mutation.type).toEqual(type); + + if (mutation.payload) { + expect(mutation.payload).toEqual(mutationPayload); } - count++; - if (count >= expectedMutations.length) { + mutationsCount += 1; + if (mutationsCount >= expectedMutations.length) { + done(); + } + }; + + // mock dispatch + const dispatch = (type, actionPayload) => { + const actionExpected = expectedActions[actionsCount]; + + expect(actionExpected.type).toEqual(type); + + if (actionExpected.payload) { + expect(actionExpected.payload).toEqual(actionPayload); + } + + actionsCount += 1; + if (actionsCount >= expectedActions.length) { done(); } }; // call the action with mocked store and arguments - action({ commit, state }, payload); + action({ commit, state, dispatch }, payload); // check if no mutations should have been dispatched if (expectedMutations.length === 0) { - expect(count).to.equal(0); + expect(mutationsCount).toEqual(0); + done(); + } + + // check if no mutations should have been dispatched + if (expectedActions.length === 0) { + expect(actionsCount).toEqual(0); done(); } }; diff --git a/spec/javascripts/ide/lib/editor_spec.js b/spec/javascripts/ide/lib/editor_spec.js index d6df35c90e8..2ccd87de1a7 100644 --- a/spec/javascripts/ide/lib/editor_spec.js +++ b/spec/javascripts/ide/lib/editor_spec.js @@ -64,18 +64,20 @@ describe('Multi-file editor library', () => { instance.createDiffInstance(holder); - expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith( - holder, - { - model: null, - contextmenu: true, - minimap: { - enabled: false, - }, - readOnly: true, - scrollBeyondLastLine: false, + expect(instance.monaco.editor.createDiffEditor).toHaveBeenCalledWith(holder, { + model: null, + contextmenu: true, + minimap: { + enabled: false, }, - ); + readOnly: true, + scrollBeyondLastLine: false, + quickSuggestions: false, + occurrencesHighlight: false, + renderLineHighlight: 'none', + hideCursorInOverviewRuler: true, + wordWrap: 'bounded', + }); }); }); @@ -113,9 +115,7 @@ describe('Multi-file editor library', () => { }); it('sets original & modified when diff editor', () => { - spyOn(instance.instance, 'getEditorType').and.returnValue( - 'vs.editor.IDiffEditor', - ); + spyOn(instance.instance, 'getEditorType').and.returnValue('vs.editor.IDiffEditor'); spyOn(instance.instance, 'setModel'); instance.attachModel(model); @@ -131,9 +131,7 @@ describe('Multi-file editor library', () => { instance.attachModel(model); - expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith( - model, - ); + expect(instance.dirtyDiffController.attachModel).toHaveBeenCalledWith(model); }); it('re-decorates with the dirty diff controller', () => { @@ -141,9 +139,7 @@ describe('Multi-file editor library', () => { instance.attachModel(model); - expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith( - model, - ); + expect(instance.dirtyDiffController.reDecorate).toHaveBeenCalledWith(model); }); }); diff --git a/spec/javascripts/jobs/mock_data.js b/spec/javascripts/jobs/mock_data.js index 43589d54be4..25ca8eb6c0b 100644 --- a/spec/javascripts/jobs/mock_data.js +++ b/spec/javascripts/jobs/mock_data.js @@ -115,6 +115,10 @@ export default { commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6', }, }, + metadata: { + timeout_human_readable: '1m 40s', + timeout_source: 'runner', + }, merge_request: { iid: 2, path: '/root/ci-mock/merge_requests/2', diff --git a/spec/javascripts/jobs/sidebar_detail_row_spec.js b/spec/javascripts/jobs/sidebar_detail_row_spec.js index 3ac65709c4a..e6bfb0c4adc 100644 --- a/spec/javascripts/jobs/sidebar_detail_row_spec.js +++ b/spec/javascripts/jobs/sidebar_detail_row_spec.js @@ -37,4 +37,25 @@ describe('Sidebar detail row', () => { vm.$el.textContent.replace(/\s+/g, ' ').trim(), ).toEqual('this is the title: this is the value'); }); + + describe('when helpUrl not provided', () => { + it('should not render help', () => { + expect(vm.$el.querySelector('.help-button')).toBeNull(); + }); + }); + + describe('when helpUrl provided', () => { + beforeEach(() => { + vm = new SidebarDetailRow({ + propsData: { + helpUrl: 'help url', + value: 'foo', + }, + }).$mount(); + }); + + it('should render help', () => { + expect(vm.$el.querySelector('.help-button a').getAttribute('href')).toEqual('help url'); + }); + }); }); diff --git a/spec/javascripts/jobs/sidebar_details_block_spec.js b/spec/javascripts/jobs/sidebar_details_block_spec.js index 95532ef5382..602dae514b1 100644 --- a/spec/javascripts/jobs/sidebar_details_block_spec.js +++ b/spec/javascripts/jobs/sidebar_details_block_spec.js @@ -96,6 +96,12 @@ describe('Sidebar details block', () => { ).toEqual('Runner: #1'); }); + it('should render timeout information', () => { + expect( + trimWhitespace(vm.$el.querySelector('.js-job-timeout')), + ).toEqual('Timeout: 1m 40s (from runner)'); + }); + it('should render coverage', () => { expect( trimWhitespace(vm.$el.querySelector('.js-job-coverage')), diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js index 19504e4f7c8..cda550760fe 100644 --- a/spec/javascripts/notes/components/noteable_discussion_spec.js +++ b/spec/javascripts/notes/components/noteable_discussion_spec.js @@ -25,26 +25,34 @@ describe('issue_discussion component', () => { }); it('should render user avatar', () => { - expect(vm.$el.querySelector('.user-avatar-link')).toBeDefined(); + expect(vm.$el.querySelector('.user-avatar-link')).not.toBeNull(); }); it('should render discussion header', () => { - expect(vm.$el.querySelector('.discussion-header')).toBeDefined(); + expect(vm.$el.querySelector('.discussion-header')).not.toBeNull(); expect(vm.$el.querySelector('.notes').children.length).toEqual(discussionMock.notes.length); }); describe('actions', () => { it('should render reply button', () => { - expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual('Reply...'); + expect(vm.$el.querySelector('.js-vue-discussion-reply').textContent.trim()).toEqual( + 'Reply...', + ); }); - it('should toggle reply form', (done) => { + it('should toggle reply form', done => { vm.$el.querySelector('.js-vue-discussion-reply').click(); Vue.nextTick(() => { - expect(vm.$refs.noteForm).toBeDefined(); + expect(vm.$refs.noteForm).not.toBeNull(); expect(vm.isReplying).toEqual(true); done(); }); }); + + it('does not render jump to discussion button', () => { + expect( + vm.$el.querySelector('*[data-original-title="Jump to next unresolved discussion"]'), + ).toBeNull(); + }); }); }); diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js index 91249b2c79e..520a25cc5c6 100644 --- a/spec/javascripts/notes/stores/actions_spec.js +++ b/spec/javascripts/notes/stores/actions_spec.js @@ -5,7 +5,13 @@ import * as actions from '~/notes/stores/actions'; import store from '~/notes/stores'; import testAction from '../../helpers/vuex_action_helper'; import { resetStore } from '../helpers'; -import { discussionMock, notesDataMock, userDataMock, noteableDataMock, individualNote } from '../mock_data'; +import { + discussionMock, + notesDataMock, + userDataMock, + noteableDataMock, + individualNote, +} from '../mock_data'; describe('Actions Notes Store', () => { afterEach(() => { @@ -13,66 +19,103 @@ describe('Actions Notes Store', () => { }); describe('setNotesData', () => { - it('should set received notes data', (done) => { - testAction(actions.setNotesData, null, { notesData: {} }, [ - { type: 'SET_NOTES_DATA', payload: notesDataMock }, - ], done); + it('should set received notes data', done => { + testAction( + actions.setNotesData, + notesDataMock, + { notesData: {} }, + [{ type: 'SET_NOTES_DATA', payload: notesDataMock }], + [], + done, + ); }); }); describe('setNoteableData', () => { - it('should set received issue data', (done) => { - testAction(actions.setNoteableData, null, { noteableData: {} }, [ - { type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }, - ], done); + it('should set received issue data', done => { + testAction( + actions.setNoteableData, + noteableDataMock, + { noteableData: {} }, + [{ type: 'SET_NOTEABLE_DATA', payload: noteableDataMock }], + [], + done, + ); }); }); describe('setUserData', () => { - it('should set received user data', (done) => { - testAction(actions.setUserData, null, { userData: {} }, [ - { type: 'SET_USER_DATA', payload: userDataMock }, - ], done); + it('should set received user data', done => { + testAction( + actions.setUserData, + userDataMock, + { userData: {} }, + [{ type: 'SET_USER_DATA', payload: userDataMock }], + [], + done, + ); }); }); describe('setLastFetchedAt', () => { - it('should set received timestamp', (done) => { - testAction(actions.setLastFetchedAt, null, { lastFetchedAt: {} }, [ - { type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }, - ], done); + it('should set received timestamp', done => { + testAction( + actions.setLastFetchedAt, + 'timestamp', + { lastFetchedAt: {} }, + [{ type: 'SET_LAST_FETCHED_AT', payload: 'timestamp' }], + [], + done, + ); }); }); describe('setInitialNotes', () => { - it('should set initial notes', (done) => { - testAction(actions.setInitialNotes, null, { notes: [] }, [ - { type: 'SET_INITIAL_NOTES', payload: [individualNote] }, - ], done); + it('should set initial notes', done => { + testAction( + actions.setInitialNotes, + [individualNote], + { notes: [] }, + [{ type: 'SET_INITIAL_NOTES', payload: [individualNote] }], + [], + done, + ); }); }); describe('setTargetNoteHash', () => { - it('should set target note hash', (done) => { - testAction(actions.setTargetNoteHash, null, { notes: [] }, [ - { type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }, - ], done); + it('should set target note hash', done => { + testAction( + actions.setTargetNoteHash, + 'hash', + { notes: [] }, + [{ type: 'SET_TARGET_NOTE_HASH', payload: 'hash' }], + [], + done, + ); }); }); describe('toggleDiscussion', () => { - it('should toggle discussion', (done) => { - testAction(actions.toggleDiscussion, null, { notes: [discussionMock] }, [ - { type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }, - ], done); + it('should toggle discussion', done => { + testAction( + actions.toggleDiscussion, + { discussionId: discussionMock.id }, + { notes: [discussionMock] }, + [{ type: 'TOGGLE_DISCUSSION', payload: { discussionId: discussionMock.id } }], + [], + done, + ); }); }); describe('async methods', () => { const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({}), { - status: 200, - })); + next( + request.respondWith(JSON.stringify({}), { + status: 200, + }), + ); }; beforeEach(() => { @@ -84,8 +127,9 @@ describe('Actions Notes Store', () => { }); describe('closeIssue', () => { - it('sets state as closed', (done) => { - store.dispatch('closeIssue', { notesData: { closeIssuePath: '' } }) + it('sets state as closed', done => { + store + .dispatch('closeIssue', { notesData: { closeIssuePath: '' } }) .then(() => { expect(store.state.noteableData.state).toEqual('closed'); expect(store.state.isToggleStateButtonLoading).toEqual(false); @@ -96,8 +140,9 @@ describe('Actions Notes Store', () => { }); describe('reopenIssue', () => { - it('sets state as reopened', (done) => { - store.dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } }) + it('sets state as reopened', done => { + store + .dispatch('reopenIssue', { notesData: { reopenIssuePath: '' } }) .then(() => { expect(store.state.noteableData.state).toEqual('reopened'); expect(store.state.isToggleStateButtonLoading).toEqual(false); @@ -110,7 +155,7 @@ describe('Actions Notes Store', () => { describe('emitStateChangedEvent', () => { it('emits an event on the document', () => { - document.addEventListener('issuable_vue_app:change', (event) => { + document.addEventListener('issuable_vue_app:change', event => { expect(event.detail.data).toEqual({ id: '1', state: 'closed' }); expect(event.detail.isClosed).toEqual(false); }); @@ -120,40 +165,47 @@ describe('Actions Notes Store', () => { }); describe('toggleStateButtonLoading', () => { - it('should set loading as true', (done) => { - testAction(actions.toggleStateButtonLoading, true, {}, [ - { type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }, - ], done); + it('should set loading as true', done => { + testAction( + actions.toggleStateButtonLoading, + true, + {}, + [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: true }], + [], + done, + ); }); - it('should set loading as false', (done) => { - testAction(actions.toggleStateButtonLoading, false, {}, [ - { type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }, - ], done); + it('should set loading as false', done => { + testAction( + actions.toggleStateButtonLoading, + false, + {}, + [{ type: 'TOGGLE_STATE_BUTTON_LOADING', payload: false }], + [], + done, + ); }); }); describe('toggleIssueLocalState', () => { - it('sets issue state as closed', (done) => { - testAction(actions.toggleIssueLocalState, 'closed', {}, [ - { type: 'CLOSE_ISSUE', payload: 'closed' }, - ], done); + it('sets issue state as closed', done => { + testAction(actions.toggleIssueLocalState, 'closed', {}, [{ type: 'CLOSE_ISSUE' }], [], done); }); - it('sets issue state as reopened', (done) => { - testAction(actions.toggleIssueLocalState, 'reopened', {}, [ - { type: 'REOPEN_ISSUE', payload: 'reopened' }, - ], done); + it('sets issue state as reopened', done => { + testAction(actions.toggleIssueLocalState, 'reopened', {}, [{ type: 'REOPEN_ISSUE' }], [], done); }); }); describe('poll', () => { - beforeEach((done) => { + beforeEach(done => { jasmine.clock().install(); spyOn(Vue.http, 'get').and.callThrough(); - store.dispatch('setNotesData', notesDataMock) + store + .dispatch('setNotesData', notesDataMock) .then(done) .catch(done.fail); }); @@ -162,23 +214,29 @@ describe('Actions Notes Store', () => { jasmine.clock().uninstall(); }); - it('calls service with last fetched state', (done) => { + it('calls service with last fetched state', done => { const interceptor = (request, next) => { - next(request.respondWith(JSON.stringify({ - notes: [], - last_fetched_at: '123456', - }), { - status: 200, - headers: { - 'poll-interval': '1000', - }, - })); + next( + request.respondWith( + JSON.stringify({ + notes: [], + last_fetched_at: '123456', + }), + { + status: 200, + headers: { + 'poll-interval': '1000', + }, + }, + ), + ); }; Vue.http.interceptors.push(interceptor); Vue.http.interceptors.push(headersInterceptor); - store.dispatch('poll') + store + .dispatch('poll') .then(() => new Promise(resolve => requestAnimationFrame(resolve))) .then(() => { expect(Vue.http.get).toHaveBeenCalledWith(jasmine.anything(), { @@ -192,9 +250,12 @@ describe('Actions Notes Store', () => { jasmine.clock().tick(1500); }) - .then(() => new Promise((resolve) => { - requestAnimationFrame(resolve); - })) + .then( + () => + new Promise(resolve => { + requestAnimationFrame(resolve); + }), + ) .then(() => { expect(Vue.http.get.calls.count()).toBe(2); expect(Vue.http.get.calls.mostRecent().args[1].headers).toEqual({ diff --git a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js index 080158a8ee0..a24f8204fe1 100644 --- a/spec/javascripts/pages/labels/components/promote_label_modal_spec.js +++ b/spec/javascripts/pages/labels/components/promote_label_modal_spec.js @@ -12,6 +12,7 @@ describe('Promote label modal', () => { labelColor: '#5cb85c', labelTextColor: '#ffffff', url: `${gl.TEST_HOST}/dummy/promote/labels`, + groupName: 'group', }; describe('Modal title and description', () => { @@ -24,7 +25,7 @@ describe('Promote label modal', () => { }); it('contains the proper description', () => { - expect(vm.text).toContain('Promoting this label will make it available for all projects inside the group'); + expect(vm.text).toContain(`Promoting ${labelMockData.labelTitle} will make it available for all projects inside ${labelMockData.groupName}`); }); it('contains a label span with the color', () => { diff --git a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js index 22956929e7b..8b220423637 100644 --- a/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js +++ b/spec/javascripts/pages/milestones/shared/components/promote_milestone_modal_spec.js @@ -10,6 +10,7 @@ describe('Promote milestone modal', () => { const milestoneMockData = { milestoneTitle: 'v1.0', url: `${gl.TEST_HOST}/dummy/promote/milestones`, + groupName: 'group', }; describe('Modal title and description', () => { @@ -22,7 +23,7 @@ describe('Promote milestone modal', () => { }); it('contains the proper description', () => { - expect(vm.text).toContain('Promoting this milestone will make it available for all projects inside the group.'); + expect(vm.text).toContain(`Promoting ${milestoneMockData.milestoneTitle} will make it available for all projects inside ${milestoneMockData.groupName}.`); }); it('contains the correct title', () => { diff --git a/spec/javascripts/registry/stores/actions_spec.js b/spec/javascripts/registry/stores/actions_spec.js index 3c9da4f107b..bc4c444655a 100644 --- a/spec/javascripts/registry/stores/actions_spec.js +++ b/spec/javascripts/registry/stores/actions_spec.js @@ -29,57 +29,96 @@ describe('Actions Registry Store', () => { describe('fetchRepos', () => { beforeEach(() => { interceptor = (request, next) => { - next(request.respondWith(JSON.stringify(reposServerResponse), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(reposServerResponse), { + status: 200, + }), + ); }; Vue.http.interceptors.push(interceptor); }); - it('should set receveived repos', (done) => { - testAction(actions.fetchRepos, null, mockedState, [ - { type: types.TOGGLE_MAIN_LOADING }, - { type: types.SET_REPOS_LIST, payload: reposServerResponse }, - ], done); + it('should set receveived repos', done => { + testAction( + actions.fetchRepos, + null, + mockedState, + [ + { type: types.TOGGLE_MAIN_LOADING }, + { type: types.TOGGLE_MAIN_LOADING }, + { type: types.SET_REPOS_LIST, payload: reposServerResponse }, + ], + [], + done, + ); }); }); describe('fetchList', () => { beforeEach(() => { interceptor = (request, next) => { - next(request.respondWith(JSON.stringify(registryServerResponse), { - status: 200, - })); + next( + request.respondWith(JSON.stringify(registryServerResponse), { + status: 200, + }), + ); }; Vue.http.interceptors.push(interceptor); }); - it('should set received list', (done) => { + it('should set received list', done => { mockedState.repos = parsedReposServerResponse; - testAction(actions.fetchList, { repo: mockedState.repos[1] }, mockedState, [ - { type: types.TOGGLE_REGISTRY_LIST_LOADING }, - { type: types.SET_REGISTRY_LIST, payload: registryServerResponse }, - ], done); + const repo = mockedState.repos[1]; + + testAction( + actions.fetchList, + { repo }, + mockedState, + [ + { type: types.TOGGLE_REGISTRY_LIST_LOADING, payload: repo }, + { type: types.TOGGLE_REGISTRY_LIST_LOADING, payload: repo }, + { + type: types.SET_REGISTRY_LIST, + payload: { + repo, + resp: registryServerResponse, + headers: jasmine.anything(), + }, + }, + ], + [], + done, + ); }); }); }); describe('setMainEndpoint', () => { - it('should commit set main endpoint', (done) => { - testAction(actions.setMainEndpoint, 'endpoint', mockedState, [ - { type: types.SET_MAIN_ENDPOINT, payload: 'endpoint' }, - ], done); + it('should commit set main endpoint', done => { + testAction( + actions.setMainEndpoint, + 'endpoint', + mockedState, + [{ type: types.SET_MAIN_ENDPOINT, payload: 'endpoint' }], + [], + done, + ); }); }); describe('toggleLoading', () => { - it('should commit toggle main loading', (done) => { - testAction(actions.toggleLoading, null, mockedState, [ - { type: types.TOGGLE_MAIN_LOADING }, - ], done); + it('should commit toggle main loading', done => { + testAction( + actions.toggleLoading, + null, + mockedState, + [{ type: types.TOGGLE_MAIN_LOADING }], + [], + done, + ); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js index 5323523abc0..fcbd8169bc7 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -1,6 +1,7 @@ import Vue from 'vue'; import conflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import removeBreakLine from 'spec/helpers/vue_component_helper'; describe('MRWidgetConflicts', () => { let Component; @@ -78,8 +79,9 @@ describe('MRWidgetConflicts', () => { }); it('should tell you to rebase locally', () => { - expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('Fast-forward merge is not possible.'); - expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toContain('To merge this request, first rebase locally'); + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('Fast-forward merge is not possible. To merge this request, first rebase locally.'); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js index baacbc03fb1..894dbe3382f 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js @@ -1,6 +1,7 @@ import Vue from 'vue'; import pipelineBlockedComponent from '~/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked.vue'; import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import removeBreakLine from 'spec/helpers/vue_component_helper'; describe('MRWidgetPipelineBlocked', () => { let vm; @@ -18,6 +19,8 @@ describe('MRWidgetPipelineBlocked', () => { }); it('renders information text', () => { - expect(vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ')).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('Pipeline blocked. The pipeline for this merge request requires a manual action to proceed'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js index 25684861724..b02af94d03a 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js @@ -1,17 +1,25 @@ import Vue from 'vue'; import ShaMismatch from '~/vue_merge_request_widget/components/states/sha_mismatch.vue'; +import mountComponent from 'spec/helpers/vue_mount_component_helper'; +import removeBreakLine from 'spec/helpers/vue_component_helper'; describe('ShaMismatch', () => { - describe('template', () => { + let vm; + + beforeEach(() => { const Component = Vue.extend(ShaMismatch); - const vm = new Component({ - el: document.createElement('div'), - }); - it('should have correct elements', () => { - expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(vm.$el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(vm.$el.innerText).toContain('The source branch HEAD has recently changed.'); - expect(vm.$el.innerText).toContain('Please reload the page and review the changes before merging.'); - }); + vm = mountComponent(Component); + }); + + afterEach(() => { + vm.$destroy(); + }); + + it('should render information message', () => { + expect(vm.$el.querySelector('button').disabled).toEqual(true); + + expect( + removeBreakLine(vm.$el.textContent).trim(), + ).toContain('The source branch HEAD has recently changed. Please reload the page and review the changes before merging'); }); }); diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 5100f5737c2..84688845fa5 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -278,6 +278,10 @@ describe Backup::Manager do connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) end + after do + Fog.unmock! + end + context 'target path' do it 'uses the tar filename by default' do expect_any_instance_of(Fog::Collection).to receive(:create) diff --git a/spec/lib/banzai/filter/autolink_filter_spec.rb b/spec/lib/banzai/filter/autolink_filter_spec.rb index cbb0089bde7..a50329473ad 100644 --- a/spec/lib/banzai/filter/autolink_filter_spec.rb +++ b/spec/lib/banzai/filter/autolink_filter_spec.rb @@ -167,6 +167,15 @@ describe Banzai::Filter::AutolinkFilter do expect(actual).to eq(expected_complicated_link) end + it 'does not double-encode HTML entities' do + encoded_link = "#{link}?foo=bar&baz=quux" + expected_encoded_link = %Q{<a href="#{encoded_link}">#{encoded_link}</a>} + actual = unescape(filter(encoded_link).to_html) + + expect(actual).to eq(Rinku.auto_link(encoded_link)) + expect(actual).to eq(expected_encoded_link) + end + it 'does not include trailing HTML entities' do doc = filter("See <<<#{link}>>>") diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index 2a0e19ae796..e1782cff81a 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -48,7 +48,7 @@ module Gitlab }, 'images' => { input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]', - output: "<img src=\"https://localhost.com/image.png\" alt=\"Alt text\">" + output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt='Alt text\" onerror=\"alert(7)'></span></p>\n</div>" }, 'pre' => { input: '```mypre"><script>alert(3)</script>', diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index f969f9e8e38..18cef8ec996 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -315,13 +315,19 @@ describe Gitlab::Auth do it "tries to autheticate with db before ldap" do expect(Gitlab::Auth::LDAP::Authentication).not_to receive(:login) - gl_auth.find_with_user_password(username, password) + expect(gl_auth.find_with_user_password(username, password)).to eq(user) + end + + it "does not find user by using ldap as fallback to for authentication" do + expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(nil) + + expect(gl_auth.find_with_user_password('ldap_user', 'password')).to be_nil end - it "uses ldap as fallback to for authentication" do - expect(Gitlab::Auth::LDAP::Authentication).to receive(:login) + it "find new user by using ldap as fallback to for authentication" do + expect(Gitlab::Auth::LDAP::Authentication).to receive(:login).and_return(user) - gl_auth.find_with_user_password('ldap_user', 'password') + expect(gl_auth.find_with_user_password('ldap_user', 'password')).to eq(user) end end diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb index e112e9e9e3d..5ce84c61042 100644 --- a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb @@ -51,4 +51,20 @@ describe Gitlab::BackgroundMigration::MigrateBuildStage, :migration, schema: 201 expect { described_class.new.perform(1, 6) } .to raise_error ActiveRecord::RecordNotUnique end + + context 'when invalid class can be loaded due to single table inheritance' do + let(:commit_status) do + jobs.create!(id: 7, commit_id: 1, project_id: 123, stage_idx: 4, + stage: 'post-deploy', status: :failed) + end + + before do + commit_status.update_column(:type, 'SomeClass') + end + + it 'does ignore single table inheritance type' do + expect { described_class.new.perform(1, 7) }.not_to raise_error + expect(jobs.find(7)).to have_attributes(stage_id: (a_value > 0)) + end + end end diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb index e263d29656c..8e9386b1ba1 100644 --- a/spec/lib/gitlab/checks/project_moved_spec.rb +++ b/spec/lib/gitlab/checks/project_moved_spec.rb @@ -44,44 +44,17 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do end describe '#message' do - context 'when the push is rejected' do - it 'returns a redirect message telling the user to try again' do - project_moved = described_class.new(project, user, 'http', 'foo/bar') - message = "Project 'foo/bar' was moved to '#{project.full_path}'." + - "\n\nPlease update your Git remote:" + - "\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n" + it 'returns a redirect message' do + project_moved = described_class.new(project, user, 'http', 'foo/bar') + message = <<~MSG + Project 'foo/bar' was moved to '#{project.full_path}'. - expect(project_moved.message(rejected: true)).to eq(message) - end - end + Please update your Git remote: - context 'when the push is not rejected' do - it 'returns a redirect message' do - project_moved = described_class.new(project, user, 'http', 'foo/bar') - message = "Project 'foo/bar' was moved to '#{project.full_path}'." + - "\n\nPlease update your Git remote:" + - "\n\n git remote set-url origin #{project.http_url_to_repo}\n" + git remote set-url origin #{project.http_url_to_repo} + MSG - expect(project_moved.message).to eq(message) - end - end - end - - describe '#permanent_redirect?' do - context 'with a permanent RedirectRoute' do - it 'returns true' do - project.route.create_redirect('foo/bar', permanent: true) - project_moved = described_class.new(project, user, 'http', 'foo/bar') - expect(project_moved.permanent_redirect?).to be_truthy - end - end - - context 'without a permanent RedirectRoute' do - it 'returns false' do - project.route.create_redirect('foo/bar') - project_moved = described_class.new(project, user, 'http', 'foo/bar') - expect(project_moved.permanent_redirect?).to be_falsy - end + expect(project_moved.message).to eq(message) end end end diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb index 5a21282712a..cce4efaa069 100644 --- a/spec/lib/gitlab/ci/build/step_spec.rb +++ b/spec/lib/gitlab/ci/build/step_spec.rb @@ -5,10 +5,14 @@ describe Gitlab::Ci::Build::Step do shared_examples 'has correct script' do subject { described_class.from_commands(job) } + before do + job.run! + end + it 'fabricates an object' do expect(subject.name).to eq(:script) expect(subject.script).to eq(script) - expect(subject.timeout).to eq(job.timeout) + expect(subject.timeout).to eq(job.metadata_timeout) expect(subject.when).to eq('on_success') expect(subject.allow_failure).to be_falsey end @@ -47,6 +51,10 @@ describe Gitlab::Ci::Build::Step do subject { described_class.from_after_script(job) } + before do + job.run! + end + context 'when after_script is empty' do it 'doesn not fabricate an object' do is_expected.to be_nil @@ -59,7 +67,7 @@ describe Gitlab::Ci::Build::Step do it 'fabricates an object' do expect(subject.name).to eq(:after_script) expect(subject.script).to eq(['ls -la', 'date']) - expect(subject.timeout).to eq(job.timeout) + expect(subject.timeout).to eq(job.metadata_timeout) expect(subject.when).to eq('always') expect(subject.allow_failure).to be_truthy end diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb index 1b03227d67b..dc12ba076bc 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb @@ -5,23 +5,23 @@ describe Gitlab::Ci::Pipeline::Chain::Create do set(:user) { create(:user) } let(:pipeline) do - build(:ci_pipeline_with_one_job, project: project, - ref: 'master') + build(:ci_empty_pipeline, project: project, ref: 'master') end let(:command) do Gitlab::Ci::Pipeline::Chain::Command.new( - project: project, - current_user: user, seeds_block: nil) + project: project, current_user: user) end let(:step) { described_class.new(pipeline, command) } - before do - step.perform! - end - context 'when pipeline is ready to be saved' do + before do + pipeline.stages.build(name: 'test', project: project) + + step.perform! + end + it 'saves a pipeline' do expect(pipeline).to be_persisted end @@ -32,6 +32,7 @@ describe Gitlab::Ci::Pipeline::Chain::Create do it 'creates stages' do expect(pipeline.reload.stages).to be_one + expect(pipeline.stages.first).to be_persisted end end @@ -40,6 +41,10 @@ describe Gitlab::Ci::Pipeline::Chain::Create do build(:ci_pipeline, project: project, ref: nil) end + before do + step.perform! + end + it 'breaks the chain' do expect(step.break?).to be true end @@ -49,18 +54,4 @@ describe Gitlab::Ci::Pipeline::Chain::Create do .to include /Failed to persist the pipeline/ end end - - context 'when there is a seed block present' do - let(:seeds) { spy('pipeline seeds') } - - let(:command) do - double('command', project: project, - current_user: user, - seeds_block: seeds) - end - - it 'executes the block' do - expect(seeds).to have_received(:call).with(pipeline) - end - end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb new file mode 100644 index 00000000000..2258ae83f38 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb @@ -0,0 +1,153 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Chain::Populate do + set(:project) { create(:project) } + set(:user) { create(:user) } + + let(:pipeline) do + build(:ci_pipeline_with_one_job, project: project, + ref: 'master') + end + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + seeds_block: nil) + end + + let(:step) { described_class.new(pipeline, command) } + + context 'when pipeline doesn not have seeds block' do + before do + step.perform! + end + + it 'does not persist the pipeline' do + expect(pipeline).not_to be_persisted + end + + it 'does not break the chain' do + expect(step.break?).to be false + end + + it 'populates pipeline with stages' do + expect(pipeline.stages).to be_one + expect(pipeline.stages.first).not_to be_persisted + end + + it 'populates pipeline with builds' do + expect(pipeline.builds).to be_one + expect(pipeline.builds.first).not_to be_persisted + expect(pipeline.stages.first.builds).to be_one + expect(pipeline.stages.first.builds.first).not_to be_persisted + end + end + + context 'when pipeline is empty' do + let(:config) do + { rspec: { + script: 'ls', + only: ['something'] + } } + end + + let(:pipeline) do + build(:ci_pipeline, project: project, config: config) + end + + before do + step.perform! + end + + it 'breaks the chain' do + expect(step.break?).to be true + end + + it 'appends an error about missing stages' do + expect(pipeline.errors.to_a) + .to include 'No stages / jobs for this pipeline.' + end + end + + context 'when pipeline has validation errors' do + let(:pipeline) do + build(:ci_pipeline, project: project, ref: nil) + end + + before do + step.perform! + end + + it 'breaks the chain' do + expect(step.break?).to be true + end + + it 'appends validation error' do + expect(pipeline.errors.to_a) + .to include 'Failed to build the pipeline!' + end + end + + context 'when there is a seed blocks present' do + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + seeds_block: seeds_block) + end + + context 'when seeds block builds some resources' do + let(:seeds_block) do + ->(pipeline) { pipeline.variables.build(key: 'VAR', value: '123') } + end + + it 'populates pipeline with resources described in the seeds block' do + step.perform! + + expect(pipeline).not_to be_persisted + expect(pipeline.variables).not_to be_empty + expect(pipeline.variables.first).not_to be_persisted + expect(pipeline.variables.first.key).to eq 'VAR' + expect(pipeline.variables.first.value).to eq '123' + end + end + + context 'when seeds block tries to persist some resources' do + let(:seeds_block) do + ->(pipeline) { pipeline.variables.create!(key: 'VAR', value: '123') } + end + + it 'raises exception' do + expect { step.perform! }.to raise_error(ActiveRecord::RecordNotSaved) + end + end + end + + context 'when pipeline gets persisted during the process' do + let(:pipeline) { create(:ci_pipeline, project: project) } + + it 'raises error' do + expect { step.perform! }.to raise_error(described_class::PopulateError) + end + end + + context 'when using only/except build policies' do + let(:config) do + { rspec: { script: 'rspec', stage: 'test', only: ['master'] }, + prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] } } + end + + let(:pipeline) do + build(:ci_pipeline, ref: 'master', config: config) + end + + it 'populates pipeline according to used policies' do + step.perform! + + expect(pipeline.stages.size).to eq 1 + expect(pipeline.builds.size).to eq 1 + expect(pipeline.builds.first.name).to eq 'rspec' + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb index 5c12c6e6392..c53294d091c 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb @@ -76,28 +76,6 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do end end - context 'when pipeline has no stages / jobs' do - let(:config) do - { rspec: { - script: 'ls', - only: ['something'] - } } - end - - let(:pipeline) do - build(:ci_pipeline, project: project, config: config) - end - - it 'appends an error about missing stages' do - expect(pipeline.errors.to_a) - .to include 'No stages / jobs for this pipeline.' - end - - it 'breaks the chain' do - expect(step.break?).to be true - end - end - context 'when pipeline contains configuration validation errors' do let(:config) { { rspec: {} } } diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb new file mode 100644 index 00000000000..116573379e0 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -0,0 +1,242 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Seed::Build do + let(:pipeline) { create(:ci_empty_pipeline) } + + let(:attributes) do + { name: 'rspec', + ref: 'master', + commands: 'rspec' } + end + + subject do + described_class.new(pipeline, attributes) + end + + describe '#attributes' do + it 'returns hash attributes of a build' do + expect(subject.attributes).to be_a Hash + expect(subject.attributes) + .to include(:name, :project, :ref, :commands) + end + end + + describe '#user=' do + let(:user) { build(:user) } + + it 'assignes user to a build' do + subject.user = user + + expect(subject.attributes).to include(user: user) + end + end + + describe '#to_resource' do + it 'returns a valid build resource' do + expect(subject.to_resource).to be_a(::Ci::Build) + expect(subject.to_resource).to be_valid + end + + it 'memoizes a resource object' do + build = subject.to_resource + + expect(build.object_id).to eq subject.to_resource.object_id + end + + it 'can not be persisted without explicit assignment' do + build = subject.to_resource + + pipeline.save! + + expect(build).not_to be_persisted + end + end + + describe 'applying only/except policies' do + context 'when no branch policy is specified' do + let(:attributes) { { name: 'rspec' } } + + it { is_expected.to be_included } + end + + context 'when branch policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['deploy'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['deploy'] } } } + + it { is_expected.to be_included } + end + end + + context 'when branch regexp policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['/^deploy$/'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['/^deploy$/'] } } } + + it { is_expected.to be_included } + end + end + + context 'when branch policy matches' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: %w[deploy master] } } } + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: %w[deploy master] } } } + + it { is_expected.not_to be_included } + end + end + + context 'when keyword policy matches' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['branches'] } } } + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['branches'] } } } + + it { is_expected.not_to be_included } + end + end + + context 'when keyword policy does not match' do + context 'when using only' do + let(:attributes) { { name: 'rspec', only: { refs: ['tags'] } } } + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) { { name: 'rspec', except: { refs: ['tags'] } } } + + it { is_expected.to be_included } + end + end + + context 'when keywords and pipeline source policy matches' do + possibilities = [%w[pushes push], + %w[web web], + %w[triggers trigger], + %w[schedules schedule], + %w[api api], + %w[external external]] + + context 'when using only' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', only: { refs: [keyword] } } } + + it { is_expected.to be_included } + end + end + end + + context 'when using except' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', except: { refs: [keyword] } } } + + it { is_expected.not_to be_included } + end + end + end + end + + context 'when keywords and pipeline source does not match' do + possibilities = [%w[pushes web], + %w[web push], + %w[triggers schedule], + %w[schedules external], + %w[api trigger], + %w[external api]] + + context 'when using only' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', only: { refs: [keyword] } } } + + it { is_expected.not_to be_included } + end + end + end + + context 'when using except' do + possibilities.each do |keyword, source| + context "when using keyword `#{keyword}` and source `#{source}`" do + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source) + end + + let(:attributes) { { name: 'rspec', except: { refs: [keyword] } } } + + it { is_expected.to be_included } + end + end + end + end + + context 'when repository path matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: ["branches@#{pipeline.project_full_path}"] } } + end + + it { is_expected.to be_included } + end + + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ["branches@#{pipeline.project_full_path}"] } } + end + + it { is_expected.not_to be_included } + end + end + + context 'when repository path does not matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: ['branches@fork'] } } + end + + it { is_expected.not_to be_included } + end + + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ['branches@fork'] } } + end + + it { is_expected.to be_included } + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb new file mode 100644 index 00000000000..8f0bf40d624 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb @@ -0,0 +1,133 @@ +require 'spec_helper' + +describe Gitlab::Ci::Pipeline::Seed::Stage do + let(:pipeline) { create(:ci_empty_pipeline) } + + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'rspec' }, + { name: 'spinach' }, + { name: 'deploy', only: { refs: ['feature'] } }] } + end + + subject do + described_class.new(pipeline, attributes) + end + + describe '#size' do + it 'returns a number of jobs in the stage' do + expect(subject.size).to eq 2 + end + end + + describe '#attributes' do + it 'returns hash attributes of a stage' do + expect(subject.attributes).to be_a Hash + expect(subject.attributes).to include(:name, :project) + end + end + + describe '#included?' do + context 'when it contains builds seeds' do + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'deploy', only: { refs: ['master'] } }] } + end + + it { is_expected.to be_included } + end + + context 'when it does not contain build seeds' do + let(:attributes) do + { name: 'test', + index: 0, + builds: [{ name: 'deploy', only: { refs: ['feature'] } }] } + end + + it { is_expected.not_to be_included } + end + end + + describe '#seeds' do + it 'returns build seeds' do + expect(subject.seeds).to all(be_a Gitlab::Ci::Pipeline::Seed::Build) + end + + it 'returns build seeds including valid attributes' do + expect(subject.seeds.size).to eq 2 + expect(subject.seeds.map(&:attributes)).to all(include(ref: 'master')) + expect(subject.seeds.map(&:attributes)).to all(include(tag: false)) + expect(subject.seeds.map(&:attributes)).to all(include(project: pipeline.project)) + expect(subject.seeds.map(&:attributes)) + .to all(include(trigger_request: pipeline.trigger_requests.first)) + end + + context 'when a ref is protected' do + before do + allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) + end + + it 'returns protected builds' do + expect(subject.seeds.map(&:attributes)).to all(include(protected: true)) + end + end + + context 'when a ref is not protected' do + before do + allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) + end + + it 'returns unprotected builds' do + expect(subject.seeds.map(&:attributes)).to all(include(protected: false)) + end + end + + it 'filters seeds using only/except policies' do + expect(subject.seeds.map(&:attributes)).to satisfy do |seeds| + seeds.any? { |hash| hash.fetch(:name) == 'rspec' } + end + + expect(subject.seeds.map(&:attributes)).not_to satisfy do |seeds| + seeds.any? { |hash| hash.fetch(:name) == 'deploy' } + end + end + end + + describe '#user=' do + let(:user) { build(:user) } + + it 'assignes relevant pipeline attributes' do + subject.user = user + + expect(subject.seeds.map(&:attributes)).to all(include(user: user)) + end + end + + describe '#to_resource' do + it 'builds a valid stage object with all builds' do + subject.to_resource.save! + + expect(pipeline.reload.stages.count).to eq 1 + expect(pipeline.reload.builds.count).to eq 2 + expect(pipeline.builds).to all(satisfy { |job| job.stage_id.present? }) + expect(pipeline.builds).to all(satisfy { |job| job.pipeline.present? }) + expect(pipeline.builds).to all(satisfy { |job| job.project.present? }) + expect(pipeline.stages) + .to all(satisfy { |stage| stage.pipeline.present? }) + expect(pipeline.stages) + .to all(satisfy { |stage| stage.project.present? }) + end + + it 'can not be persisted without explicit pipeline assignment' do + stage = subject.to_resource + + pipeline.save! + + expect(stage).not_to be_persisted + expect(pipeline.reload.stages.count).to eq 0 + expect(pipeline.reload.builds.count).to eq 0 + end + end +end diff --git a/spec/lib/gitlab/ci/stage/seed_spec.rb b/spec/lib/gitlab/ci/stage/seed_spec.rb deleted file mode 100644 index 3fe8d50c49a..00000000000 --- a/spec/lib/gitlab/ci/stage/seed_spec.rb +++ /dev/null @@ -1,83 +0,0 @@ -require 'spec_helper' - -describe Gitlab::Ci::Stage::Seed do - let(:pipeline) { create(:ci_empty_pipeline) } - - let(:builds) do - [{ name: 'rspec' }, { name: 'spinach' }] - end - - subject do - described_class.new(pipeline, 'test', builds) - end - - describe '#size' do - it 'returns a number of jobs in the stage' do - expect(subject.size).to eq 2 - end - end - - describe '#stage' do - it 'returns hash attributes of a stage' do - expect(subject.stage).to be_a Hash - expect(subject.stage).to include(:name, :project) - end - end - - describe '#builds' do - it 'returns hash attributes of all builds' do - expect(subject.builds.size).to eq 2 - expect(subject.builds).to all(include(ref: 'master')) - expect(subject.builds).to all(include(tag: false)) - expect(subject.builds).to all(include(project: pipeline.project)) - expect(subject.builds) - .to all(include(trigger_request: pipeline.trigger_requests.first)) - end - - context 'when a ref is protected' do - before do - allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) - end - - it 'returns protected builds' do - expect(subject.builds).to all(include(protected: true)) - end - end - - context 'when a ref is unprotected' do - before do - allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) - end - - it 'returns unprotected builds' do - expect(subject.builds).to all(include(protected: false)) - end - end - end - - describe '#user=' do - let(:user) { build(:user) } - - it 'assignes relevant pipeline attributes' do - subject.user = user - - expect(subject.builds).to all(include(user: user)) - end - end - - describe '#create!' do - it 'creates all stages and builds' do - subject.create! - - expect(pipeline.reload.stages.count).to eq 1 - expect(pipeline.reload.builds.count).to eq 2 - expect(pipeline.builds).to all(satisfy { |job| job.stage_id.present? }) - expect(pipeline.builds).to all(satisfy { |job| job.pipeline.present? }) - expect(pipeline.builds).to all(satisfy { |job| job.project.present? }) - expect(pipeline.stages) - .to all(satisfy { |stage| stage.pipeline.present? }) - expect(pipeline.stages) - .to all(satisfy { |stage| stage.project.present? }) - end - end -end diff --git a/spec/lib/gitlab/ci/trace/http_io_spec.rb b/spec/lib/gitlab/ci/trace/http_io_spec.rb new file mode 100644 index 00000000000..5474e2f518c --- /dev/null +++ b/spec/lib/gitlab/ci/trace/http_io_spec.rb @@ -0,0 +1,315 @@ +require 'spec_helper' + +describe Gitlab::Ci::Trace::HttpIO do + include HttpIOHelpers + + let(:http_io) { described_class.new(url, size) } + let(:url) { remote_trace_url } + let(:size) { remote_trace_size } + + describe '#close' do + subject { http_io.close } + + it { is_expected.to be_nil } + end + + describe '#binmode' do + subject { http_io.binmode } + + it { is_expected.to be_nil } + end + + describe '#binmode?' do + subject { http_io.binmode? } + + it { is_expected.to be_truthy } + end + + describe '#path' do + subject { http_io.path } + + it { is_expected.to be_nil } + end + + describe '#url' do + subject { http_io.url } + + it { is_expected.to eq(url) } + end + + describe '#seek' do + subject { http_io.seek(pos, where) } + + context 'when moves pos to end of the file' do + let(:pos) { 0 } + let(:where) { IO::SEEK_END } + + it { is_expected.to eq(size) } + end + + context 'when moves pos to middle of the file' do + let(:pos) { size / 2 } + let(:where) { IO::SEEK_SET } + + it { is_expected.to eq(size / 2) } + end + + context 'when moves pos around' do + it 'matches the result' do + expect(http_io.seek(0)).to eq(0) + expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100) + expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file') + end + end + end + + describe '#eof?' do + subject { http_io.eof? } + + context 'when current pos is at end of the file' do + before do + http_io.seek(size, IO::SEEK_SET) + end + + it { is_expected.to be_truthy } + end + + context 'when current pos is not at end of the file' do + before do + http_io.seek(0, IO::SEEK_SET) + end + + it { is_expected.to be_falsey } + end + end + + describe '#each_line' do + subject { http_io.each_line } + + let(:string_io) { StringIO.new(remote_trace_body) } + + before do + stub_remote_trace_206 + end + + it 'yields lines' do + expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a) + end + + context 'when buckets on GCS' do + context 'when BUFFER_SIZE is larger than file size' do + before do + stub_remote_trace_200 + set_larger_buffer_size_than(size) + end + + it 'calls get_chunk only once' do + expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original + + http_io.each_line { |line| } + end + end + end + end + + describe '#read' do + subject { http_io.read(length) } + + context 'when there are no network issue' do + before do + stub_remote_trace_206 + end + + context 'when read whole size' do + let(:length) { nil } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + end + + context 'when read only first 100 bytes' do + let(:length) { 100 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body[0, length]) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body[0, length]) + end + end + end + + context 'when tries to read oversize' do + let(:length) { size + 1000 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to eq(remote_trace_body) + end + end + end + + context 'when tries to read 0 bytes' do + let(:length) { 0 } + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it 'reads a trace' do + is_expected.to be_empty + end + end + end + end + + context 'when there is anetwork issue' do + let(:length) { nil } + + before do + stub_remote_trace_500 + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + end + + describe '#readline' do + subject { http_io.readline } + + let(:string_io) { StringIO.new(remote_trace_body) } + + before do + stub_remote_trace_206 + end + + shared_examples 'all line matching' do + it 'reads a line' do + (0...remote_trace_body.lines.count).each do + expect(http_io.readline).to eq(string_io.readline) + end + end + end + + context 'when there is anetwork issue' do + let(:length) { nil } + + before do + stub_remote_trace_500 + end + + it 'reads a trace' do + expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) + end + end + + context 'when BUFFER_SIZE is smaller than file size' do + before do + set_smaller_buffer_size_than(size) + end + + it_behaves_like 'all line matching' + end + + context 'when BUFFER_SIZE is larger than file size' do + before do + set_larger_buffer_size_than(size) + end + + it_behaves_like 'all line matching' + end + + context 'when pos is at middle of the file' do + before do + set_smaller_buffer_size_than(size) + + http_io.seek(size / 2) + string_io.seek(size / 2) + end + + it 'reads from pos' do + expect(http_io.readline).to eq(string_io.readline) + end + end + end + + describe '#write' do + subject { http_io.write(nil) } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#truncate' do + subject { http_io.truncate(nil) } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#flush' do + subject { http_io.flush } + + it { expect { subject }.to raise_error(NotImplementedError) } + end + + describe '#present?' do + subject { http_io.present? } + + it { is_expected.to be_truthy } + end +end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index f83f932e61e..fbc2af29b98 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -18,6 +18,34 @@ module Gitlab describe '#build_attributes' do subject { described_class.new(config).build_attributes(:rspec) } + describe 'attributes list' do + let(:config) do + YAML.dump( + before_script: ['pwd'], + rspec: { script: 'rspec' } + ) + end + + it 'returns valid build attributes' do + expect(subject).to eq({ + stage: "test", + stage_idx: 1, + name: "rspec", + commands: "pwd\nrspec", + coverage_regex: nil, + tag_list: [], + options: { + before_script: ["pwd"], + script: ["rspec"] + }, + allow_failure: false, + when: "on_success", + environment: nil, + yaml_variables: [] + }) + end + end + describe 'coverage entry' do describe 'code coverage regexp' do let(:config) do @@ -105,512 +133,118 @@ module Gitlab end end - describe '#stage_seeds' do - context 'when no refs policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod' }, - rspec: { stage: 'test', script: 'rspec' }, - spinach: { stage: 'test', script: 'spinach' }) - end - - let(:pipeline) { create(:ci_empty_pipeline) } - - it 'correctly fabricates a stage seeds object' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 2 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.second.stage[:name]).to eq 'deploy' - expect(seeds.first.builds.dig(0, :name)).to eq 'rspec' - expect(seeds.first.builds.dig(1, :name)).to eq 'spinach' - expect(seeds.second.builds.dig(0, :name)).to eq 'production' - end - end - - context 'when refs policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, - spinach: { stage: 'test', script: 'spinach', only: ['tags'] }) - end - - let(:pipeline) do - create(:ci_empty_pipeline, ref: 'feature', tag: true) - end - - it 'returns stage seeds only assigned to master to master' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end - end - - context 'when source policy is specified' do - let(:config) do - YAML.dump(production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] }, - spinach: { stage: 'test', script: 'spinach', only: ['schedules'] }) - end - - let(:pipeline) do - create(:ci_empty_pipeline, source: :schedule) - end - - it 'returns stage seeds only assigned to schedules' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.stage[:name]).to eq 'test' - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end + describe '#stages_attributes' do + let(:config) do + YAML.dump( + rspec: { script: 'rspec', stage: 'test', only: ['branches'] }, + prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] } + ) end - context 'when kubernetes policy is specified' do - let(:config) do - YAML.dump( - spinach: { stage: 'test', script: 'spinach' }, - production: { - stage: 'deploy', - script: 'cap', - only: { kubernetes: 'active' } - } - ) - end - - context 'when kubernetes is active' do - shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do - it 'returns seeds for kubernetes dependent job' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 2 - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - expect(seeds.second.builds.dig(0, :name)).to eq 'production' - end - end - - context 'when user configured kubernetes from Integration > Kubernetes' do - let(:project) { create(:kubernetes_project) } - let(:pipeline) { create(:ci_empty_pipeline, project: project) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end - - context 'when user configured kubernetes from CI/CD > Clusters' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } - let(:project) { cluster.project } - let(:pipeline) { create(:ci_empty_pipeline, project: project) } - - it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' - end - end - - context 'when kubernetes is not active' do - it 'does not return seeds for kubernetes dependent job' do - seeds = subject.stage_seeds(pipeline) - - expect(seeds.size).to eq 1 - expect(seeds.first.builds.dig(0, :name)).to eq 'spinach' - end - end + let(:attributes) do + [{ name: "build", + index: 0, + builds: [] }, + { name: "test", + index: 1, + builds: + [{ stage_idx: 1, + stage: "test", + commands: "rspec", + tag_list: [], + name: "rspec", + allow_failure: false, + when: "on_success", + environment: nil, + coverage_regex: nil, + yaml_variables: [], + options: { script: ["rspec"] }, + only: { refs: ["branches"] }, + except: {} }] }, + { name: "deploy", + index: 2, + builds: + [{ stage_idx: 2, + stage: "deploy", + commands: "cap prod", + tag_list: [], + name: "prod", + allow_failure: false, + when: "on_success", + environment: nil, + coverage_regex: nil, + yaml_variables: [], + options: { script: ["cap prod"] }, + only: { refs: ["tags"] }, + except: {} }] }] + end + + it 'returns stages seed attributes' do + expect(subject.stages_attributes).to eq attributes end end - describe "#pipeline_stage_builds" do - let(:type) { 'test' } + describe 'only / except policies validations' do + context 'when `only` has an invalid value' do + let(:config) { { rspec: { script: "rspec", type: "test", only: only } } } + let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - it "returns builds if no branch specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec" } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).first).to eq({ - stage: "test", - stage_idx: 1, - name: "rspec", - commands: "pwd\nrspec", - coverage_regex: nil, - tag_list: [], - options: { - before_script: ["pwd"], - script: ["rspec"] - }, - allow_failure: false, - when: "on_success", - environment: nil, - yaml_variables: [] - }) - end - - describe 'only' do - it "does not return builds if only has another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "does not return builds if only has regexp with another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["/^deploy$/"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "returns builds if only has specified this branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", only: ["master"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "returns builds if only has a list of branches including specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: %w(master deploy) } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "returns builds if only has a branches keyword specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["branches"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "does not return builds if only has a tags keyword" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["tags"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end + context 'when it is integer' do + let(:only) { 1 } - it "returns builds if only has special keywords specified and source matches" do - possibilities = [{ keyword: 'pushes', source: 'push' }, - { keyword: 'web', source: 'web' }, - { keyword: 'triggers', source: 'trigger' }, - { keyword: 'schedules', source: 'schedule' }, - { keyword: 'api', source: 'api' }, - { keyword: 'external', source: 'external' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: [possibility[:keyword]] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only has to be either an array of conditions or a hash') end end - it "does not return builds if only has special keywords specified and source doesn't match" do - possibilities = [{ keyword: 'pushes', source: 'web' }, - { keyword: 'web', source: 'push' }, - { keyword: 'triggers', source: 'schedule' }, - { keyword: 'schedules', source: 'external' }, - { keyword: 'api', source: 'trigger' }, - { keyword: 'external', source: 'api' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: [possibility[:keyword]] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when it is an array of integers' do + let(:only) { [1, 1] } - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only config should be an array of strings or regexps') end end - it "returns builds if only has current repository path" do - seed_pipeline = pipeline(ref: 'deploy') - - config = YAML.dump({ - before_script: ["pwd"], - rspec: { - script: "rspec", - type: type, - only: ["branches@#{seed_pipeline.project_full_path}"] - } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when it is invalid regex' do + let(:only) { ["/*invalid/"] } - expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(1) - end - - it "does not return builds if only has different repository path" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, only: ["branches@fork"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "returns build only for specified type" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: "test", only: %w(master deploy) }, - staging: { script: "deploy", type: "deploy", only: %w(master deploy) }, - production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "deploy")).size).to eq(2) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "deploy")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "master")).size).to eq(1) - end - - context 'for invalid value' do - let(:config) { { rspec: { script: "rspec", type: "test", only: only } } } - let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - - context 'when it is integer' do - let(:only) { 1 } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only has to be either an array of conditions or a hash') - end - end - - context 'when it is an array of integers' do - let(:only) { [1, 1] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only config should be an array of strings or regexps') - end - end - - context 'when it is invalid regex' do - let(:only) { ["/*invalid/"] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:only config should be an array of strings or regexps') - end + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:only config should be an array of strings or regexps') end end end - describe 'except' do - it "returns builds if except has another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["deploy"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "returns builds if except has regexp with another branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["/^deploy$/"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) + context 'when `except` has an invalid value' do + let(:config) { { rspec: { script: "rspec", except: except } } } + let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1) - end - - it "does not return builds if except has specified this branch" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", except: ["master"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0) - end - - it "does not return builds if except has a list of branches including specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: %w(master deploy) } - }) + context 'when it is integer' do + let(:except) { 1 } - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "does not return builds if except has a branches keyword specified" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["branches"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0) - end - - it "returns builds if except has a tags keyword" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["tags"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "does not return builds if except has special keywords specified and source matches" do - possibilities = [{ keyword: 'pushes', source: 'push' }, - { keyword: 'web', source: 'web' }, - { keyword: 'triggers', source: 'trigger' }, - { keyword: 'schedules', source: 'schedule' }, - { keyword: 'api', source: 'api' }, - { keyword: 'external', source: 'external' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: [possibility[:keyword]] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except has to be either an array of conditions or a hash') end end - it "returns builds if except has special keywords specified and source doesn't match" do - possibilities = [{ keyword: 'pushes', source: 'web' }, - { keyword: 'web', source: 'push' }, - { keyword: 'triggers', source: 'schedule' }, - { keyword: 'schedules', source: 'external' }, - { keyword: 'api', source: 'trigger' }, - { keyword: 'external', source: 'api' }] - - possibilities.each do |possibility| - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: [possibility[:keyword]] } - }) + context 'when it is an array of integers' do + let(:except) { [1, 1] } - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1) + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except config should be an array of strings or regexps') end end - it "does not return builds if except has current repository path" do - seed_pipeline = pipeline(ref: 'deploy') - - config = YAML.dump({ - before_script: ["pwd"], - rspec: { - script: "rspec", - type: type, - except: ["branches@#{seed_pipeline.project_full_path}"] - } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(0) - end - - it "returns builds if except has different repository path" do - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: type, except: ["branches@fork"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1) - end - - it "returns build except specified type" do - master_pipeline = pipeline(ref: 'master') - test_pipeline = pipeline(ref: 'test') - deploy_pipeline = pipeline(ref: 'deploy') - - config = YAML.dump({ - before_script: ["pwd"], - rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@#{test_pipeline.project_full_path}"] }, - staging: { script: "deploy", type: "deploy", except: ["master"] }, - production: { script: "deploy", type: "deploy", except: ["master@#{master_pipeline.project_full_path}"] } - }) - - config_processor = Gitlab::Ci::YamlProcessor.new(config) - - expect(config_processor.pipeline_stage_builds("deploy", deploy_pipeline).size).to eq(2) - expect(config_processor.pipeline_stage_builds("test", test_pipeline).size).to eq(0) - expect(config_processor.pipeline_stage_builds("deploy", master_pipeline).size).to eq(0) - end - - context 'for invalid value' do - let(:config) { { rspec: { script: "rspec", except: except } } } - let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } + context 'when it is invalid regex' do + let(:except) { ["/*invalid/"] } - context 'when it is integer' do - let(:except) { 1 } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except has to be either an array of conditions or a hash') - end - end - - context 'when it is an array of integers' do - let(:except) { [1, 1] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except config should be an array of strings or regexps') - end - end - - context 'when it is invalid regex' do - let(:except) { ["/*invalid/"] } - - it do - expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, - 'jobs:rspec:except config should be an array of strings or regexps') - end + it do + expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, + 'jobs:rspec:except config should be an array of strings or regexps') end end end @@ -620,7 +254,7 @@ module Gitlab let(:config_data) { YAML.dump(config) } let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) } - subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first } + subject { config_processor.stage_builds_attributes('test').first } describe "before_script" do context "in global context" do @@ -703,8 +337,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -738,8 +372,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -771,8 +405,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -800,8 +434,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -946,8 +580,8 @@ module Gitlab }) config_processor = Gitlab::Ci::YamlProcessor.new(config) + builds = config_processor.stage_builds_attributes("test") - builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) expect(builds.size).to eq(1) expect(builds.first[:when]).to eq(when_state) end @@ -978,8 +612,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', @@ -997,8 +631,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["logs/", "binaries/"], untracked: true, key: 'key', @@ -1017,8 +651,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq( + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq( paths: ["test/"], untracked: false, key: 'local', @@ -1046,8 +680,8 @@ module Gitlab config_processor = Gitlab::Ci::YamlProcessor.new(config) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1) - expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({ + expect(config_processor.stage_builds_attributes("test").size).to eq(1) + expect(config_processor.stage_builds_attributes("test").first).to eq({ stage: "test", stage_idx: 1, name: "rspec", @@ -1083,8 +717,8 @@ module Gitlab }) config_processor = Gitlab::Ci::YamlProcessor.new(config) + builds = config_processor.stage_builds_attributes("test") - builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) expect(builds.size).to eq(1) expect(builds.first[:options][:artifacts][:when]).to eq(when_state) end @@ -1099,7 +733,7 @@ module Gitlab end let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) } - let(:builds) { processor.pipeline_stage_builds('deploy', pipeline(ref: 'master')) } + let(:builds) { processor.stage_builds_attributes('deploy') } context 'when a production environment is specified' do let(:environment) { 'production' } @@ -1256,7 +890,7 @@ module Gitlab describe "Hidden jobs" do let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) } - subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) } + subject { config_processor.stage_builds_attributes("test") } shared_examples 'hidden_job_handling' do it "doesn't create jobs that start with dot" do @@ -1304,7 +938,7 @@ module Gitlab describe "YAML Alias/Anchor" do let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) } - subject { config_processor.pipeline_stage_builds("build", pipeline(ref: "master")) } + subject { config_processor.stage_builds_attributes("build") } shared_examples 'job_templates_handling' do it "is correctly supported for jobs" do @@ -1344,13 +978,13 @@ module Gitlab context 'when template is a job' do let(:config) do - <<EOT -job1: &JOBTMPL - stage: build - script: execute-script-for-job + <<~EOT + job1: &JOBTMPL + stage: build + script: execute-script-for-job -job2: *JOBTMPL -EOT + job2: *JOBTMPL + EOT end it_behaves_like 'job_templates_handling' @@ -1358,15 +992,15 @@ EOT context 'when template is a hidden job' do let(:config) do - <<EOT -.template: &JOBTMPL - stage: build - script: execute-script-for-job + <<~EOT + .template: &JOBTMPL + stage: build + script: execute-script-for-job -job1: *JOBTMPL + job1: *JOBTMPL -job2: *JOBTMPL -EOT + job2: *JOBTMPL + EOT end it_behaves_like 'job_templates_handling' @@ -1374,18 +1008,18 @@ EOT context 'when job adds its own keys to a template definition' do let(:config) do - <<EOT -.template: &JOBTMPL - stage: build - -job1: - <<: *JOBTMPL - script: execute-script-for-job - -job2: - <<: *JOBTMPL - script: execute-script-for-job -EOT + <<~EOT + .template: &JOBTMPL + stage: build + + job1: + <<: *JOBTMPL + script: execute-script-for-job + + job2: + <<: *JOBTMPL + script: execute-script-for-job + EOT end it_behaves_like 'job_templates_handling' @@ -1724,10 +1358,6 @@ EOT it { is_expected.to be_nil } end end - - def pipeline(**attributes) - build_stubbed(:ci_empty_pipeline, **attributes) - end end end end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index a41b7f4e104..280f799f2ab 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -1211,4 +1211,33 @@ describe Gitlab::Database::MigrationHelpers do expect(model.perform_background_migration_inline?).to eq(false) end end + + describe '#index_exists_by_name?' do + it 'returns true if an index exists' do + expect(model.index_exists_by_name?(:projects, 'index_projects_on_path')) + .to be_truthy + end + + it 'returns false if the index does not exist' do + expect(model.index_exists_by_name?(:projects, 'this_does_not_exist')) + .to be_falsy + end + + context 'when an index with a function exists', :postgresql do + before do + ActiveRecord::Base.connection.execute( + 'CREATE INDEX test_index ON projects (LOWER(path));' + ) + end + + after do + 'DROP INDEX IF EXISTS test_index;' + end + + it 'returns true if an index exists' do + expect(model.index_exists_by_name?(:projects, 'test_index')) + .to be_truthy + end + end + end end diff --git a/spec/lib/gitlab/git/env_spec.rb b/spec/lib/gitlab/git/hook_env_spec.rb index 03836d49518..e6aa5ad8c90 100644 --- a/spec/lib/gitlab/git/env_spec.rb +++ b/spec/lib/gitlab/git/hook_env_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' -describe Gitlab::Git::Env do +describe Gitlab::Git::HookEnv do + let(:gl_repository) { 'project-123' } + describe ".set" do context 'with RequestStore.store disabled' do before do @@ -8,9 +10,9 @@ describe Gitlab::Git::Env do end it 'does not store anything' do - described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'foo') - expect(described_class.all).to be_empty + expect(described_class.all(gl_repository)).to be_empty end end @@ -21,15 +23,19 @@ describe Gitlab::Git::Env do it 'whitelist some `GIT_*` variables and stores them using RequestStore' do described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', + gl_repository, + GIT_OBJECT_DIRECTORY_RELATIVE: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: 'bar', GIT_EXEC_PATH: 'baz', PATH: '~/.bin:/bin') - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') - expect(described_class[:GIT_ALTERNATE_OBJECT_DIRECTORIES]).to eq('bar') - expect(described_class[:GIT_EXEC_PATH]).to be_nil - expect(described_class[:bar]).to be_nil + git_env = described_class.all(gl_repository) + + expect(git_env[:GIT_OBJECT_DIRECTORY_RELATIVE]).to eq('foo') + expect(git_env[:GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE]).to eq('bar') + expect(git_env[:GIT_EXEC_PATH]).to be_nil + expect(git_env[:PATH]).to be_nil + expect(git_env[:bar]).to be_nil end end end @@ -39,14 +45,15 @@ describe Gitlab::Git::Env do before do allow(RequestStore).to receive(:active?).and_return(true) described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: ['bar']) + gl_repository, + GIT_OBJECT_DIRECTORY_RELATIVE: 'foo', + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: ['bar']) end it 'returns an env hash' do - expect(described_class.all).to eq({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => ['bar'] + expect(described_class.all(gl_repository)).to eq({ + 'GIT_OBJECT_DIRECTORY_RELATIVE' => 'foo', + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['bar'] }) end end @@ -56,8 +63,8 @@ describe Gitlab::Git::Env do context 'with RequestStore.store enabled' do using RSpec::Parameterized::TableSyntax - let(:key) { 'GIT_OBJECT_DIRECTORY' } - subject { described_class.to_env_hash } + let(:key) { 'GIT_OBJECT_DIRECTORY_RELATIVE' } + subject { described_class.to_env_hash(gl_repository) } where(:input, :output) do nil | nil @@ -70,7 +77,7 @@ describe Gitlab::Git::Env do with_them do before do allow(RequestStore).to receive(:active?).and_return(true) - described_class.set(key.to_sym => input) + described_class.set(gl_repository, key.to_sym => input) end it 'puts the right value in the hash' do @@ -84,47 +91,25 @@ describe Gitlab::Git::Env do end end - describe ".[]" do - context 'with RequestStore.store enabled' do - before do - allow(RequestStore).to receive(:active?).and_return(true) - end - - before do - described_class.set( - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar') - end - - it 'returns a stored value for an existing key' do - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') - end - - it 'returns nil for an non-existing key' do - expect(described_class[:foo]).to be_nil - end - end - end - describe 'thread-safety' do context 'with RequestStore.store enabled' do before do allow(RequestStore).to receive(:active?).and_return(true) - described_class.set(GIT_OBJECT_DIRECTORY: 'foo') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'foo') end it 'is thread-safe' do another_thread = Thread.new do - described_class.set(GIT_OBJECT_DIRECTORY: 'bar') + described_class.set(gl_repository, GIT_OBJECT_DIRECTORY_RELATIVE: 'bar') Thread.stop - described_class[:GIT_OBJECT_DIRECTORY] + described_class.all(gl_repository)[:GIT_OBJECT_DIRECTORY_RELATIVE] end # Ensure another_thread runs first sleep 0.1 until another_thread.stop? - expect(described_class[:GIT_OBJECT_DIRECTORY]).to eq('foo') + expect(described_class.all(gl_repository)[:GIT_OBJECT_DIRECTORY_RELATIVE]).to eq('foo') another_thread.run expect(another_thread.value).to eq('bar') diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 54ada3e423f..5cbe2808d0b 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -120,7 +120,7 @@ describe Gitlab::Git::Repository, seed_helper: true do describe 'alternates keyword argument' do context 'with no Git env stored' do before do - allow(Gitlab::Git::Env).to receive(:all).and_return({}) + allow(Gitlab::Git::HookEnv).to receive(:all).and_return({}) end it "is passed an empty array" do @@ -132,7 +132,7 @@ describe Gitlab::Git::Repository, seed_helper: true do context 'with absolute and relative Git object dir envvars stored' do before do - allow(Gitlab::Git::Env).to receive(:all).and_return({ + allow(Gitlab::Git::HookEnv).to receive(:all).and_return({ 'GIT_OBJECT_DIRECTORY_RELATIVE' => './objects/foo', 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['./objects/bar', './objects/baz'], 'GIT_OBJECT_DIRECTORY' => 'ignored', @@ -148,22 +148,6 @@ describe Gitlab::Git::Repository, seed_helper: true do repository.rugged end end - - context 'with only absolute Git object dir envvars stored' do - before do - allow(Gitlab::Git::Env).to receive(:all).and_return({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => %w[bar baz], - 'GIT_OTHER' => 'another_env' - }) - end - - it "is passed the absolute object dir envvars as is" do - expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: %w[foo bar baz]) - - repository.rugged - end - end end end @@ -604,17 +588,20 @@ describe Gitlab::Git::Repository, seed_helper: true do shared_examples 'returning the right branches' do let(:head_id) { repository.rugged.head.target.oid } let(:new_branch) { head_id } + let(:utf8_branch) { 'branch-é' } before do repository.create_branch(new_branch, 'master') + repository.create_branch(utf8_branch, 'master') end after do repository.delete_branch(new_branch) + repository.delete_branch(utf8_branch) end it 'displays that branch' do - expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch) + expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch) end end diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb index 4e0ee206219..32ec1e029c8 100644 --- a/spec/lib/gitlab/git/rev_list_spec.rb +++ b/spec/lib/gitlab/git/rev_list_spec.rb @@ -3,17 +3,6 @@ require 'spec_helper' describe Gitlab::Git::RevList do let(:repository) { create(:project, :repository).repository.raw } let(:rev_list) { described_class.new(repository, newrev: 'newrev') } - let(:env_hash) do - { - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' - } - end - let(:command_env) { { 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'foo:bar' } } - - before do - allow(Gitlab::Git::Env).to receive(:all).and_return(env_hash) - end def args_for_popen(args_list) [Gitlab.config.git.bin_path, 'rev-list', *args_list] @@ -23,7 +12,7 @@ describe Gitlab::Git::RevList do params = [ args_for_popen(additional_args), repository.path, - command_env, + {}, hash_including(lazy_block: with_lazy_block ? anything : nil) ] diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 6f07e423c1b..f8f09d29c73 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -240,14 +240,21 @@ describe Gitlab::GitAccess do end shared_examples 'check_project_moved' do - it 'enqueues a redirected message' do + it 'enqueues a redirected message for pushing' do push_access_check expect(Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)).not_to be_nil end + + it 'allows push and pull access' do + aggregate_failures do + expect { push_access_check }.not_to raise_error + expect { pull_access_check }.not_to raise_error + end + end end - describe '#check_project_moved!', :clean_gitlab_redis_shared_state do + describe '#add_project_moved_message!', :clean_gitlab_redis_shared_state do before do project.add_master(user) end @@ -261,62 +268,18 @@ describe Gitlab::GitAccess do end end - context 'when a permanent redirect and ssh protocol' do + context 'with a redirect and ssh protocol' do let(:redirected_path) { 'some/other-path' } - before do - allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) - end - - it 'allows push and pull access' do - aggregate_failures do - expect { push_access_check }.not_to raise_error - end - end - it_behaves_like 'check_project_moved' end - context 'with a permanent redirect and http protocol' do + context 'with a redirect and http protocol' do let(:redirected_path) { 'some/other-path' } let(:protocol) { 'http' } - before do - allow_any_instance_of(Gitlab::Checks::ProjectMoved).to receive(:permanent_redirect?).and_return(true) - end - - it 'allows_push and pull access' do - aggregate_failures do - expect { push_access_check }.not_to raise_error - end - end - it_behaves_like 'check_project_moved' end - - context 'with a temporal redirect and ssh protocol' do - let(:redirected_path) { 'some/other-path' } - - it 'blocks push and pull access' do - aggregate_failures do - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/) - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/) - - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /Project '#{redirected_path}' was moved to '#{project.full_path}'/) - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.ssh_url_to_repo}/) - end - end - end - - context 'with a temporal redirect and http protocol' do - let(:redirected_path) { 'some/other-path' } - let(:protocol) { 'http' } - - it 'does not allow to push and pull access' do - expect { push_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - expect { pull_access_check }.to raise_error(described_class::ProjectMovedError, /git remote set-url origin #{project.http_url_to_repo}/) - end - end end describe '#check_authentication_abilities!' do diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb index 872377c93d8..f03c7e3f04b 100644 --- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb @@ -58,4 +58,14 @@ describe Gitlab::GitalyClient::RemoteService do client.update_remote_mirror(ref_name, only_branches_matching) end end + + describe '.exists?' do + context "when the remote doesn't exist" do + let(:url) { 'https://gitlab.com/gitlab-org/ik-besta-niet-of-ik-word-geplaagd.git' } + + it 'returns false' do + expect(described_class.exists?(url)).to be(false) + end + end + end end diff --git a/spec/lib/gitlab/gitaly_client/util_spec.rb b/spec/lib/gitlab/gitaly_client/util_spec.rb index d1e0136f8c1..550db6db6d9 100644 --- a/spec/lib/gitlab/gitaly_client/util_spec.rb +++ b/spec/lib/gitlab/gitaly_client/util_spec.rb @@ -7,16 +7,19 @@ describe Gitlab::GitalyClient::Util do let(:gl_repository) { 'project-1' } let(:git_object_directory) { '.git/objects' } let(:git_alternate_object_directory) { ['/dir/one', '/dir/two'] } + let(:git_env) do + { + 'GIT_OBJECT_DIRECTORY_RELATIVE' => git_object_directory, + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => git_alternate_object_directory + } + end subject do described_class.repository(repository_storage, relative_path, gl_repository) end it 'creates a Gitaly::Repository with the given data' do - allow(Gitlab::Git::Env).to receive(:[]).with('GIT_OBJECT_DIRECTORY_RELATIVE') - .and_return(git_object_directory) - allow(Gitlab::Git::Env).to receive(:[]).with('GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE') - .and_return(git_alternate_object_directory) + allow(Gitlab::Git::HookEnv).to receive(:all).with(gl_repository).and_return(git_env) expect(subject).to be_a(Gitaly::Repository) expect(subject.storage_name).to eq(repository_storage) diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb index 5bedfc79dd3..1f0f1fdd7da 100644 --- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb @@ -38,8 +38,12 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do expect(project) .to receive(:wiki_repository_exists?) .and_return(false) + expect(Gitlab::GitalyClient::RemoteService) + .to receive(:exists?) + .with("foo.wiki.git") + .and_return(true) - expect(importer.import_wiki?).to eq(true) + expect(importer.import_wiki?).to be(true) end it 'returns false if the GitHub wiki is disabled' do diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 44e4c6ff94b..0716852f57f 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -265,7 +265,9 @@ CommitStatus: - target_url - description - artifacts_file +- artifacts_file_store - artifacts_metadata +- artifacts_metadata_store - erased_by_id - erased_at - artifacts_expire_at diff --git a/spec/lib/gitlab/verify/lfs_objects_spec.rb b/spec/lib/gitlab/verify/lfs_objects_spec.rb index 64f3a9660e0..0f890e2c7ce 100644 --- a/spec/lib/gitlab/verify/lfs_objects_spec.rb +++ b/spec/lib/gitlab/verify/lfs_objects_spec.rb @@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do expect(failures.keys).to contain_exactly(lfs_object) expect(failure.to_s).to include('Checksum mismatch') end + + context 'with remote files' do + before do + stub_lfs_object_storage + end + + it 'skips LFS objects in object storage' do + local_failure = create(:lfs_object) + create(:lfs_object, :object_storage) + + failures = {} + described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) } + + expect(failures.keys).to contain_exactly(local_failure) + end + end end end diff --git a/spec/lib/gitlab/verify/uploads_spec.rb b/spec/lib/gitlab/verify/uploads_spec.rb index 6146ce61226..85768308edc 100644 --- a/spec/lib/gitlab/verify/uploads_spec.rb +++ b/spec/lib/gitlab/verify/uploads_spec.rb @@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do expect(failures.keys).to contain_exactly(upload) expect(failure.to_s).to include('Checksum missing') end + + context 'with remote files' do + before do + stub_uploads_object_storage(AvatarUploader) + end + + it 'skips uploads in object storage' do + local_failure = create(:upload) + create(:upload, :object_storage) + + failures = {} + described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) } + + expect(failures.keys).to contain_exactly(local_failure) + end + end end end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 37a0bf1ad36..2b3ffb2d7c0 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -55,7 +55,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_archive feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_archive feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -100,7 +100,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_send_git_patch feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_send_git_patch feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -173,7 +173,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_send_git_diff feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_send_git_diff feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) @@ -275,7 +275,7 @@ describe Gitlab::Workhorse do describe '.git_http_ok' do let(:user) { create(:user) } - let(:repo_path) { repository.path_to_repo } + let(:repo_path) { 'ignored but not allowed to be empty in gitlab-workhorse' } let(:action) { 'info_refs' } let(:params) do { @@ -455,7 +455,7 @@ describe Gitlab::Workhorse do end end - context 'when Gitaly workhorse_raw_show feature is disabled', :skip_gitaly_mock do + context 'when Gitaly workhorse_raw_show feature is disabled', :disable_gitaly do it 'sets the header correctly' do key, command, params = decode_workhorse_header(subject) diff --git a/spec/mailers/previews/notify_preview.rb b/spec/mailers/previews/notify_preview.rb index 580f0d56a92..43c3c89f140 100644 --- a/spec/mailers/previews/notify_preview.rb +++ b/spec/mailers/previews/notify_preview.rb @@ -65,7 +65,7 @@ class NotifyPreview < ActionMailer::Preview end def merge_request - @merge_request ||= project.merge_requests.find_by(source_branch: 'master', target_branch: 'feature') + @merge_request ||= project.merge_requests.first end def user diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb index 7f7ce91378b..f6d030ab25c 100644 --- a/spec/migrations/remove_empty_fork_networks_spec.rb +++ b/spec/migrations/remove_empty_fork_networks_spec.rb @@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do deleted_project.destroy! end + after do + Upload.reset_column_information + end + it 'deletes only the fork network without members' do expect(fork_networks.count).to eq(2) diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb new file mode 100644 index 00000000000..268561ee941 --- /dev/null +++ b/spec/models/ci/build_metadata_spec.rb @@ -0,0 +1,61 @@ +require 'spec_helper' + +describe Ci::BuildMetadata do + set(:user) { create(:user) } + set(:group) { create(:group, :access_requestable) } + set(:project) { create(:project, :repository, group: group, build_timeout: 2000) } + + set(:pipeline) do + create(:ci_pipeline, project: project, + sha: project.commit.id, + ref: project.default_branch, + status: 'success') + end + + let(:build) { create(:ci_build, pipeline: pipeline) } + let(:build_metadata) { create(:ci_build_metadata, build: build) } + + describe '#update_timeout_state' do + subject { build_metadata } + + context 'when runner is not assigned to the job' do + it "doesn't change timeout value" do + expect { subject.update_timeout_state }.not_to change { subject.reload.timeout } + end + + it "doesn't change timeout_source value" do + expect { subject.update_timeout_state }.not_to change { subject.reload.timeout_source } + end + end + + context 'when runner is assigned to the job' do + before do + build.update_attributes(runner: runner) + end + + context 'when runner timeout is lower than project timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 1900) } + + it 'sets runner timeout' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(1900) + end + + it 'sets runner_timeout_source' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('runner_timeout_source') + end + end + + context 'when runner timeout is higher than project timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 2100) } + + it 'sets project timeout' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout }.to(2000) + end + + it 'sets project_timeout_source' do + expect { subject.update_timeout_state }.to change { subject.reload.timeout_source }.to('project_timeout_source') + end + end + end + end +end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 30a352fd090..f5534d22a54 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -198,6 +198,16 @@ describe Ci::Build do end context 'when legacy artifacts are used' do + let(:build) { create(:ci_build, :legacy_artifacts) } + + subject { build.artifacts? } + + context 'is expired' do + let(:build) { create(:ci_build, :legacy_artifacts, :expired) } + + it { is_expected.to be_falsy } + end + context 'artifacts archive does not exist' do let(:build) { create(:ci_build) } @@ -208,13 +218,25 @@ describe Ci::Build do let(:build) { create(:ci_build, :legacy_artifacts) } it { is_expected.to be_truthy } + end + end + end - context 'is expired' do - let(:build) { create(:ci_build, :legacy_artifacts, :expired) } + describe '#browsable_artifacts?' do + subject { build.browsable_artifacts? } - it { is_expected.to be_falsy } - end + context 'artifacts metadata does not exist' do + before do + build.update_attributes(legacy_artifacts_metadata: nil) end + + it { is_expected.to be_falsy } + end + + context 'artifacts metadata does exists' do + let(:build) { create(:ci_build, :artifacts) } + + it { is_expected.to be_truthy } end end @@ -1249,12 +1271,6 @@ describe Ci::Build do end describe 'project settings' do - describe '#timeout' do - it 'returns project timeout configuration' do - expect(build.timeout).to eq(project.build_timeout) - end - end - describe '#allow_git_fetch' do it 'return project allow_git_fetch configuration' do expect(build.allow_git_fetch).to eq(project.build_allow_git_fetch) @@ -1989,6 +2005,70 @@ describe Ci::Build do end end + describe 'state transition: pending: :running' do + let(:runner) { create(:ci_runner) } + let(:job) { create(:ci_build, :pending, runner: runner) } + + before do + job.project.update_attribute(:build_timeout, 1800) + end + + def run_job_without_exception + job.run! + rescue StateMachines::InvalidTransition + end + + shared_examples 'saves data on transition' do + it 'saves timeout' do + expect { job.run! }.to change { job.reload.ensure_metadata.timeout }.from(nil).to(expected_timeout) + end + + it 'saves timeout_source' do + expect { job.run! }.to change { job.reload.ensure_metadata.timeout_source }.from('unknown_timeout_source').to(expected_timeout_source) + end + + context 'when Ci::BuildMetadata#update_timeout_state fails update' do + before do + allow_any_instance_of(Ci::BuildMetadata).to receive(:update_timeout_state).and_return(false) + end + + it "doesn't save timeout" do + expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source } + end + + it "doesn't save timeout_source" do + expect { run_job_without_exception }.not_to change { job.reload.ensure_metadata.timeout_source } + end + + it 'raises an exception' do + expect { job.run! }.to raise_error(StateMachines::InvalidTransition) + end + end + end + + context 'when runner timeout overrides project timeout' do + let(:expected_timeout) { 900 } + let(:expected_timeout_source) { 'runner_timeout_source' } + + before do + runner.update_attribute(:maximum_timeout, 900) + end + + it_behaves_like 'saves data on transition' + end + + context "when runner timeout doesn't override project timeout" do + let(:expected_timeout) { 1800 } + let(:expected_timeout_source) { 'project_timeout_source' } + + before do + runner.update_attribute(:maximum_timeout, 3600) + end + + it_behaves_like 'saves data on transition' + end + end + describe 'state transition: any => [:running]' do shared_examples 'validation is active' do context 'when depended job has not been completed yet' do diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb index a2bd36537e6..1aa28434879 100644 --- a/spec/models/ci/job_artifact_spec.rb +++ b/spec/models/ci/job_artifact_spec.rb @@ -15,6 +15,50 @@ describe Ci::JobArtifact do it { is_expected.to delegate_method(:open).to(:file) } it { is_expected.to delegate_method(:exists?).to(:file) } + describe 'callbacks' do + subject { create(:ci_job_artifact, :archive) } + + describe '#schedule_background_upload' do + context 'when object storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it 'does not schedule the migration' do + expect(ObjectStorageUploadWorker).not_to receive(:perform_async) + + subject + end + end + + context 'when object storage is enabled' do + context 'when background upload is enabled' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric)) + + subject + end + end + + context 'when background upload is disabled' do + before do + stub_artifacts_object_storage(background_upload: false) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + end + end + end + describe '#set_size' do it 'sets the size' do expect(artifact.size).to eq(106365) diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 4635f8cfe9d..92f00cfbc19 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -177,6 +177,24 @@ describe Ci::Pipeline, :mailer do end end + describe '#protected_ref?' do + it 'delegates method to project' do + expect(pipeline).not_to be_protected_ref + end + end + + describe '#legacy_trigger' do + let(:trigger_request) { create(:ci_trigger_request) } + + before do + pipeline.trigger_requests << trigger_request + end + + it 'returns first trigger request' do + expect(pipeline.legacy_trigger).to eq trigger_request + end + end + describe '#auto_canceled?' do subject { pipeline.auto_canceled? } @@ -215,142 +233,257 @@ describe Ci::Pipeline, :mailer do end describe 'pipeline stages' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'linux', - stage_idx: 0, - status: 'success') - - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'mac', - stage_idx: 0, - status: 'failed') - - create(:commit_status, pipeline: pipeline, - stage: 'deploy', - name: 'staging', - stage_idx: 2, - status: 'running') - - create(:commit_status, pipeline: pipeline, - stage: 'test', - name: 'rspec', - stage_idx: 1, - status: 'success') - end - describe '#stage_seeds' do - let(:pipeline) do - build(:ci_pipeline, config: { rspec: { script: 'rake' } }) - end + let(:pipeline) { build(:ci_pipeline, config: config) } + let(:config) { { rspec: { script: 'rake' } } } it 'returns preseeded stage seeds object' do - expect(pipeline.stage_seeds).to all(be_a Gitlab::Ci::Stage::Seed) + expect(pipeline.stage_seeds) + .to all(be_a Gitlab::Ci::Pipeline::Seed::Base) expect(pipeline.stage_seeds.count).to eq 1 end - end - describe '#seeds_size' do - let(:pipeline) { build(:ci_pipeline_with_one_job) } + context 'when no refs policy is specified' do + let(:config) do + { production: { stage: 'deploy', script: 'cap prod' }, + rspec: { stage: 'test', script: 'rspec' }, + spinach: { stage: 'test', script: 'spinach' } } + end - it 'returns number of jobs in stage seeds' do - expect(pipeline.seeds_size).to eq 1 + it 'correctly fabricates a stage seeds object' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 2 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.second.attributes[:name]).to eq 'deploy' + expect(seeds.dig(0, 0, :name)).to eq 'rspec' + expect(seeds.dig(0, 1, :name)).to eq 'spinach' + expect(seeds.dig(1, 0, :name)).to eq 'production' + end end - end - describe '#legacy_stages' do - subject { pipeline.legacy_stages } + context 'when refs policy is specified' do + let(:pipeline) do + build(:ci_pipeline, ref: 'feature', tag: true, config: config) + end - context 'stages list' do - it 'returns ordered list of stages' do - expect(subject.map(&:name)).to eq(%w[build test deploy]) + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, + spinach: { stage: 'test', script: 'spinach', only: ['tags'] } } + end + + it 'returns stage seeds only assigned to master to master' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 1 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.dig(0, 0, :name)).to eq 'spinach' end end - context 'stages with statuses' do - let(:statuses) do - subject.map { |stage| [stage.name, stage.status] } + context 'when source policy is specified' do + let(:pipeline) { build(:ci_pipeline, source: :schedule, config: config) } + + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['triggers'] }, + spinach: { stage: 'test', script: 'spinach', only: ['schedules'] } } end - it 'returns list of stages with correct statuses' do - expect(statuses).to eq([%w(build failed), - %w(test success), - %w(deploy running)]) + it 'returns stage seeds only assigned to schedules' do + seeds = pipeline.stage_seeds + + expect(seeds.size).to eq 1 + expect(seeds.first.attributes[:name]).to eq 'test' + expect(seeds.dig(0, 0, :name)).to eq 'spinach' end + end - context 'when commit status is retried' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'mac', - stage_idx: 0, - status: 'success') + context 'when kubernetes policy is specified' do + let(:config) do + { + spinach: { stage: 'test', script: 'spinach' }, + production: { + stage: 'deploy', + script: 'cap', + only: { kubernetes: 'active' } + } + } + end + + context 'when kubernetes is active' do + shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do + it 'returns seeds for kubernetes dependent job' do + seeds = pipeline.stage_seeds - pipeline.process! + expect(seeds.size).to eq 2 + expect(seeds.dig(0, 0, :name)).to eq 'spinach' + expect(seeds.dig(1, 0, :name)).to eq 'production' + end end - it 'ignores the previous state' do - expect(statuses).to eq([%w(build success), - %w(test success), - %w(deploy running)]) + context 'when user configured kubernetes from Integration > Kubernetes' do + let(:project) { create(:kubernetes_project) } + let(:pipeline) { build(:ci_pipeline, project: project, config: config) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' end - end - end - context 'when there is a stage with warnings' do - before do - create(:commit_status, pipeline: pipeline, - stage: 'deploy', - name: 'prod:2', - stage_idx: 2, - status: 'failed', - allow_failure: true) + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp) } + let(:project) { cluster.project } + let(:pipeline) { build(:ci_pipeline, project: project, config: config) } + + it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes' + end end - it 'populates stage with correct number of warnings' do - deploy_stage = pipeline.legacy_stages.third + context 'when kubernetes is not active' do + it 'does not return seeds for kubernetes dependent job' do + seeds = pipeline.stage_seeds - expect(deploy_stage).not_to receive(:statuses) - expect(deploy_stage).to have_warnings + expect(seeds.size).to eq 1 + expect(seeds.dig(0, 0, :name)).to eq 'spinach' + end end end end - describe '#stages_count' do - it 'returns a valid number of stages' do - expect(pipeline.stages_count).to eq(3) - end - end + describe '#seeds_size' do + context 'when refs policy is specified' do + let(:config) do + { production: { stage: 'deploy', script: 'cap prod', only: ['master'] }, + spinach: { stage: 'test', script: 'spinach', only: ['tags'] } } + end - describe '#stages_names' do - it 'returns a valid names of stages' do - expect(pipeline.stages_names).to eq(%w(build test deploy)) + let(:pipeline) do + build(:ci_pipeline, ref: 'feature', tag: true, config: config) + end + + it 'returns real seeds size' do + expect(pipeline.seeds_size).to eq 1 + end end end - end - - describe '#legacy_stage' do - subject { pipeline.legacy_stage('test') } - context 'with status in stage' do + describe 'legacy stages' do before do - create(:commit_status, pipeline: pipeline, stage: 'test') + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'linux', + stage_idx: 0, + status: 'success') + + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'mac', + stage_idx: 0, + status: 'failed') + + create(:commit_status, pipeline: pipeline, + stage: 'deploy', + name: 'staging', + stage_idx: 2, + status: 'running') + + create(:commit_status, pipeline: pipeline, + stage: 'test', + name: 'rspec', + stage_idx: 1, + status: 'success') + end + + describe '#legacy_stages' do + subject { pipeline.legacy_stages } + + context 'stages list' do + it 'returns ordered list of stages' do + expect(subject.map(&:name)).to eq(%w[build test deploy]) + end + end + + context 'stages with statuses' do + let(:statuses) do + subject.map { |stage| [stage.name, stage.status] } + end + + it 'returns list of stages with correct statuses' do + expect(statuses).to eq([%w(build failed), + %w(test success), + %w(deploy running)]) + end + + context 'when commit status is retried' do + before do + create(:commit_status, pipeline: pipeline, + stage: 'build', + name: 'mac', + stage_idx: 0, + status: 'success') + + pipeline.process! + end + + it 'ignores the previous state' do + expect(statuses).to eq([%w(build success), + %w(test success), + %w(deploy running)]) + end + end + end + + context 'when there is a stage with warnings' do + before do + create(:commit_status, pipeline: pipeline, + stage: 'deploy', + name: 'prod:2', + stage_idx: 2, + status: 'failed', + allow_failure: true) + end + + it 'populates stage with correct number of warnings' do + deploy_stage = pipeline.legacy_stages.third + + expect(deploy_stage).not_to receive(:statuses) + expect(deploy_stage).to have_warnings + end + end + end + + describe '#stages_count' do + it 'returns a valid number of stages' do + expect(pipeline.stages_count).to eq(3) + end end - it { expect(subject).to be_a Ci::LegacyStage } - it { expect(subject.name).to eq 'test' } - it { expect(subject.statuses).not_to be_empty } + describe '#stages_names' do + it 'returns a valid names of stages' do + expect(pipeline.stages_names).to eq(%w(build test deploy)) + end + end end - context 'without status in stage' do - before do - create(:commit_status, pipeline: pipeline, stage: 'build') + describe '#legacy_stage' do + subject { pipeline.legacy_stage('test') } + + context 'with status in stage' do + before do + create(:commit_status, pipeline: pipeline, stage: 'test') + end + + it { expect(subject).to be_a Ci::LegacyStage } + it { expect(subject.name).to eq 'test' } + it { expect(subject.statuses).not_to be_empty } end - it 'return stage object' do - is_expected.to be_nil + context 'without status in stage' do + before do + create(:commit_status, pipeline: pipeline, stage: 'build') + end + + it 'return stage object' do + is_expected.to be_nil + end end end end @@ -589,20 +722,6 @@ describe Ci::Pipeline, :mailer do end end - describe '#has_stage_seeds?' do - context 'when pipeline has stage seeds' do - subject { build(:ci_pipeline_with_one_job) } - - it { is_expected.to have_stage_seeds } - end - - context 'when pipeline does not have stage seeds' do - subject { create(:ci_pipeline_without_jobs) } - - it { is_expected.not_to have_stage_seeds } - end - end - describe '#has_warnings?' do subject { pipeline.has_warnings? } diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb new file mode 100644 index 00000000000..27c86e60e60 --- /dev/null +++ b/spec/models/concerns/chronic_duration_attribute_spec.rb @@ -0,0 +1,115 @@ +require 'spec_helper' + +shared_examples 'ChronicDurationAttribute reader' do + it 'contains dynamically created reader method' do + expect(subject.class).to be_public_method_defined(virtual_field) + end + + it 'outputs chronic duration formatted value' do + subject.send("#{source_field}=", 120) + + expect(subject.send(virtual_field)).to eq('2m') + end + + context 'when value is set to nil' do + it 'outputs nil' do + subject.send("#{source_field}=", nil) + + expect(subject.send(virtual_field)).to be_nil + end + end +end + +shared_examples 'ChronicDurationAttribute writer' do + it 'contains dynamically created writer method' do + expect(subject.class).to be_public_method_defined("#{virtual_field}=") + end + + before do + subject.send("#{virtual_field}=", '10m') + end + + it 'parses chronic duration input' do + expect(subject.send(source_field)).to eq(600) + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + + context 'when negative input is used' do + before do + subject.send("#{source_field}=", 3600) + end + + it "doesn't raise exception" do + expect { subject.send("#{virtual_field}=", '-10m') }.not_to raise_error(ChronicDuration::DurationParseError) + end + + it "doesn't change value" do + expect { subject.send("#{virtual_field}=", '-10m') }.not_to change { subject.send(source_field) } + end + + it "doesn't pass validation" do + subject.send("#{virtual_field}=", '-10m') + + expect(subject.valid?).to be_falsey + expect(subject.errors&.messages).to include(virtual_field => ['is not a correct duration']) + end + end + + context 'when empty input is used' do + before do + subject.send("#{virtual_field}=", '') + end + + it 'writes nil' do + expect(subject.send(source_field)).to be_nil + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + end + + context 'when nil input is used' do + before do + subject.send("#{virtual_field}=", nil) + end + + it 'writes nil' do + expect(subject.send(source_field)).to be_nil + end + + it 'passes validation' do + expect(subject.valid?).to be_truthy + end + + it "doesn't raise exception" do + expect { subject.send("#{virtual_field}=", nil) }.not_to raise_error(NoMethodError) + end + end +end + +describe 'ChronicDurationAttribute' do + let(:source_field) {:maximum_timeout} + let(:virtual_field) {:maximum_timeout_human_readable} + + subject { Ci::Runner.new } + + it_behaves_like 'ChronicDurationAttribute reader' + it_behaves_like 'ChronicDurationAttribute writer' +end + +describe 'ChronicDurationAttribute - reader' do + let(:source_field) {:timeout} + let(:virtual_field) {:timeout_human_readable} + + subject {Ci::BuildMetadata.new} + + it "doesn't contain dynamically created writer method" do + expect(subject.class).not_to be_public_method_defined("#{virtual_field}=") + end + + it_behaves_like 'ChronicDurationAttribute reader' +end diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb index 3d7283e2164..41440c6d288 100644 --- a/spec/models/deploy_key_spec.rb +++ b/spec/models/deploy_key_spec.rb @@ -17,4 +17,25 @@ describe DeployKey, :mailer do should_not_email(user) end end + + describe '#user' do + let(:deploy_key) { create(:deploy_key) } + let(:user) { create(:user) } + + context 'when user is set' do + before do + deploy_key.user = user + end + + it 'returns the user' do + expect(deploy_key.user).to be(user) + end + end + + context 'when user is not set' do + it 'returns the ghost user' do + expect(deploy_key.user).to eq(User.ghost) + end + end + end end diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb new file mode 100644 index 00000000000..a182116d637 --- /dev/null +++ b/spec/models/lfs_object_spec.rb @@ -0,0 +1,85 @@ +require 'spec_helper' + +describe LfsObject do + describe '#local_store?' do + it 'returns true when file_store is nil' do + subject.file_store = nil + + expect(subject.local_store?).to eq true + end + + it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do + subject.file_store = LfsObjectUploader::Store::LOCAL + + expect(subject.local_store?).to eq true + end + + it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do + subject.file_store = LfsObjectUploader::Store::REMOTE + + expect(subject.local_store?).to eq false + end + end + + describe '#schedule_background_upload' do + before do + stub_lfs_setting(enabled: true) + end + + subject { create(:lfs_object, :with_file) } + + context 'when object storage is disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it 'does not schedule the migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + + context 'when object storage is enabled' do + context 'when background upload is enabled' do + context 'when is licensed' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker) + .to receive(:perform_async) + .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric)) + .once + + subject + end + + it 'schedules the model for migration once' do + expect(ObjectStorage::BackgroundMoveWorker) + .to receive(:perform_async) + .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric)) + .once + + lfs_object = create(:lfs_object) + lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") + lfs_object.save! + end + end + end + + context 'when background upload is disabled' do + before do + stub_lfs_object_storage(background_upload: false) + end + + it 'schedules the model for migration' do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + subject + end + end + end + end +end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index ff5a6f63010..f73f44ca0ad 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -1961,6 +1961,17 @@ describe MergeRequest do expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3) end end + + it 'runs a single query on the initial call, and none afterwards' do + expect { subject.merge_request_diff_for(merge_request_diff1.diff_refs) } + .not_to exceed_query_limit(1) + + expect { subject.merge_request_diff_for(merge_request_diff2.diff_refs) } + .not_to exceed_query_limit(0) + + expect { subject.merge_request_diff_for(merge_request_diff3.head_commit_sha) } + .not_to exceed_query_limit(0) + end end describe '#version_params_for' do diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index e506c932d58..60ab52565cb 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -501,28 +501,6 @@ describe Repository do end end - describe '#create_hooks' do - let(:hook_path) { File.join(repository.path_to_repo, 'hooks') } - - it 'symlinks the global hooks directory' do - repository.create_hooks - - expect(File.symlink?(hook_path)).to be true - expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path) - end - - it 'replaces existing symlink with the right directory' do - FileUtils.mkdir_p(hook_path) - - expect(File.symlink?(hook_path)).to be false - - repository.create_hooks - - expect(File.symlink?(hook_path)).to be true - expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path) - end - end - describe "#create_dir" do it "commits a change that creates a new directory" do expect do diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb index dfac82b327a..01238a89a81 100644 --- a/spec/models/route_spec.rb +++ b/spec/models/route_spec.rb @@ -16,66 +16,6 @@ describe Route do it { is_expected.to validate_presence_of(:source) } it { is_expected.to validate_presence_of(:path) } it { is_expected.to validate_uniqueness_of(:path).case_insensitive } - - describe '#ensure_permanent_paths' do - context 'when the route is not yet persisted' do - let(:new_route) { described_class.new(path: 'foo', source: build(:group)) } - - context 'when permanent conflicting redirects exist' do - it 'is invalid' do - redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz') - redirect.save!(validate: false) - - expect(new_route.valid?).to be_falsey - expect(new_route.errors.first[1]).to eq('has been taken before') - end - end - - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(new_route.valid?).to be_truthy - end - end - end - - context 'when path has changed' do - before do - route.path = 'foo' - end - - context 'when permanent conflicting redirects exist' do - it 'is invalid' do - redirect = build(:redirect_route, :permanent, path: 'foo/bar/baz') - redirect.save!(validate: false) - - expect(route.valid?).to be_falsey - expect(route.errors.first[1]).to eq('has been taken before') - end - end - - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(route.valid?).to be_truthy - end - end - end - - context 'when path has not changed' do - context 'when permanent conflicting redirects exist' do - it 'is valid' do - redirect = build(:redirect_route, :permanent, path: 'git_lab/foo/bar') - redirect.save!(validate: false) - - expect(route.valid?).to be_truthy - end - end - context 'when no permanent conflicting redirects exist' do - it 'is valid' do - expect(route.valid?).to be_truthy - end - end - end - end end describe 'callbacks' do @@ -211,43 +151,31 @@ describe Route do end context 'when the source is a Project' do - it 'creates a temporal RedirectRoute' do + it 'creates a RedirectRoute' do project = create(:project) route = project.route redirect_route = route.create_redirect('foo') - expect(redirect_route.permanent?).to be_falsy + expect(redirect_route).not_to be_nil end end context 'when the source is not a project' do - it 'creates a permanent RedirectRoute' do - redirect_route = route.create_redirect('foo', permanent: true) - expect(redirect_route.permanent?).to be_truthy + it 'creates a RedirectRoute' do + redirect_route = route.create_redirect('foo') + expect(redirect_route).not_to be_nil end end end describe '#delete_conflicting_redirects' do - context 'with permanent redirect' do - it 'does not delete the redirect' do - route.create_redirect("#{route.path}/foo", permanent: true) - - expect do - route.delete_conflicting_redirects - end.not_to change { RedirectRoute.count } - end - end - - context 'with temporal redirect' do - let(:route) { create(:project).route } + let(:route) { create(:project).route } - it 'deletes the redirect' do - route.create_redirect("#{route.path}/foo") + it 'deletes the redirect' do + route.create_redirect("#{route.path}/foo") - expect do - route.delete_conflicting_redirects - end.to change { RedirectRoute.count }.by(-1) - end + expect do + route.delete_conflicting_redirects + end.to change { RedirectRoute.count }.by(-1) end context 'when a redirect route with the same path exists' do @@ -289,31 +217,18 @@ describe Route do end describe '#conflicting_redirects' do + let(:route) { create(:project).route } + it 'returns an ActiveRecord::Relation' do expect(route.conflicting_redirects).to be_an(ActiveRecord::Relation) end - context 'with permanent redirects' do - it 'does not return anything' do - route.create_redirect("#{route.path}/foo", permanent: true) - route.create_redirect("#{route.path}/foo/bar", permanent: true) - route.create_redirect("#{route.path}/baz/quz", permanent: true) + it 'returns the redirect routes' do + redirect1 = route.create_redirect("#{route.path}/foo") + redirect2 = route.create_redirect("#{route.path}/foo/bar") + redirect3 = route.create_redirect("#{route.path}/baz/quz") - expect(route.conflicting_redirects).to be_empty - end - end - - context 'with temporal redirects' do - let(:route) { create(:project).route } - - it 'returns the redirect routes' do - route = create(:project).route - redirect1 = route.create_redirect("#{route.path}/foo") - redirect2 = route.create_redirect("#{route.path}/foo/bar") - redirect3 = route.create_redirect("#{route.path}/baz/quz") - - expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3]) - end + expect(route.conflicting_redirects).to match_array([redirect1, redirect2, redirect3]) end context 'when a redirect route with the same path exists' do @@ -348,44 +263,6 @@ describe Route do end end - describe "#conflicting_redirect_exists?" do - context 'when a conflicting redirect exists' do - let(:group1) { create(:group, path: 'foo') } - let(:group2) { create(:group, path: 'baz') } - - it 'should not be saved' do - group1.path = 'bar' - group1.save - - group2.path = 'foo' - - expect(group2.save).to be_falsy - end - - it 'should return an error on path' do - group1.path = 'bar' - group1.save - - group2.path = 'foo' - group2.valid? - expect(group2.errors[:path]).to eq(['has been taken before']) - end - end - - context 'when a conflicting redirect does not exist' do - let(:project1) { create(:project, path: 'foo') } - let(:project2) { create(:project, path: 'baz') } - - it 'should be saved' do - project1.path = 'bar' - project1.save - - project2.path = 'foo' - expect(project2.save).to be_truthy - end - end - end - describe '#delete_conflicting_orphaned_routes' do context 'when there is a conflicting route' do let!(:conflicting_group) { create(:group, path: 'foo') } diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 79f25dc4360..83ed3b203e6 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -58,6 +58,21 @@ describe Service do end describe "Template" do + describe '.build_from_template' do + context 'when template is invalid' do + it 'sets service template to inactive when template is invalid' do + project = create(:project) + template = JiraService.new(template: true, active: true) + template.save(validate: false) + + service = described_class.build_from_template(project.id, template) + + expect(service).to be_valid + expect(service.active).to be false + end + end + end + describe "for pushover service" do let!(:service_template) do PushoverService.create( diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index c61674fff13..100418da804 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -25,7 +25,7 @@ describe User do it { is_expected.to have_many(:group_members) } it { is_expected.to have_many(:groups) } it { is_expected.to have_many(:keys).dependent(:destroy) } - it { is_expected.to have_many(:deploy_keys).dependent(:destroy) } + it { is_expected.to have_many(:deploy_keys).dependent(:nullify) } it { is_expected.to have_many(:events).dependent(:destroy) } it { is_expected.to have_many(:issues).dependent(:destroy) } it { is_expected.to have_many(:notes).dependent(:destroy) } @@ -126,23 +126,6 @@ describe User do end end - context 'when the username was used by another user before' do - let(:username) { 'foo' } - let!(:other_user) { create(:user, username: username) } - - before do - other_user.username = 'bar' - other_user.save! - end - - it 'is invalid' do - user = build(:user, username: username) - - expect(user).not_to be_valid - expect(user.errors.full_messages).to eq(['Username has been taken before']) - end - end - context 'when the username is in use by another user' do let(:username) { 'foo' } let!(:other_user) { create(:user, username: username) } @@ -2699,27 +2682,19 @@ describe User do end end - describe "#username_previously_taken?" do - let(:user1) { create(:user, username: 'foo') } + context 'changing a username' do + let(:user) { create(:user, username: 'foo') } - context 'when the username has been taken before' do - before do - user1.username = 'bar' - user1.save! - end - - it 'should raise an ActiveRecord::RecordInvalid exception' do - user2 = build(:user, username: 'foo') - expect { user2.save! }.to raise_error(ActiveRecord::RecordInvalid, /Username has been taken before/) - end + it 'creates a redirect route' do + expect { user.update!(username: 'bar') } + .to change { RedirectRoute.where(path: 'foo').count }.by(1) end - context 'when the username has not been taken before' do - it 'should be valid' do - expect(RedirectRoute.count).to eq(0) - user2 = build(:user, username: 'baz') - expect(user2).to be_valid - end + it 'deletes the redirect when a user with the old username was created' do + user.update!(username: 'bar') + + expect { create(:user, username: 'foo') } + .to change { RedirectRoute.where(path: 'foo').count }.by(-1) end end end diff --git a/spec/policies/protected_branch_policy_spec.rb b/spec/policies/protected_branch_policy_spec.rb new file mode 100644 index 00000000000..b39de42d721 --- /dev/null +++ b/spec/policies/protected_branch_policy_spec.rb @@ -0,0 +1,22 @@ +require 'spec_helper' + +describe ProtectedBranchPolicy do + let(:user) { create(:user) } + let(:name) { 'feature' } + let(:protected_branch) { create(:protected_branch, name: name) } + let(:project) { protected_branch.project } + + subject { described_class.new(user, protected_branch) } + + it 'branches can be updated via project masters' do + project.add_master(user) + + is_expected.to be_allowed(:update_protected_branch) + end + + it "branches can't be updated by guests" do + project.add_guest(user) + + is_expected.to be_disallowed(:update_protected_branch) + end +end diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb index f8c93d91ec5..55962f345d4 100644 --- a/spec/presenters/project_presenter_spec.rb +++ b/spec/presenters/project_presenter_spec.rb @@ -339,7 +339,7 @@ describe ProjectPresenter do it 'returns link to clusters page if more than one exists' do project.add_master(user) - create(:cluster, projects: [project]) + create(:cluster, :production_environment, projects: [project]) create(:cluster, projects: [project]) expect(presenter.kubernetes_cluster_anchor_data).to eq(OpenStruct.new(enabled: true, diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 852f67db958..8ad19e3f0f5 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -1141,4 +1141,33 @@ describe API::Commits do end end end + + describe 'GET /projects/:id/repository/commits/:sha/merge_requests' do + let!(:project) { create(:project, :repository, :private) } + let!(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') } + let(:commit) { merged_mr.merge_request_diff.commits.last } + + it 'returns the correct merge request' do + get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + expect(json_response.length).to eq(1) + expect(json_response[0]['id']).to eq(merged_mr.id) + end + + it 'returns 403 for an unauthorized user' do + project.add_guest(user) + + get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user) + + expect(response).to have_gitlab_http_status(403) + end + + it 'responds 404 when the commit does not exist' do + get api("/projects/#{project.id}/repository/commits/a7d26f00c35b/merge_requests", user) + + expect(response).to have_gitlab_http_status(404) + end + end end diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb index 0772b3f2e64..ae9c0e9c304 100644 --- a/spec/requests/api/deploy_keys_spec.rb +++ b/spec/requests/api/deploy_keys_spec.rb @@ -91,6 +91,10 @@ describe API::DeployKeys do expect do post api("/projects/#{project.id}/deploy_keys", admin), key_attrs end.to change { project.deploy_keys.count }.by(1) + + new_key = project.deploy_keys.last + expect(new_key.key).to eq(key_attrs[:key]) + expect(new_key.user).to eq(admin) end it 'returns an existing ssh key when attempting to add a duplicate' do diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index 3cb90a1b8ef..db8c5f963d6 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -251,44 +251,23 @@ describe API::Internal do end context 'with env passed as a JSON' do - context 'when relative path envs are not set' do - it 'sets env in RequestStore' do - expect(Gitlab::Git::Env).to receive(:set).with({ - 'GIT_OBJECT_DIRECTORY' => 'foo', - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar' - }) - - push(key, project.wiki, env: { - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar' - }.to_json) + let(:gl_repository) { project.gl_repository(is_wiki: true) } - expect(response).to have_gitlab_http_status(200) - end - end + it 'sets env in RequestStore' do + obj_dir_relative = './objects' + alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2'] - context 'when relative path envs are set' do - it 'sets env in RequestStore' do - obj_dir_relative = './objects' - alt_obj_dirs_relative = ['./alt-objects-1', './alt-objects-2'] - repo_path = project.wiki.repository.path_to_repo - - expect(Gitlab::Git::Env).to receive(:set).with({ - 'GIT_OBJECT_DIRECTORY' => File.join(repo_path, obj_dir_relative), - 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => alt_obj_dirs_relative.map { |d| File.join(repo_path, d) }, - 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative, - 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative - }) - - push(key, project.wiki, env: { - GIT_OBJECT_DIRECTORY: 'foo', - GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar', - GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative, - GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative - }.to_json) + expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, { + 'GIT_OBJECT_DIRECTORY_RELATIVE' => obj_dir_relative, + 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => alt_obj_dirs_relative + }) - expect(response).to have_gitlab_http_status(200) - end + push(key, project.wiki, env: { + GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative, + GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative + }.to_json) + + expect(response).to have_gitlab_http_status(200) end end diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb index 6192bbd4abb..3ffdfdc0e9a 100644 --- a/spec/requests/api/jobs_spec.rb +++ b/spec/requests/api/jobs_spec.rb @@ -1,6 +1,8 @@ require 'spec_helper' describe API::Jobs do + include HttpIOHelpers + set(:project) do create(:project, :repository, public_builds: false) end @@ -112,6 +114,7 @@ describe API::Jobs do let(:query) { Hash.new } before do + job get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query end @@ -335,10 +338,55 @@ describe API::Jobs do end end + context 'when artifacts are stored remotely' do + let(:proxy_download) { false } + + before do + stub_artifacts_object_storage(proxy_download: proxy_download) + end + + let(:job) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + context 'when proxy download is enabled' do + let(:proxy_download) { true } + + it 'responds with the workhorse send-url' do + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") + end + end + + context 'when proxy download is disabled' do + it 'returns location redirect' do + expect(response).to have_gitlab_http_status(302) + end + end + + context 'authorized user' do + it 'returns the file remote URL' do + expect(response).to redirect_to(artifact.file.url) + end + end + + context 'unauthorized user' do + let(:api_user) { nil } + + it 'does not return specific job artifacts' do + expect(response).to have_gitlab_http_status(404) + end + end + end + it 'does not return job artifacts if not uploaded' do get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - expect(response).to have_gitlab_http_status(404) + expect(response).to have_gitlab_http_status(:not_found) end end end @@ -349,6 +397,7 @@ describe API::Jobs do let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } before do + stub_artifacts_object_storage job.success end @@ -412,9 +461,24 @@ describe API::Jobs do "attachment; filename=#{job.artifacts_file.filename}" } end - it { expect(response).to have_gitlab_http_status(200) } + it { expect(response).to have_http_status(:ok) } it { expect(response.headers).to include(download_headers) } end + + context 'when artifacts are stored remotely' do + let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + it 'returns location redirect' do + expect(response).to have_http_status(:found) + end + end end context 'with regular branch' do @@ -451,6 +515,22 @@ describe API::Jobs do end context 'authorized user' do + context 'when trace is in ObjectStorage' do + let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } + + before do + stub_remote_trace_206 + allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } + allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } + allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } + end + + it 'returns specific job trace' do + expect(response).to have_gitlab_http_status(200) + expect(response.body).to eq(job.trace.raw) + end + end + context 'when trace is artifact' do let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index cee93f6ed14..d73a42f48ad 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -1718,6 +1718,12 @@ describe API::Projects do group end + let(:group3) do + group = create(:group, name: 'group3_name', parent: group2) + group.add_owner(user2) + group + end + before do project.add_reporter(user2) end @@ -1813,6 +1819,15 @@ describe API::Projects do expect(json_response['namespace']['name']).to eq(group2.name) end + it 'forks to owned subgroup' do + full_path = "#{group2.path}/#{group3.path}" + post api("/projects/#{project.id}/fork", user2), namespace: full_path + + expect(response).to have_gitlab_http_status(201) + expect(json_response['namespace']['name']).to eq(group3.name) + expect(json_response['namespace']['full_path']).to eq(full_path) + end + it 'fails to fork to not owned group' do post api("/projects/#{project.id}/fork", user2), namespace: group.name diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb index 1d23e023bb6..576fde46615 100644 --- a/spec/requests/api/protected_branches_spec.rb +++ b/spec/requests/api/protected_branches_spec.rb @@ -193,6 +193,19 @@ describe API::ProtectedBranches do expect(json_response['merge_access_levels'][0]['access_level']).to eq(Gitlab::Access::MASTER) end end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + post post_endpoint, name: branch_name + + expect(response).to have_gitlab_http_status(403) + end + end end context 'when authenticated as a guest' do @@ -209,18 +222,20 @@ describe API::ProtectedBranches do end describe "DELETE /projects/:id/protected_branches/unprotect/:branch" do + let(:delete_endpoint) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) } + before do project.add_master(user) end it "unprotects a single branch" do - delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user) + delete delete_endpoint expect(response).to have_gitlab_http_status(204) end it_behaves_like '412 response' do - let(:request) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) } + let(:request) { delete_endpoint } end it "returns 404 if branch does not exist" do @@ -229,11 +244,24 @@ describe API::ProtectedBranches do expect(response).to have_gitlab_http_status(404) end + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + delete delete_endpoint + + expect(response).to have_gitlab_http_status(403) + end + end + context 'when branch has a wildcard in its name' do let(:protected_name) { 'feature*' } it "unprotects a wildcard branch" do - delete api("/projects/#{project.id}/protected_branches/#{branch_name}", user) + delete delete_endpoint expect(response).to have_gitlab_http_status(204) end diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index 95c23726a79..5084b36c761 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -109,6 +109,26 @@ describe API::Runner do end end + context 'when maximum job timeout is specified' do + it 'creates runner' do + post api('/runners'), token: registration_token, + maximum_timeout: 9000 + + expect(response).to have_gitlab_http_status 201 + expect(Ci::Runner.first.maximum_timeout).to eq(9000) + end + + context 'when maximum job timeout is empty' do + it 'creates runner' do + post api('/runners'), token: registration_token, + maximum_timeout: '' + + expect(response).to have_gitlab_http_status 201 + expect(Ci::Runner.first.maximum_timeout).to be_nil + end + end + end + %w(name version revision platform architecture).each do |param| context "when info parameter '#{param}' info is present" do let(:value) { "#{param}_value" } @@ -200,7 +220,7 @@ describe API::Runner do let(:project) { create(:project, shared_runners_enabled: false) } let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } let(:runner) { create(:ci_runner) } - let!(:job) do + let(:job) do create(:ci_build, :artifacts, :extended_options, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate") end @@ -215,6 +235,7 @@ describe API::Runner do let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' } before do + job stub_container_registry_config(enabled: false) end @@ -339,12 +360,12 @@ describe API::Runner do let(:expected_steps) do [{ 'name' => 'script', 'script' => %w(ls date), - 'timeout' => job.timeout, + 'timeout' => job.metadata_timeout, 'when' => 'on_success', 'allow_failure' => false }, { 'name' => 'after_script', 'script' => %w(ls date), - 'timeout' => job.timeout, + 'timeout' => job.metadata_timeout, 'when' => 'always', 'allow_failure' => true }] end @@ -647,6 +668,41 @@ describe API::Runner do end end end + + describe 'timeout support' do + context 'when project specifies job timeout' do + let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) } + + it 'contains info about timeout taken from project' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + end + + context 'when runner specifies lower timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 1000) } + + it 'contains info about timeout overridden by runner' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1000 }) + end + end + + context 'when runner specifies bigger timeout' do + let(:runner) { create(:ci_runner, maximum_timeout: 2000) } + + it 'contains info about timeout not overridden by runner' do + request_job + + expect(response).to have_gitlab_http_status(201) + expect(json_response['runner_info']).to include({ 'timeout' => 1234 }) + end + end + end + end end def request_job(token = runner.token, **params) @@ -888,6 +944,7 @@ describe API::Runner do let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } before do + stub_artifacts_object_storage job.run! end @@ -1179,27 +1236,67 @@ describe API::Runner do describe 'GET /api/v4/jobs/:id/artifacts' do let(:token) { job.token } - before do - download_artifact - end - context 'when job has artifacts' do - let(:job) { create(:ci_build, :artifacts) } - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + let(:job) { create(:ci_build) } + let(:store) { JobArtifactUploader::Store::LOCAL } + + before do + create(:ci_job_artifact, :archive, file_store: store, job: job) end context 'when using job token' do - it 'download artifacts' do - expect(response).to have_gitlab_http_status(200) - expect(response.headers).to include download_headers + context 'when artifacts are stored locally' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + end + + before do + download_artifact + end + + it 'download artifacts' do + expect(response).to have_http_status(200) + expect(response.headers).to include download_headers + end + end + + context 'when artifacts are stored remotely' do + let(:store) { JobArtifactUploader::Store::REMOTE } + let!(:job) { create(:ci_build) } + + context 'when proxy download is being used' do + before do + download_artifact(direct_download: false) + end + + it 'uses workhorse send-url' do + expect(response).to have_gitlab_http_status(200) + expect(response.headers).to include( + 'Gitlab-Workhorse-Send-Data' => /send-url:/) + end + end + + context 'when direct download is being used' do + before do + download_artifact(direct_download: true) + end + + it 'receive redirect for downloading artifacts' do + expect(response).to have_gitlab_http_status(302) + expect(response.headers).to include('Location') + end + end end end context 'when using runnners token' do let(:token) { job.project.runners_token } + before do + download_artifact + end + it 'responds with forbidden' do expect(response).to have_gitlab_http_status(403) end @@ -1208,12 +1305,16 @@ describe API::Runner do context 'when job does not has artifacts' do it 'responds with not found' do + download_artifact + expect(response).to have_gitlab_http_status(404) end end def download_artifact(params = {}, request_headers = headers) params = params.merge(token: token) + job.reload + get api("/jobs/#{job.id}/artifacts"), params, request_headers end end diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb index ec5cad4f4fd..d30f0cf36e2 100644 --- a/spec/requests/api/runners_spec.rb +++ b/spec/requests/api/runners_spec.rb @@ -123,6 +123,7 @@ describe API::Runners do expect(response).to have_gitlab_http_status(200) expect(json_response['description']).to eq(shared_runner.description) + expect(json_response['maximum_timeout']).to be_nil end end @@ -192,7 +193,8 @@ describe API::Runners do tag_list: ['ruby2.1', 'pgsql', 'mysql'], run_untagged: 'false', locked: 'true', - access_level: 'ref_protected') + access_level: 'ref_protected', + maximum_timeout: 1234) shared_runner.reload expect(response).to have_gitlab_http_status(200) @@ -204,6 +206,7 @@ describe API::Runners do expect(shared_runner.ref_protected?).to be_truthy expect(shared_runner.ensure_runner_queue_value) .not_to eq(runner_queue_value) + expect(shared_runner.maximum_timeout).to eq(1234) end end diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb index 79041c6a792..00f067889a0 100644 --- a/spec/requests/api/v3/builds_spec.rb +++ b/spec/requests/api/v3/builds_spec.rb @@ -216,6 +216,7 @@ describe API::V3::Builds do describe 'GET /projects/:id/builds/:build_id/artifacts' do before do + stub_artifacts_object_storage get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) end @@ -230,13 +231,24 @@ describe API::V3::Builds do end it 'returns specific job artifacts' do - expect(response).to have_gitlab_http_status(200) + expect(response).to have_http_status(200) expect(response.headers).to include(download_headers) expect(response.body).to match_file(build.artifacts_file.file.file) end end end + context 'when artifacts are stored remotely' do + let(:build) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) } + + it 'returns location redirect' do + get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) + + expect(response).to have_gitlab_http_status(302) + end + end + context 'unauthorized user' do let(:api_user) { nil } @@ -256,6 +268,7 @@ describe API::V3::Builds do let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } before do + stub_artifacts_object_storage build.success end @@ -318,9 +331,24 @@ describe API::V3::Builds do "attachment; filename=#{build.artifacts_file.filename}" } end - it { expect(response).to have_gitlab_http_status(200) } + it { expect(response).to have_http_status(200) } it { expect(response.headers).to include(download_headers) } end + + context 'when artifacts are stored remotely' do + let(:build) { create(:ci_build, pipeline: pipeline) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) } + + before do + build.reload + + get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) + end + + it 'returns location redirect' do + expect(response).to have_http_status(302) + end + end end context 'with regular branch' do diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index 6dbbb1ad7bb..494db30e8e0 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -163,7 +163,7 @@ describe 'Git HTTP requests' do download(path) do |response| json_body = ActiveSupport::JSON.decode(response.body) - expect(json_body['RepoPath']).to include(wiki.repository.disk_path) + expect(json_body['Repository']['relative_path']).to eq(wiki.repository.relative_path) end end end @@ -344,20 +344,11 @@ describe 'Git HTTP requests' do context 'and the user requests a redirected path' do let!(:redirect) { project.route.create_redirect('foo/bar') } let(:path) { "#{redirect.path}.git" } - let(:project_moved_message) do - <<-MSG.strip_heredoc - Project '#{redirect.path}' was moved to '#{project.full_path}'. - Please update your Git remote: - - git remote set-url origin #{project.http_url_to_repo} and try again. - MSG - end - - it 'downloads get status 404 with "project was moved" message' do + it 'downloads get status 200 for redirects' do clone_get(path, {}) - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + + expect(response).to have_gitlab_http_status(:ok) end end end @@ -559,20 +550,19 @@ describe 'Git HTTP requests' do Please update your Git remote: - git remote set-url origin #{project.http_url_to_repo} and try again. + git remote set-url origin #{project.http_url_to_repo}. MSG end - it 'downloads get status 404 with "project was moved" message' do + it 'downloads get status 200' do clone_get(path, env) - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + + expect(response).to have_gitlab_http_status(:ok) end it 'uploads get status 404 with "project was moved" message' do upload(path, env) do |response| - expect(response).to have_gitlab_http_status(:not_found) - expect(response.body).to match(project_moved_message) + expect(response).to have_gitlab_http_status(:ok) end end end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 971b45c411d..1e6bd993c08 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do describe 'when fetching lfs object' do let(:project) { create(:project) } let(:update_permissions) { } + let(:before_get) { } before do enable_lfs update_permissions + before_get get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers end @@ -239,6 +241,38 @@ describe 'Git LFS API and storage' do end it_behaves_like 'responds with a file' + + context 'when LFS uses object storage' do + context 'when proxy download is enabled' do + let(:before_get) do + stub_lfs_object_storage(proxy_download: true) + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect' do + expect(response).to have_gitlab_http_status(200) + end + + it 'responds with the workhorse send-url' do + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") + end + end + + context 'when proxy download is disabled' do + let(:before_get) do + stub_lfs_object_storage(proxy_download: false) + lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE) + end + + it 'responds with redirect' do + expect(response).to have_gitlab_http_status(302) + end + + it 'responds with the file location' do + expect(response.location).to include(lfs_object.reload.file.path) + end + end + end end end @@ -945,22 +979,61 @@ describe 'Git LFS API and storage' do end context 'and request is sent by gitlab-workhorse to authorize the request' do - before do - put_authorize + shared_examples 'a valid response' do + before do + put_authorize + end + + it 'responds with status 200' do + expect(response).to have_gitlab_http_status(200) + end + + it 'uses the gitlab-workhorse content type' do + expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + end end - it 'responds with status 200' do - expect(response).to have_gitlab_http_status(200) + shared_examples 'a local file' do + it_behaves_like 'a valid response' do + it 'responds with status 200, location of lfs store and object details' do + expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) + expect(json_response['RemoteObject']).to be_nil + expect(json_response['LfsOid']).to eq(sample_oid) + expect(json_response['LfsSize']).to eq(sample_size) + end + end end - it 'uses the gitlab-workhorse content type' do - expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + context 'when using local storage' do + it_behaves_like 'a local file' end - it 'responds with status 200, location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) - expect(json_response['LfsOid']).to eq(sample_oid) - expect(json_response['LfsSize']).to eq(sample_size) + context 'when using remote storage' do + context 'when direct upload is enabled' do + before do + stub_lfs_object_storage(enabled: true, direct_upload: true) + end + + it_behaves_like 'a valid response' do + it 'responds with status 200, location of lfs remote store and object details' do + expect(json_response['TempPath']).to be_nil + expect(json_response['RemoteObject']).to have_key('ID') + expect(json_response['RemoteObject']).to have_key('GetURL') + expect(json_response['RemoteObject']).to have_key('StoreURL') + expect(json_response['RemoteObject']).to have_key('DeleteURL') + expect(json_response['LfsOid']).to eq(sample_oid) + expect(json_response['LfsSize']).to eq(sample_size) + end + end + end + + context 'when direct upload is disabled' do + before do + stub_lfs_object_storage(enabled: true, direct_upload: false) + end + + it_behaves_like 'a local file' + end end end @@ -978,14 +1051,95 @@ describe 'Git LFS API and storage' do end end + context 'and workhorse requests upload finalize for a new lfs object' do + before do + lfs_object.destroy + end + + context 'with object storage disabled' do + it "doesn't attempt to migrate file to object storage" do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + put_finalize(with_tempfile: true) + end + end + + context 'with object storage enabled' do + context 'and direct upload enabled' do + let!(:fog_connection) do + stub_lfs_object_storage(direct_upload: true) + end + + ['123123', '../../123123'].each do |remote_id| + context "with invalid remote_id: #{remote_id}" do + subject do + put_finalize_with_args('file.remote_id' => remote_id) + end + + it 'responds with status 403' do + subject + + expect(response).to have_gitlab_http_status(403) + end + end + end + + context 'with valid remote_id' do + before do + fog_connection.directories.get('lfs-objects').files.create( + key: 'tmp/upload/12312300', + body: 'content' + ) + end + + subject do + put_finalize_with_args( + 'file.remote_id' => '12312300', + 'file.name' => 'name') + end + + it 'responds with status 200' do + subject + + expect(response).to have_gitlab_http_status(200) + end + + it 'schedules migration of file to object storage' do + subject + + expect(LfsObject.last.projects).to include(project) + end + + it 'have valid file' do + subject + + expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE) + expect(LfsObject.last.file).to be_exists + end + end + end + + context 'and background upload enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'schedules migration of file to object storage' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric)) + + put_finalize(with_tempfile: true) + end + end + end + end + context 'invalid tempfiles' do - it 'rejects slashes in the tempfile name (path traversal' do - put_finalize('foo/bar') - expect(response).to have_gitlab_http_status(403) + before do + lfs_object.destroy end - it 'rejects tempfile names that do not start with the oid' do - put_finalize("foo#{sample_oid}") + it 'rejects slashes in the tempfile name (path traversal)' do + put_finalize('../bar', with_tempfile: true) expect(response).to have_gitlab_http_status(403) end end @@ -1075,7 +1229,7 @@ describe 'Git LFS API and storage' do end it 'with location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) + expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end @@ -1177,9 +1331,25 @@ describe 'Git LFS API and storage' do put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers end - def put_finalize(lfs_tmp = lfs_tmp_file) - put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil, - headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact + def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false) + upload_path = LfsObjectUploader.workhorse_local_upload_path + file_path = upload_path + '/' + lfs_tmp if lfs_tmp + + if with_tempfile + FileUtils.mkdir_p(upload_path) + FileUtils.touch(file_path) + end + + args = { + 'file.path' => file_path, + 'file.name' => File.basename(file_path) + }.compact + + put_finalize_with_args(args) + end + + def put_finalize_with_args(args) + put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers end def lfs_tmp_file diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb index c38795ad1a1..f51c11b141f 100644 --- a/spec/serializers/pipeline_serializer_spec.rb +++ b/spec/serializers/pipeline_serializer_spec.rb @@ -117,6 +117,7 @@ describe PipelineSerializer do shared_examples 'no N+1 queries' do it 'verifies number of queries', :request_store do recorded = ActiveRecord::QueryRecorder.new { subject } + expect(recorded.count).to be_within(1).of(36) expect(recorded.cached_count).to eq(0) end diff --git a/spec/serializers/status_entity_spec.rb b/spec/serializers/status_entity_spec.rb index 16431ed4188..70402bac2e2 100644 --- a/spec/serializers/status_entity_spec.rb +++ b/spec/serializers/status_entity_spec.rb @@ -25,5 +25,10 @@ describe StatusEntity do allow(Rails.env).to receive(:development?) { true } expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/dev/favicon_status_success.ico') end + + it 'contains a canary namespaced favicon if canary env' do + stub_env('CANARY', 'true') + expect(entity.as_json[:favicon]).to match_asset_path('/assets/ci_favicons/canary/favicon_status_success.ico') + end end end diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index db9c216d3f4..8de0bdf92e2 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -28,7 +28,9 @@ describe Ci::RetryBuildService do %i[type lock_version target_url base_tags trace_sections commit_id deployments erased_by_id last_deployment project_id runner_id tag_taggings taggings tags trigger_request_id - user_id auto_canceled_by_id retried failure_reason].freeze + user_id auto_canceled_by_id retried failure_reason + artifacts_file_store artifacts_metadata_store + metadata].freeze shared_examples 'build duplication' do let(:another_pipeline) { create(:ci_empty_pipeline, project: project) } diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb index e2e64659dfa..1c2f9c5cf43 100644 --- a/spec/services/clusters/create_service_spec.rb +++ b/spec/services/clusters/create_service_spec.rb @@ -82,7 +82,7 @@ describe Clusters::CreateService do context 'when project has a cluster' do include_context 'valid params' - let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } + let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) } it 'does not create a cluster' do expect(ClusterProvisionWorker).not_to receive(:perform_async) diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb index e1c873f8c1e..999677cfaaa 100644 --- a/spec/services/groups/transfer_service_spec.rb +++ b/spec/services/groups/transfer_service_spec.rb @@ -222,8 +222,8 @@ describe Groups::TransferService, :postgresql do expect(new_parent_group.children.first).to eq(group) end - it 'should create a permanent redirect for the group' do - expect(group.redirect_routes.permanent.count).to eq(1) + it 'should create a redirect for the group' do + expect(group.redirect_routes.count).to eq(1) end end @@ -243,10 +243,10 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirects for the subgroups' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(subgroup2.redirect_routes.permanent.count).to eq(1) + it 'should create redirects for the subgroups' do + expect(group.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(subgroup2.redirect_routes.count).to eq(1) end context 'when the new parent has a higher visibility than the children' do @@ -287,9 +287,9 @@ describe Groups::TransferService, :postgresql do end it 'should create permanent redirects for the projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(project2.redirect_routes.permanent.count).to eq(1) + expect(group.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(project2.redirect_routes.count).to eq(1) end context 'when the new parent has a higher visibility than the projects' do @@ -338,12 +338,12 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirect for the subgroups and projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(subgroup2.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(project2.redirect_routes.permanent.count).to eq(1) + it 'should create redirect for the subgroups and projects' do + expect(group.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(subgroup2.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(project2.redirect_routes.count).to eq(1) end end @@ -380,12 +380,12 @@ describe Groups::TransferService, :postgresql do end end - it 'should create permanent redirect for the subgroups and projects' do - expect(group.redirect_routes.permanent.count).to eq(1) - expect(project1.redirect_routes.permanent.count).to eq(1) - expect(subgroup1.redirect_routes.permanent.count).to eq(1) - expect(nested_subgroup.redirect_routes.permanent.count).to eq(1) - expect(nested_project.redirect_routes.permanent.count).to eq(1) + it 'should create redirect for the subgroups and projects' do + expect(group.redirect_routes.count).to eq(1) + expect(project1.redirect_routes.count).to eq(1) + expect(subgroup1.redirect_routes.count).to eq(1) + expect(nested_subgroup.redirect_routes.count).to eq(1) + expect(nested_project.redirect_routes.count).to eq(1) end end diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index c148a98569b..a9aee9e100f 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -6,7 +6,7 @@ describe Issues::MoveService do let(:title) { 'Some issue' } let(:description) { 'Some issue description' } let(:old_project) { create(:project) } - let(:new_project) { create(:project, group: create(:group)) } + let(:new_project) { create(:project) } let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:old_issue) do diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index 903aa0a5078..2536c6e2514 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -24,6 +24,14 @@ describe MergeRequests::RefreshService do merge_when_pipeline_succeeds: true, merge_user: @user) + @another_merge_request = create(:merge_request, + source_project: @project, + source_branch: 'master', + target_branch: 'test', + target_project: @project, + merge_when_pipeline_succeeds: true, + merge_user: @user) + @fork_merge_request = create(:merge_request, source_project: @fork_project, source_branch: 'master', @@ -52,9 +60,11 @@ describe MergeRequests::RefreshService do context 'push to origin repo source branch' do let(:refresh_service) { service.new(@project, @user) } + let(:notification_service) { spy('notification_service') } before do allow(refresh_service).to receive(:execute_hooks) + allow(NotificationService).to receive(:new) { notification_service } end it 'executes hooks with update action' do @@ -64,6 +74,11 @@ describe MergeRequests::RefreshService do expect(refresh_service).to have_received(:execute_hooks) .with(@merge_request, 'update', old_rev: @oldrev) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@merge_request, @user, new_commits: anything, existing_commits: anything) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@another_merge_request, @user, new_commits: anything, existing_commits: anything) + expect(@merge_request.notes).not_to be_empty expect(@merge_request).to be_open expect(@merge_request.merge_when_pipeline_succeeds).to be_falsey @@ -119,11 +134,13 @@ describe MergeRequests::RefreshService do context 'push to origin repo source branch when an MR was reopened' do let(:refresh_service) { service.new(@project, @user) } + let(:notification_service) { spy('notification_service') } before do @merge_request.update(state: :reopened) allow(refresh_service).to receive(:execute_hooks) + allow(NotificationService).to receive(:new) { notification_service } refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') reload_mrs end @@ -131,6 +148,10 @@ describe MergeRequests::RefreshService do it 'executes hooks with update action' do expect(refresh_service).to have_received(:execute_hooks) .with(@merge_request, 'update', old_rev: @oldrev) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@merge_request, @user, new_commits: anything, existing_commits: anything) + expect(notification_service).to have_received(:push_to_merge_request) + .with(@another_merge_request, @user, new_commits: anything, existing_commits: anything) expect(@merge_request.notes).not_to be_empty expect(@merge_request).to be_open diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 1d117d9abc7..e7c7706b484 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -1121,6 +1121,36 @@ describe NotificationService, :mailer do end end + describe '#push_to_merge_request' do + before do + update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project) + update_custom_notification(:push_to_merge_request, @u_custom_global) + end + + it do + notification.push_to_merge_request(merge_request, @u_disabled) + + should_email(merge_request.assignee) + should_email(@u_guest_custom) + should_email(@u_custom_global) + should_email(@u_participant_mentioned) + should_email(@subscriber) + should_email(@watcher_and_subscriber) + should_not_email(@u_watcher) + should_not_email(@u_guest_watcher) + should_not_email(@unsubscriber) + should_not_email(@u_participating) + should_not_email(@u_disabled) + should_not_email(@u_lazy_participant) + end + + it_behaves_like 'participating notifications' do + let(:participant) { create(:user, username: 'user-participant') } + let(:issuable) { merge_request } + let(:notification_trigger) { notification.push_to_merge_request(merge_request, @u_disabled) } + end + end + describe '#relabel_merge_request' do let(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1', merge_requests: [merge_request]) } let(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') } diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index 4413c6ef83e..2cacb97a293 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -70,6 +70,16 @@ describe Projects::CreateService, '#execute' do opts[:default_branch] = 'master' expect(create_project(user, opts)).to eq(nil) end + + it 'sets invalid service as inactive' do + create(:service, type: 'JiraService', project: nil, template: true, active: true) + + project = create_project(user, opts) + service = project.services.first + + expect(project).to be_persisted + expect(service.active).to be false + end end context 'wiki_enabled creates repository directory' do @@ -232,14 +242,15 @@ describe Projects::CreateService, '#execute' do end context 'when a bad service template is created' do - it 'reports an error in the imported project' do + it 'sets service to be inactive' do opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce' create(:service, type: 'DroneCiService', project: nil, template: true, active: true) project = create_project(user, opts) + service = project.services.first - expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/) - expect(project.services.count).to eq 0 + expect(project).to be_persisted + expect(service.active).to be false end end diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb index 53b3e5e365d..786493c3577 100644 --- a/spec/services/protected_branches/create_service_spec.rb +++ b/spec/services/protected_branches/create_service_spec.rb @@ -35,5 +35,18 @@ describe ProtectedBranches::CreateService do expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError) end end + + context 'when a policy restricts rule creation' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + expect do + service.execute + end.to raise_error(Gitlab::Access::AccessDeniedError) + end + end end end diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb new file mode 100644 index 00000000000..4a391b6c25c --- /dev/null +++ b/spec/services/protected_branches/destroy_service_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe ProtectedBranches::DestroyService do + let(:protected_branch) { create(:protected_branch) } + let(:project) { protected_branch.project } + let(:user) { project.owner } + + describe '#execute' do + subject(:service) { described_class.new(project, user) } + + it 'destroys a protected branch' do + service.execute(protected_branch) + + expect(protected_branch).to be_destroyed + end + + context 'when a policy restricts rule deletion' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents deletion of the protected branch rule" do + expect do + service.execute(protected_branch) + end.to raise_error(Gitlab::Access::AccessDeniedError) + end + end + end +end diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb index 9fa5983db66..3f6f8e09565 100644 --- a/spec/services/protected_branches/update_service_spec.rb +++ b/spec/services/protected_branches/update_service_spec.rb @@ -22,5 +22,16 @@ describe ProtectedBranches::UpdateService do expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError) end end + + context 'when a policy restricts rule creation' do + before do + policy = instance_double(ProtectedBranchPolicy, can?: false) + expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) + end + + it "prevents creation of the protected branch rule" do + expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError) + end + end end end diff --git a/spec/services/protected_tags/destroy_service_spec.rb b/spec/services/protected_tags/destroy_service_spec.rb new file mode 100644 index 00000000000..e12f53a2221 --- /dev/null +++ b/spec/services/protected_tags/destroy_service_spec.rb @@ -0,0 +1,17 @@ +require 'spec_helper' + +describe ProtectedTags::DestroyService do + let(:protected_tag) { create(:protected_tag) } + let(:project) { protected_tag.project } + let(:user) { project.owner } + + describe '#execute' do + subject(:service) { described_class.new(project, user) } + + it 'destroy a protected tag' do + service.execute(protected_tag) + + expect(protected_tag).to be_destroyed + end + end +end diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb index 576db1dde2d..d974cc0226f 100644 --- a/spec/services/verify_pages_domain_service_spec.rb +++ b/spec/services/verify_pages_domain_service_spec.rb @@ -93,6 +93,25 @@ describe VerifyPagesDomainService do expect(domain).not_to be_enabled end end + + context 'invalid domain' do + let(:domain) { build(:pages_domain, :expired, :with_missing_chain) } + + before do + domain.save(validate: false) + end + + it 'can be disabled' do + error_status[:message] += '. It is now disabled.' + + stub_resolver + + expect(service.execute).to eq(error_status) + + expect(domain).not_to be_verified + expect(domain).not_to be_enabled + end + end end context 'timeout behaviour' do diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 5051cd34564..e8cecf361ff 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -97,6 +97,10 @@ RSpec.configure do |config| TestEnv.init end + config.after(:all) do + TestEnv.clean_test_path + end + config.before(:example) do # Skip pre-receive hook check so we can use the web editor and merge. allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil]) diff --git a/spec/support/gitaly.rb b/spec/support/gitaly.rb index c7e8a39a617..9cf541372b5 100644 --- a/spec/support/gitaly.rb +++ b/spec/support/gitaly.rb @@ -1,11 +1,13 @@ RSpec.configure do |config| config.before(:each) do |example| if example.metadata[:disable_gitaly] - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) + # Use 'and_wrap_original' to make sure the arguments are valid + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) && false } else next if example.metadata[:skip_gitaly_mock] - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(true) + # Use 'and_wrap_original' to make sure the arguments are valid + allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_wrap_original { |m, *args| m.call(*args) || true } end end end diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb new file mode 100644 index 00000000000..31e07e720cd --- /dev/null +++ b/spec/support/http_io/http_io_helpers.rb @@ -0,0 +1,64 @@ +module HttpIOHelpers + def stub_remote_trace_206 + WebMock.stub_request(:get, remote_trace_url) + .to_return { |request| remote_trace_response(request, 206) } + end + + def stub_remote_trace_200 + WebMock.stub_request(:get, remote_trace_url) + .to_return { |request| remote_trace_response(request, 200) } + end + + def stub_remote_trace_500 + WebMock.stub_request(:get, remote_trace_url) + .to_return(status: [500, "Internal Server Error"]) + end + + def remote_trace_url + "http://trace.com/trace" + end + + def remote_trace_response(request, responce_status) + range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/) + + { + status: responce_status, + headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i), + body: range_trace_body(range[1].to_i, range[2].to_i) + } + end + + def remote_trace_response_headers(responce_status, from, to) + headers = { 'Content-Type' => 'text/plain' } + + if responce_status == 206 + headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}") + end + + headers + end + + def range_trace_body(from, to) + remote_trace_body[from..to] + end + + def remote_trace_body + @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace')) + end + + def remote_trace_size + remote_trace_body.length + end + + def set_smaller_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks / 2) * 128 + stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) + end + + def set_larger_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks * 2) * 128 + stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) + end +end diff --git a/spec/support/migrations_helpers.rb b/spec/support/migrations_helpers.rb index 6bf976a2cf9..5d6f662e8fe 100644 --- a/spec/support/migrations_helpers.rb +++ b/spec/support/migrations_helpers.rb @@ -1,6 +1,9 @@ module MigrationsHelpers def table(name) - Class.new(ActiveRecord::Base) { self.table_name = name } + Class.new(ActiveRecord::Base) do + self.table_name = name + self.inheritance_column = :_type_disabled + end end def migrations_paths diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb index d7acf8c0032..b615a8f54cf 100644 --- a/spec/support/shared_examples/controllers/variables_shared_examples.rb +++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb @@ -16,19 +16,19 @@ shared_examples 'PATCH #update updates variables' do let(:variable_attributes) do { id: variable.id, key: variable.key, - value: variable.value, + secret_value: variable.value, protected: variable.protected?.to_s } end let(:new_variable_attributes) do { key: 'new_key', - value: 'dummy_value', + secret_value: 'dummy_value', protected: 'false' } end context 'with invalid new variable parameters' do let(:variables_attributes) do [ - variable_attributes.merge(value: 'other_value'), + variable_attributes.merge(secret_value: 'other_value'), new_variable_attributes.merge(key: '...?') ] end @@ -52,7 +52,7 @@ shared_examples 'PATCH #update updates variables' do let(:variables_attributes) do [ new_variable_attributes, - new_variable_attributes.merge(value: 'other_value') + new_variable_attributes.merge(secret_value: 'other_value') ] end @@ -74,7 +74,7 @@ shared_examples 'PATCH #update updates variables' do context 'with valid new variable parameters' do let(:variables_attributes) do [ - variable_attributes.merge(value: 'other_value'), + variable_attributes.merge(secret_value: 'other_value'), new_variable_attributes ] end diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb new file mode 100644 index 00000000000..6352f1527cd --- /dev/null +++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb @@ -0,0 +1,138 @@ +shared_context 'with storage' do |store, **stub_params| + before do + subject.object_store = store + end +end + +shared_examples "migrates" do |to_store:, from_store: nil| + let(:to) { to_store } + let(:from) { from_store || subject.object_store } + + def migrate(to) + subject.migrate!(to) + end + + def checksum + Digest::SHA256.hexdigest(subject.read) + end + + before do + migrate(from) + end + + it 'returns corresponding file type' do + expect(subject).to be_an(CarrierWave::Uploader::Base) + expect(subject).to be_a(ObjectStorage::Concern) + + if from == described_class::Store::REMOTE + expect(subject.file).to be_a(CarrierWave::Storage::Fog::File) + elsif from == described_class::Store::LOCAL + expect(subject.file).to be_a(CarrierWave::SanitizedFile) + else + raise 'Unexpected file type' + end + end + + it 'does nothing when migrating to the current store' do + expect { migrate(from) }.not_to change { subject.object_store }.from(from) + end + + it 'migrate to the specified store' do + from_checksum = checksum + + expect { migrate(to) }.to change { subject.object_store }.from(from).to(to) + expect(checksum).to eq(from_checksum) + end + + it 'removes the original file after the migration' do + original_file = subject.file.path + migrate(to) + + expect(File.exist?(original_file)).to be_falsey + end + + it 'can access to the original file during migration' do + file = subject.file + + allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate + allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload) + + expect(file.exists?).to be_truthy + expect { migrate(to) }.not_to change { file.exists? } + end + + context 'when migrate! is not occupied by another process' do + it 'executes migrate!' do + expect(subject).to receive(:object_store=).at_least(1) + + migrate(to) + end + + it 'executes use_file' do + expect(subject).to receive(:unsafe_use_file).once + + subject.use_file + end + end + + context 'when migrate! is occupied by another process' do + let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" } + + before do + @uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain + end + + it 'does not execute migrate!' do + expect(subject).not_to receive(:unsafe_migrate!) + + expect { migrate(to) }.to raise_error('exclusive lease already taken') + end + + it 'does not execute use_file' do + expect(subject).not_to receive(:unsafe_use_file) + + expect { subject.use_file }.to raise_error('exclusive lease already taken') + end + + after do + Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid) + end + end + + context 'migration is unsuccessful' do + shared_examples "handles gracefully" do |error:| + it 'does not update the object_store' do + expect { migrate(to) }.to raise_error(error) + expect(subject.object_store).to eq(from) + end + + it 'does not delete the original file' do + expect { migrate(to) }.to raise_error(error) + expect(subject.exists?).to be_truthy + end + end + + context 'when the store is not supported' do + let(:to) { -1 } # not a valid store + + include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError + end + + context 'upon a fog failure' do + before do + storage_class = subject.send(:storage_for, to).class + expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.") + end + + include_examples "handles gracefully", error: "Store failure." + end + + context 'upon a database failure' do + before do + expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.") + end + + include_examples "handles gracefully", error: "ActiveRecord failure." + end + end +end diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb new file mode 100644 index 00000000000..6e88641da42 --- /dev/null +++ b/spec/support/stub_object_storage.rb @@ -0,0 +1,48 @@ +module StubConfiguration + def stub_object_storage_uploader( + config:, + uploader:, + remote_directory:, + enabled: true, + proxy_download: false, + background_upload: false, + direct_upload: false + ) + allow(config).to receive(:enabled) { enabled } + allow(config).to receive(:proxy_download) { proxy_download } + allow(config).to receive(:background_upload) { background_upload } + allow(config).to receive(:direct_upload) { direct_upload } + + return unless enabled + + Fog.mock! + + ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection| + begin + connection.directories.create(key: remote_directory) + rescue Excon::Error::Conflict + end + end + end + + def stub_artifacts_object_storage(**params) + stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, + uploader: JobArtifactUploader, + remote_directory: 'artifacts', + **params) + end + + def stub_lfs_object_storage(**params) + stub_object_storage_uploader(config: Gitlab.config.lfs.object_store, + uploader: LfsObjectUploader, + remote_directory: 'lfs-objects', + **params) + end + + def stub_uploads_object_storage(uploader = described_class, **params) + stub_object_storage_uploader(config: Gitlab.config.uploads.object_store, + uploader: uploader, + remote_directory: 'uploads', + **params) + end +end diff --git a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb new file mode 100644 index 00000000000..8544fb62b5a --- /dev/null +++ b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb @@ -0,0 +1,118 @@ +require 'rake_helper' + +describe 'gitlab:artifacts namespace rake task' do + before(:context) do + Rake.application.rake_require 'tasks/gitlab/artifacts/migrate' + end + + let(:object_storage_enabled) { false } + + before do + stub_artifacts_object_storage(enabled: object_storage_enabled) + end + + subject { run_rake_task('gitlab:artifacts:migrate') } + + context 'legacy artifacts' do + describe 'migrate' do + let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) } + + context 'when local storage is used' do + let(:store) { ObjectStorage::Store::LOCAL } + + context 'and job does not have file store defined' do + let(:object_storage_enabled) { true } + let(:store) { nil } + + it "migrates file to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is defined' do + let(:object_storage_enabled) { true } + + it "migrates file to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is not defined' do + it "fails to migrate to remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL) + end + end + end + + context 'when remote storage is used' do + let(:object_storage_enabled) { true } + + let(:store) { ObjectStorage::Store::REMOTE } + + it "file stays on remote storage" do + subject + + expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE) + expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end + end + + context 'job artifacts' do + let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) } + + context 'when local storage is used' do + let(:store) { ObjectStorage::Store::LOCAL } + + context 'and job does not have file store defined' do + let(:object_storage_enabled) { true } + let(:store) { nil } + + it "migrates file to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is defined' do + let(:object_storage_enabled) { true } + + it "migrates file to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + + context 'and remote storage is not defined' do + it "fails to migrate to remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL) + end + end + end + + context 'when remote storage is used' do + let(:object_storage_enabled) { true } + let(:store) { ObjectStorage::Store::REMOTE } + + it "file stays on remote storage" do + subject + + expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE) + end + end + end +end diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb new file mode 100644 index 00000000000..66d1a192a96 --- /dev/null +++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb @@ -0,0 +1,37 @@ +require 'rake_helper' + +describe 'gitlab:lfs namespace rake task' do + before :all do + Rake.application.rake_require 'tasks/gitlab/lfs/migrate' + end + + describe 'migrate' do + let(:local) { ObjectStorage::Store::LOCAL } + let(:remote) { ObjectStorage::Store::REMOTE } + let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } + + def lfs_migrate + run_rake_task('gitlab:lfs:migrate') + end + + context 'object storage disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it "doesn't migrate files" do + expect { lfs_migrate }.not_to change { lfs_object.reload.file_store } + end + end + + context 'object storage enabled' do + before do + stub_lfs_object_storage + end + + it 'migrates local file to object storage' do + expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote) + end + end + end +end diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb new file mode 100644 index 00000000000..b778d26060d --- /dev/null +++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb @@ -0,0 +1,28 @@ +require 'rake_helper' + +describe 'gitlab:uploads:migrate rake tasks' do + let!(:projects) { create_list(:project, 10, :with_avatar) } + let(:model_class) { Project } + let(:uploader_class) { AvatarUploader } + let(:mounted_as) { :avatar } + let(:batch_size) { 3 } + + before do + stub_env('BATCH', batch_size.to_s) + stub_uploads_object_storage(uploader_class) + Rake.application.rake_require 'tasks/gitlab/uploads/migrate' + + allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async) + end + + def run + args = [uploader_class.to_s, model_class.to_s, mounted_as].compact + run_rake_task("gitlab:uploads:migrate", *args) + end + + it 'enqueue jobs in batch' do + expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times + + run + end +end diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb index 091ba824fc6..d302c14efb9 100644 --- a/spec/uploaders/attachment_uploader_spec.rb +++ b/spec/uploaders/attachment_uploader_spec.rb @@ -11,4 +11,26 @@ describe AttachmentUploader do store_dir: %r[uploads/-/system/note/attachment/], upload_path: %r[uploads/-/system/note/attachment/], absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] + + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[note/attachment/], + upload_path: %r[note/attachment/] + end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb index bf9028c9260..b0468bc35ff 100644 --- a/spec/uploaders/avatar_uploader_spec.rb +++ b/spec/uploaders/avatar_uploader_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' describe AvatarUploader do - let(:model) { create(:user, :with_avatar) } + let(:model) { build_stubbed(:user) } let(:uploader) { described_class.new(model, :avatar) } let(:upload) { create(:upload, model: model) } @@ -12,15 +12,28 @@ describe AvatarUploader do upload_path: %r[uploads/-/system/user/avatar/], absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] - describe '#move_to_cache' do - it 'is false' do - expect(uploader.move_to_cache).to eq(false) + context "object_store is REMOTE" do + before do + stub_uploads_object_storage end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[user/avatar/], + upload_path: %r[user/avatar/] end - describe '#move_to_store' do - it 'is false' do - expect(uploader.move_to_store).to eq(false) + context "with a file" do + let(:project) { create(:project, :with_avatar) } + let(:uploader) { project.avatar } + let(:upload) { uploader.upload } + + before do + stub_uploads_object_storage end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL end end diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb index bc024cd307c..68b7e24776d 100644 --- a/spec/uploaders/file_mover_spec.rb +++ b/spec/uploaders/file_mover_spec.rb @@ -36,6 +36,12 @@ describe FileMover do it 'creates a new update record' do expect { subject }.to change { Upload.count }.by(1) end + + it 'schedules a background migration' do + expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once + + subject + end end context 'when update_markdown fails' do diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index b42ce982b27..db2810bbe1d 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -11,32 +11,41 @@ describe FileUploader do shared_examples 'builds correct legacy storage paths' do include_examples 'builds correct paths', store_dir: %r{awesome/project/\h+}, + upload_path: %r{\h+/<filename>}, absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg} end - shared_examples 'uses hashed storage' do - context 'when rolled out attachments' do - let(:project) { build_stubbed(:project, namespace: group, name: 'project') } + context 'legacy storage' do + it_behaves_like 'builds correct legacy storage paths' - before do - allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') - end + context 'uses hashed storage' do + context 'when rolled out attachments' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project') } - it_behaves_like 'builds correct paths', - store_dir: %r{ca/fe/fe/ed/\h+}, - absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg} - end + include_examples 'builds correct paths', + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{\h+/<filename>} + end - context 'when only repositories are rolled out' do - let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } + context 'when only repositories are rolled out' do + let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - it_behaves_like 'builds correct legacy storage paths' + it_behaves_like 'builds correct legacy storage paths' + end end end - context 'legacy storage' do - it_behaves_like 'builds correct legacy storage paths' - include_examples 'uses hashed storage' + context 'object store is remote' do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + # always use hashed storage path for remote uploads + it_behaves_like 'builds correct paths', + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>} end describe 'initialize' do @@ -78,6 +87,16 @@ describe FileUploader do end end + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end + describe '#upload=' do let(:secret) { SecureRandom.hex } let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') } @@ -93,15 +112,5 @@ describe FileUploader do uploader.upload = upload end - - context 'uploader_context is empty' do - it 'fallbacks to regex based extraction' do - expect(upload).to receive(:uploader_context).and_return({}) - - uploader.upload = upload - expect(uploader.secret).to eq(secret) - expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt') - end - end end end diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb index 60e35dcf235..4fba122cce1 100644 --- a/spec/uploaders/gitlab_uploader_spec.rb +++ b/spec/uploaders/gitlab_uploader_spec.rb @@ -27,7 +27,7 @@ describe GitlabUploader do describe '#file_cache_storage?' do context 'when file storage is used' do before do - uploader_class.cache_storage(:file) + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File } end it { is_expected.to be_file_cache_storage } @@ -35,7 +35,7 @@ describe GitlabUploader do context 'when is remote storage' do before do - uploader_class.cache_storage(:fog) + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog } end it { is_expected.not_to be_file_cache_storage } diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb index 5612ec7e661..42036d67f3d 100644 --- a/spec/uploaders/job_artifact_uploader_spec.rb +++ b/spec/uploaders/job_artifact_uploader_spec.rb @@ -1,7 +1,8 @@ require 'spec_helper' describe JobArtifactUploader do - let(:job_artifact) { create(:ci_job_artifact) } + let(:store) { described_class::Store::LOCAL } + let(:job_artifact) { create(:ci_job_artifact, file_store: store) } let(:uploader) { described_class.new(job_artifact, :file) } subject { uploader } @@ -11,6 +12,17 @@ describe JobArtifactUploader do cache_dir: %r[artifacts/tmp/cache], work_dir: %r[artifacts/tmp/work] + context "object store is REMOTE" do + before do + stub_artifacts_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z] + end + describe '#open' do subject { uploader.open } @@ -36,6 +48,17 @@ describe JobArtifactUploader do end end end + + context 'when trace is stored in Object storage' do + before do + allow(uploader).to receive(:file_storage?) { false } + allow(uploader).to receive(:url) { 'http://object_storage.com/trace' } + end + + it 'returns http io stream' do + is_expected.to be_a(Gitlab::Ci::Trace::HttpIO) + end + end end context 'file is stored in valid local_path' do @@ -55,4 +78,14 @@ describe JobArtifactUploader do it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace'))) + stub_artifacts_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb index 54c6a8b869b..eeb6fd90c9d 100644 --- a/spec/uploaders/legacy_artifact_uploader_spec.rb +++ b/spec/uploaders/legacy_artifact_uploader_spec.rb @@ -1,7 +1,8 @@ require 'rails_helper' describe LegacyArtifactUploader do - let(:job) { create(:ci_build) } + let(:store) { described_class::Store::LOCAL } + let(:job) { create(:ci_build, artifacts_file_store: store) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) } let(:local_path) { described_class.root } @@ -20,6 +21,17 @@ describe LegacyArtifactUploader do cache_dir: %r[artifacts/tmp/cache], work_dir: %r[artifacts/tmp/work] + context 'object store is remote' do + before do + stub_artifacts_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z] + end + describe '#filename' do # we need to use uploader, as this makes to use mounter # which initialises uploader.file object diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb index 6ebc885daa8..a2fb3886610 100644 --- a/spec/uploaders/lfs_object_uploader_spec.rb +++ b/spec/uploaders/lfs_object_uploader_spec.rb @@ -11,4 +11,62 @@ describe LfsObjectUploader do store_dir: %r[\h{2}/\h{2}], cache_dir: %r[/lfs-objects/tmp/cache], work_dir: %r[/lfs-objects/tmp/work] + + context "object store is REMOTE" do + before do + stub_lfs_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}] + end + + describe 'migration to object storage' do + context 'with object storage disabled' do + it "is skipped" do + expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async) + + lfs_object + end + end + + context 'with object storage enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'is scheduled to run after creation' do + expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric)) + + lfs_object + end + end + end + + describe 'remote file' do + let(:remote) { described_class::Store::REMOTE } + let(:lfs_object) { create(:lfs_object, file_store: remote) } + + context 'with object storage enabled' do + before do + stub_lfs_object_storage + end + + it 'can store file remotely' do + allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async) + + store_file(lfs_object) + + expect(lfs_object.file_store).to eq remote + expect(lfs_object.file.path).not_to be_blank + end + end + end + + def store_file(lfs_object) + lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png") + lfs_object.save! + end end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index 24a2fc0f72e..a8ba01d70b8 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -13,4 +13,26 @@ describe NamespaceFileUploader do store_dir: %r[uploads/-/system/namespace/\d+], upload_path: IDENTIFIER, absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}] + + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[namespace/\d+/\h+], + upload_path: IDENTIFIER + end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb new file mode 100644 index 00000000000..59e02fecbce --- /dev/null +++ b/spec/uploaders/object_storage_spec.rb @@ -0,0 +1,654 @@ +require 'rails_helper' +require 'carrierwave/storage/fog' + +class Implementation < GitlabUploader + include ObjectStorage::Concern + include ::RecordsUploads::Concern + prepend ::ObjectStorage::Extension::RecordsUploads + + storage_options Gitlab.config.uploads + + private + + # user/:id + def dynamic_segment + File.join(model.class.to_s.underscore, model.id.to_s) + end +end + +describe ObjectStorage do + let(:uploader_class) { Implementation } + let(:object) { build_stubbed(:user) } + let(:uploader) { uploader_class.new(object, :file) } + + describe '#object_store=' do + before do + allow(uploader_class).to receive(:object_store_enabled?).and_return(true) + end + + it "reload the local storage" do + uploader.object_store = described_class::Store::LOCAL + expect(uploader.file_storage?).to be_truthy + end + + it "reload the REMOTE storage" do + uploader.object_store = described_class::Store::REMOTE + expect(uploader.file_storage?).to be_falsey + end + + context 'object_store is Store::LOCAL' do + before do + uploader.object_store = described_class::Store::LOCAL + end + + describe '#store_dir' do + it 'is the composition of (base_dir, dynamic_segment)' do + expect(uploader.store_dir).to start_with("uploads/-/system/user/") + end + end + end + + context 'object_store is Store::REMOTE' do + before do + uploader.object_store = described_class::Store::REMOTE + end + + describe '#store_dir' do + it 'is the composition of (dynamic_segment)' do + expect(uploader.store_dir).to start_with("user/") + end + end + end + end + + describe '#object_store' do + it "delegates to <mount>_store on model" do + expect(object).to receive(:file_store) + + uploader.object_store + end + + context 'when store is null' do + before do + expect(object).to receive(:file_store).and_return(nil) + end + + it "returns Store::LOCAL" do + expect(uploader.object_store).to eq(described_class::Store::LOCAL) + end + end + + context 'when value is set' do + before do + expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE) + end + + it "returns the given value" do + expect(uploader.object_store).to eq(described_class::Store::REMOTE) + end + end + end + + describe '#file_cache_storage?' do + context 'when file storage is used' do + before do + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File } + end + + it { expect(uploader).to be_file_cache_storage } + end + + context 'when is remote storage' do + before do + expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog } + end + + it { expect(uploader).not_to be_file_cache_storage } + end + end + + # this means the model shall include + # include RecordsUpload::Concern + # prepend ObjectStorage::Extension::RecordsUploads + # the object_store persistence is delegated to the `Upload` model. + # + context 'when persist_object_store? is false' do + let(:object) { create(:project, :with_avatar) } + let(:uploader) { object.avatar } + + it { expect(object).to be_a(Avatarable) } + it { expect(uploader.persist_object_store?).to be_falsey } + + describe 'delegates the object_store logic to the `Upload` model' do + it 'sets @upload to the found `upload`' do + expect(uploader.upload).to eq(uploader.upload) + end + + it 'sets @object_store to the `Upload` value' do + expect(uploader.object_store).to eq(uploader.upload.store) + end + end + + describe '#migrate!' do + let(:new_store) { ObjectStorage::Store::REMOTE } + + before do + stub_uploads_object_storage(uploader: AvatarUploader) + end + + subject { uploader.migrate!(new_store) } + + it 'persist @object_store to the recorded upload' do + subject + + expect(uploader.upload.store).to eq(new_store) + end + + describe 'fails' do + it 'is handled gracefully' do + store = uploader.object_store + expect_any_instance_of(Upload).to receive(:save!).and_raise("An error") + + expect { subject }.to raise_error("An error") + expect(uploader.exists?).to be_truthy + expect(uploader.upload.store).to eq(store) + end + end + end + end + + # this means the model holds an <mounted_as>_store attribute directly + # and do not delegate the object_store persistence to the `Upload` model. + # + context 'persist_object_store? is true' do + context 'when using JobArtifactsUploader' do + let(:store) { described_class::Store::LOCAL } + let(:object) { create(:ci_job_artifact, :archive, file_store: store) } + let(:uploader) { object.file } + + context 'checking described_class' do + it "uploader include described_class::Concern" do + expect(uploader).to be_a(described_class::Concern) + end + end + + describe '#use_file' do + context 'when file is stored locally' do + it "calls a regular path" do + expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache]) + end + end + + context 'when file is stored remotely' do + let(:store) { described_class::Store::REMOTE } + + before do + stub_artifacts_object_storage + end + + it "calls a cache path" do + expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache]) + end + end + end + + describe '#migrate!' do + subject { uploader.migrate!(new_store) } + + shared_examples "updates the underlying <mounted>_store" do + it do + subject + + expect(object.file_store).to eq(new_store) + end + end + + context 'when using the same storage' do + let(:new_store) { store } + + it "to not migrate the storage" do + subject + + expect(uploader).not_to receive(:store!) + expect(uploader.object_store).to eq(store) + end + end + + context 'when migrating to local storage' do + let(:store) { described_class::Store::REMOTE } + let(:new_store) { described_class::Store::LOCAL } + + before do + stub_artifacts_object_storage + end + + include_examples "updates the underlying <mounted>_store" + + it "local file does not exist" do + expect(File.exist?(uploader.path)).to eq(false) + end + + it "remote file exist" do + expect(uploader.file.exists?).to be_truthy + end + + it "does migrate the file" do + subject + + expect(uploader.object_store).to eq(new_store) + expect(File.exist?(uploader.path)).to eq(true) + end + end + + context 'when migrating to remote storage' do + let(:new_store) { described_class::Store::REMOTE } + let!(:current_path) { uploader.path } + + it "file does exist" do + expect(File.exist?(current_path)).to eq(true) + end + + context 'when storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it "to raise an error" do + expect { subject }.to raise_error(/Object Storage is not enabled/) + end + end + + context 'when credentials are set' do + before do + stub_artifacts_object_storage + end + + include_examples "updates the underlying <mounted>_store" + + it "does migrate the file" do + subject + + expect(uploader.object_store).to eq(new_store) + end + + it "does delete original file" do + subject + + expect(File.exist?(current_path)).to eq(false) + end + + context 'when subject save fails' do + before do + expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception") + end + + it "original file is not removed" do + expect { subject }.to raise_error(/exception/) + + expect(File.exist?(current_path)).to eq(true) + end + end + end + end + end + end + end + + describe '#fog_directory' do + let(:remote_directory) { 'directory' } + + before do + allow(uploader_class).to receive(:options) do + double(object_store: double(remote_directory: remote_directory)) + end + end + + subject { uploader.fog_directory } + + it { is_expected.to eq(remote_directory) } + end + + context 'when file is in use' do + def when_file_is_in_use + uploader.use_file do + yield + end + end + + it 'cannot migrate' do + when_file_is_in_use do + expect(uploader).not_to receive(:unsafe_migrate!) + + expect { uploader.migrate!(described_class::Store::REMOTE) }.to raise_error('exclusive lease already taken') + end + end + + it 'cannot use_file' do + when_file_is_in_use do + expect(uploader).not_to receive(:unsafe_use_file) + + expect { uploader.use_file }.to raise_error('exclusive lease already taken') + end + end + end + + describe '#fog_credentials' do + let(:connection) { Settingslogic.new("provider" => "AWS") } + + before do + allow(uploader_class).to receive(:options) do + double(object_store: double(connection: connection)) + end + end + + subject { uploader.fog_credentials } + + it { is_expected.to eq(provider: 'AWS') } + end + + describe '#fog_public' do + subject { uploader.fog_public } + + it { is_expected.to eq(false) } + end + + describe '.workhorse_authorize' do + subject { uploader_class.workhorse_authorize } + + before do + # ensure that we use regular Fog libraries + # other tests might call `Fog.mock!` and + # it will make tests to fail + Fog.unmock! + end + + shared_examples 'uses local storage' do + it "returns temporary path" do + is_expected.to have_key(:TempPath) + + expect(subject[:TempPath]).to start_with(uploader_class.root) + expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH) + end + + it "does not return remote store" do + is_expected.not_to have_key('RemoteObject') + end + end + + shared_examples 'uses remote storage' do + it "returns remote store" do + is_expected.to have_key(:RemoteObject) + + expect(subject[:RemoteObject]).to have_key(:ID) + expect(subject[:RemoteObject]).to have_key(:GetURL) + expect(subject[:RemoteObject]).to have_key(:DeleteURL) + expect(subject[:RemoteObject]).to have_key(:StoreURL) + expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH) + expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH) + expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH) + end + + it "does not return local store" do + is_expected.not_to have_key('TempPath') + end + end + + context 'when object storage is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false } + end + + it_behaves_like 'uses local storage' + end + + context 'when object storage is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true } + end + + context 'when direct upload is enabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true } + end + + context 'uses AWS' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "AWS", + aws_access_key_id: "AWS_ACCESS_KEY_ID", + aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", + region: "eu-central-1" } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" } + + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'uses Google' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "Google", + google_storage_access_key_id: 'ACCESS_KEY_ID', + google_storage_secret_access_key: 'SECRET_ACCESS_KEY' } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "https://storage.googleapis.com/uploads/" } + + it 'returns links for Google Cloud' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + + context 'uses GDK/minio' do + before do + expect(uploader_class).to receive(:object_store_credentials) do + { provider: "AWS", + aws_access_key_id: "AWS_ACCESS_KEY_ID", + aws_secret_access_key: "AWS_SECRET_ACCESS_KEY", + endpoint: 'http://127.0.0.1:9000', + path_style: true, + region: "gdk" } + end + end + + it_behaves_like 'uses remote storage' do + let(:storage_url) { "http://127.0.0.1:9000/uploads/" } + + it 'returns links for S3' do + expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url) + expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url) + end + end + end + end + + context 'when direct upload is disabled' do + before do + allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false } + end + + it_behaves_like 'uses local storage' + end + end + end + + describe '#store_workhorse_file!' do + subject do + uploader.store_workhorse_file!(params, :file) + end + + context 'when local file is used' do + context 'when valid file is used' do + let(:target_path) do + File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH) + end + + before do + FileUtils.mkdir_p(target_path) + end + + context 'when no filename is specified' do + let(:params) do + { "file.path" => "test/file" } + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/) + end + end + + context 'when invalid file is specified' do + let(:file_path) do + File.join(target_path, "..", "test.file") + end + + before do + FileUtils.touch(file_path) + end + + let(:params) do + { "file.path" => file_path, + "file.name" => "my_file.txt" } + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/) + end + end + + context 'when filename is specified' do + let(:params) do + { "file.path" => tmp_file, + "file.name" => "my_file.txt" } + end + + let(:tmp_file) { Tempfile.new('filename', target_path) } + + before do + FileUtils.touch(tmp_file) + end + + after do + FileUtils.rm_f(tmp_file) + end + + it 'succeeds' do + expect { subject }.not_to raise_error + + expect(uploader).to be_exists + end + + it 'proper path is being used' do + subject + + expect(uploader.path).to start_with(uploader_class.root) + expect(uploader.path).to end_with("my_file.txt") + end + + it 'source file to not exist' do + subject + + expect(File.exist?(tmp_file.path)).to be_falsey + end + end + end + end + + context 'when remote file is used' do + let!(:fog_connection) do + stub_uploads_object_storage(uploader_class) + end + + context 'when valid file is used' do + context 'when no filename is specified' do + let(:params) do + { "file.remote_id" => "test/123123" } + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/) + end + end + + context 'when invalid file is specified' do + let(:params) do + { "file.remote_id" => "../test/123123", + "file.name" => "my_file.txt" } + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/) + end + end + + context 'when non existing file is specified' do + let(:params) do + { "file.remote_id" => "test/12312300", + "file.name" => "my_file.txt" } + end + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/) + end + end + + context 'when filename is specified' do + let(:params) do + { "file.remote_id" => "test/123123", + "file.name" => "my_file.txt" } + end + + let!(:fog_file) do + fog_connection.directories.get('uploads').files.create( + key: 'tmp/upload/test/123123', + body: 'content' + ) + end + + it 'succeeds' do + expect { subject }.not_to raise_error + + expect(uploader).to be_exists + end + + it 'path to not be temporary' do + subject + + expect(uploader.path).not_to be_nil + expect(uploader.path).not_to include('tmp/upload') + expect(uploader.url).to include('/my_file.txt') + end + + it 'url is used' do + subject + + expect(uploader.url).not_to be_nil + expect(uploader.url).to include('/my_file.txt') + end + end + end + end + + context 'when no file is used' do + let(:params) { {} } + + it 'raises an error' do + expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/) + end + end + end +end diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb index ed1fba6edda..c70521d90dc 100644 --- a/spec/uploaders/personal_file_uploader_spec.rb +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -14,6 +14,18 @@ describe PersonalFileUploader do upload_path: IDENTIFIER, absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}] + context "object_store is REMOTE" do + before do + stub_uploads_object_storage + end + + include_context 'with storage', described_class::Store::REMOTE + + it_behaves_like 'builds correct paths', + store_dir: %r[\d+/\h+], + upload_path: IDENTIFIER + end + describe '#to_h' do before do subject.instance_variable_set(:@secret, 'secret') @@ -30,4 +42,14 @@ describe PersonalFileUploader do ) end end + + describe "#migrate!" do + before do + uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt'))) + stub_uploads_object_storage + end + + it_behaves_like "migrates", to_store: described_class::Store::REMOTE + it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL + end end diff --git a/spec/views/ci/lints/show.html.haml_spec.rb b/spec/views/projects/ci/lints/show.html.haml_spec.rb index 7724d54c569..2f0cd38c14a 100644 --- a/spec/views/ci/lints/show.html.haml_spec.rb +++ b/spec/views/projects/ci/lints/show.html.haml_spec.rb @@ -1,11 +1,13 @@ require 'spec_helper' -describe 'ci/lints/show' do +describe 'projects/ci/lints/show' do include Devise::Test::ControllerHelpers + let(:project) { create(:project, :repository) } + let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } describe 'XSS protection' do - let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } before do + assign(:project, project) assign(:status, true) assign(:builds, config_processor.builds) assign(:stages, config_processor.stages) @@ -47,22 +49,21 @@ describe 'ci/lints/show' do end end - let(:content) do - { - build_template: { - script: './build.sh', - tags: ['dotnet'], - only: ['test@dude/repo'], - except: ['deploy'], - environment: 'testing' + context 'when the content is valid' do + let(:content) do + { + build_template: { + script: './build.sh', + tags: ['dotnet'], + only: ['test@dude/repo'], + except: ['deploy'], + environment: 'testing' + } } - } - end - - let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) } + end - context 'when the content is valid' do before do + assign(:project, project) assign(:status, true) assign(:builds, config_processor.builds) assign(:stages, config_processor.stages) @@ -82,6 +83,7 @@ describe 'ci/lints/show' do context 'when the content is invalid' do before do + assign(:project, project) assign(:status, false) assign(:error, 'Undefined error') end diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb new file mode 100644 index 00000000000..32ddcbe9757 --- /dev/null +++ b/spec/workers/object_storage_upload_worker_spec.rb @@ -0,0 +1,108 @@ +require 'spec_helper' + +describe ObjectStorageUploadWorker do + let(:local) { ObjectStorage::Store::LOCAL } + let(:remote) { ObjectStorage::Store::REMOTE } + + def perform + described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id) + end + + context 'for LFS' do + let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } + let(:uploader_class) { LfsObjectUploader } + let(:subject_class) { LfsObject } + let(:file_field) { :file } + let(:subject_id) { lfs_object.id } + + context 'when object storage is enabled' do + before do + stub_lfs_object_storage(background_upload: true) + end + + it 'uploads object to storage' do + expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote) + end + + context 'when background upload is disabled' do + before do + allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false } + end + + it 'is skipped' do + expect { perform }.not_to change { lfs_object.reload.file_store } + end + end + end + + context 'when object storage is disabled' do + before do + stub_lfs_object_storage(enabled: false) + end + + it "doesn't migrate files" do + perform + + expect(lfs_object.reload.file_store).to eq(local) + end + end + end + + context 'for legacy artifacts' do + let(:build) { create(:ci_build, :legacy_artifacts) } + let(:uploader_class) { LegacyArtifactUploader } + let(:subject_class) { Ci::Build } + let(:file_field) { :artifacts_file } + let(:subject_id) { build.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(build.reload.artifacts_file_store).to eq(remote) + end + + context 'for artifacts_metadata' do + let(:file_field) { :artifacts_metadata } + + it 'migrates metadata to remote storage' do + perform + + expect(build.reload.artifacts_metadata_store).to eq(remote) + end + end + end + end + end + + context 'for job artifacts' do + let(:artifact) { create(:ci_job_artifact, :archive) } + let(:uploader_class) { JobArtifactUploader } + let(:subject_class) { Ci::JobArtifact } + let(:file_field) { :file } + let(:subject_id) { artifact.id } + + context 'when local storage is used' do + let(:store) { local } + + context 'and remote storage is defined' do + before do + stub_artifacts_object_storage(background_upload: true) + end + + it "migrates file to remote storage" do + perform + + expect(artifact.reload.file_store).to eq(remote) + end + end + end + end +end |