diff options
Diffstat (limited to 'spec/lib')
253 files changed, 9570 insertions, 3273 deletions
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb index 8572b067984..b9d6ab7a652 100644 --- a/spec/lib/api/entities/merge_request_basic_spec.rb +++ b/spec/lib/api/entities/merge_request_basic_spec.rb @@ -9,11 +9,22 @@ RSpec.describe ::API::Entities::MergeRequestBasic do let_it_be(:labels) { create_list(:label, 3) } let_it_be(:merge_requests) { create_list(:labeled_merge_request, 10, :unique_branches, labels: labels) } + let_it_be(:entity) { described_class.new(merge_request) } + # This mimics the behavior of the `Grape::Entity` serializer def present(obj) described_class.new(obj).presented end + subject { entity.as_json } + + it 'includes basic fields' do + is_expected.to include( + draft: merge_request.draft?, + work_in_progress: merge_request.draft? + ) + end + context "with :with_api_entity_associations scope" do let(:scope) { MergeRequest.with_api_entity_associations } diff --git a/spec/lib/api/helpers/runner_helpers_spec.rb b/spec/lib/api/helpers/runner_helpers_spec.rb new file mode 100644 index 00000000000..65b35845aab --- /dev/null +++ b/spec/lib/api/helpers/runner_helpers_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Helpers::Runner do + let(:ip_address) { '1.2.3.4' } + let(:runner_class) do + Class.new do + include API::Helpers + include API::Helpers::Runner + + attr_accessor :params + + def initialize(params) + @params = params + end + + def ip_address + '1.2.3.4' + end + end + end + + let(:runner_helper) { runner_class.new(runner_params) } + + describe '#get_runner_details_from_request' do + context 'when no runner info is present' do + let(:runner_params) { {} } + + it 'returns the runner IP' do + expect(runner_helper.get_runner_details_from_request).to eq({ ip_address: ip_address }) + end + end + + context 'when runner info is present' do + let(:name) { 'runner' } + let(:version) { '1.2.3' } + let(:revision) { '10.0' } + let(:platform) { 'test' } + let(:architecture) { 'arm' } + let(:config) { { 'gpus' => 'all' } } + let(:runner_params) do + { + 'info' => + { + 'name' => name, + 'version' => version, + 'revision' => revision, + 'platform' => platform, + 'architecture' => architecture, + 'config' => config, + 'ignored' => 1 + } + } + end + + subject(:details) { runner_helper.get_runner_details_from_request } + + it 'extracts the runner details', :aggregate_failures do + expect(details.keys).to match_array(%w(name version revision platform architecture config ip_address)) + expect(details['name']).to eq(name) + expect(details['version']).to eq(version) + expect(details['revision']).to eq(revision) + expect(details['platform']).to eq(platform) + expect(details['architecture']).to eq(architecture) + expect(details['config']).to eq(config) + expect(details['ip_address']).to eq(ip_address) + end + end + end +end diff --git a/spec/lib/api/helpers/runner_spec.rb b/spec/lib/api/helpers/runner_spec.rb new file mode 100644 index 00000000000..e55c20b7ab6 --- /dev/null +++ b/spec/lib/api/helpers/runner_spec.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Helpers::Runner do + let(:helper) { Class.new { include API::Helpers::Runner }.new } + + before do + allow(helper).to receive(:env).and_return({}) + end + + describe '#current_job' do + let(:build) { create(:ci_build, :running) } + + it 'handles sticking of a build when a build ID is specified' do + allow(helper).to receive(:params).and_return(id: build.id) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .to receive(:stick_or_unstick) + .with({}, :build, build.id) + + helper.current_job + end + + it 'does not handle sticking if no build ID was specified' do + allow(helper).to receive(:params).and_return({}) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .not_to receive(:stick_or_unstick) + + helper.current_job + end + + it 'returns the build if one could be found' do + allow(helper).to receive(:params).and_return(id: build.id) + + expect(helper.current_job).to eq(build) + end + end + + describe '#current_runner' do + let(:runner) { create(:ci_runner, token: 'foo') } + + it 'handles sticking of a runner if a token is specified' do + allow(helper).to receive(:params).and_return(token: runner.token) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .to receive(:stick_or_unstick) + .with({}, :runner, runner.token) + + helper.current_runner + end + + it 'does not handle sticking if no token was specified' do + allow(helper).to receive(:params).and_return({}) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .not_to receive(:stick_or_unstick) + + helper.current_runner + end + + it 'returns the runner if one could be found' do + allow(helper).to receive(:params).and_return(token: runner.token) + + expect(helper.current_runner).to eq(runner) + end + end +end diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb index 87cd0d4388c..6e48ee4c315 100644 --- a/spec/lib/api/helpers_spec.rb +++ b/spec/lib/api/helpers_spec.rb @@ -7,6 +7,66 @@ RSpec.describe API::Helpers do subject { Class.new.include(described_class).new } + describe '#current_user' do + include Rack::Test::Methods + + let(:user) { build(:user, id: 42) } + + let(:helper) do + Class.new(Grape::API::Instance) do + helpers API::APIGuard::HelperMethods + helpers API::Helpers + format :json + + get 'user' do + current_user ? { id: current_user.id } : { found: false } + end + + get 'protected' do + authenticate_by_gitlab_geo_node_token! + end + end + end + + def app + helper + end + + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + end + + it 'handles sticking when a user could be found' do + allow_any_instance_of(API::Helpers).to receive(:initial_current_user).and_return(user) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .to receive(:stick_or_unstick).with(any_args, :user, 42) + + get 'user' + + expect(Gitlab::Json.parse(last_response.body)).to eq({ 'id' => user.id }) + end + + it 'does not handle sticking if no user could be found' do + allow_any_instance_of(API::Helpers).to receive(:initial_current_user).and_return(nil) + + expect(Gitlab::Database::LoadBalancing::RackMiddleware) + .not_to receive(:stick_or_unstick) + + get 'user' + + expect(Gitlab::Json.parse(last_response.body)).to eq({ 'found' => false }) + end + + it 'returns the user if one could be found' do + allow_any_instance_of(API::Helpers).to receive(:initial_current_user).and_return(user) + + get 'user' + + expect(Gitlab::Json.parse(last_response.body)).to eq({ 'id' => user.id }) + end + end + describe '#find_project' do let(:project) { create(:project) } diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb new file mode 100644 index 00000000000..13567ead842 --- /dev/null +++ b/spec/lib/backup/gitaly_backup_spec.rb @@ -0,0 +1,110 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Backup::GitalyBackup do + let(:progress) do + Tempfile.new('progress').tap do |progress| + progress.unlink + end + end + + after do + progress.close + end + + subject { described_class.new(progress) } + + context 'unknown' do + it 'fails to start unknown' do + expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') + end + end + + context 'create' do + RSpec.shared_examples 'creates a repository backup' do + it 'creates repository bundles', :aggregate_failures do + # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. + create(:wiki_page, container: project) + create(:design, :with_file, issue: create(:issue, project: project)) + project_snippet = create(:project_snippet, :repository, project: project) + personal_snippet = create(:personal_snippet, :repository, author: project.owner) + + subject.start(:create) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.enqueue(project, Gitlab::GlRepository::WIKI) + subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) + subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) + subject.wait + + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) + end + + it 'raises when the exit code not zero' do + expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) + + subject.start(:create) + expect { subject.wait }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') + end + end + + context 'hashed storage' do + let_it_be(:project) { create(:project, :repository) } + + it_behaves_like 'creates a repository backup' + end + + context 'legacy storage' do + let_it_be(:project) { create(:project, :repository, :legacy_storage) } + + it_behaves_like 'creates a repository backup' + end + end + + context 'restore' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) } + let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) } + + def copy_bundle_to_backup_path(bundle_name, destination) + FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(destination))) + FileUtils.cp(Rails.root.join('spec/fixtures/lib/backup', bundle_name), File.join(Gitlab.config.backup.path, 'repositories', destination)) + end + + it 'restores from repository bundles', :aggregate_failures do + copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle') + copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle') + copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle') + copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle') + copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle') + + subject.start(:restore) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.enqueue(project, Gitlab::GlRepository::WIKI) + subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) + subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) + subject.wait + + collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) } + + expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec']) + expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea']) + expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d']) + expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e']) + expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1']) + end + + it 'raises when the exit code not zero' do + expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) + + subject.start(:restore) + expect { subject.wait }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') + end + end +end diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb new file mode 100644 index 00000000000..fb442f4a86f --- /dev/null +++ b/spec/lib/backup/gitaly_rpc_backup_spec.rb @@ -0,0 +1,153 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Backup::GitalyRpcBackup do + let(:progress) { spy(:stdout) } + + subject { described_class.new(progress) } + + after do + # make sure we do not leave behind any backup files + FileUtils.rm_rf(File.join(Gitlab.config.backup.path, 'repositories')) + end + + context 'unknown' do + it 'fails to start unknown' do + expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') + end + end + + context 'create' do + RSpec.shared_examples 'creates a repository backup' do + it 'creates repository bundles', :aggregate_failures do + # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. + create(:wiki_page, container: project) + create(:design, :with_file, issue: create(:issue, project: project)) + project_snippet = create(:project_snippet, :repository, project: project) + personal_snippet = create(:personal_snippet, :repository, author: project.owner) + + subject.start(:create) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.enqueue(project, Gitlab::GlRepository::WIKI) + subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) + subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) + subject.wait + + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) + expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) + end + + context 'failure' do + before do + allow_next_instance_of(Repository) do |repository| + allow(repository).to receive(:bundle_to_disk) { raise 'Fail in tests' } + end + end + + it 'logs an appropriate message', :aggregate_failures do + subject.start(:create) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.wait + + expect(progress).to have_received(:puts).with("[Failed] backing up #{project.full_path} (#{project.disk_path})") + expect(progress).to have_received(:puts).with("Error Fail in tests") + end + end + end + + context 'hashed storage' do + let_it_be(:project) { create(:project, :repository) } + + it_behaves_like 'creates a repository backup' + end + + context 'legacy storage' do + let_it_be(:project) { create(:project, :repository, :legacy_storage) } + + it_behaves_like 'creates a repository backup' + end + end + + context 'restore' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) } + let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) } + + def copy_bundle_to_backup_path(bundle_name, destination) + FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(destination))) + FileUtils.cp(Rails.root.join('spec/fixtures/lib/backup', bundle_name), File.join(Gitlab.config.backup.path, 'repositories', destination)) + end + + it 'restores from repository bundles', :aggregate_failures do + copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle') + copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle') + copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle') + copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle') + copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle') + + subject.start(:restore) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.enqueue(project, Gitlab::GlRepository::WIKI) + subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) + subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) + subject.wait + + collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) } + + expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec']) + expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea']) + expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d']) + expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e']) + expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1']) + end + + it 'cleans existing repositories', :aggregate_failures do + expect_next_instance_of(DesignManagement::Repository) do |repository| + expect(repository).to receive(:remove) + end + + # 4 times = project repo + wiki repo + project_snippet repo + personal_snippet repo + expect(Repository).to receive(:new).exactly(4).times.and_wrap_original do |method, *original_args| + full_path, container, kwargs = original_args + + repository = method.call(full_path, container, **kwargs) + + expect(repository).to receive(:remove) + + repository + end + + subject.start(:restore) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.enqueue(project, Gitlab::GlRepository::WIKI) + subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) + subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) + subject.wait + end + + context 'failure' do + before do + allow_next_instance_of(Repository) do |repository| + allow(repository).to receive(:create_repository) { raise 'Fail in tests' } + allow(repository).to receive(:create_from_bundle) { raise 'Fail in tests' } + end + end + + it 'logs an appropriate message', :aggregate_failures do + subject.start(:restore) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.wait + + expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})") + expect(progress).to have_received(:puts).with("Error Fail in tests") + end + end + end +end diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index 7a8cc713e4f..d77b1e0f276 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -3,37 +3,28 @@ require 'spec_helper' RSpec.describe Backup::Repositories do - let(:progress) { StringIO.new } + let(:progress) { spy(:stdout) } + let(:strategy) { spy(:strategy) } - subject { described_class.new(progress) } - - before do - allow(progress).to receive(:puts) - allow(progress).to receive(:print) - - allow_next_instance_of(described_class) do |instance| - allow(instance).to receive(:progress).and_return(progress) - end - end + subject { described_class.new(progress, strategy: strategy) } describe '#dump' do let_it_be(:projects) { create_list(:project, 5, :repository) } RSpec.shared_examples 'creates repository bundles' do - specify :aggregate_failures do - # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. - create(:wiki_page, container: project) - create(:design, :with_file, issue: create(:issue, project: project)) + it 'calls enqueue for each repository type', :aggregate_failures do project_snippet = create(:project_snippet, :repository, project: project) personal_snippet = create(:personal_snippet, :repository, author: project.owner) subject.dump(max_concurrency: 1, max_storage_concurrency: 1) - expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) - expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki' + '.bundle')) - expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design' + '.bundle')) - expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) - expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) + expect(strategy).to have_received(:start).with(:create) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET) + expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET) + expect(strategy).to have_received(:wait) end end @@ -53,16 +44,18 @@ RSpec.describe Backup::Repositories do it 'creates the expected number of threads' do expect(Thread).not_to receive(:new) + expect(strategy).to receive(:start).with(:create) projects.each do |project| - expect(subject).to receive(:dump_project).with(project).and_call_original + expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) end + expect(strategy).to receive(:wait) subject.dump(max_concurrency: 1, max_storage_concurrency: 1) end describe 'command failure' do - it 'dump_project raises an error' do - allow(subject).to receive(:dump_project).and_raise(IOError) + it 'enqueue_project raises an error' do + allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(IOError) end @@ -100,9 +93,11 @@ RSpec.describe Backup::Repositories do .exactly(storage_keys.length * (max_storage_concurrency + 1)).times .and_call_original + expect(strategy).to receive(:start).with(:create) projects.each do |project| - expect(subject).to receive(:dump_project).with(project).and_call_original + expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) end + expect(strategy).to receive(:wait) subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) end @@ -112,17 +107,18 @@ RSpec.describe Backup::Repositories do .exactly(storage_keys.length * (max_storage_concurrency + 1)).times .and_call_original + expect(strategy).to receive(:start).with(:create) projects.each do |project| - expect(subject).to receive(:dump_project).with(project).and_call_original + expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) end + expect(strategy).to receive(:wait) subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency) end describe 'command failure' do - it 'dump_project raises an error' do - allow(subject).to receive(:dump_project) - .and_raise(IOError) + it 'enqueue_project raises an error' do + allow(strategy).to receive(:enqueue).and_raise(IOError) expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(IOError) end @@ -162,61 +158,16 @@ RSpec.describe Backup::Repositories do let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) } let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) } - let(:next_path_to_bundle) do - [ - Rails.root.join('spec/fixtures/lib/backup/project_repo.bundle'), - Rails.root.join('spec/fixtures/lib/backup/wiki_repo.bundle'), - Rails.root.join('spec/fixtures/lib/backup/design_repo.bundle'), - Rails.root.join('spec/fixtures/lib/backup/personal_snippet_repo.bundle'), - Rails.root.join('spec/fixtures/lib/backup/project_snippet_repo.bundle') - ].to_enum - end - - it 'restores repositories from bundles', :aggregate_failures do - allow_next_instance_of(described_class::BackupRestore) do |backup_restore| - allow(backup_restore).to receive(:path_to_bundle).and_return(next_path_to_bundle.next) - end - + it 'calls enqueue for each repository type', :aggregate_failures do subject.restore - collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) } - - expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec']) - expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea']) - expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d']) - expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e']) - expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1']) - end - - describe 'command failure' do - before do - expect(Project).to receive(:find_each).and_yield(project) - - allow_next_instance_of(DesignManagement::Repository) do |repository| - allow(repository).to receive(:create_repository) { raise 'Fail in tests' } - end - allow_next_instance_of(Repository) do |repository| - allow(repository).to receive(:create_repository) { raise 'Fail in tests' } - end - end - - context 'hashed storage' do - it 'shows the appropriate error' do - subject.restore - - expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})") - end - end - - context 'legacy storage' do - let_it_be(:project) { create(:project, :legacy_storage) } - - it 'shows the appropriate error' do - subject.restore - - expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})") - end - end + expect(strategy).to have_received(:start).with(:restore) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) + expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET) + expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET) + expect(strategy).to have_received(:wait) end context 'restoring object pools' do @@ -242,78 +193,36 @@ RSpec.describe Backup::Repositories do end end - it 'cleans existing repositories' do - success_response = ServiceResponse.success(message: "Valid Snippet Repo") - allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(success_response) - - expect_next_instance_of(DesignManagement::Repository) do |repository| - expect(repository).to receive(:remove) - end - - # 4 times = project repo + wiki repo + project_snippet repo + personal_snippet repo - expect(Repository).to receive(:new).exactly(4).times.and_wrap_original do |method, *original_args| - full_path, container, kwargs = original_args - - repository = method.call(full_path, container, **kwargs) - - expect(repository).to receive(:remove) - - repository - end - - subject.restore - end - - context 'restoring snippets' do + context 'cleanup snippets' do before do create(:snippet_repository, snippet: personal_snippet) create(:snippet_repository, snippet: project_snippet) - allow_next_instance_of(described_class::BackupRestore) do |backup_restore| - allow(backup_restore).to receive(:path_to_bundle).and_return(next_path_to_bundle.next) - end + error_response = ServiceResponse.error(message: "Repository has more than one branch") + allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(error_response) end - context 'when the repository is valid' do - it 'restores the snippet repositories' do - subject.restore - - expect(personal_snippet.snippet_repository.persisted?).to be true - expect(personal_snippet.repository).to exist + it 'shows the appropriate error' do + subject.restore - expect(project_snippet.snippet_repository.persisted?).to be true - expect(project_snippet.repository).to exist - end + expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch") + expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch") end - context 'when repository is invalid' do - before do - error_response = ServiceResponse.error(message: "Repository has more than one branch") - allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(error_response) - end - - it 'shows the appropriate error' do - subject.restore - - expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch") - expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch") - end - - it 'removes the snippets from the DB' do - expect { subject.restore }.to change(PersonalSnippet, :count).by(-1) - .and change(ProjectSnippet, :count).by(-1) - .and change(SnippetRepository, :count).by(-2) - end + it 'removes the snippets from the DB' do + expect { subject.restore }.to change(PersonalSnippet, :count).by(-1) + .and change(ProjectSnippet, :count).by(-1) + .and change(SnippetRepository, :count).by(-2) + end - it 'removes the repository from disk' do - gitlab_shell = Gitlab::Shell.new - shard_name = personal_snippet.repository.shard - path = personal_snippet.disk_path + '.git' + it 'removes the repository from disk' do + gitlab_shell = Gitlab::Shell.new + shard_name = personal_snippet.repository.shard + path = personal_snippet.disk_path + '.git' - subject.restore + subject.restore - expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false - end + expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false end end end diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb index db7dda96cad..b18d68c8dd4 100644 --- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb @@ -702,4 +702,72 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter do expect(result.css('a').first.text).to eq "#{label.name} in #{project.full_name}" end end + + context 'checking N+1' do + let_it_be(:group) { create(:group) } + let_it_be(:group2) { create(:group) } + let_it_be(:project) { create(:project, :public, namespace: group) } + let_it_be(:project2) { create(:project, :public, namespace: group2) } + let_it_be(:project3) { create(:project, :public) } + let_it_be(:project_label) { create(:label, project: project) } + let_it_be(:project_label2) { create(:label, project: project) } + let_it_be(:project2_label) { create(:label, project: project2) } + let_it_be(:group2_label) { create(:group_label, group: group2, color: '#00ff00') } + let_it_be(:project_reference) { "#{project_label.to_reference}" } + let_it_be(:project_reference2) { "#{project_label2.to_reference}" } + let_it_be(:project2_reference) { "#{project2_label.to_reference}" } + let_it_be(:group2_reference) { "#{project2.full_path}~#{group2_label.name}" } + + it 'does not have N+1 per multiple references per project', :use_sql_query_cache do + markdown = "#{project_reference}" + control_count = 1 + + expect do + reference_filter(markdown) + end.not_to exceed_all_query_limit(control_count) + + markdown = "#{project_reference} ~qwert ~werty ~ertyu ~rtyui #{project_reference2}" + + expect do + reference_filter(markdown) + end.not_to exceed_all_query_limit(control_count) + end + + it 'has N+1 for multiple unique project/group references', :use_sql_query_cache do + # reference to already loaded project, only one query + markdown = "#{project_reference}" + control_count = 1 + + expect do + reference_filter(markdown, project: project) + end.not_to exceed_all_query_limit(control_count) + + # Since we're not batching label queries across projects/groups, + # queries increase when a new project/group is added. + # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359 + # first reference to already loaded project (1), + # second reference requires project and namespace (2), and label (1) + markdown = "#{project_reference} #{group2_reference}" + max_count = control_count + 3 + + expect do + reference_filter(markdown) + end.not_to exceed_all_query_limit(max_count) + + # third reference to already queried project/namespace, nothing extra (no N+1 here) + markdown = "#{project_reference} #{group2_reference} #{project2_reference}" + + expect do + reference_filter(markdown) + end.not_to exceed_all_query_limit(max_count) + + # last reference needs another namespace and label query (2) + markdown = "#{project_reference} #{group2_reference} #{project2_reference} #{project3.full_path}~test_label" + max_count += 2 + + expect do + reference_filter(markdown) + end.not_to exceed_all_query_limit(max_count) + end + end end diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb index 9e2a6f35910..c9404c381d3 100644 --- a/spec/lib/banzai/filter/references/reference_cache_spec.rb +++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb @@ -55,11 +55,12 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do cache_single.load_records_per_parent end.count + expect(control_count).to eq 1 + # Since this is an issue filter that is not batching issue queries # across projects, we have to account for that. - # 1 for both projects, 1 for issues in each project == 3 - # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359 - max_count = control_count + 1 + # 1 for original issue, 2 for second route/project, 1 for other issue + max_count = control_count + 3 expect do cache.load_references_per_parent(filter.nodes) diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb index 7ab3b24b1c2..2e324669870 100644 --- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb @@ -233,13 +233,15 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter do reference_filter(markdown) end.count + expect(control_count).to eq 1 + markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12" # Since we're not batching snippet queries across projects, # we have to account for that. # 1 for both projects, 1 for snippets in each project == 3 # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359 - max_count = control_count + 1 + max_count = control_count + 2 expect do reference_filter(markdown) diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb index 5f31ad0c8f6..4903f624469 100644 --- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb @@ -17,20 +17,6 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do result end - context 'when feature flag honor_escaped_markdown is disabled' do - before do - stub_feature_flags(honor_escaped_markdown: false) - end - - it 'does not escape the markdown' do - result = described_class.call(%q(\!), project: project) - output = result[:output].to_html - - expect(output).to eq('<p data-sourcepos="1:1-1:2">!</p>') - expect(result[:escaped_literals]).to be_falsey - end - end - describe 'CommonMark tests', :aggregate_failures do it 'converts all reference punctuation to literals' do reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS diff --git a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb index 55038d58f22..e8df395564a 100644 --- a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb @@ -44,18 +44,5 @@ RSpec.describe Banzai::Pipeline::PostProcessPipeline do subject end - - context 'when "optimize_linkable_attributes" is disabled' do - before do - stub_feature_flags(optimize_linkable_attributes: false) - end - - it 'searches for attributes twice' do - expect(doc).to receive(:xpath).exactly(non_related_xpath_calls + 2).times - .and_call_original - - subject - end - end end end diff --git a/spec/lib/banzai/reference_parser/commit_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_parser_spec.rb index 612ce6b93f1..31cece108bf 100644 --- a/spec/lib/banzai/reference_parser/commit_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_parser_spec.rb @@ -130,11 +130,11 @@ RSpec.describe Banzai::ReferenceParser::CommitParser do end context 'when checking commits on another projects' do - let(:control_links) do + let!(:control_links) do [commit_link] end - let(:actual_links) do + let!(:actual_links) do control_links + [commit_link, commit_link] end diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb index 76f13e7b3aa..7de78710d34 100644 --- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb @@ -5,9 +5,11 @@ require 'spec_helper' RSpec.describe Banzai::ReferenceParser::IssueParser do include ReferenceParserHelpers - let(:project) { create(:project, :public) } - let(:user) { create(:user) } - let(:issue) { create(:issue, project: project) } + let_it_be(:group) { create(:group, :public) } + let_it_be(:project) { create(:project, :public, group: group) } + let_it_be(:user) { create(:user) } + let_it_be(:issue) { create(:issue, project: project) } + let(:link) { empty_html_link } subject { described_class.new(Banzai::RenderContext.new(project, user)) } @@ -121,7 +123,7 @@ RSpec.describe Banzai::ReferenceParser::IssueParser do end end - context 'when checking multiple merge requests on another project' do + context 'when checking multiple issues on another project' do let(:other_project) { create(:project, :public) } let(:other_issue) { create(:issue, project: other_project) } diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb index 1820141c898..04c35c8b082 100644 --- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb @@ -5,9 +5,11 @@ require 'spec_helper' RSpec.describe Banzai::ReferenceParser::MergeRequestParser do include ReferenceParserHelpers + let(:group) { create(:group, :public) } + let(:project) { create(:project, :public, group: group) } let(:user) { create(:user) } - let(:project) { create(:project, :public) } let(:merge_request) { create(:merge_request, source_project: project) } + subject(:parser) { described_class.new(Banzai::RenderContext.new(merge_request.target_project, user)) } let(:link) { empty_html_link } @@ -16,10 +18,19 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do context 'when the link has a data-issue attribute' do before do project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC) + link['data-project'] = merge_request.project_id.to_s link['data-merge-request'] = merge_request.id.to_s end it_behaves_like "referenced feature visibility", "merge_requests" + + context 'when optimize_merge_request_parser feature flag is off' do + before do + stub_feature_flags(optimize_merge_request_parser: false) + end + + it_behaves_like "referenced feature visibility", "merge_requests" + end end end @@ -27,6 +38,7 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do describe 'when the link has a data-merge-request attribute' do context 'using an existing merge request ID' do it 'returns an Array of merge requests' do + link['data-project'] = merge_request.project_id.to_s link['data-merge-request'] = merge_request.id.to_s expect(subject.referenced_by([link])).to eq([merge_request]) @@ -35,6 +47,7 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do context 'using a non-existing merge request ID' do it 'returns an empty Array' do + link['data-project'] = merge_request.project_id.to_s link['data-merge-request'] = '' expect(subject.referenced_by([link])).to eq([]) @@ -47,16 +60,16 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do let(:other_project) { create(:project, :public) } let(:other_merge_request) { create(:merge_request, source_project: other_project) } - let(:control_links) do + let!(:control_links) do [merge_request_link(other_merge_request)] end - let(:actual_links) do + let!(:actual_links) do control_links + [merge_request_link(create(:merge_request, :conflict, source_project: other_project))] end def merge_request_link(merge_request) - Nokogiri::HTML.fragment(%Q{<a data-merge-request="#{merge_request.id}"></a>}).children[0] + Nokogiri::HTML.fragment(%Q{<a data-project="#{merge_request.project_id}" data-merge-request="#{merge_request.id}"></a>}).children[0] end before do diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb index 213fa23675e..ac42f12a3d4 100644 --- a/spec/lib/bulk_imports/clients/http_spec.rb +++ b/spec/lib/bulk_imports/clients/http_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Clients::Http do +RSpec.describe BulkImports::Clients::HTTP do include ImportSpecHelper let(:uri) { 'http://gitlab.example' } @@ -48,6 +48,7 @@ RSpec.describe BulkImports::Clients::Http do [ 'http://gitlab.example:80/api/v4/resource', hash_including( + follow_redirects: false, query: { page: described_class::DEFAULT_PAGE, per_page: described_class::DEFAULT_PER_PAGE @@ -118,6 +119,7 @@ RSpec.describe BulkImports::Clients::Http do 'http://gitlab.example:80/api/v4/resource', hash_including( body: {}, + follow_redirects: false, headers: { 'Content-Type' => 'application/json', 'Authorization' => "Bearer #{token}" @@ -127,4 +129,42 @@ RSpec.describe BulkImports::Clients::Http do end end end + + describe '#head' do + let(:method) { :head } + + include_examples 'performs network request' do + let(:expected_args) do + [ + 'http://gitlab.example:80/api/v4/resource', + hash_including( + follow_redirects: false, + headers: { + 'Content-Type' => 'application/json', + 'Authorization' => "Bearer #{token}" + } + ) + ] + end + end + end + + describe '#stream' do + it 'performs network request with stream_body option' do + expected_args = [ + 'http://gitlab.example:80/api/v4/resource', + hash_including( + stream_body: true, + headers: { + 'Content-Type' => 'application/json', + 'Authorization' => "Bearer #{token}" + } + ) + ] + + expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double) + + subject.stream(resource) + end + end end diff --git a/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb new file mode 100644 index 00000000000..bd306233de8 --- /dev/null +++ b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do + let_it_be(:tmpdir) { Dir.mktmpdir } + let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' } + let_it_be(:import) { create(:bulk_import) } + let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) } + let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + subject { described_class.new(relation: 'labels') } + + before do + allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original + + subject.instance_variable_set(:@tmp_dir, tmpdir) + end + + after(:all) do + FileUtils.remove_entry(tmpdir) if File.directory?(tmpdir) + end + + describe '#extract' do + before do + FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz')) + + allow_next_instance_of(BulkImports::FileDownloadService) do |service| + allow(service).to receive(:execute) + end + end + + it 'returns ExtractedData' do + extracted_data = subject.extract(context) + label = extracted_data.data.first.first + + expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData) + expect(label['title']).to include('Label') + expect(label['description']).to include('Label') + expect(label['type']).to eq('GroupLabel') + end + end + + describe '#remove_tmp_dir' do + it 'removes tmp dir' do + expect(FileUtils).to receive(:remove_entry).with(tmpdir).once + + subject.remove_tmp_dir + end + end +end diff --git a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb index 721dacbe3f4..5ee5cdbe413 100644 --- a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb +++ b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe BulkImports::Common::Extractors::RestExtractor do - let(:http_client) { instance_double(BulkImports::Clients::Http) } + let(:http_client) { instance_double(BulkImports::Clients::HTTP) } let(:options) { { query: double(to_h: { resource: nil, query: nil }) } } let(:response) { double(parsed_response: { 'data' => { 'foo' => 'bar' } }, headers: { 'x-next-page' => '2' }) } diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb index ac8786440e9..f7485b188ce 100644 --- a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb +++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb @@ -12,7 +12,7 @@ RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do response = [{ 'test' => 'group' }] context = BulkImports::Pipeline::Context.new(tracker) - allow_next_instance_of(BulkImports::Clients::Http) do |client| + allow_next_instance_of(BulkImports::Clients::HTTP) do |client| allow(client).to receive(:each_page).and_return(response) end diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb deleted file mode 100644 index 61db644a372..00000000000 --- a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do - it 'has a valid query' do - tracker = create(:bulk_import_tracker) - context = BulkImports::Pipeline::Context.new(tracker) - - query = GraphQL::Query.new( - GitlabSchema, - described_class.to_s, - variables: described_class.variables(context) - ) - result = GitlabSchema.static_validator.validate(query) - - expect(result[:errors]).to be_empty - end - - describe '#data_path' do - it 'returns data path' do - expected = %w[data group labels nodes] - - expect(described_class.data_path).to eq(expected) - end - end - - describe '#page_info_path' do - it 'returns pagination information path' do - expected = %w[data group labels page_info] - - expect(described_class.page_info_path).to eq(expected) - end - end -end diff --git a/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb new file mode 100644 index 00000000000..8b2f03ca15f --- /dev/null +++ b/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Groups::Pipelines::BoardsPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/boards.ndjson.gz' } + let_it_be(:entity) do + create( + :bulk_import_entity, + group: group, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Group', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:tmpdir) { Dir.mktmpdir } + + before do + FileUtils.copy_file(filepath, File.join(tmpdir, 'boards.ndjson.gz')) + group.add_owner(user) + end + + subject { described_class.new(context) } + + describe '#run' do + it 'imports group boards into destination group and removes tmpdir' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + allow_next_instance_of(BulkImports::FileDownloadService) do |service| + allow(service).to receive(:execute) + end + + expect { subject.run }.to change(Board, :count).by(1) + + lists = group.boards.find_by(name: 'first board').lists + + expect(lists.count).to eq(3) + expect(lists.first.label.title).to eq('TSL') + expect(lists.second.label.title).to eq('Sosync') + end + end +end diff --git a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb index 8276349c5f4..b97aeb435b9 100644 --- a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb @@ -25,13 +25,33 @@ RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do .to change(entity, :status_name).to(:finished) end - it 'does nothing when the entity is already finished' do - entity = create(:bulk_import_entity, :finished) - pipeline_tracker = create(:bulk_import_tracker, entity: entity) - context = BulkImports::Pipeline::Context.new(pipeline_tracker) - subject = described_class.new(context) + context 'when entity is in a final finished or failed state' do + shared_examples 'performs no state update' do |entity_state| + it 'does nothing' do + entity = create(:bulk_import_entity, entity_state) + pipeline_tracker = create(:bulk_import_tracker, entity: entity) + context = BulkImports::Pipeline::Context.new(pipeline_tracker) + subject = described_class.new(context) - expect { subject.run } - .not_to change(entity, :status_name) + expect { subject.run } + .not_to change(entity, :status_name) + end + end + + include_examples 'performs no state update', :finished + include_examples 'performs no state update', :failed + end + + context 'when all entity trackers failed' do + it 'marks entity as failed' do + entity = create(:bulk_import_entity, :started) + create(:bulk_import_tracker, :failed, entity: entity) + pipeline_tracker = create(:bulk_import_tracker, entity: entity, relation: described_class) + context = BulkImports::Pipeline::Context.new(pipeline_tracker) + + described_class.new(context).run + + expect(entity.reload.failed?).to eq(true) + end end end diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb index 8af646d1101..6344dae0fb7 100644 --- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb @@ -5,98 +5,74 @@ require 'spec_helper' RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } - let_it_be(:timestamp) { Time.new(2020, 01, 01).utc } - + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' } let_it_be(:entity) do create( :bulk_import_entity, + group: group, + bulk_import: bulk_import, source_full_path: 'source/full/path', destination_name: 'My Destination Group', - destination_namespace: group.full_path, - group: group + destination_namespace: group.full_path ) end let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let(:tmpdir) { Dir.mktmpdir } + + before do + FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz')) + group.add_owner(user) + end + subject { described_class.new(context) } describe '#run' do - it 'imports a group labels' do - first_page = extracted_data(title: 'label1', has_next_page: true) - last_page = extracted_data(title: 'label2') - - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor) - .to receive(:extract) - .and_return(first_page, last_page) + it 'imports group labels into destination group and removes tmpdir' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + allow_next_instance_of(BulkImports::FileDownloadService) do |service| + allow(service).to receive(:execute) end - expect { subject.run }.to change(Label, :count).by(2) + expect { subject.run }.to change(::GroupLabel, :count).by(1) - label = group.labels.order(:created_at).last + label = group.labels.first - expect(label.title).to eq('label2') - expect(label.description).to eq('desc') - expect(label.color).to eq('#428BCA') - expect(label.created_at).to eq(timestamp) - expect(label.updated_at).to eq(timestamp) + expect(label.title).to eq('Label 1') + expect(label.description).to eq('Label 1') + expect(label.color).to eq('#6699cc') + expect(File.directory?(tmpdir)).to eq(false) end end describe '#load' do - it 'creates the label' do - data = label_data('label') + context 'when label is not persisted' do + it 'saves the label' do + label = build(:group_label, group: group) - expect { subject.load(context, data) }.to change(Label, :count).by(1) + expect(label).to receive(:save!) - label = group.labels.first - - data.each do |key, value| - expect(label[key]).to eq(value) + subject.load(context, label) end end - end - describe 'pipeline parts' do - it { expect(described_class).to include_module(BulkImports::Pipeline) } - it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } - - it 'has extractors' do - expect(described_class.get_extractor) - .to eq( - klass: BulkImports::Common::Extractors::GraphqlExtractor, - options: { - query: BulkImports::Groups::Graphql::GetLabelsQuery - } - ) - end + context 'when label is persisted' do + it 'does not save label' do + label = create(:group_label, group: group) - it 'has transformers' do - expect(described_class.transformers) - .to contain_exactly( - { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil } - ) - end - end - - def label_data(title) - { - 'title' => title, - 'description' => 'desc', - 'color' => '#428BCA', - 'created_at' => timestamp.to_s, - 'updated_at' => timestamp.to_s - } - end + expect(label).not_to receive(:save!) - def extracted_data(title:, has_next_page: false) - page_info = { - 'has_next_page' => has_next_page, - 'next_page' => has_next_page ? 'cursor' : nil - } + subject.load(context, label) + end + end - BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info) + context 'when label is missing' do + it 'returns' do + expect(subject.load(context, nil)).to be_nil + end + end end end diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb index e5cf75c566b..a8354e62459 100644 --- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb @@ -5,119 +5,69 @@ require 'spec_helper' RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } - let_it_be(:timestamp) { Time.new(2020, 01, 01).utc } let_it_be(:bulk_import) { create(:bulk_import, user: user) } - + let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/milestones.ndjson.gz' } let_it_be(:entity) do create( :bulk_import_entity, + group: group, bulk_import: bulk_import, source_full_path: 'source/full/path', destination_name: 'My Destination Group', - destination_namespace: group.full_path, - group: group + destination_namespace: group.full_path ) end let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - subject { described_class.new(context) } + let(:tmpdir) { Dir.mktmpdir } before do + FileUtils.copy_file(filepath, File.join(tmpdir, 'milestones.ndjson.gz')) group.add_owner(user) end - describe '#run' do - it 'imports group milestones' do - first_page = extracted_data(title: 'milestone1', iid: 1, has_next_page: true) - last_page = extracted_data(title: 'milestone2', iid: 2) + subject { described_class.new(context) } - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor) - .to receive(:extract) - .and_return(first_page, last_page) + describe '#run' do + it 'imports group milestones into destination group and removes tmpdir' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + allow_next_instance_of(BulkImports::FileDownloadService) do |service| + allow(service).to receive(:execute) end - expect { subject.run }.to change(Milestone, :count).by(2) - - expect(group.milestones.pluck(:title)).to contain_exactly('milestone1', 'milestone2') - - milestone = group.milestones.last - - expect(milestone.description).to eq('desc') - expect(milestone.state).to eq('closed') - expect(milestone.start_date.to_s).to eq('2020-10-21') - expect(milestone.due_date.to_s).to eq('2020-10-22') - expect(milestone.created_at).to eq(timestamp) - expect(milestone.updated_at).to eq(timestamp) + expect { subject.run }.to change(Milestone, :count).by(5) + expect(group.milestones.pluck(:title)).to contain_exactly('v4.0', 'v3.0', 'v2.0', 'v1.0', 'v0.0') + expect(File.directory?(tmpdir)).to eq(false) end end describe '#load' do - it 'creates the milestone' do - data = milestone_data('milestone') - - expect { subject.load(context, data) }.to change(Milestone, :count).by(1) - end - - context 'when user is not authorized to create the milestone' do - before do - allow(user).to receive(:can?).with(:admin_milestone, group).and_return(false) - end + context 'when milestone is not persisted' do + it 'saves the milestone' do + milestone = build(:milestone, group: group) - it 'raises NotAllowedError' do - data = extracted_data(title: 'milestone') + expect(milestone).to receive(:save!) - expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError) + subject.load(context, milestone) end end - end - describe 'pipeline parts' do - it { expect(described_class).to include_module(BulkImports::Pipeline) } - it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } + context 'when milestone is persisted' do + it 'does not save milestone' do + milestone = create(:milestone, group: group) - it 'has extractors' do - expect(described_class.get_extractor) - .to eq( - klass: BulkImports::Common::Extractors::GraphqlExtractor, - options: { - query: BulkImports::Groups::Graphql::GetMilestonesQuery - } - ) - end + expect(milestone).not_to receive(:save!) - it 'has transformers' do - expect(described_class.transformers) - .to contain_exactly( - { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil } - ) + subject.load(context, milestone) + end end - end - def milestone_data(title, iid: 1) - { - 'title' => title, - 'description' => 'desc', - 'iid' => iid, - 'state' => 'closed', - 'start_date' => '2020-10-21', - 'due_date' => '2020-10-22', - 'created_at' => timestamp.to_s, - 'updated_at' => timestamp.to_s - } - end - - def extracted_data(title:, iid: 1, has_next_page: false) - page_info = { - 'has_next_page' => has_next_page, - 'next_page' => has_next_page ? 'cursor' : nil - } - - BulkImports::Pipeline::ExtractedData.new( - data: milestone_data(title, iid: iid), - page_info: page_info - ) + context 'when milestone is missing' do + it 'returns' do + expect(subject.load(context, nil)).to be_nil + end + end end end diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb new file mode 100644 index 00000000000..a5d1a5f7fbb --- /dev/null +++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb @@ -0,0 +1,186 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::NdjsonPipeline do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + let(:klass) do + Class.new do + include BulkImports::NdjsonPipeline + + relation_name 'test' + + attr_reader :portable, :current_user + + def initialize(portable, user) + @portable = portable + @current_user = user + end + end + end + + before do + stub_const('NdjsonPipelineClass', klass) + end + + subject { NdjsonPipelineClass.new(group, user) } + + it 'marks pipeline as ndjson' do + expect(NdjsonPipelineClass.ndjson_pipeline?).to eq(true) + end + + describe '#deep_transform_relation!' do + it 'transforms relation hash' do + transformed = subject.deep_transform_relation!({}, 'test', {}) do |key, hash| + hash.merge(relation_key: key) + end + + expect(transformed[:relation_key]).to eq('test') + end + + context 'when subrelations is an array' do + it 'transforms each element of the array' do + relation_hash = { + 'key' => 'value', + 'labels' => [ + { 'title' => 'label 1' }, + { 'title' => 'label 2' }, + { 'title' => 'label 3' } + ] + } + relation_definition = { 'labels' => {} } + + transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash| + hash.merge(relation_key: key) + end + + transformed['labels'].each do |label| + expect(label[:relation_key]).to eq('group_labels') + end + end + end + + context 'when subrelation is a hash' do + it 'transforms subrelation hash' do + relation_hash = { + 'key' => 'value', + 'label' => { 'title' => 'label' } + } + relation_definition = { 'label' => {} } + + transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash| + hash.merge(relation_key: key) + end + + expect(transformed['label'][:relation_key]).to eq('group_label') + end + end + + context 'when subrelation is nil' do + it 'removes subrelation' do + relation_hash = { + 'key' => 'value', + 'label' => { 'title' => 'label' } + } + relation_definition = { 'label' => {} } + + transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash| + if key == 'group_label' + nil + else + hash + end + end + + expect(transformed['label']).to be_nil + end + end + end + + describe '#transform' do + it 'calls relation factory' do + hash = { key: :value } + data = [hash, 1] + user = double + config = double(relation_excluded_keys: nil, top_relation_tree: []) + context = double(portable: group, current_user: user, import_export_config: config) + allow(subject).to receive(:import_export_config).and_return(config) + + expect(Gitlab::ImportExport::Group::RelationFactory) + .to receive(:create) + .with( + relation_index: 1, + relation_sym: :test, + relation_hash: hash, + importable: group, + members_mapper: instance_of(Gitlab::ImportExport::MembersMapper), + object_builder: Gitlab::ImportExport::Group::ObjectBuilder, + user: user, + excluded_keys: nil + ) + + subject.transform(context, data) + end + end + + describe '#load' do + context 'when object is not persisted' do + it 'saves the object' do + object = double(persisted?: false) + + expect(object).to receive(:save!) + + subject.load(nil, object) + end + end + + context 'when object is persisted' do + it 'does not save the object' do + object = double(persisted?: true) + + expect(object).not_to receive(:save!) + + subject.load(nil, object) + end + end + + context 'when object is missing' do + it 'returns' do + expect(subject.load(nil, nil)).to be_nil + end + end + end + + describe '#relation_class' do + context 'when relation name is pluralized' do + it 'returns constantized class' do + expect(subject.relation_class('MergeRequest::Metrics')).to eq(MergeRequest::Metrics) + end + end + + context 'when relation name is singularized' do + it 'returns constantized class' do + expect(subject.relation_class('Badge')).to eq(Badge) + end + end + end + + describe '#relation_key_override' do + context 'when portable is group' do + it 'returns group relation name override' do + expect(subject.relation_key_override('labels')).to eq('group_labels') + end + end + + context 'when portable is project' do + subject { NdjsonPipelineClass.new(project, user) } + + it 'returns group relation name override' do + expect(subject.relation_key_override('labels')).to eq('project_labels') + end + end + end +end diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb index 5b7711ad5d7..83d6f494d53 100644 --- a/spec/lib/bulk_imports/pipeline/context_spec.rb +++ b/spec/lib/bulk_imports/pipeline/context_spec.rb @@ -6,6 +6,9 @@ RSpec.describe BulkImports::Pipeline::Context do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:project) { create(:project) } + let_it_be(:project_entity) { create(:bulk_import_entity, :project_entity, project: project) } + let_it_be(:project_tracker) { create(:bulk_import_tracker, entity: project_entity) } let_it_be(:entity) do create( @@ -51,4 +54,24 @@ RSpec.describe BulkImports::Pipeline::Context do describe '#extra' do it { expect(subject.extra).to eq(extra: :data) } end + + describe '#portable' do + it { expect(subject.portable).to eq(group) } + + context 'when portable is project' do + subject { described_class.new(project_tracker) } + + it { expect(subject.portable).to eq(project) } + end + end + + describe '#import_export_config' do + it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::GroupConfig) } + + context 'when portable is project' do + subject { described_class.new(project_tracker) } + + it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::ProjectConfig) } + end + end end diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb index dda2e41f06c..48c265d6118 100644 --- a/spec/lib/bulk_imports/pipeline_spec.rb +++ b/spec/lib/bulk_imports/pipeline_spec.rb @@ -63,6 +63,7 @@ RSpec.describe BulkImports::Pipeline do BulkImports::MyPipeline.transformer(klass, options) BulkImports::MyPipeline.loader(klass, options) BulkImports::MyPipeline.abort_on_failure! + BulkImports::MyPipeline.ndjson_pipeline! expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options }) @@ -74,6 +75,7 @@ RSpec.describe BulkImports::Pipeline do expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options }) expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true) + expect(BulkImports::MyPipeline.ndjson_pipeline?).to eq(true) end end end diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/stage_spec.rb index 713cd3f22ab..d082faa90bc 100644 --- a/spec/lib/bulk_imports/stage_spec.rb +++ b/spec/lib/bulk_imports/stage_spec.rb @@ -10,7 +10,8 @@ RSpec.describe BulkImports::Stage do [1, BulkImports::Groups::Pipelines::MembersPipeline], [1, BulkImports::Groups::Pipelines::LabelsPipeline], [1, BulkImports::Groups::Pipelines::MilestonesPipeline], - [1, BulkImports::Groups::Pipelines::BadgesPipeline] + [1, BulkImports::Groups::Pipelines::BadgesPipeline], + [2, BulkImports::Groups::Pipelines::BoardsPipeline] ] end diff --git a/spec/lib/csv_builder_spec.rb b/spec/lib/csv_builder_spec.rb index 546be3ba6f7..ec065ee6f7d 100644 --- a/spec/lib/csv_builder_spec.rb +++ b/spec/lib/csv_builder_spec.rb @@ -105,5 +105,17 @@ RSpec.describe CsvBuilder do expect(csv_data).not_to include "'*safe_desc" expect(csv_data).not_to include "'*safe_title" end + + context 'when dangerous characters are after a line break' do + it 'does not append single quote to description' do + fake_object = double(title: "Safe title", description: "With task list\n-[x] todo 1") + fake_relation = FakeRelation.new([fake_object]) + builder = described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') + + csv_data = builder.render + + expect(csv_data).to eq("Title,Description\nSafe title,\"With task list\n-[x] todo 1\"\n") + end + end end end diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb index 407187ea05f..1108d26b2a9 100644 --- a/spec/lib/expand_variables_spec.rb +++ b/spec/lib/expand_variables_spec.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true -require 'spec_helper' +require 'fast_spec_helper' +require 'rspec-parameterized' RSpec.describe ExpandVariables do shared_examples 'common variable expansion' do |expander| @@ -231,41 +232,4 @@ RSpec.describe ExpandVariables do end end end - - describe '#possible_var_reference?' do - context 'table tests' do - using RSpec::Parameterized::TableSyntax - - where do - { - "empty value": { - value: '', - result: false - }, - "normal value": { - value: 'some value', - result: false - }, - "simple expansions": { - value: 'key$variable', - result: true - }, - "complex expansions": { - value: 'key${variable}${variable2}', - result: true - }, - "complex expansions for Windows": { - value: 'key%variable%%variable2%', - result: true - } - } - end - - with_them do - subject { ExpandVariables.possible_var_reference?(value) } - - it { is_expected.to eq(result) } - end - end - end end diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb index 25c4001a192..4e172dd32f0 100644 --- a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb +++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true -require 'generator_helper' +require 'spec_helper' -RSpec.describe Gitlab::SnowplowEventDefinitionGenerator do +RSpec.describe Gitlab::SnowplowEventDefinitionGenerator, :silence_stdout do let(:ce_temp_dir) { Dir.mktmpdir } let(:ee_temp_dir) { Dir.mktmpdir } let(:generator_options) { { 'category' => 'Groups::EmailCampaignsController', 'action' => 'click' } } diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb index 95a577e6334..e497551bc3f 100644 --- a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb +++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true -require 'generator_helper' +require 'spec_helper' -RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator do +RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout do include UsageDataHelpers let(:category) { 'test_category' } diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb index 74aaf34e82c..05833cf4ec4 100644 --- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb +++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true -require 'generator_helper' +require 'spec_helper' -RSpec.describe Gitlab::UsageMetricDefinitionGenerator do +RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do include UsageDataHelpers let(:key_path) { 'counts_weekly.test_metric' } diff --git a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb new file mode 100644 index 00000000000..f38815acca6 --- /dev/null +++ b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do + let(:ce_temp_dir) { Dir.mktmpdir } + let(:ee_temp_dir) { Dir.mktmpdir } + let(:spec_ce_temp_dir) { Dir.mktmpdir } + let(:spec_ee_temp_dir) { Dir.mktmpdir } + let(:args) { ['CountFoo'] } + let(:options) { { 'type' => 'redis_hll' } } + + before do + stub_const("#{described_class}::CE_DIR", ce_temp_dir) + stub_const("#{described_class}::EE_DIR", ee_temp_dir) + stub_const("#{described_class}::SPEC_CE_DIR", spec_ce_temp_dir) + stub_const("#{described_class}::SPEC_EE_DIR", spec_ee_temp_dir) + end + + after do + FileUtils.rm_rf([ce_temp_dir, ee_temp_dir, spec_ce_temp_dir, spec_ee_temp_dir]) + end + + def expect_generated_file(directory, file_name, content) + file_path = File.join(directory, file_name) + file = File.read(file_path) + + expect(file).to eq(content) + end + + describe 'Creating metric instrumentation files' do + let(:sample_metric_dir) { 'lib/generators/gitlab/usage_metric_generator' } + let(:sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_metric.rb')) } + let(:sample_spec) { fixture_file(File.join(sample_metric_dir, 'sample_metric_test.rb')) } + + it 'creates CE metric instrumentation files using the template' do + described_class.new(args, options).invoke_all + + expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', sample_metric) + expect_generated_file(spec_ce_temp_dir, 'count_foo_metric_spec.rb', sample_spec) + end + + context 'with EE flag true' do + let(:options) { { 'type' => 'redis_hll', 'ee' => true } } + + it 'creates EE metric instrumentation files using the template' do + described_class.new(args, options).invoke_all + + expect_generated_file(ee_temp_dir, 'count_foo_metric.rb', sample_metric) + expect_generated_file(spec_ee_temp_dir, 'count_foo_metric_spec.rb', sample_spec) + end + end + + context 'with type option missing' do + let(:options) { {} } + + it 'raises an ArgumentError' do + expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Type is required/) + end + end + + context 'with type option value not included in approved superclasses' do + let(:options) { { 'type' => 'some_other_type' } } + + it 'raises an ArgumentError' do + expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown type 'some_other_type'/) + end + end + end +end diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb index c4fe2ebaba9..ecd68caba79 100644 --- a/spec/lib/gitlab/application_context_spec.rb +++ b/spec/lib/gitlab/application_context_spec.rb @@ -68,6 +68,24 @@ RSpec.describe Gitlab::ApplicationContext do end end + describe '.current_context_attribute' do + it 'returns the raw attribute value' do + described_class.with_context(caller_id: "Hello") do + expect(described_class.current_context_attribute(:caller_id)).to be('Hello') + end + end + + it 'returns the attribute value with meta prefix' do + described_class.with_context(feature_category: "secure") do + expect(described_class.current_context_attribute('meta.feature_category')).to be('secure') + end + end + + it 'returns nil if the key was not present in the current context' do + expect(described_class.current_context_attribute(:caller_id)).to be(nil) + end + end + describe '#to_lazy_hash' do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project) } diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb index cddcaf09b74..7475ed2796f 100644 --- a/spec/lib/gitlab/auth/auth_finders_spec.rb +++ b/spec/lib/gitlab/auth/auth_finders_spec.rb @@ -50,7 +50,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do end shared_examples 'find user from job token' do |without_job_token_allowed| - context 'when route is allowed to be authenticated' do + context 'when route is allowed to be authenticated', :request_store do let(:route_authentication_setting) { { job_token_allowed: true } } context 'for an invalid token' do @@ -68,6 +68,8 @@ RSpec.describe Gitlab::Auth::AuthFinders do it 'return user' do expect(subject).to eq(user) expect(@current_authenticated_job).to eq job + expect(subject).to be_from_ci_job_token + expect(subject.ci_job_token_scope.source_project).to eq(job.project) end end @@ -81,7 +83,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do end end - context 'when route is not allowed to be authenticated' do + context 'when route is not allowed to be authenticated', :request_store do let(:route_authentication_setting) { { job_token_allowed: false } } context 'with a running job' do @@ -96,6 +98,8 @@ RSpec.describe Gitlab::Auth::AuthFinders do it 'returns the user' do expect(subject).to eq(user) expect(@current_authenticated_job).to eq job + expect(subject).to be_from_ci_job_token + expect(subject.ci_job_token_scope.source_project).to eq(job.project) end else it 'returns nil' do diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index 7f06e66ad50..d529d4a96e1 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -196,8 +196,8 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do end it 'recognizes other ci services' do - project.create_drone_ci_service(active: true) - project.drone_ci_service.update(token: 'token') + project.create_drone_ci_integration(active: true) + project.drone_ci_integration.update(token: 'token') expect(gl_auth.find_for_git_client('drone-ci-token', 'token', project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(nil, project, :ci, described_class.build_authentication_abilities)) end @@ -683,6 +683,28 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do end end + describe '#build_access_token_check' do + subject { gl_auth.find_for_git_client('gitlab-ci-token', build.token, project: build.project, ip: '1.2.3.4') } + + let_it_be(:user) { create(:user) } + + context 'for running build' do + let!(:build) { create(:ci_build, :running, user: user) } + + it 'executes query using primary database' do + expect(Ci::Build).to receive(:find_by_token).with(build.token).and_wrap_original do |m, *args| + expect(::Gitlab::Database::LoadBalancing::Session.current.use_primary?).to eq(true) + m.call(*args) + end + + expect(subject).to be_a(Gitlab::Auth::Result) + expect(subject.actor).to eq(user) + expect(subject.project).to eq(build.project) + expect(subject.type).to eq(:build) + end + end + end + describe 'find_with_user_password' do let!(:user) do create(:user, diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb new file mode 100644 index 00000000000..8a3671b2e53 --- /dev/null +++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210514063252 do + let(:lfs_objects_projects) { table(:lfs_objects_projects) } + let(:lfs_objects) { table(:lfs_objects) } + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + + let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') } + let(:project) { projects.create!(namespace_id: namespace.id) } + let(:another_project) { projects.create!(namespace_id: namespace.id) } + let(:lfs_object) { lfs_objects.create!(oid: 'abcdef', size: 1) } + let(:another_lfs_object) { lfs_objects.create!(oid: '1abcde', size: 2) } + + let!(:without_object1) { create_object(project_id: project.id) } + let!(:without_object2) { create_object(project_id: another_project.id) } + let!(:without_object3) { create_object(project_id: another_project.id) } + let!(:with_project_and_object1) { create_object(project_id: project.id, lfs_object_id: lfs_object.id) } + let!(:with_project_and_object2) { create_object(project_id: project.id, lfs_object_id: another_lfs_object.id) } + let!(:with_project_and_object3) { create_object(project_id: another_project.id, lfs_object_id: another_lfs_object.id) } + let!(:without_project1) { create_object(lfs_object_id: lfs_object.id) } + let!(:without_project2) { create_object(lfs_object_id: another_lfs_object.id) } + let!(:without_project_and_object) { create_object } + + def create_object(project_id: non_existing_record_id, lfs_object_id: non_existing_record_id) + lfs_objects_project = nil + + ActiveRecord::Base.connection.disable_referential_integrity do + lfs_objects_project = lfs_objects_projects.create!(project_id: project_id, lfs_object_id: lfs_object_id) + end + + lfs_objects_project + end + + subject { described_class.new } + + describe '#perform' do + it 'lfs_objects_projects without an existing lfs object or project are removed' do + subject.perform(without_object1.id, without_object3.id) + + expect(lfs_objects_projects.all).to match_array([ + with_project_and_object1, with_project_and_object2, with_project_and_object3, + without_project1, without_project2, without_project_and_object + ]) + + subject.perform(with_project_and_object1.id, with_project_and_object3.id) + + expect(lfs_objects_projects.all).to match_array([ + with_project_and_object1, with_project_and_object2, with_project_and_object3, + without_project1, without_project2, without_project_and_object + ]) + + subject.perform(without_project1.id, without_project_and_object.id) + + expect(lfs_objects_projects.all).to match_array([ + with_project_and_object1, with_project_and_object2, with_project_and_object3 + ]) + + expect(lfs_objects.ids).to contain_exactly(lfs_object.id, another_lfs_object.id) + expect(projects.ids).to contain_exactly(project.id, another_project.id) + end + + it 'cache for affected projects is being reset' do + expect(ProjectCacheWorker).to receive(:bulk_perform_in) do |delay, args| + expect(delay).to eq(1.minute) + expect(args).to match_array([[project.id, [], [:lfs_objects_size]], [another_project.id, [], [:lfs_objects_size]]]) + end + + subject.perform(without_object1.id, with_project_and_object1.id) + + expect(ProjectCacheWorker).not_to receive(:bulk_perform_in) + + subject.perform(with_project_and_object1.id, with_project_and_object3.id) + + expect(ProjectCacheWorker).not_to receive(:bulk_perform_in) + + subject.perform(without_project1.id, without_project_and_object.id) + end + end +end diff --git a/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb b/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb new file mode 100644 index 00000000000..04eb9ad475f --- /dev/null +++ b/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::DisableExpirationPoliciesLinkedToNoContainerImages do + let_it_be(:projects) { table(:projects) } + let_it_be(:container_expiration_policies) { table(:container_expiration_policies) } + let_it_be(:container_repositories) { table(:container_repositories) } + let_it_be(:namespaces) { table(:namespaces) } + + let!(:namespace) { namespaces.create!(name: 'test', path: 'test') } + + let!(:policy1) { create_expiration_policy(project_id: 1, enabled: true) } + let!(:policy2) { create_expiration_policy(project_id: 2, enabled: false) } + let!(:policy3) { create_expiration_policy(project_id: 3, enabled: false) } + let!(:policy4) { create_expiration_policy(project_id: 4, enabled: true, with_images: true) } + let!(:policy5) { create_expiration_policy(project_id: 5, enabled: false, with_images: true) } + let!(:policy6) { create_expiration_policy(project_id: 6, enabled: false) } + let!(:policy7) { create_expiration_policy(project_id: 7, enabled: true) } + let!(:policy8) { create_expiration_policy(project_id: 8, enabled: true, with_images: true) } + let!(:policy9) { create_expiration_policy(project_id: 9, enabled: true) } + + describe '#perform' do + subject { described_class.new.perform(from_id, to_id) } + + shared_examples 'disabling policies with no images' do + it 'disables the proper policies' do + subject + + rows = container_expiration_policies.order(:project_id).to_h do |row| + [row.project_id, row.enabled] + end + expect(rows).to eq(expected_rows) + end + end + + context 'the whole range' do + let(:from_id) { 1 } + let(:to_id) { 9 } + + it_behaves_like 'disabling policies with no images' do + let(:expected_rows) do + { + 1 => false, + 2 => false, + 3 => false, + 4 => true, + 5 => false, + 6 => false, + 7 => false, + 8 => true, + 9 => false + } + end + end + end + + context 'a range with no policies to disable' do + let(:from_id) { 2 } + let(:to_id) { 6 } + + it_behaves_like 'disabling policies with no images' do + let(:expected_rows) do + { + 1 => true, + 2 => false, + 3 => false, + 4 => true, + 5 => false, + 6 => false, + 7 => true, + 8 => true, + 9 => true + } + end + end + end + + context 'a range with only images' do + let(:from_id) { 4 } + let(:to_id) { 5 } + + it_behaves_like 'disabling policies with no images' do + let(:expected_rows) do + { + 1 => true, + 2 => false, + 3 => false, + 4 => true, + 5 => false, + 6 => false, + 7 => true, + 8 => true, + 9 => true + } + end + end + end + + context 'a range with a single element' do + let(:from_id) { 9 } + let(:to_id) { 9 } + + it_behaves_like 'disabling policies with no images' do + let(:expected_rows) do + { + 1 => true, + 2 => false, + 3 => false, + 4 => true, + 5 => false, + 6 => false, + 7 => true, + 8 => true, + 9 => false + } + end + end + end + end + + def create_expiration_policy(project_id:, enabled:, with_images: false) + projects.create!(id: project_id, namespace_id: namespace.id, name: "gitlab-#{project_id}") + + if with_images + container_repositories.create!(project_id: project_id, name: "image-#{project_id}") + end + + container_expiration_policies.create!( + enabled: enabled, + project_id: project_id + ) + end + + def enabled_policies + container_expiration_policies.where(enabled: true) + end + + def disabled_policies + container_expiration_policies.where(enabled: false) + end +end diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb index 8668216d014..80879c8c6d9 100644 --- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb @@ -291,7 +291,7 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s services.create!(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker') end - let!(:bugzilla_service_valid) do + let!(:bugzilla_integration_valid) do services.create!(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker') end @@ -314,14 +314,14 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s expect(jira_service_valid.title).to eq(title) expect(jira_service_valid.description).to eq(description) - bugzilla_service_valid.reload - data = IssueTrackerData.find_by(service_id: bugzilla_service_valid.id) + bugzilla_integration_valid.reload + data = IssueTrackerData.find_by(service_id: bugzilla_integration_valid.id) expect(data.project_url).to eq(url) expect(data.issues_url).to eq(issues_url) expect(data.new_issue_url).to eq(new_issue_url) - expect(bugzilla_service_valid.title).to eq(title) - expect(bugzilla_service_valid.description).to eq(description) + expect(bugzilla_integration_valid.title).to eq(title) + expect(bugzilla_integration_valid.description).to eq(description) end end end diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb index 990ef4fbe6a..70906961641 100644 --- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb +++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb @@ -73,6 +73,14 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence expect(vulnerabilities_findings.pluck(:uuid)).to eq([desired_uuid_v5]) end + + it 'logs recalculation' do + expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| + expect(instance).to receive(:info).once + end + + subject + end end context "when finding has a UUIDv5" do @@ -99,6 +107,32 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence end end + context 'when recalculation fails' do + before do + @uuid_v4 = create_finding!( + vulnerability_id: vulnerability_for_uuidv4.id, + project_id: project.id, + scanner_id: different_scanner.id, + primary_identifier_id: different_vulnerability_identifier.id, + report_type: 0, # "sast" + location_fingerprint: "fa18f432f1d56675f4098d318739c3cd5b14eb3e", + uuid: known_uuid_v4 + ) + + allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(expected_error) + end + + let(:finding) { @uuid_v4 } + let(:expected_error) { RuntimeError.new } + + it 'captures the errors and does not crash entirely' do + expect { subject }.not_to raise_error + + expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).once + end + end + private def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) diff --git a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb new file mode 100644 index 00000000000..f7466a2ddfd --- /dev/null +++ b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl do + let(:services_table) { table(:services) } + let(:service_jira_cloud) { services_table.create!(id: 1, type: 'JiraService') } + let(:service_jira_server) { services_table.create!(id: 2, type: 'JiraService') } + + before do + jira_tracker_data = Class.new(ApplicationRecord) do + self.table_name = 'jira_tracker_data' + + def self.encryption_options + { + key: Settings.attr_encrypted_db_key_base_32, + encode: true, + mode: :per_attribute_iv, + algorithm: 'aes-256-gcm' + } + end + + attr_encrypted :url, encryption_options + attr_encrypted :api_url, encryption_options + attr_encrypted :username, encryption_options + attr_encrypted :password, encryption_options + end + + stub_const('JiraTrackerData', jira_tracker_data) + end + + let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, service_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: 0) } + let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, service_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: 0) } + + subject { described_class.new.perform(tracker_data_cloud.id, tracker_data_server.id) } + + it "changes unknown deployment_types based on URL" do + expect(JiraTrackerData.pluck(:deployment_type)).to eq([0, 0]) + + subject + + expect(JiraTrackerData.pluck(:deployment_type)).to eq([2, 1]) + end +end diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb index d6911dad9d4..8ce12f5d32e 100644 --- a/spec/lib/gitlab/cache/import/caching_spec.rb +++ b/spec/lib/gitlab/cache/import/caching_spec.rb @@ -88,6 +88,18 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do end end + describe '.values_from_set' do + it 'returns empty list when the set is empty' do + expect(described_class.values_from_set('foo')).to eq([]) + end + + it 'returns the set list of values' do + described_class.set_add('foo', 10) + + expect(described_class.values_from_set('foo')).to eq(['10']) + end + end + describe '.write_multiple' do it 'sets multiple keys when key_prefix not set' do mapping = { 'foo' => 10, 'bar' => 20 } diff --git a/spec/lib/gitlab/cache_spec.rb b/spec/lib/gitlab/cache_spec.rb index 5b1034a77a3..67c70a77880 100644 --- a/spec/lib/gitlab/cache_spec.rb +++ b/spec/lib/gitlab/cache_spec.rb @@ -26,4 +26,16 @@ RSpec.describe Gitlab::Cache, :request_store do expect(subject.call).to eq("return value") end end + + describe '.delete' do + let(:key) { %w{a cache key} } + + subject(:delete) { described_class.delete(key) } + + it 'calls Rails.cache.delete' do + expect(Rails.cache).to receive(:delete).with(key) + + delete + end + end end diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb new file mode 100644 index 00000000000..a46732f8255 --- /dev/null +++ b/spec/lib/gitlab/checks/changes_access_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Checks::ChangesAccess do + describe '#validate!' do + include_context 'changes access checks context' + + before do + allow(project).to receive(:lfs_enabled?).and_return(true) + end + + subject { changes_access } + + context 'without failed checks' do + it "doesn't raise an error" do + expect { subject.validate! }.not_to raise_error + end + + it 'calls lfs checks' do + expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance| + expect(instance).to receive(:validate!) + end + + subject.validate! + end + end + + context 'when time limit was reached' do + it 'raises a TimeoutError' do + logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout) + access = described_class.new(changes, + project: project, + user_access: user_access, + protocol: protocol, + logger: logger) + + expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError) + end + end + end +end diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb index 19c1d820dff..331a43b814f 100644 --- a/spec/lib/gitlab/checks/lfs_check_spec.rb +++ b/spec/lib/gitlab/checks/lfs_check_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Checks::LfsCheck do - include_context 'change access checks context' + include_context 'changes access checks context' let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') } @@ -15,6 +15,10 @@ RSpec.describe Gitlab::Checks::LfsCheck do describe '#validate!' do context 'with LFS not enabled' do + before do + allow(project).to receive(:lfs_enabled?).and_return(false) + end + it 'skips integrity check' do expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers) @@ -51,13 +55,13 @@ RSpec.describe Gitlab::Checks::LfsCheck do context 'with missing newrev' do it_behaves_like 'a skipped integrity check' do - let(:changes) { { oldrev: oldrev, ref: ref } } + let(:changes) { [{ oldrev: oldrev, ref: ref }] } end end context 'with blank newrev' do it_behaves_like 'a skipped integrity check' do - let(:changes) { { oldrev: oldrev, newrev: Gitlab::Git::BLANK_SHA, ref: ref } } + let(:changes) { [{ oldrev: oldrev, newrev: Gitlab::Git::BLANK_SHA, ref: ref }] } end end end diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb index 4583cd72cfd..3468094ffa5 100644 --- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb +++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb @@ -18,12 +18,18 @@ RSpec.describe Gitlab::Checks::LfsIntegrity do operations.commit_tree('8856a329dd38ca86dfb9ce5aa58a16d88cc119bd', "New LFS objects") end - subject { described_class.new(project, newrev, time_left) } + let(:newrevs) { [newrev] } + + subject { described_class.new(project, newrevs, time_left) } describe '#objects_missing?' do let(:blob_object) { repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') } context 'with LFS not enabled' do + before do + allow(project).to receive(:lfs_enabled?).and_return(false) + end + it 'skips integrity check' do expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers) @@ -36,8 +42,28 @@ RSpec.describe Gitlab::Checks::LfsIntegrity do allow(project).to receive(:lfs_enabled?).and_return(true) end + context 'nil rev' do + let(:newrevs) { [nil] } + + it 'skips integrity check' do + expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers) + + expect(subject.objects_missing?).to be_falsey + end + end + context 'deletion' do - let(:newrev) { nil } + let(:newrevs) { [Gitlab::Git::BLANK_SHA] } + + it 'skips integrity check' do + expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers) + + expect(subject.objects_missing?).to be_falsey + end + end + + context 'no changes' do + let(:newrevs) { [] } it 'skips integrity check' do expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers) diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb index ca7ee784ee3..feda488a936 100644 --- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb +++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb @@ -18,6 +18,9 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do subject { described_class.new(newrev, target_branch, project) } + let(:total_counter) { subject.send(:total_counter) } + let(:stale_counter) { subject.send(:stale_counter) } + it 'matches a merge request' do expect(subject.match?).to be true end @@ -27,5 +30,70 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do expect(matcher.match?).to be false end + + context 'with load balancing disabled', :request_store, :redis do + before do + expect(::Gitlab::Database::LoadBalancing).to receive(:enable?).at_least(:once).and_return(false) + expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:unstick_or_continue_sticking) + expect(::Gitlab::Database::LoadBalancing::Sticking).not_to receive(:select_valid_replicas) + end + + it 'does not attempt to stick to primary' do + expect(subject.match?).to be true + end + + it 'increments no counters' do + expect { subject.match? } + .to change { total_counter.get }.by(0) + .and change { stale_counter.get }.by(0) + end + end + + context 'with load balancing enabled', :request_store, :redis do + let(:session) { ::Gitlab::Database::LoadBalancing::Session.current } + let(:all_caught_up) { true } + + before do + expect(::Gitlab::Database::LoadBalancing).to receive(:enable?).at_least(:once).and_return(true) + allow(::Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?).and_return(all_caught_up) + + expect(::Gitlab::Database::LoadBalancing::Sticking).to receive(:select_valid_host).with(:project, project.id).and_call_original + allow(::Gitlab::Database::LoadBalancing::Sticking).to receive(:select_caught_up_replicas).with(:project, project.id).and_return(all_caught_up) + end + + shared_examples 'secondary that has caught up to a primary' do + it 'continues to use the secondary' do + expect(session.use_primary?).to be false + expect(subject.match?).to be true + end + + it 'only increments total counter' do + expect { subject.match? } + .to change { total_counter.get }.by(1) + .and change { stale_counter.get }.by(0) + end + end + + shared_examples 'secondary that is lagging primary' do + it 'sticks to the primary' do + expect(subject.match?).to be true + expect(session.use_primary?).to be true + end + + it 'increments both total and stale counters' do + expect { subject.match? } + .to change { total_counter.get }.by(1) + .and change { stale_counter.get }.by(1) + end + end + + it_behaves_like 'secondary that has caught up to a primary' + + context 'on secondary behind primary' do + let(:all_caught_up) { false } + + it_behaves_like 'secondary that is lagging primary' + end + end end end diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb index 6f82dabb285..8b235005b3e 100644 --- a/spec/lib/gitlab/checks/change_access_spec.rb +++ b/spec/lib/gitlab/checks/single_change_access_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Checks::ChangeAccess do +RSpec.describe Gitlab::Checks::SingleChangeAccess do describe '#validate!' do include_context 'change access checks context' @@ -37,14 +37,6 @@ RSpec.describe Gitlab::Checks::ChangeAccess do subject.validate! end - it 'calls lfs checks' do - expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance| - expect(instance).to receive(:validate!) - end - - subject.validate! - end - it 'calls diff checks' do expect_next_instance_of(Gitlab::Checks::DiffCheck) do |instance| expect(instance).to receive(:validate!) diff --git a/spec/lib/gitlab/ci/ansi2json/line_spec.rb b/spec/lib/gitlab/ci/ansi2json/line_spec.rb index d681447a0e8..909c0f1b3ea 100644 --- a/spec/lib/gitlab/ci/ansi2json/line_spec.rb +++ b/spec/lib/gitlab/ci/ansi2json/line_spec.rb @@ -76,10 +76,30 @@ RSpec.describe Gitlab::Ci::Ansi2json::Line do end describe '#set_section_duration' do - it 'sets and formats the section_duration' do - subject.set_section_duration(75) + shared_examples 'set_section_duration' do + it 'sets and formats the section_duration' do + subject.set_section_duration(75) - expect(subject.section_duration).to eq('01:15') + expect(subject.section_duration).to eq('01:15') + end + end + + context 'with default timezone' do + it_behaves_like 'set_section_duration' + end + + context 'with a timezone carrying minutes offset' do + before do + # The actual call by does use Time.at(...).utc that the following + # rubocop rule (Rails/TimeZone) suggests, but for this specific + # test's purposes we needed to mock at the Time.at call point. + + # rubocop:disable Rails/TimeZone + allow(Time).to receive(:at).with(75).and_return(Time.at(75, in: '+05:30')) + # rubocop:enable Rails/TimeZone + end + + it_behaves_like 'set_section_duration' end end diff --git a/spec/lib/gitlab/ci/badge/coverage/template_spec.rb b/spec/lib/gitlab/ci/badge/coverage/template_spec.rb index f010d1bce50..b03ca3c93ca 100644 --- a/spec/lib/gitlab/ci/badge/coverage/template_spec.rb +++ b/spec/lib/gitlab/ci/badge/coverage/template_spec.rb @@ -6,31 +6,7 @@ RSpec.describe Gitlab::Ci::Badge::Coverage::Template do let(:badge) { double(entity: 'coverage', status: 90.00, customization: {}) } let(:template) { described_class.new(badge) } - describe '#key_text' do - it 'says coverage by default' do - expect(template.key_text).to eq 'coverage' - end - - context 'when custom key_text is defined' do - before do - allow(badge).to receive(:customization).and_return({ key_text: "custom text" }) - end - - it 'returns custom value' do - expect(template.key_text).to eq "custom text" - end - - context 'when its size is larger than the max allowed value' do - before do - allow(badge).to receive(:customization).and_return({ key_text: 't' * 65 }) - end - - it 'returns default value' do - expect(template.key_text).to eq 'coverage' - end - end - end - end + it_behaves_like 'a badge template', 'coverage' describe '#value_text' do context 'when coverage is known' do @@ -60,32 +36,6 @@ RSpec.describe Gitlab::Ci::Badge::Coverage::Template do end end - describe '#key_width' do - it 'is fixed by default' do - expect(template.key_width).to eq 62 - end - - context 'when custom key_width is defined' do - before do - allow(badge).to receive(:customization).and_return({ key_width: 101 }) - end - - it 'returns custom value' do - expect(template.key_width).to eq 101 - end - - context 'when it is larger than the max allowed value' do - before do - allow(badge).to receive(:customization).and_return({ key_width: 513 }) - end - - it 'returns default value' do - expect(template.key_width).to eq 62 - end - end - end - end - describe '#value_width' do context 'when coverage is known' do it 'is narrower when coverage is known' do diff --git a/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb b/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb index 696bb62b4d6..9392ccef147 100644 --- a/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb +++ b/spec/lib/gitlab/ci/badge/pipeline/template_spec.rb @@ -6,31 +6,7 @@ RSpec.describe Gitlab::Ci::Badge::Pipeline::Template do let(:badge) { double(entity: 'pipeline', status: 'success', customization: {}) } let(:template) { described_class.new(badge) } - describe '#key_text' do - it 'says pipeline by default' do - expect(template.key_text).to eq 'pipeline' - end - - context 'when custom key_text is defined' do - before do - allow(badge).to receive(:customization).and_return({ key_text: 'custom text' }) - end - - it 'returns custom value' do - expect(template.key_text).to eq 'custom text' - end - - context 'when its size is larger than the max allowed value' do - before do - allow(badge).to receive(:customization).and_return({ key_text: 't' * 65 }) - end - - it 'returns default value' do - expect(template.key_text).to eq 'pipeline' - end - end - end - end + it_behaves_like 'a badge template', 'pipeline' describe '#value_text' do it 'is status value' do @@ -38,32 +14,6 @@ RSpec.describe Gitlab::Ci::Badge::Pipeline::Template do end end - describe '#key_width' do - it 'is fixed by default' do - expect(template.key_width).to eq 62 - end - - context 'when custom key_width is defined' do - before do - allow(badge).to receive(:customization).and_return({ key_width: 101 }) - end - - it 'returns custom value' do - expect(template.key_width).to eq 101 - end - - context 'when it is larger than the max allowed value' do - before do - allow(badge).to receive(:customization).and_return({ key_width: 513 }) - end - - it 'returns default value' do - expect(template.key_width).to eq 62 - end - end - end - end - describe 'widths and text anchors' do it 'has fixed width and text anchors' do expect(template.width).to eq 116 diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb index cfa8c9cd938..b107553bbce 100644 --- a/spec/lib/gitlab/ci/build/auto_retry_spec.rb +++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do describe '#allowed?' do using RSpec::Parameterized::TableSyntax - let(:build) { create(:ci_build) } + let(:build) { build_stubbed(:ci_build) } subject { auto_retry.allowed? } @@ -22,6 +22,8 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do "not matching reason" | 0 | { when: %w[script_error], max: 2 } | :api_failure | false "scheduler failure override" | 1 | { when: %w[scheduler_failure], max: 1 } | :scheduler_failure | false "default for scheduler failure" | 1 | {} | :scheduler_failure | true + "quota is exceeded" | 0 | { max: 2 } | :ci_quota_exceeded | false + "no matching runner" | 0 | { max: 2 } | :no_matching_runner | false end with_them do diff --git a/spec/lib/gitlab/ci/config/entry/need_spec.rb b/spec/lib/gitlab/ci/config/entry/need_spec.rb index a0a5dd52ad4..ab2e8d4db78 100644 --- a/spec/lib/gitlab/ci/config/entry/need_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/need_spec.rb @@ -25,16 +25,6 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do it 'returns job needs configuration' do expect(need.value).to eq(name: 'job_name', artifacts: true, optional: false) end - - context 'when the FF ci_needs_optional is disabled' do - before do - stub_feature_flags(ci_needs_optional: false) - end - - it 'returns job needs configuration without `optional`' do - expect(need.value).to eq(name: 'job_name', artifacts: true) - end - end end it_behaves_like 'job type' @@ -134,16 +124,6 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do it 'returns job needs configuration' do expect(need.value).to eq(name: 'job_name', artifacts: true, optional: true) end - - context 'when the FF ci_needs_optional is disabled' do - before do - stub_feature_flags(ci_needs_optional: false) - end - - it 'returns job needs configuration without `optional`' do - expect(need.value).to eq(name: 'job_name', artifacts: true) - end - end end end diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb index 016d59e98b9..f98a6a869d6 100644 --- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb @@ -271,10 +271,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do context 'when workflow rules is not used' do let(:workflow) { double('workflow', 'has_rules?' => false) } - let(:feature_flag_value) { true } before do - stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: feature_flag_value) entry.compose!(deps) end @@ -298,12 +296,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do it 'raises a warning' do expect(entry.warnings).to contain_exactly(/may allow multiple pipelines/) end - - context 'when feature flag is disabled' do - let(:feature_flag_value) { false } - - it_behaves_like 'has no warnings' - end end context 'and its value is `never`' do diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb index 98105ebcd55..d8907f7015b 100644 --- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb @@ -41,7 +41,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do :dependency_scanning | 'gl-dependency-scanning-report.json' :container_scanning | 'gl-container-scanning-report.json' :dast | 'gl-dast-report.json' - :license_management | 'gl-license-management-report.json' :license_scanning | 'gl-license-scanning-report.json' :performance | 'performance.json' :browser_performance | 'browser-performance.json' diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb index 480a4a05379..b0d6f5adfb1 100644 --- a/spec/lib/gitlab/ci/jwt_spec.rb +++ b/spec/lib/gitlab/ci/jwt_spec.rb @@ -42,6 +42,7 @@ RSpec.describe Gitlab::Ci::Jwt do expect(payload[:user_email]).to eq(user.email) expect(payload[:user_login]).to eq(user.username) expect(payload[:pipeline_id]).to eq(pipeline.id.to_s) + expect(payload[:pipeline_source]).to eq(pipeline.source.to_s) expect(payload[:job_id]).to eq(build.id.to_s) expect(payload[:ref]).to eq(pipeline.source_ref) expect(payload[:ref_protected]).to eq(build.protected.to_s) diff --git a/spec/lib/gitlab/ci/matching/build_matcher_spec.rb b/spec/lib/gitlab/ci/matching/build_matcher_spec.rb new file mode 100644 index 00000000000..f12e85da9c2 --- /dev/null +++ b/spec/lib/gitlab/ci/matching/build_matcher_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Matching::BuildMatcher do + let(:dummy_attributes) do + { + protected: true, + tag_list: %w[tag1 tag2], + build_ids: [1, 2, 3], + project: :my_project + } + end + + subject(:matcher) { described_class.new(attributes) } + + describe '.new' do + context 'when attributes are missing' do + let(:attributes) { {} } + + it { expect { matcher }.to raise_error(KeyError) } + end + + context 'with attributes' do + let(:attributes) { dummy_attributes } + + it { expect(matcher.protected).to eq(true) } + + it { expect(matcher.tag_list).to eq(%w[tag1 tag2]) } + + it { expect(matcher.build_ids).to eq([1, 2, 3]) } + + it { expect(matcher.project).to eq(:my_project) } + end + end + + describe '#protected?' do + context 'when protected is set to true' do + let(:attributes) { dummy_attributes } + + it { expect(matcher.protected?).to be_truthy } + end + + context 'when protected is set to false' do + let(:attributes) { dummy_attributes.merge(protected: false) } + + it { expect(matcher.protected?).to be_falsey } + end + end + + describe '#has_tags?' do + context 'when tags are present' do + let(:attributes) { dummy_attributes } + + it { expect(matcher.has_tags?).to be_truthy } + end + + context 'when tags are empty' do + let(:attributes) { dummy_attributes.merge(tag_list: []) } + + it { expect(matcher.has_tags?).to be_falsey } + end + end +end diff --git a/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb new file mode 100644 index 00000000000..d6492caa31a --- /dev/null +++ b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Matching::RunnerMatcher do + let(:dummy_attributes) do + { + runner_type: 'instance_type', + public_projects_minutes_cost_factor: 0, + private_projects_minutes_cost_factor: 1, + run_untagged: false, + access_level: 'ref_protected', + tag_list: %w[tag1 tag2] + } + end + + subject(:matcher) { described_class.new(attributes) } + + describe '.new' do + context 'when attributes are missing' do + let(:attributes) { {} } + + it { expect { matcher }.to raise_error(KeyError) } + end + + context 'with attributes' do + let(:attributes) { dummy_attributes } + + it { expect(matcher.runner_type).to eq('instance_type') } + + it { expect(matcher.public_projects_minutes_cost_factor).to eq(0) } + + it { expect(matcher.private_projects_minutes_cost_factor).to eq(1) } + + it { expect(matcher.run_untagged).to eq(false) } + + it { expect(matcher.access_level).to eq('ref_protected') } + + it { expect(matcher.tag_list).to eq(%w[tag1 tag2]) } + end + end + + describe '#instance_type?' do + let(:attributes) { dummy_attributes } + + it { expect(matcher.instance_type?).to be_truthy } + + context 'context with private runners' do + let(:attributes) { dummy_attributes.merge(runner_type: 'project_type') } + + it { expect(matcher.instance_type?).to be_falsey } + end + end + + describe '#matches?' do + let(:build) { build_stubbed(:ci_build, build_attributes) } + let(:runner_matcher) { described_class.new(dummy_attributes.merge(runner_attributes)) } + + subject { runner_matcher.matches?(record) } + + context 'with an instance of BuildMatcher' do + using RSpec::Parameterized::TableSyntax + + where(:ref_protected, :build_protected, :run_untagged, :runner_tags, :build_tags, :result) do + # the `ref_protected? && !build.protected?` part: + true | true | true | [] | [] | true + true | false | true | [] | [] | false + false | true | true | [] | [] | true + false | false | true | [] | [] | true + # `accepting_tags?(build)` bit: + true | true | true | [] | [] | true + true | true | true | [] | ['a'] | false + true | true | true | %w[a b] | ['a'] | true + true | true | true | ['a'] | %w[a b] | false + true | true | true | ['a'] | ['a'] | true + true | true | false | ['a'] | ['a'] | true + true | true | false | ['b'] | ['a'] | false + true | true | false | %w[a b] | ['a'] | true + end + + with_them do + let(:build_attributes) do + { + tag_list: build_tags, + protected: build_protected + } + end + + let(:runner_attributes) do + { + access_level: ref_protected ? 'ref_protected' : 'not_protected', + run_untagged: run_untagged, + tag_list: runner_tags + } + end + + let(:record) { build.build_matcher } + + it { is_expected.to eq(result) } + end + end + + context 'with an instance of Ci::Build' do + let(:runner_attributes) { {} } + let(:build_attributes) { {} } + let(:record) { build } + + it 'raises ArgumentError' do + expect { subject }.to raise_error ArgumentError, /BuildMatcher are allowed/ + end + end + end +end diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb index 7da602251a5..4ca8f74e57f 100644 --- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb +++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb @@ -417,6 +417,30 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do end end + context 'when attachment is specified in test case with error' do + let(:junit) do + <<~EOF + <testsuites> + <testsuite> + <testcase classname='Calculator' name='sumTest1' time='0.01'> + <error>Some error</error> + <system-out>[[ATTACHMENT|some/path.png]]</system-out> + </testcase> + </testsuite> + </testsuites> + EOF + end + + it 'assigns correct attributes to the test case' do + expect { subject }.not_to raise_error + + expect(test_cases[0].has_attachment?).to be_truthy + expect(test_cases[0].attachment).to eq("some/path.png") + + expect(test_cases[0].job).to eq(job) + end + end + private def flattened_test_cases(test_suite) diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb index e3061f8095b..16517b39a45 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do let_it_be(:project) { create(:project) } - let_it_be(:user) { create(:user) } + let_it_be(:user) { create(:user, :with_sign_ins) } let(:pipeline) { build(:ci_empty_pipeline, user: user, project: project) } let!(:step) { described_class.new(pipeline, command) } @@ -43,7 +43,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do end let(:save_incompleted) { true } - let(:dot_com) { true } let(:command) do Gitlab::Ci::Pipeline::Chain::Command.new( project: project, current_user: user, yaml_processor_result: yaml_processor_result, save_incompleted: save_incompleted @@ -57,7 +56,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do before do stub_env('EXTERNAL_VALIDATION_SERVICE_URL', validation_service_url) - allow(Gitlab).to receive(:com?).and_return(dot_com) allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('correlation-id') end @@ -199,61 +197,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do end end - context 'when the feature flag is disabled' do - before do - stub_feature_flags(ci_external_validation_service: false) - stub_request(:post, validation_service_url) - end - - it 'does not drop the pipeline' do - perform! - - expect(pipeline.status).not_to eq('failed') - expect(pipeline.errors).to be_empty - end - - it 'does not break the chain' do - perform! - - expect(step.break?).to be false - end - - it 'does not make requests' do - perform! - - expect(WebMock).not_to have_requested(:post, validation_service_url) - end - end - - context 'when not on .com' do - let(:dot_com) { false } - - before do - stub_feature_flags(ci_external_validation_service: false) - stub_request(:post, validation_service_url).to_return(status: 404, body: "{}") - end - - it 'drops the pipeline' do - perform! - - expect(pipeline.status).to eq('failed') - expect(pipeline).to be_persisted - expect(pipeline.errors.to_a).to include('External validation failed') - end - - it 'breaks the chain' do - perform! - - expect(step.break?).to be true - end - - it 'logs the authorization' do - expect(Gitlab::AppLogger).to receive(:info).with(message: 'Pipeline not authorized', project_id: project.id, user_id: user.id) - - perform! - end - end - context 'when validation returns 406 Not Acceptable' do before do stub_request(:post, validation_service_url).to_return(status: 406, body: "{}") diff --git a/spec/lib/gitlab/ci/pipeline/preloader_spec.rb b/spec/lib/gitlab/ci/pipeline/preloader_spec.rb index ae423fa04f9..5b644e42451 100644 --- a/spec/lib/gitlab/ci/pipeline/preloader_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/preloader_spec.rb @@ -5,9 +5,11 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Pipeline::Preloader do let(:stage) { double(:stage) } let(:commit) { double(:commit) } + let(:scheduled_action) { double(:scheduled_action) } + let(:manual_action) { double(:manual_action) } let(:pipeline) do - double(:pipeline, commit: commit, stages: [stage]) + double(:pipeline, commit: commit, stages: [stage], scheduled_actions: [scheduled_action], manual_actions: [manual_action]) end describe '.preload!' do @@ -33,6 +35,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Preloader do expect(pipeline).to receive(:lazy_ref_commit) expect(pipeline).to receive(:number_of_warnings) expect(stage).to receive(:number_of_warnings) + expect(scheduled_action).to receive(:persisted_environment) + expect(manual_action).to receive(:persisted_environment) described_class.preload!([pipeline]) end @@ -42,6 +46,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Preloader do allow(pipeline).to receive(:lazy_ref_commit) allow(pipeline).to receive(:number_of_warnings) allow(stage).to receive(:number_of_warnings) + allow(scheduled_action).to receive(:persisted_environment) + allow(manual_action).to receive(:persisted_environment) pipelines = [pipeline, pipeline] diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index 058fb25807d..020f957cf70 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -1101,17 +1101,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do it "does not return an error" do expect(subject.errors).to be_empty end - - context 'when the FF ci_needs_optional is disabled' do - before do - stub_feature_flags(ci_needs_optional: false) - end - - it "returns an error" do - expect(subject.errors).to contain_exactly( - "'rspec' job needs 'build' job, but it was not added to the pipeline") - end - end end end diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb index 175b12637e6..ad89f1f5cda 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb @@ -128,7 +128,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do context 'when environment has already been created' do before do - create(:environment, :staging, project: project, name: 'customer-portal') + create(:environment, project: project, name: 'customer-portal', tier: :staging) end it 'does not overwrite the specified deployment tier' do diff --git a/spec/lib/gitlab/ci/templates/npm_spec.rb b/spec/lib/gitlab/ci/templates/npm_spec.rb index b10e2b0e057..2456c9ae545 100644 --- a/spec/lib/gitlab/ci/templates/npm_spec.rb +++ b/spec/lib/gitlab/ci/templates/npm_spec.rb @@ -2,8 +2,8 @@ require 'spec_helper' -RSpec.describe 'npm.latest.gitlab-ci.yml' do - subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('npm.latest') } +RSpec.describe 'npm.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('npm') } describe 'the created pipeline' do let(:repo_files) { { 'package.json' => '{}', 'README.md' => '' } } diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb index 56443e611e8..2e6df7da232 100644 --- a/spec/lib/gitlab/ci/templates/templates_spec.rb +++ b/spec/lib/gitlab/ci/templates/templates_spec.rb @@ -6,26 +6,105 @@ RSpec.describe 'CI YML Templates' do subject { Gitlab::Ci::YamlProcessor.new(content).execute } let(:all_templates) { Gitlab::Template::GitlabCiYmlTemplate.all.map(&:full_name) } - let(:excluded_templates) do - all_templates.select do |name| + excluded = all_templates.select do |name| Gitlab::Template::GitlabCiYmlTemplate.excluded_patterns.any? { |pattern| pattern.match?(name) } end + excluded + ["Terraform.gitlab-ci.yml"] end - context 'when including available templates in a CI YAML configuration' do - using RSpec::Parameterized::TableSyntax + before do + stub_feature_flags( + redirect_to_latest_template_terraform: false, + redirect_to_latest_template_security_api_fuzzing: false, + redirect_to_latest_template_security_dast: false) + end - where(:template_name) do - all_templates - excluded_templates + shared_examples 'require default stages to be included' do + it 'require default stages to be included' do + expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default) end + end + + context 'that support autodevops' do + non_autodevops_templates = [ + 'Security/DAST-API.gitlab-ci.yml', + 'Security/API-Fuzzing.gitlab-ci.yml' + ] + + context 'when including available templates in a CI YAML configuration' do + using RSpec::Parameterized::TableSyntax + + where(:template_name) do + all_templates - excluded_templates - non_autodevops_templates + end + + with_them do + let(:content) do + <<~EOS + include: + - template: #{template_name} + + concrete_build_implemented_by_a_user: + stage: test + script: do something + EOS + end + + it { is_expected.to be_valid } + + include_examples 'require default stages to be included' + end + end + + context 'when including unavailable templates in a CI YAML configuration' do + using RSpec::Parameterized::TableSyntax + + where(:template_name) do + excluded_templates + end + + with_them do + let(:content) do + <<~EOS + include: + - template: #{template_name} + + concrete_build_implemented_by_a_user: + stage: test + script: do something + EOS + end + + it { is_expected.not_to be_valid } + end + end + end + + describe 'that do not support autodevops' do + context 'when DAST API template' do + # The DAST API template purposly excludes a stages + # definition. - with_them do - let(:content) do - if template_name == 'Security/DAST-API.gitlab-ci.yml' - # The DAST-API template purposly excludes a stages - # definition. + let(:template_name) { 'Security/DAST-API.gitlab-ci.yml' } + context 'with default stages' do + let(:content) do + <<~EOS + include: + - template: #{template_name} + + concrete_build_implemented_by_a_user: + stage: test + script: do something + EOS + end + + it { is_expected.not_to be_valid } + end + + context 'with defined stages' do + let(:content) do <<~EOS include: - template: #{template_name} @@ -40,7 +119,22 @@ RSpec.describe 'CI YML Templates' do stage: test script: do something EOS - else + end + + it { is_expected.to be_valid } + + include_examples 'require default stages to be included' + end + end + + context 'when API Fuzzing template' do + # The API Fuzzing template purposly excludes a stages + # definition. + + let(:template_name) { 'Security/API-Fuzzing.gitlab-ci.yml' } + + context 'with default stages' do + let(:content) do <<~EOS include: - template: #{template_name} @@ -50,39 +144,31 @@ RSpec.describe 'CI YML Templates' do script: do something EOS end - end - - it 'is valid' do - expect(subject).to be_valid - end - it 'require default stages to be included' do - expect(subject.stages).to include(*Gitlab::Ci::Config::Entry::Stages.default) + it { is_expected.not_to be_valid } end - end - end - context 'when including unavailable templates in a CI YAML configuration' do - using RSpec::Parameterized::TableSyntax + context 'with defined stages' do + let(:content) do + <<~EOS + include: + - template: #{template_name} - where(:template_name) do - excluded_templates - end + stages: + - build + - test + - deploy + - fuzz - with_them do - let(:content) do - <<~EOS - include: - - template: #{template_name} + concrete_build_implemented_by_a_user: + stage: test + script: do something + EOS + end - concrete_build_implemented_by_a_user: - stage: test - script: do something - EOS - end + it { is_expected.to be_valid } - it 'is not valid' do - expect(subject).not_to be_valid + include_examples 'require default stages to be included' end end end diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb index f878d24fe4b..63625244fe8 100644 --- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do let(:chunked_io) { described_class.new(build) } before do - stub_feature_flags(ci_enable_live_trace: true, gitlab_ci_trace_read_consistency: true) + stub_feature_flags(ci_enable_live_trace: true) end describe "#initialize" do diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb index ca9dc95711d..9443bf6d6d5 100644 --- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb @@ -70,6 +70,43 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do end end + describe '.possible_var_reference?' do + context 'table tests' do + using RSpec::Parameterized::TableSyntax + + where do + { + "empty value": { + value: '', + result: false + }, + "normal value": { + value: 'some value', + result: false + }, + "simple expansions": { + value: 'key$variable', + result: true + }, + "complex expansions": { + value: 'key${variable}${variable2}', + result: true + }, + "complex expansions for Windows": { + value: 'key%variable%%variable2%', + result: true + } + } + end + + with_them do + subject { Gitlab::Ci::Variables::Collection::Item.possible_var_reference?(value) } + + it { is_expected.to eq(result) } + end + end + end + describe '#depends_on' do let(:item) { Gitlab::Ci::Variables::Collection::Item.new(**variable) } @@ -128,7 +165,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do end it 'supports using an active record resource' do - variable = create(:ci_variable, key: 'CI_VAR', value: '123') + variable = build(:ci_variable, key: 'CI_VAR', value: '123') resource = described_class.fabricate(variable) expect(resource).to be_a(described_class) diff --git a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb index 73cf0e19d00..01eef673c35 100644 --- a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true -require 'spec_helper' +require 'fast_spec_helper' +require 'rspec-parameterized' RSpec.describe Gitlab::Ci::Variables::Collection::Sort do describe '#initialize with non-Collection value' do @@ -57,9 +58,9 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Sort do }, "variable containing escaped variable reference": { variables: [ - { key: 'variable_a', value: 'value' }, { key: 'variable_b', value: '$$variable_a' }, - { key: 'variable_c', value: '$variable_b' } + { key: 'variable_c', value: '$variable_a' }, + { key: 'variable_a', value: 'value' } ], expected_errors: nil } @@ -144,11 +145,11 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Sort do }, "variable containing escaped variable reference": { variables: [ - { key: 'variable_c', value: '$variable_b' }, { key: 'variable_b', value: '$$variable_a' }, + { key: 'variable_c', value: '$variable_a' }, { key: 'variable_a', value: 'value' } ], - result: %w[variable_a variable_b variable_c] + result: %w[variable_b variable_a variable_c] } } end diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb index 7b77754190a..abda27f0d6e 100644 --- a/spec/lib/gitlab/ci/variables/collection_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection_spec.rb @@ -44,6 +44,30 @@ RSpec.describe Gitlab::Ci::Variables::Collection do end end + describe '#compact' do + subject do + described_class.new + .append(key: 'STRING', value: 'string') + .append(key: 'NIL', value: nil) + .append(key: nil, value: 'string') + end + + it 'returns a new Collection instance', :aggregate_failures do + collection = subject.compact + + expect(collection).to be_an_instance_of(described_class) + expect(collection).not_to eql(subject) + end + + it 'rejects pair that has nil value', :aggregate_failures do + collection = subject.compact + + expect(collection).not_to include(key: 'NIL', value: nil, public: true) + expect(collection).to include(key: 'STRING', value: 'string', public: true) + expect(collection).to include(key: nil, value: 'string', public: true) + end + end + describe '#concat' do it 'appends all elements from an array' do collection = described_class.new([{ key: 'VAR_1', value: '1' }]) @@ -229,6 +253,11 @@ RSpec.describe Gitlab::Ci::Variables::Collection do value: 'key${MISSING_VAR}-${CI_JOB_NAME}', result: 'key${MISSING_VAR}-test-1', keep_undefined: true + }, + "escaped characters are kept intact": { + value: 'key-$TEST1-%%HOME%%-$${HOME}', + result: 'key-test-3-%%HOME%%-$${HOME}', + keep_undefined: false } } end @@ -291,6 +320,14 @@ RSpec.describe Gitlab::Ci::Variables::Collection do ], keep_undefined: false }, + "escaped characters in complex expansions are kept intact": { + variables: [ + { key: 'variable3', value: 'key_${variable}_$${HOME}_%%HOME%%' }, + { key: 'variable', value: '$variable2' }, + { key: 'variable2', value: 'value2' } + ], + keep_undefined: false + }, "array with cyclic dependency": { variables: [ { key: 'variable', value: '$variable2' }, @@ -391,6 +428,30 @@ RSpec.describe Gitlab::Ci::Variables::Collection do { key: 'variable3', value: 'keyvalueresult' } ] }, + "escaped characters in complex expansions keeping undefined are kept intact": { + variables: [ + { key: 'variable3', value: 'key_${variable}_$${HOME}_%%HOME%%' }, + { key: 'variable', value: '$variable2' }, + { key: 'variable2', value: 'value' } + ], + keep_undefined: true, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'value' }, + { key: 'variable3', value: 'key_value_$${HOME}_%%HOME%%' } + ] + }, + "escaped characters in complex expansions discarding undefined are kept intact": { + variables: [ + { key: 'variable2', value: 'key_${variable4}_$${HOME}_%%HOME%%' }, + { key: 'variable', value: 'value_$${HOME}_%%HOME%%' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value_$${HOME}_%%HOME%%' }, + { key: 'variable2', value: 'key__$${HOME}_%%HOME%%' } + ] + }, "out-of-order expansion": { variables: [ { key: 'variable3', value: 'key$variable2$variable' }, @@ -417,7 +478,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection do { key: 'variable3', value: 'keyresultvalue' } ] }, - "missing variable": { + "missing variable discarding original": { variables: [ { key: 'variable2', value: 'key$variable' } ], @@ -461,6 +522,19 @@ RSpec.describe Gitlab::Ci::Variables::Collection do { key: 'variable3', value: 'key_$variable2_value2' } ] }, + "variable value referencing password with special characters": { + variables: [ + { key: 'VAR', value: '$PASSWORD' }, + { key: 'PASSWORD', value: 'my_password$$_%%_$A' }, + { key: 'A', value: 'value' } + ], + keep_undefined: false, + result: [ + { key: 'VAR', value: 'my_password$$_%%_value' }, + { key: 'PASSWORD', value: 'my_password$$_%%_value' }, + { key: 'A', value: 'value' } + ] + }, "cyclic dependency causes original array to be returned": { variables: [ { key: 'variable', value: '$variable2' }, diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb index e345cd4de9b..25705fd4260 100644 --- a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb @@ -39,6 +39,59 @@ module Gitlab expect(expanded_config).to include(*included_config.keys) end end + + describe '#yaml_variables_for' do + let(:config_content) do + <<~YAML + variables: + VAR1: value 1 + VAR2: value 2 + + job: + script: echo 'hello' + variables: + VAR1: value 11 + YAML + end + + let(:job_name) { :job } + + subject(:yaml_variables_for) { result.yaml_variables_for(job_name) } + + it 'returns calculated variables with root and job variables' do + is_expected.to match_array([ + { key: 'VAR1', value: 'value 11', public: true }, + { key: 'VAR2', value: 'value 2', public: true } + ]) + end + + context 'when an absent job is sent' do + let(:job_name) { :invalid_job } + + it { is_expected.to eq([]) } + end + end + + describe '#stage_for' do + let(:config_content) do + <<~YAML + job: + script: echo 'hello' + YAML + end + + let(:job_name) { :job } + + subject(:stage_for) { result.stage_for(job_name) } + + it { is_expected.to eq('test') } + + context 'when an absent job is sent' do + let(:job_name) { :invalid_job } + + it { is_expected.to be_nil } + end + end end end end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index 94ab4819361..e8e44f884cf 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -485,10 +485,6 @@ module Gitlab end describe '#warnings' do - before do - stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: true) - end - context 'when a warning is raised in a given entry' do let(:config) do <<-EOYML @@ -602,27 +598,6 @@ module Gitlab it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/ end end - - context 'when feature flag is disabled' do - before do - stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: false) - end - - context 'job rules used without workflow rules' do - let(:config) do - <<-EOYML - rspec: - script: rspec - rules: - - when: always - EOYML - end - - it 'does not raise the warning' do - expect(subject.warnings).to be_empty - end - end - end end describe 'only / except policies validations' do diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb index 6b568320953..b0f7703462a 100644 --- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb +++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb @@ -53,7 +53,7 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do expect(null_logger).to receive(:info).with(/Looking for orphan LFS files/) expect(null_logger).to receive(:info).with(/Nothing to do/) - project.lfs_objects_projects.delete_all + LfsObjectsProject.where(project: project).delete_all expect(service).not_to receive(:remove_orphan_references) diff --git a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb deleted file mode 100644 index 7f7c95b2527..00000000000 --- a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb +++ /dev/null @@ -1,117 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# For easier debugging set `UNICORN_DEBUG=1` - -RSpec.describe Gitlab::Cluster::Mixins::UnicornHttpServer do - before do - stub_const('UNICORN_STARTUP_TIMEOUT', 30) - end - - context 'when running Unicorn' do - using RSpec::Parameterized::TableSyntax - - where(:signal, :exitstatus, :termsig) do - # executes phased restart block - :USR2 | 140 | nil - :QUIT | 140 | nil - - # does not execute phased restart block - :INT | 0 | nil - :TERM | 0 | nil - end - - with_them do - it 'properly handles process lifecycle' do - with_unicorn(workers: 1) do |pid| - Process.kill(signal, pid) - - child_pid, child_status = Process.wait2(pid) - expect(child_pid).to eq(pid) - expect(child_status.exitstatus).to eq(exitstatus) - expect(child_status.termsig).to eq(termsig) - end - end - end - end - - private - - def with_unicorn(workers:, timeout: UNICORN_STARTUP_TIMEOUT) - with_unicorn_configs(workers: workers) do |unicorn_rb, config_ru| - cmdline = [ - "bundle", "exec", "unicorn", - "-I", Rails.root.to_s, - "-c", unicorn_rb, - config_ru - ] - - IO.popen(cmdline) do |process| - # wait for process to start: - # I, [2019-10-15T13:21:27.565225 #3089] INFO -- : master process ready - wait_for_output(process, /master process ready/, timeout: timeout) - consume_output(process) - - yield(process.pid) - ensure - begin - Process.kill(:KILL, process.pid) - rescue Errno::ESRCH - end - end - end - end - - def with_unicorn_configs(workers:) - Dir.mktmpdir do |dir| - File.write "#{dir}/unicorn.rb", <<-EOF - require './lib/gitlab/cluster/lifecycle_events' - require './lib/gitlab/cluster/mixins/unicorn_http_server' - - worker_processes #{workers} - listen "127.0.0.1:0" - preload_app true - - Unicorn::HttpServer.prepend(#{described_class}) - - mutex = Mutex.new - - Gitlab::Cluster::LifecycleEvents.on_before_blackout_period do - mutex.synchronize do - exit(140) - end - end - - # redirect stderr to stdout - $stderr.reopen($stdout) - EOF - - File.write "#{dir}/config.ru", <<-EOF - run -> (env) { [404, {}, ['']] } - EOF - - yield("#{dir}/unicorn.rb", "#{dir}/config.ru") - end - end - - def wait_for_output(process, output, timeout:) - Timeout.timeout(timeout) do - loop do - line = process.readline - puts "UNICORN_DEBUG: #{line}" if ENV['UNICORN_DEBUG'] - break if line =~ output - end - end - end - - def consume_output(process) - Thread.new do - loop do - line = process.readline - puts "UNICORN_DEBUG: #{line}" if ENV['UNICORN_DEBUG'] - end - rescue StandardError - end - end -end diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 19e52d2cf4a..d08057fb10a 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -58,6 +58,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com https://example.com") expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com") + expect(directives['font_src']).to eq("'self' https://example.com") end end end diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb index 2cdf95ea101..b9e0132badb 100644 --- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb @@ -4,14 +4,15 @@ require 'spec_helper' RSpec.describe Gitlab::CycleAnalytics::StageSummary do let(:project) { create(:project, :repository) } - let(:options) { { from: 1.day.ago, current_user: user } } + let(:options) { { from: 1.day.ago } } + let(:args) { { options: options, current_user: user } } let(:user) { create(:user, :admin) } before do project.add_maintainer(user) end - let(:stage_summary) { described_class.new(project, **options).data } + let(:stage_summary) { described_class.new(project, **args).data } describe "#new_issues" do subject { stage_summary.first } @@ -117,11 +118,11 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do before do project.add_guest(guest_user) - options.merge!({ current_user: guest_user }) + args.merge!({ current_user: guest_user }) end it 'does not include commit stats' do - data = described_class.new(project, **options).data + data = described_class.new(project, **args).data expect(includes_commits?(data)).to be_falsy end diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb index a31e5a1d1e2..325fdb90929 100644 --- a/spec/lib/gitlab/data_builder/build_spec.rb +++ b/spec/lib/gitlab/data_builder/build_spec.rb @@ -47,6 +47,8 @@ RSpec.describe Gitlab::DataBuilder::Build do it { expect(data[:runner][:id]).to eq(build.runner.id) } it { expect(data[:runner][:tags]).to match_array(tag_names) } it { expect(data[:runner][:description]).to eq(build.runner.description) } + it { expect(data[:runner][:runner_type]).to eq(build.runner.runner_type) } + it { expect(data[:runner][:is_shared]).to eq(build.runner.instance_type?) } it { expect(data[:environment]).to be_nil } context 'commit author_url' do diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb index bec1e612c02..c05a044f0de 100644 --- a/spec/lib/gitlab/data_builder/pipeline_spec.rb +++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb @@ -58,8 +58,10 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do it 'has runner attributes', :aggregate_failures do expect(runner_data[:id]).to eq(ci_runner.id) expect(runner_data[:description]).to eq(ci_runner.description) + expect(runner_data[:runner_type]).to eq(ci_runner.runner_type) expect(runner_data[:active]).to eq(ci_runner.active) expect(runner_data[:tags]).to match_array(tag_names) + expect(runner_data[:is_shared]).to eq(ci_runner.instance_type?) end end diff --git a/spec/lib/gitlab/data_builder/wiki_page_spec.rb b/spec/lib/gitlab/data_builder/wiki_page_spec.rb index ec768cf9719..276ddb1e0af 100644 --- a/spec/lib/gitlab/data_builder/wiki_page_spec.rb +++ b/spec/lib/gitlab/data_builder/wiki_page_spec.rb @@ -4,8 +4,8 @@ require 'spec_helper' RSpec.describe Gitlab::DataBuilder::WikiPage do let_it_be(:project) { create(:project, :repository, :wiki_repo) } - let(:wiki_page) { create(:wiki_page, wiki: project.wiki) } - let(:user) { create(:user) } + let_it_be(:wiki_page) { create(:wiki_page, wiki: project.wiki) } + let_it_be(:user) { create(:user) } describe '.build' do let(:data) { described_class.build(wiki_page, user, 'create') } @@ -19,5 +19,6 @@ RSpec.describe Gitlab::DataBuilder::WikiPage do it { expect(data[:object_attributes]).to include(wiki_page.hook_attrs) } it { expect(data[:object_attributes]).to include(url: Gitlab::UrlBuilder.build(wiki_page)) } it { expect(data[:object_attributes]).to include(action: 'create') } + it { expect(data[:object_attributes]).to include(diff_url: Gitlab::UrlBuilder.build(wiki_page, action: :diff, version_id: wiki_page.version.id)) } end end diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb index 78e0b7627e9..2de784d3e16 100644 --- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb @@ -49,16 +49,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d let(:batched_job) { build(:batched_background_migration_job) } let(:batched_migration) { batched_job.batched_migration } - describe '#migration_aborted?' do - before do - batched_migration.status = :aborted - end - - it 'returns the migration aborted?' do - expect(batched_job.migration_aborted?).to eq(batched_migration.aborted?) - end - end - describe '#migration_job_class' do it 'returns the migration job_class' do expect(batched_job.migration_job_class).to eq(batched_migration.job_class) diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index 43e34325419..d881390cd52 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -19,6 +19,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end end + describe 'validations' do + subject { build(:batched_background_migration) } + + it { is_expected.to validate_uniqueness_of(:job_arguments).scoped_to(:job_class_name, :table_name, :column_name) } + end + describe '.queue_order' do let!(:migration1) { create(:batched_background_migration) } let!(:migration2) { create(:batched_background_migration) } @@ -36,6 +42,38 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m it 'returns the first active migration according to queue order' do expect(described_class.active_migration).to eq(migration2) + create(:batched_background_migration_job, batched_migration: migration1, batch_size: 1000, status: :succeeded) + end + end + + describe '.queued' do + let!(:migration1) { create(:batched_background_migration, :finished) } + let!(:migration2) { create(:batched_background_migration, :paused) } + let!(:migration3) { create(:batched_background_migration, :active) } + + it 'returns active and paused migrations' do + expect(described_class.queued).to contain_exactly(migration2, migration3) + end + end + + describe '.successful_rows_counts' do + let!(:migration1) { create(:batched_background_migration) } + let!(:migration2) { create(:batched_background_migration) } + let!(:migration_without_jobs) { create(:batched_background_migration) } + + before do + create(:batched_background_migration_job, batched_migration: migration1, batch_size: 1000, status: :succeeded) + create(:batched_background_migration_job, batched_migration: migration1, batch_size: 200, status: :failed) + create(:batched_background_migration_job, batched_migration: migration2, batch_size: 500, status: :succeeded) + create(:batched_background_migration_job, batched_migration: migration2, batch_size: 200, status: :running) + end + + it 'returns totals from successful jobs' do + results = described_class.successful_rows_counts([migration1, migration2, migration_without_jobs]) + + expect(results[migration1.id]).to eq(1000) + expect(results[migration2.id]).to eq(500) + expect(results[migration_without_jobs.id]).to eq(nil) end end @@ -324,4 +362,29 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m subject end end + + describe '.for_configuration' do + let!(:migration) do + create( + :batched_background_migration, + job_class_name: 'MyJobClass', + table_name: :projects, + column_name: :id, + job_arguments: [[:id], [:id_convert_to_bigint]] + ) + end + + before do + create(:batched_background_migration, job_class_name: 'OtherClass') + create(:batched_background_migration, table_name: 'other_table') + create(:batched_background_migration, column_name: 'other_column') + create(:batched_background_migration, job_arguments: %w[other arguments]) + end + + it 'finds the migration matching the given configuration parameters' do + actual = described_class.for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]]) + + expect(actual).to contain_exactly(migration) + end + end end diff --git a/spec/lib/gitlab/database/consistency_spec.rb b/spec/lib/gitlab/database/consistency_spec.rb new file mode 100644 index 00000000000..35fa65512ae --- /dev/null +++ b/spec/lib/gitlab/database/consistency_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Consistency do + let(:session) do + Gitlab::Database::LoadBalancing::Session.current + end + + describe '.with_read_consistency' do + it 'sticks to primary database' do + expect(session).not_to be_using_primary + + block = -> (&control) do + described_class.with_read_consistency do + expect(session).to be_using_primary + + control.call + end + end + + expect { |probe| block.call(&probe) }.to yield_control + end + end +end diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb index 324ed498abc..cdcc862c376 100644 --- a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb +++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb @@ -27,7 +27,7 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do end context 'when models using single-type inheritance are used' do - let(:models) { [Group, CiService, Namespace] } + let(:models) { [Group, Integrations::BaseCi, Namespace] } before do models.each do |model| diff --git a/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb b/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb index 23ad621d0ee..0844616ee1c 100644 --- a/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb +++ b/spec/lib/gitlab/database/dynamic_model_helpers_spec.rb @@ -3,12 +3,12 @@ require 'spec_helper' RSpec.describe Gitlab::Database::DynamicModelHelpers do + let(:including_class) { Class.new.include(described_class) } + let(:table_name) { 'projects' } + describe '#define_batchable_model' do subject { including_class.new.define_batchable_model(table_name) } - let(:including_class) { Class.new.include(described_class) } - let(:table_name) { 'projects' } - it 'is an ActiveRecord model' do expect(subject.ancestors).to include(ActiveRecord::Base) end @@ -25,4 +25,86 @@ RSpec.describe Gitlab::Database::DynamicModelHelpers do expect(subject.inheritance_column).to eq('_type_disabled') end end + + describe '#each_batch' do + subject { including_class.new } + + before do + create_list(:project, 2) + end + + context 'when no transaction is open' do + before do + allow(subject).to receive(:transaction_open?).and_return(false) + end + + it 'iterates table in batches' do + each_batch_size = ->(&block) do + subject.each_batch(table_name, of: 1) do |batch| + block.call(batch.size) + end + end + + expect { |b| each_batch_size.call(&b) } + .to yield_successive_args(1, 1) + end + end + + context 'when transaction is open' do + before do + allow(subject).to receive(:transaction_open?).and_return(true) + end + + it 'raises an error' do + expect { subject.each_batch(table_name, of: 1) { |batch| batch.size } } + .to raise_error(RuntimeError, /each_batch should not run inside a transaction/) + end + end + end + + describe '#each_batch_range' do + subject { including_class.new } + + let(:first_project) { create(:project) } + let(:second_project) { create(:project) } + + context 'when no transaction is open' do + before do + allow(subject).to receive(:transaction_open?).and_return(false) + end + + it 'iterates table in batch ranges' do + expect { |b| subject.each_batch_range(table_name, of: 1, &b) } + .to yield_successive_args( + [first_project.id, first_project.id], + [second_project.id, second_project.id] + ) + end + + it 'yields only one batch if bigger than the table size' do + expect { |b| subject.each_batch_range(table_name, of: 2, &b) } + .to yield_successive_args([first_project.id, second_project.id]) + end + + it 'makes it possible to apply a scope' do + each_batch_limited = ->(&b) do + subject.each_batch_range(table_name, scope: ->(table) { table.limit(1) }, of: 1, &b) + end + + expect { |b| each_batch_limited.call(&b) } + .to yield_successive_args([first_project.id, first_project.id]) + end + end + + context 'when transaction is open' do + before do + allow(subject).to receive(:transaction_open?).and_return(true) + end + + it 'raises an error' do + expect { subject.each_batch_range(table_name, of: 1) { 1 } } + .to raise_error(RuntimeError, /each_batch should not run inside a transaction/) + end + end + end end diff --git a/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb new file mode 100644 index 00000000000..8886ce9756d --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/active_record_proxy_spec.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::ActiveRecordProxy do + describe '#connection' do + it 'returns a connection proxy' do + dummy = Class.new do + include Gitlab::Database::LoadBalancing::ActiveRecordProxy + end + + proxy = double(:proxy) + + expect(Gitlab::Database::LoadBalancing).to receive(:proxy) + .and_return(proxy) + + expect(dummy.new.connection).to eq(proxy) + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb new file mode 100644 index 00000000000..015dd2ba8d2 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb @@ -0,0 +1,316 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do + let(:proxy) { described_class.new } + + describe '#select' do + it 'performs a read' do + expect(proxy).to receive(:read_using_load_balancer).with(:select, ['foo']) + + proxy.select('foo') + end + end + + describe '#select_all' do + let(:override_proxy) { ActiveRecord::Base.connection.class } + + # We can't use :Gitlab::Utils::Override because this method is dynamically prepended + it 'method signatures match' do + expect(proxy.method(:select_all).parameters).to eq(override_proxy.instance_method(:select_all).parameters) + end + + describe 'using a SELECT query' do + it 'runs the query on a secondary' do + arel = double(:arel) + + expect(proxy).to receive(:read_using_load_balancer) + .with(:select_all, [arel, 'foo', []]) + + proxy.select_all(arel, 'foo') + end + end + + describe 'using a SELECT FOR UPDATE query' do + it 'runs the query on the primary and sticks to it' do + arel = double(:arel, locked: true) + + expect(proxy).to receive(:write_using_load_balancer) + .with(:select_all, [arel, 'foo', []], sticky: true) + + proxy.select_all(arel, 'foo') + end + end + end + + Gitlab::Database::LoadBalancing::ConnectionProxy::NON_STICKY_READS.each do |name| + describe "#{name}" do + it 'runs the query on the replica' do + expect(proxy).to receive(:read_using_load_balancer) + .with(name, ['foo']) + + proxy.send(name, 'foo') + end + end + end + + Gitlab::Database::LoadBalancing::ConnectionProxy::STICKY_WRITES.each do |name| + describe "#{name}" do + it 'runs the query on the primary and sticks to it' do + expect(proxy).to receive(:write_using_load_balancer) + .with(name, ['foo'], sticky: true) + + proxy.send(name, 'foo') + end + end + end + + describe '.insert_all!' do + before do + ActiveRecord::Schema.define do + create_table :connection_proxy_bulk_insert, force: true do |t| + t.string :name, null: true + end + end + end + + after do + ActiveRecord::Schema.define do + drop_table :connection_proxy_bulk_insert, force: true + end + end + + let(:model_class) do + Class.new(ApplicationRecord) do + self.table_name = "connection_proxy_bulk_insert" + end + end + + it 'inserts data in bulk' do + expect(model_class).to receive(:connection) + .at_least(:once) + .and_return(proxy) + + expect(proxy).to receive(:write_using_load_balancer) + .at_least(:once) + .and_call_original + + expect do + model_class.insert_all! [ + { name: "item1" }, + { name: "item2" } + ] + end.to change { model_class.count }.by(2) + end + end + + # We have an extra test for #transaction here to make sure that nested queries + # are also sent to a primary. + describe '#transaction' do + let(:session) { double(:session) } + + before do + allow(Gitlab::Database::LoadBalancing::Session).to receive(:current) + .and_return(session) + end + + context 'session fallbacks ambiguous queries to replicas' do + let(:replica) { double(:connection) } + + before do + allow(session).to receive(:fallback_to_replicas_for_ambiguous_queries?).and_return(true) + allow(session).to receive(:use_primary?).and_return(false) + allow(replica).to receive(:transaction).and_yield + allow(replica).to receive(:select) + end + + context 'with a read query' do + it 'runs the transaction and any nested queries on the replica' do + expect(proxy.load_balancer).to receive(:read) + .twice.and_yield(replica) + expect(proxy.load_balancer).not_to receive(:read_write) + expect(session).not_to receive(:write!) + + proxy.transaction { proxy.select('true') } + end + end + + context 'with a write query' do + it 'raises an exception' do + allow(proxy.load_balancer).to receive(:read).and_yield(replica) + allow(proxy.load_balancer).to receive(:read_write).and_yield(replica) + + expect do + proxy.transaction { proxy.insert('something') } + end.to raise_error(Gitlab::Database::LoadBalancing::ConnectionProxy::WriteInsideReadOnlyTransactionError) + end + end + end + + context 'session does not fallback to replicas for ambiguous queries' do + let(:primary) { double(:connection) } + + before do + allow(session).to receive(:fallback_to_replicas_for_ambiguous_queries?).and_return(false) + allow(session).to receive(:use_replicas_for_read_queries?).and_return(false) + allow(session).to receive(:use_primary?).and_return(true) + allow(primary).to receive(:transaction).and_yield + allow(primary).to receive(:select) + allow(primary).to receive(:insert) + end + + context 'with a read query' do + it 'runs the transaction and any nested queries on the primary and stick to it' do + expect(proxy.load_balancer).to receive(:read_write) + .twice.and_yield(primary) + expect(proxy.load_balancer).not_to receive(:read) + expect(session).to receive(:write!) + + proxy.transaction { proxy.select('true') } + end + end + + context 'with a write query' do + it 'runs the transaction and any nested queries on the primary and stick to it' do + expect(proxy.load_balancer).to receive(:read_write) + .twice.and_yield(primary) + expect(proxy.load_balancer).not_to receive(:read) + expect(session).to receive(:write!).twice + + proxy.transaction { proxy.insert('something') } + end + end + end + end + + describe '#method_missing' do + it 'runs the query on the primary without sticking to it' do + expect(proxy).to receive(:write_using_load_balancer) + .with(:foo, ['foo']) + + proxy.foo('foo') + end + + it 'properly forwards trailing hash arguments' do + allow(proxy.load_balancer).to receive(:read_write) + + expect(proxy).to receive(:write_using_load_balancer).and_call_original + + expect { proxy.case_sensitive_comparison(:table, :attribute, :column, { value: :value, format: :format }) } + .not_to raise_error + end + + context 'current session prefers to fallback ambiguous queries to replicas' do + let(:session) { double(:session) } + + before do + allow(Gitlab::Database::LoadBalancing::Session).to receive(:current) + .and_return(session) + allow(session).to receive(:fallback_to_replicas_for_ambiguous_queries?).and_return(true) + allow(session).to receive(:use_primary?).and_return(false) + end + + it 'runs the query on the replica' do + expect(proxy).to receive(:read_using_load_balancer).with(:foo, ['foo']) + + proxy.foo('foo') + end + + it 'properly forwards trailing hash arguments' do + allow(proxy.load_balancer).to receive(:read) + + expect(proxy).to receive(:read_using_load_balancer).and_call_original + + expect { proxy.case_sensitive_comparison(:table, :attribute, :column, { value: :value, format: :format }) } + .not_to raise_error + end + end + end + + describe '#read_using_load_balancer' do + let(:session) { double(:session) } + let(:connection) { double(:connection) } + + before do + allow(Gitlab::Database::LoadBalancing::Session).to receive(:current) + .and_return(session) + end + + context 'with a regular session' do + it 'uses a secondary' do + allow(session).to receive(:use_primary?).and_return(false) + allow(session).to receive(:use_replicas_for_read_queries?).and_return(false) + + expect(connection).to receive(:foo).with('foo') + expect(proxy.load_balancer).to receive(:read).and_yield(connection) + + proxy.read_using_load_balancer(:foo, ['foo']) + end + end + + context 'with a regular session and forcing all reads to replicas' do + it 'uses a secondary' do + allow(session).to receive(:use_primary?).and_return(false) + allow(session).to receive(:use_replicas_for_read_queries?).and_return(true) + + expect(connection).to receive(:foo).with('foo') + expect(proxy.load_balancer).to receive(:read).and_yield(connection) + + proxy.read_using_load_balancer(:foo, ['foo']) + end + end + + context 'with a session using the primary but forcing all reads to replicas' do + it 'uses a secondary' do + allow(session).to receive(:use_primary?).and_return(true) + allow(session).to receive(:use_replicas_for_read_queries?).and_return(true) + + expect(connection).to receive(:foo).with('foo') + expect(proxy.load_balancer).to receive(:read).and_yield(connection) + + proxy.read_using_load_balancer(:foo, ['foo']) + end + end + + describe 'with a session using the primary' do + it 'uses the primary' do + allow(session).to receive(:use_primary?).and_return(true) + allow(session).to receive(:use_replicas_for_read_queries?).and_return(false) + + expect(connection).to receive(:foo).with('foo') + + expect(proxy.load_balancer).to receive(:read_write) + .and_yield(connection) + + proxy.read_using_load_balancer(:foo, ['foo']) + end + end + end + + describe '#write_using_load_balancer' do + let(:session) { double(:session) } + let(:connection) { double(:connection) } + + before do + allow(Gitlab::Database::LoadBalancing::Session).to receive(:current) + .and_return(session) + end + + it 'uses but does not stick to the primary when sticking is disabled' do + expect(proxy.load_balancer).to receive(:read_write).and_yield(connection) + expect(connection).to receive(:foo).with('foo') + expect(session).not_to receive(:write!) + + proxy.write_using_load_balancer(:foo, ['foo']) + end + + it 'sticks to the primary when sticking is enabled' do + expect(proxy.load_balancer).to receive(:read_write).and_yield(connection) + expect(connection).to receive(:foo).with('foo') + expect(session).to receive(:write!) + + proxy.write_using_load_balancer(:foo, ['foo'], sticky: true) + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/host_list_spec.rb b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb new file mode 100644 index 00000000000..873b599f84d --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb @@ -0,0 +1,188 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::HostList do + def expect_metrics(hosts) + expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts) + end + + before do + allow(Gitlab::Database) + .to receive(:create_connection_pool) + .and_return(ActiveRecord::Base.connection_pool) + end + + let(:load_balancer) { double(:load_balancer) } + let(:host_count) { 2 } + + let(:host_list) do + hosts = Array.new(host_count) do + Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer, port: 5432) + end + + described_class.new(hosts) + end + + describe '#initialize' do + it 'sets metrics for current number of hosts and current index' do + host_list + + expect_metrics(2) + end + end + + describe '#length' do + it 'returns the number of hosts in the list' do + expect(host_list.length).to eq(2) + end + end + + describe '#host_names_and_ports' do + context 'with ports' do + it 'returns the host names of all hosts' do + hosts = [ + ['localhost', 5432], + ['localhost', 5432] + ] + + expect(host_list.host_names_and_ports).to eq(hosts) + end + end + + context 'without ports' do + let(:host_list) do + hosts = Array.new(2) do + Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer) + end + + described_class.new(hosts) + end + + it 'returns the host names of all hosts' do + hosts = [ + ['localhost', nil], + ['localhost', nil] + ] + + expect(host_list.host_names_and_ports).to eq(hosts) + end + end + end + + describe '#manage_pool?' do + before do + allow(Gitlab::Database).to receive(:create_connection_pool) { double(:connection) } + end + + context 'when the testing pool belongs to one host of the host list' do + it 'returns true' do + pool = host_list.hosts.first.pool + + expect(host_list.manage_pool?(pool)).to be(true) + end + end + + context 'when the testing pool belongs to a former host of the host list' do + it 'returns false' do + pool = host_list.hosts.first.pool + host_list.hosts = [ + Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer) + ] + + expect(host_list.manage_pool?(pool)).to be(false) + end + end + + context 'when the testing pool belongs to a new host of the host list' do + it 'returns true' do + host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer) + host_list.hosts = [host] + + expect(host_list.manage_pool?(host.pool)).to be(true) + end + end + + context 'when the testing pool does not have any relation with the host list' do + it 'returns false' do + host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer) + + expect(host_list.manage_pool?(host.pool)).to be(false) + end + end + end + + describe '#hosts' do + it 'returns a copy of the host' do + first = host_list.hosts + + expect(host_list.hosts).to eq(first) + expect(host_list.hosts.object_id).not_to eq(first.object_id) + end + end + + describe '#hosts=' do + it 'updates the list of hosts to use' do + host_list.hosts = [ + Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer) + ] + + expect(host_list.length).to eq(1) + expect(host_list.hosts[0].host).to eq('foo') + expect_metrics(1) + end + end + + describe '#next' do + it 'returns a host' do + expect(host_list.next) + .to be_an_instance_of(Gitlab::Database::LoadBalancing::Host) + end + + it 'cycles through all available hosts' do + expect(host_list.next).to eq(host_list.hosts[0]) + expect_metrics(2) + + expect(host_list.next).to eq(host_list.hosts[1]) + expect_metrics(2) + + expect(host_list.next).to eq(host_list.hosts[0]) + expect_metrics(2) + end + + it 'skips hosts that are offline' do + allow(host_list.hosts[0]).to receive(:online?).and_return(false) + + expect(host_list.next).to eq(host_list.hosts[1]) + expect_metrics(2) + end + + it 'returns nil if no hosts are online' do + host_list.hosts.each do |host| + allow(host).to receive(:online?).and_return(false) + end + + expect(host_list.next).to be_nil + expect_metrics(2) + end + + it 'returns nil if no hosts are available' do + expect(described_class.new.next).to be_nil + end + end + + describe '#shuffle' do + let(:host_count) { 3 } + + it 'randomizes the list' do + 2.times do + all_hosts = host_list.hosts + + host_list.shuffle + + expect(host_list.length).to eq(host_count) + expect(host_list.hosts).to contain_exactly(*all_hosts) + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb new file mode 100644 index 00000000000..4dfddef68c8 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb @@ -0,0 +1,445 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::Host do + let(:load_balancer) do + Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[localhost]) + end + + let(:host) { load_balancer.host_list.hosts.first } + + before do + allow(Gitlab::Database).to receive(:create_connection_pool) + .and_return(ActiveRecord::Base.connection_pool) + end + + def raise_and_wrap(wrapper, original) + raise original + rescue original.class + raise wrapper, 'boom' + end + + def wrapped_exception(wrapper, original) + raise_and_wrap(wrapper, original.new) + rescue wrapper => error + error + end + + describe '#connection' do + it 'returns a connection from the pool' do + expect(host.pool).to receive(:connection) + + host.connection + end + end + + describe '#disconnect!' do + it 'disconnects the pool' do + connection = double(:connection, in_use?: false) + pool = double(:pool, connections: [connection]) + + allow(host) + .to receive(:pool) + .and_return(pool) + + expect(host) + .not_to receive(:sleep) + + expect(host.pool) + .to receive(:disconnect!) + + host.disconnect! + end + + it 'disconnects the pool when waiting for connections takes too long' do + connection = double(:connection, in_use?: true) + pool = double(:pool, connections: [connection]) + + allow(host) + .to receive(:pool) + .and_return(pool) + + expect(host.pool) + .to receive(:disconnect!) + + host.disconnect!(1) + end + end + + describe '#release_connection' do + it 'releases the current connection from the pool' do + expect(host.pool).to receive(:release_connection) + + host.release_connection + end + end + + describe '#offline!' do + it 'marks the host as offline' do + expect(host.pool).to receive(:disconnect!) + + expect(Gitlab::Database::LoadBalancing::Logger).to receive(:warn) + .with(hash_including(event: :host_offline)) + .and_call_original + + host.offline! + end + end + + describe '#online?' do + context 'when the replica status is recent enough' do + before do + expect(host).to receive(:check_replica_status?).and_return(false) + end + + it 'returns the latest status' do + expect(host).not_to receive(:refresh_status) + expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:info) + expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:warn) + + expect(host).to be_online + end + + it 'returns an offline status' do + host.offline! + + expect(host).not_to receive(:refresh_status) + expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:info) + expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:warn) + + expect(host).not_to be_online + end + end + + context 'when the replica status is outdated' do + before do + expect(host) + .to receive(:check_replica_status?) + .and_return(true) + end + + it 'refreshes the status' do + expect(Gitlab::Database::LoadBalancing::Logger).to receive(:info) + .with(hash_including(event: :host_online)) + .and_call_original + + expect(host).to be_online + end + + context 'and replica is not up to date' do + before do + expect(host).to receive(:replica_is_up_to_date?).and_return(false) + end + + it 'marks the host offline' do + expect(Gitlab::Database::LoadBalancing::Logger).to receive(:warn) + .with(hash_including(event: :host_offline)) + .and_call_original + + expect(host).not_to be_online + end + end + end + + context 'when the replica is not online' do + it 'returns false when ActionView::Template::Error is raised' do + wrapped_error = wrapped_exception(ActionView::Template::Error, StandardError) + + allow(host) + .to receive(:check_replica_status?) + .and_raise(wrapped_error) + + expect(host).not_to be_online + end + + it 'returns false when ActiveRecord::StatementInvalid is raised' do + allow(host) + .to receive(:check_replica_status?) + .and_raise(ActiveRecord::StatementInvalid.new('foo')) + + expect(host).not_to be_online + end + + it 'returns false when PG::Error is raised' do + allow(host) + .to receive(:check_replica_status?) + .and_raise(PG::Error) + + expect(host).not_to be_online + end + end + end + + describe '#refresh_status' do + it 'refreshes the status' do + host.offline! + + expect(host) + .to receive(:replica_is_up_to_date?) + .and_call_original + + host.refresh_status + + expect(host).to be_online + end + end + + describe '#check_replica_status?' do + it 'returns true when we need to check the replica status' do + allow(host) + .to receive(:last_checked_at) + .and_return(1.year.ago) + + expect(host.check_replica_status?).to eq(true) + end + + it 'returns false when we do not need to check the replica status' do + freeze_time do + allow(host) + .to receive(:last_checked_at) + .and_return(Time.zone.now) + + expect(host.check_replica_status?).to eq(false) + end + end + end + + describe '#replica_is_up_to_date?' do + context 'when the lag time is below the threshold' do + it 'returns true' do + expect(host) + .to receive(:replication_lag_below_threshold?) + .and_return(true) + + expect(host.replica_is_up_to_date?).to eq(true) + end + end + + context 'when the lag time exceeds the threshold' do + before do + allow(host) + .to receive(:replication_lag_below_threshold?) + .and_return(false) + end + + it 'returns true if the data is recent enough' do + expect(host) + .to receive(:data_is_recent_enough?) + .and_return(true) + + expect(host.replica_is_up_to_date?).to eq(true) + end + + it 'returns false when the data is not recent enough' do + expect(host) + .to receive(:data_is_recent_enough?) + .and_return(false) + + expect(host.replica_is_up_to_date?).to eq(false) + end + end + end + + describe '#replication_lag_below_threshold' do + it 'returns true when the lag time is below the threshold' do + expect(host) + .to receive(:replication_lag_time) + .and_return(1) + + expect(host.replication_lag_below_threshold?).to eq(true) + end + + it 'returns false when the lag time exceeds the threshold' do + expect(host) + .to receive(:replication_lag_time) + .and_return(9000) + + expect(host.replication_lag_below_threshold?).to eq(false) + end + + it 'returns false when no lag time could be calculated' do + expect(host) + .to receive(:replication_lag_time) + .and_return(nil) + + expect(host.replication_lag_below_threshold?).to eq(false) + end + end + + describe '#data_is_recent_enough?' do + it 'returns true when the data is recent enough' do + expect(host.data_is_recent_enough?).to eq(true) + end + + it 'returns false when the data is not recent enough' do + diff = Gitlab::Database::LoadBalancing.max_replication_difference * 2 + + expect(host) + .to receive(:query_and_release) + .and_return({ 'diff' => diff }) + + expect(host.data_is_recent_enough?).to eq(false) + end + + it 'returns false when no lag size could be calculated' do + expect(host) + .to receive(:replication_lag_size) + .and_return(nil) + + expect(host.data_is_recent_enough?).to eq(false) + end + end + + describe '#replication_lag_time' do + it 'returns the lag time as a Float' do + expect(host.replication_lag_time).to be_an_instance_of(Float) + end + + it 'returns nil when the database query returned no rows' do + expect(host) + .to receive(:query_and_release) + .and_return({}) + + expect(host.replication_lag_time).to be_nil + end + end + + describe '#replication_lag_size' do + it 'returns the lag size as an Integer' do + expect(host.replication_lag_size).to be_an_instance_of(Integer) + end + + it 'returns nil when the database query returned no rows' do + expect(host) + .to receive(:query_and_release) + .and_return({}) + + expect(host.replication_lag_size).to be_nil + end + + it 'returns nil when the database connection fails' do + wrapped_error = wrapped_exception(ActionView::Template::Error, StandardError) + + allow(host) + .to receive(:connection) + .and_raise(wrapped_error) + + expect(host.replication_lag_size).to be_nil + end + end + + describe '#primary_write_location' do + it 'returns the write location of the primary' do + expect(host.primary_write_location).to be_an_instance_of(String) + expect(host.primary_write_location).not_to be_empty + end + end + + describe '#caught_up?' do + let(:connection) { double(:connection) } + + before do + allow(connection).to receive(:quote).and_return('foo') + end + + it 'returns true when a host has caught up' do + allow(host).to receive(:connection).and_return(connection) + expect(connection).to receive(:select_all).and_return([{ 'result' => 't' }]) + + expect(host.caught_up?('foo')).to eq(true) + end + + it 'returns true when a host has caught up' do + allow(host).to receive(:connection).and_return(connection) + expect(connection).to receive(:select_all).and_return([{ 'result' => true }]) + + expect(host.caught_up?('foo')).to eq(true) + end + + it 'returns false when a host has not caught up' do + allow(host).to receive(:connection).and_return(connection) + expect(connection).to receive(:select_all).and_return([{ 'result' => 'f' }]) + + expect(host.caught_up?('foo')).to eq(false) + end + + it 'returns false when a host has not caught up' do + allow(host).to receive(:connection).and_return(connection) + expect(connection).to receive(:select_all).and_return([{ 'result' => false }]) + + expect(host.caught_up?('foo')).to eq(false) + end + + it 'returns false when the connection fails' do + wrapped_error = wrapped_exception(ActionView::Template::Error, StandardError) + + allow(host) + .to receive(:connection) + .and_raise(wrapped_error) + + expect(host.caught_up?('foo')).to eq(false) + end + end + + describe '#database_replica_location' do + let(:connection) { double(:connection) } + + it 'returns the write ahead location of the replica', :aggregate_failures do + expect(host) + .to receive(:query_and_release) + .and_return({ 'location' => '0/D525E3A8' }) + + expect(host.database_replica_location).to be_an_instance_of(String) + end + + it 'returns nil when the database query returned no rows' do + expect(host) + .to receive(:query_and_release) + .and_return({}) + + expect(host.database_replica_location).to be_nil + end + + it 'returns nil when the database connection fails' do + wrapped_error = wrapped_exception(ActionView::Template::Error, StandardError) + + allow(host) + .to receive(:connection) + .and_raise(wrapped_error) + + expect(host.database_replica_location).to be_nil + end + end + + describe '#query_and_release' do + it 'executes a SQL query' do + results = host.query_and_release('SELECT 10 AS number') + + expect(results).to be_an_instance_of(Hash) + expect(results['number'].to_i).to eq(10) + end + + it 'releases the connection after running the query' do + expect(host) + .to receive(:release_connection) + .once + + host.query_and_release('SELECT 10 AS number') + end + + it 'returns an empty Hash in the event of an error' do + expect(host.connection) + .to receive(:select_all) + .and_raise(RuntimeError, 'kittens') + + expect(host.query_and_release('SELECT 10 AS number')).to eq({}) + end + end + + describe '#host' do + it 'returns the hostname' do + expect(host.host).to eq('localhost') + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb new file mode 100644 index 00000000000..4705bb23885 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb @@ -0,0 +1,522 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do + let(:pool) { Gitlab::Database.create_connection_pool(2) } + let(:conflict_error) { Class.new(RuntimeError) } + + let(:lb) { described_class.new(%w(localhost localhost)) } + + before do + allow(Gitlab::Database).to receive(:create_connection_pool) + .and_return(pool) + stub_const( + 'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure', + conflict_error + ) + end + + def raise_and_wrap(wrapper, original) + raise original + rescue original.class + raise wrapper, 'boop' + end + + def wrapped_exception(wrapper, original) + raise_and_wrap(wrapper, original.new) + rescue wrapper => error + error + end + + def twice_wrapped_exception(top, middle, original) + begin + raise_and_wrap(middle, original.new) + rescue middle => middle_error + raise_and_wrap(top, middle_error) + end + rescue top => top_error + top_error + end + + describe '#read' do + it 'yields a connection for a read' do + connection = double(:connection) + host = double(:host) + + allow(lb).to receive(:host).and_return(host) + allow(host).to receive(:query_cache_enabled).and_return(true) + + expect(host).to receive(:connection).and_return(connection) + + expect { |b| lb.read(&b) }.to yield_with_args(connection) + end + + it 'ensures that query cache is enabled' do + connection = double(:connection) + host = double(:host) + + allow(lb).to receive(:host).and_return(host) + allow(host).to receive(:query_cache_enabled).and_return(false) + allow(host).to receive(:connection).and_return(connection) + + expect(host).to receive(:enable_query_cache!).once + + lb.read { 10 } + end + + it 'marks hosts that are offline' do + allow(lb).to receive(:connection_error?).and_return(true) + + expect(lb.host_list.hosts[0]).to receive(:offline!) + expect(lb).to receive(:release_host) + + raised = false + + returned = lb.read do + unless raised + raised = true + raise + end + + 10 + end + + expect(returned).to eq(10) + end + + it 'retries a query in the event of a serialization failure' do + raised = false + + expect(lb).to receive(:release_host) + + returned = lb.read do + unless raised + raised = true + raise conflict_error + end + + 10 + end + + expect(returned).to eq(10) + end + + it 'retries every host at most 3 times when a query conflict is raised' do + expect(lb).to receive(:release_host).exactly(6).times + expect(lb).to receive(:read_write) + + lb.read { raise conflict_error } + end + + it 'uses the primary if no secondaries are available' do + allow(lb).to receive(:connection_error?).and_return(true) + + expect(lb.host_list.hosts).to all(receive(:online?).and_return(false)) + + expect(lb).to receive(:read_write).and_call_original + + expect { |b| lb.read(&b) } + .to yield_with_args(ActiveRecord::Base.retrieve_connection) + end + end + + describe '#read_write' do + it 'yields a connection for a write' do + expect { |b| lb.read_write(&b) } + .to yield_with_args(ActiveRecord::Base.retrieve_connection) + end + + it 'uses a retry with exponential backoffs' do + expect(lb).to receive(:retry_with_backoff).and_yield + + lb.read_write { 10 } + end + end + + describe '#db_role_for_connection' do + context 'when the load balancer creates the connection with #read' do + it 'returns :replica' do + role = nil + lb.read do |connection| + role = lb.db_role_for_connection(connection) + end + + expect(role).to be(:replica) + end + end + + context 'when the load balancer uses nested #read' do + it 'returns :replica' do + roles = [] + lb.read do |connection_1| + lb.read do |connection_2| + roles << lb.db_role_for_connection(connection_2) + end + roles << lb.db_role_for_connection(connection_1) + end + + expect(roles).to eq([:replica, :replica]) + end + end + + context 'when the load balancer creates the connection with #read_write' do + it 'returns :primary' do + role = nil + lb.read_write do |connection| + role = lb.db_role_for_connection(connection) + end + + expect(role).to be(:primary) + end + end + + context 'when the load balancer uses nested #read_write' do + it 'returns :primary' do + roles = [] + lb.read_write do |connection_1| + lb.read_write do |connection_2| + roles << lb.db_role_for_connection(connection_2) + end + roles << lb.db_role_for_connection(connection_1) + end + + expect(roles).to eq([:primary, :primary]) + end + end + + context 'when the load balancer falls back the connection creation to primary' do + it 'returns :primary' do + allow(lb).to receive(:serialization_failure?).and_return(true) + + role = nil + raised = 7 # 2 hosts = 6 retries + + lb.read do |connection| + if raised > 0 + raised -= 1 + raise + end + + role = lb.db_role_for_connection(connection) + end + + expect(role).to be(:primary) + end + end + + context 'when the load balancer uses replica after recovery from a failure' do + it 'returns :replica' do + allow(lb).to receive(:connection_error?).and_return(true) + + role = nil + raised = false + + lb.read do |connection| + unless raised + raised = true + raise + end + + role = lb.db_role_for_connection(connection) + end + + expect(role).to be(:replica) + end + end + + context 'when the connection comes from a pool managed by the host list' do + it 'returns :replica' do + connection = double(:connection) + allow(connection).to receive(:pool).and_return(lb.host_list.hosts.first.pool) + + expect(lb.db_role_for_connection(connection)).to be(:replica) + end + end + + context 'when the connection comes from the primary pool' do + it 'returns :primary' do + connection = double(:connection) + allow(connection).to receive(:pool).and_return(ActiveRecord::Base.connection_pool) + + expect(lb.db_role_for_connection(connection)).to be(:primary) + end + end + + context 'when the connection does not come from any known pool' do + it 'returns nil' do + connection = double(:connection) + pool = double(:connection_pool) + allow(connection).to receive(:pool).and_return(pool) + + expect(lb.db_role_for_connection(connection)).to be(nil) + end + end + end + + describe '#host' do + it 'returns the secondary host to use' do + expect(lb.host).to be_an_instance_of(Gitlab::Database::LoadBalancing::Host) + end + + it 'stores the host in a thread-local variable' do + RequestStore.delete(described_class::CACHE_KEY) + RequestStore.delete(described_class::VALID_HOSTS_CACHE_KEY) + + expect(lb.host_list).to receive(:next).once.and_call_original + + lb.host + lb.host + end + end + + describe '#release_host' do + it 'releases the host and its connection' do + host = lb.host + + expect(host).to receive(:disable_query_cache!) + + lb.release_host + + expect(RequestStore[described_class::CACHE_KEY]).to be_nil + expect(RequestStore[described_class::VALID_HOSTS_CACHE_KEY]).to be_nil + end + end + + describe '#release_primary_connection' do + it 'releases the connection to the primary' do + expect(ActiveRecord::Base.connection_pool).to receive(:release_connection) + + lb.release_primary_connection + end + end + + describe '#primary_write_location' do + it 'returns a String in the right format' do + expect(lb.primary_write_location).to match(%r{[A-F0-9]{1,8}/[A-F0-9]{1,8}}) + end + + it 'raises an error if the write location could not be retrieved' do + connection = double(:connection) + + allow(lb).to receive(:read_write).and_yield(connection) + allow(connection).to receive(:select_all).and_return([]) + + expect { lb.primary_write_location }.to raise_error(RuntimeError) + end + end + + describe '#all_caught_up?' do + it 'returns true if all hosts caught up to the write location' do + expect(lb.host_list.hosts).to all(receive(:caught_up?).with('foo').and_return(true)) + + expect(lb.all_caught_up?('foo')).to eq(true) + end + + it 'returns false if a host has not yet caught up' do + expect(lb.host_list.hosts[0]).to receive(:caught_up?) + .with('foo') + .and_return(true) + + expect(lb.host_list.hosts[1]).to receive(:caught_up?) + .with('foo') + .and_return(false) + + expect(lb.all_caught_up?('foo')).to eq(false) + end + end + + describe '#retry_with_backoff' do + it 'returns the value returned by the block' do + value = lb.retry_with_backoff { 10 } + + expect(value).to eq(10) + end + + it 're-raises errors not related to database connections' do + expect(lb).not_to receive(:sleep) # to make sure we're not retrying + + expect { lb.retry_with_backoff { raise 'boop' } } + .to raise_error(RuntimeError) + end + + it 'retries the block when a connection error is raised' do + allow(lb).to receive(:connection_error?).and_return(true) + expect(lb).to receive(:sleep).with(2) + expect(lb).to receive(:release_primary_connection) + + raised = false + returned = lb.retry_with_backoff do + unless raised + raised = true + raise + end + + 10 + end + + expect(returned).to eq(10) + end + + it 're-raises the connection error if the retries did not succeed' do + allow(lb).to receive(:connection_error?).and_return(true) + expect(lb).to receive(:sleep).with(2).ordered + expect(lb).to receive(:sleep).with(4).ordered + expect(lb).to receive(:sleep).with(16).ordered + + expect(lb).to receive(:release_primary_connection).exactly(3).times + + expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError) + end + end + + describe '#connection_error?' do + before do + stub_const('Gitlab::Database::LoadBalancing::LoadBalancer::CONNECTION_ERRORS', + [NotImplementedError]) + end + + it 'returns true for a connection error' do + error = NotImplementedError.new + + expect(lb.connection_error?(error)).to eq(true) + end + + it 'returns true for a wrapped connection error' do + wrapped = wrapped_exception(ActiveRecord::StatementInvalid, NotImplementedError) + + expect(lb.connection_error?(wrapped)).to eq(true) + end + + it 'returns true for a wrapped connection error from a view' do + wrapped = wrapped_exception(ActionView::Template::Error, NotImplementedError) + + expect(lb.connection_error?(wrapped)).to eq(true) + end + + it 'returns true for deeply wrapped/nested errors' do + top = twice_wrapped_exception(ActionView::Template::Error, ActiveRecord::StatementInvalid, NotImplementedError) + + expect(lb.connection_error?(top)).to eq(true) + end + + it 'returns true for an invalid encoding error' do + error = RuntimeError.new('invalid encoding name: unicode') + + expect(lb.connection_error?(error)).to eq(true) + end + + it 'returns false for errors not related to database connections' do + error = RuntimeError.new + + expect(lb.connection_error?(error)).to eq(false) + end + end + + describe '#serialization_failure?' do + let(:conflict_error) { Class.new(RuntimeError) } + + before do + stub_const( + 'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure', + conflict_error + ) + end + + it 'returns for a serialization error' do + expect(lb.serialization_failure?(conflict_error.new)).to eq(true) + end + + it 'returns true for a wrapped error' do + wrapped = wrapped_exception(ActionView::Template::Error, conflict_error) + + expect(lb.serialization_failure?(wrapped)).to eq(true) + end + end + + describe '#select_caught_up_hosts' do + let(:location) { 'AB/12345'} + let(:hosts) { lb.host_list.hosts } + let(:valid_host_list) { RequestStore[described_class::VALID_HOSTS_CACHE_KEY] } + let(:valid_hosts) { valid_host_list.hosts } + + subject { lb.select_caught_up_hosts(location) } + + context 'when all replicas are caught up' do + before do + expect(hosts).to all(receive(:caught_up?).with(location).and_return(true)) + end + + it 'returns true and sets all hosts to valid' do + expect(subject).to be true + expect(valid_host_list).to be_a(Gitlab::Database::LoadBalancing::HostList) + expect(valid_hosts).to contain_exactly(*hosts) + end + end + + context 'when none of the replicas are caught up' do + before do + expect(hosts).to all(receive(:caught_up?).with(location).and_return(false)) + end + + it 'returns false and does not set the valid hosts' do + expect(subject).to be false + expect(valid_host_list).to be_nil + end + end + + context 'when one of the replicas is caught up' do + before do + expect(hosts[0]).to receive(:caught_up?).with(location).and_return(false) + expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true) + end + + it 'returns true and sets one host to valid' do + expect(subject).to be true + expect(valid_host_list).to be_a(Gitlab::Database::LoadBalancing::HostList) + expect(valid_hosts).to contain_exactly(hosts[1]) + end + + it 'host always returns the caught-up replica' do + subject + + 3.times do + expect(lb.host).to eq(hosts[1]) + RequestStore.delete(described_class::CACHE_KEY) + end + end + end + end + + describe '#select_caught_up_hosts' do + let(:location) { 'AB/12345'} + let(:hosts) { lb.host_list.hosts } + let(:set_host) { RequestStore[described_class::CACHE_KEY] } + + subject { lb.select_up_to_date_host(location) } + + context 'when none of the replicas are caught up' do + before do + expect(hosts).to all(receive(:caught_up?).with(location).and_return(false)) + end + + it 'returns false and does not update the host thread-local variable' do + expect(subject).to be false + expect(set_host).to be_nil + end + end + + context 'when any of the replicas is caught up' do + before do + # `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier + allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false) + expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true) + end + + it 'returns true and sets host thread-local variable' do + expect(subject).to be true + expect(set_host).to eq(hosts[1]) + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb new file mode 100644 index 00000000000..01367716518 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb @@ -0,0 +1,243 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do + let(:app) { double(:app) } + let(:middleware) { described_class.new(app) } + let(:warden_user) { double(:warden, user: double(:user, id: 42)) } + let(:single_sticking_object) { Set.new([[:user, 42]]) } + let(:multiple_sticking_objects) do + Set.new([ + [:user, 42], + [:runner, '123456789'], + [:runner, '1234'] + ]) + end + + after do + Gitlab::Database::LoadBalancing::Session.clear_session + end + + describe '.stick_or_unstick' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?) + .and_return(true) + end + + it 'sticks or unsticks a single object and updates the Rack environment' do + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + + env = {} + + described_class.stick_or_unstick(env, :user, 42) + + expect(env[described_class::STICK_OBJECT].to_a).to eq([[:user, 42]]) + end + + it 'sticks or unsticks multiple objects and updates the Rack environment' do + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + .ordered + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:runner, '123456789') + .ordered + + env = {} + + described_class.stick_or_unstick(env, :user, 42) + described_class.stick_or_unstick(env, :runner, '123456789') + + expect(env[described_class::STICK_OBJECT].to_a).to eq([ + [:user, 42], + [:runner, '123456789'] + ]) + end + end + + describe '#call' do + it 'handles a request' do + env = {} + + expect(middleware).to receive(:clear).twice + + expect(middleware).to receive(:unstick_or_continue_sticking).with(env) + expect(middleware).to receive(:stick_if_necessary).with(env) + + expect(app).to receive(:call).with(env).and_return(10) + + expect(middleware.call(env)).to eq(10) + end + end + + describe '#unstick_or_continue_sticking' do + it 'does not stick if no namespace and identifier could be found' do + expect(Gitlab::Database::LoadBalancing::Sticking) + .not_to receive(:unstick_or_continue_sticking) + + middleware.unstick_or_continue_sticking({}) + end + + it 'sticks to the primary if a warden user is found' do + env = { 'warden' => warden_user } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + + middleware.unstick_or_continue_sticking(env) + end + + it 'sticks to the primary if a sticking namespace and identifier is found' do + env = { described_class::STICK_OBJECT => single_sticking_object } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + + middleware.unstick_or_continue_sticking(env) + end + + it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do + env = { described_class::STICK_OBJECT => multiple_sticking_objects } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + .ordered + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:runner, '123456789') + .ordered + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:unstick_or_continue_sticking) + .with(:runner, '1234') + .ordered + + middleware.unstick_or_continue_sticking(env) + end + end + + describe '#stick_if_necessary' do + it 'does not stick to the primary if not necessary' do + expect(Gitlab::Database::LoadBalancing::Sticking) + .not_to receive(:stick_if_necessary) + + middleware.stick_if_necessary({}) + end + + it 'sticks to the primary if a warden user is found' do + env = { 'warden' => warden_user } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:stick_if_necessary) + .with(:user, 42) + + middleware.stick_if_necessary(env) + end + + it 'sticks to the primary if a a single sticking object is found' do + env = { described_class::STICK_OBJECT => single_sticking_object } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:stick_if_necessary) + .with(:user, 42) + + middleware.stick_if_necessary(env) + end + + it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do + env = { described_class::STICK_OBJECT => multiple_sticking_objects } + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:stick_if_necessary) + .with(:user, 42) + .ordered + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:stick_if_necessary) + .with(:runner, '123456789') + .ordered + + expect(Gitlab::Database::LoadBalancing::Sticking) + .to receive(:stick_if_necessary) + .with(:runner, '1234') + .ordered + + middleware.stick_if_necessary(env) + end + end + + describe '#clear' do + it 'clears the currently used host and session' do + lb = double(:lb) + session = double(:session) + + allow(middleware).to receive(:load_balancer).and_return(lb) + + expect(lb).to receive(:release_host) + + stub_const('Gitlab::Database::LoadBalancing::RackMiddleware::Session', + session) + + expect(session).to receive(:clear_session) + + middleware.clear + end + end + + describe '.load_balancer' do + it 'returns a the load balancer' do + proxy = double(:proxy) + + expect(Gitlab::Database::LoadBalancing).to receive(:proxy) + .and_return(proxy) + + expect(proxy).to receive(:load_balancer) + + middleware.load_balancer + end + end + + describe '#sticking_namespaces_and_ids' do + context 'using a Warden request' do + it 'returns the warden user if present' do + env = { 'warden' => warden_user } + + expect(middleware.sticking_namespaces_and_ids(env)).to eq([[:user, 42]]) + end + + it 'returns an empty Array if no user was present' do + warden = double(:warden, user: nil) + env = { 'warden' => warden } + + expect(middleware.sticking_namespaces_and_ids(env)).to eq([]) + end + end + + context 'using a request with a manually set sticking object' do + it 'returns the sticking object' do + env = { described_class::STICK_OBJECT => multiple_sticking_objects } + + expect(middleware.sticking_namespaces_and_ids(env)).to eq([ + [:user, 42], + [:runner, '123456789'], + [:runner, '1234'] + ]) + end + end + + context 'using a regular request' do + it 'returns an empty Array' do + expect(middleware.sticking_namespaces_and_ids({})).to eq([]) + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/resolver_spec.rb b/spec/lib/gitlab/database/load_balancing/resolver_spec.rb new file mode 100644 index 00000000000..0051cf50255 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/resolver_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::Resolver do + describe '#resolve' do + let(:ip_addr) { IPAddr.new('127.0.0.2') } + + context 'when nameserver is an IP' do + it 'returns an IPAddr object' do + service = described_class.new('127.0.0.2') + + expect(service.resolve).to eq(ip_addr) + end + end + + context 'when nameserver is not an IP' do + subject { described_class.new('localhost').resolve } + + it 'looks the nameserver up in the hosts file' do + allow_next_instance_of(Resolv::Hosts) do |instance| + allow(instance).to receive(:getaddress).with('localhost').and_return('127.0.0.2') + end + + expect(subject).to eq(ip_addr) + end + + context 'when nameserver is not in the hosts file' do + it 'looks the nameserver up in DNS' do + resource = double(:resource, address: ip_addr) + packet = double(:packet, answer: [resource]) + + allow_next_instance_of(Resolv::Hosts) do |instance| + allow(instance).to receive(:getaddress).with('localhost').and_raise(Resolv::ResolvError) + end + + allow(Net::DNS::Resolver).to receive(:start) + .with('localhost', Net::DNS::A) + .and_return(packet) + + expect(subject).to eq(ip_addr) + end + + context 'when nameserver is not in DNS' do + it 'raises an exception' do + allow_next_instance_of(Resolv::Hosts) do |instance| + allow(instance).to receive(:getaddress).with('localhost').and_raise(Resolv::ResolvError) + end + + allow(Net::DNS::Resolver).to receive(:start) + .with('localhost', Net::DNS::A) + .and_return(double(:packet, answer: [])) + + expect { subject }.to raise_exception( + described_class::UnresolvableNameserverError, + 'could not resolve localhost' + ) + end + end + + context 'when DNS does not respond' do + it 'raises an exception' do + allow_next_instance_of(Resolv::Hosts) do |instance| + allow(instance).to receive(:getaddress).with('localhost').and_raise(Resolv::ResolvError) + end + + allow(Net::DNS::Resolver).to receive(:start) + .with('localhost', Net::DNS::A) + .and_raise(Net::DNS::Resolver::NoResponseError) + + expect { subject }.to raise_exception( + described_class::UnresolvableNameserverError, + 'no response from DNS server(s)' + ) + end + end + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb new file mode 100644 index 00000000000..7fc7b5e8d11 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb @@ -0,0 +1,252 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do + let(:service) do + described_class.new(nameserver: 'localhost', port: 8600, record: 'foo') + end + + before do + resource = double(:resource, address: IPAddr.new('127.0.0.1')) + packet = double(:packet, answer: [resource]) + + allow(Net::DNS::Resolver).to receive(:start) + .with('localhost', Net::DNS::A) + .and_return(packet) + end + + describe '#initialize' do + describe ':record_type' do + subject { described_class.new(nameserver: 'localhost', port: 8600, record: 'foo', record_type: record_type) } + + context 'with a supported type' do + let(:record_type) { 'SRV' } + + it { expect(subject.record_type).to eq Net::DNS::SRV } + end + + context 'with an unsupported type' do + let(:record_type) { 'AAAA' } + + it 'raises an argument error' do + expect { subject }.to raise_error(ArgumentError, 'Unsupported record type: AAAA') + end + end + end + end + + describe '#start' do + before do + allow(service) + .to receive(:loop) + .and_yield + end + + it 'starts service discovery in a new thread' do + expect(service) + .to receive(:refresh_if_necessary) + .and_return(5) + + expect(service) + .to receive(:rand) + .and_return(2) + + expect(service) + .to receive(:sleep) + .with(7) + + service.start.join + end + + it 'reports exceptions to Sentry' do + error = StandardError.new + + expect(service) + .to receive(:refresh_if_necessary) + .and_raise(error) + + expect(Gitlab::ErrorTracking) + .to receive(:track_exception) + .with(error) + + expect(service) + .to receive(:rand) + .and_return(2) + + expect(service) + .to receive(:sleep) + .with(62) + + service.start.join + end + end + + describe '#refresh_if_necessary' do + let(:address_foo) { described_class::Address.new('foo') } + let(:address_bar) { described_class::Address.new('bar') } + + context 'when a refresh is necessary' do + before do + allow(service) + .to receive(:addresses_from_load_balancer) + .and_return(%w[localhost]) + + allow(service) + .to receive(:addresses_from_dns) + .and_return([10, [address_foo, address_bar]]) + end + + it 'refreshes the load balancer hosts' do + expect(service) + .to receive(:replace_hosts) + .with([address_foo, address_bar]) + + expect(service.refresh_if_necessary).to eq(10) + end + end + + context 'when a refresh is not necessary' do + before do + allow(service) + .to receive(:addresses_from_load_balancer) + .and_return(%w[localhost]) + + allow(service) + .to receive(:addresses_from_dns) + .and_return([10, %w[localhost]]) + end + + it 'does not refresh the load balancer hosts' do + expect(service) + .not_to receive(:replace_hosts) + + expect(service.refresh_if_necessary).to eq(10) + end + end + end + + describe '#replace_hosts' do + let(:address_foo) { described_class::Address.new('foo') } + let(:address_bar) { described_class::Address.new('bar') } + + let(:load_balancer) do + Gitlab::Database::LoadBalancing::LoadBalancer.new([address_foo]) + end + + before do + allow(service) + .to receive(:load_balancer) + .and_return(load_balancer) + end + + it 'replaces the hosts of the load balancer' do + service.replace_hosts([address_bar]) + + expect(load_balancer.host_list.host_names_and_ports).to eq([['bar', nil]]) + end + + it 'disconnects the old connections' do + host = load_balancer.host_list.hosts.first + + allow(service) + .to receive(:disconnect_timeout) + .and_return(2) + + expect(host) + .to receive(:disconnect!) + .with(2) + + service.replace_hosts([address_bar]) + end + end + + describe '#addresses_from_dns' do + let(:service) { described_class.new(nameserver: 'localhost', port: 8600, record: 'foo', record_type: record_type) } + let(:packet) { double(:packet, answer: [res1, res2]) } + + before do + allow(service.resolver) + .to receive(:search) + .with('foo', described_class::RECORD_TYPES[record_type]) + .and_return(packet) + end + + context 'with an A record' do + let(:record_type) { 'A' } + + let(:res1) { double(:resource, address: IPAddr.new('255.255.255.0'), ttl: 90) } + let(:res2) { double(:resource, address: IPAddr.new('127.0.0.1'), ttl: 90) } + + it 'returns a TTL and ordered list of IP addresses' do + addresses = [ + described_class::Address.new('127.0.0.1'), + described_class::Address.new('255.255.255.0') + ] + + expect(service.addresses_from_dns).to eq([90, addresses]) + end + end + + context 'with an SRV record' do + let(:record_type) { 'SRV' } + + let(:res1) { double(:resource, host: 'foo1.service.consul.', port: 5432, weight: 1, priority: 1, ttl: 90) } + let(:res2) { double(:resource, host: 'foo2.service.consul.', port: 5433, weight: 1, priority: 1, ttl: 90) } + let(:res3) { double(:resource, host: 'foo3.service.consul.', port: 5434, weight: 1, priority: 1, ttl: 90) } + let(:packet) { double(:packet, answer: [res1, res2, res3], additional: []) } + + before do + expect_next_instance_of(Gitlab::Database::LoadBalancing::SrvResolver) do |resolver| + allow(resolver).to receive(:address_for).with('foo1.service.consul.').and_return(IPAddr.new('255.255.255.0')) + allow(resolver).to receive(:address_for).with('foo2.service.consul.').and_return(IPAddr.new('127.0.0.1')) + allow(resolver).to receive(:address_for).with('foo3.service.consul.').and_return(nil) + end + end + + it 'returns a TTL and ordered list of hosts' do + addresses = [ + described_class::Address.new('127.0.0.1', 5433), + described_class::Address.new('255.255.255.0', 5432) + ] + + expect(service.addresses_from_dns).to eq([90, addresses]) + end + end + end + + describe '#new_wait_time_for' do + it 'returns the DNS TTL if greater than the default interval' do + res = double(:resource, ttl: 90) + + expect(service.new_wait_time_for([res])).to eq(90) + end + + it 'returns the default interval if greater than the DNS TTL' do + res = double(:resource, ttl: 10) + + expect(service.new_wait_time_for([res])).to eq(60) + end + + it 'returns the default interval if no resources are given' do + expect(service.new_wait_time_for([])).to eq(60) + end + end + + describe '#addresses_from_load_balancer' do + it 'returns the ordered host names of the load balancer' do + load_balancer = Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[b a]) + + allow(service) + .to receive(:load_balancer) + .and_return(load_balancer) + + addresses = [ + described_class::Address.new('a'), + described_class::Address.new('b') + ] + + expect(service.addresses_from_load_balancer).to eq(addresses) + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/session_spec.rb b/spec/lib/gitlab/database/load_balancing/session_spec.rb new file mode 100644 index 00000000000..74512f76fd4 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/session_spec.rb @@ -0,0 +1,353 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::Session do + after do + described_class.clear_session + end + + describe '.current' do + it 'returns the current session' do + expect(described_class.current).to be_an_instance_of(described_class) + end + end + + describe '.clear_session' do + it 'clears the current session' do + described_class.current + described_class.clear_session + + expect(RequestStore[described_class::CACHE_KEY]).to be_nil + end + end + + describe '.without_sticky_writes' do + it 'ignores sticky write events sent by a connection proxy' do + described_class.without_sticky_writes do + described_class.current.write! + end + + session = described_class.current + + expect(session).not_to be_using_primary + end + + it 'still is aware of write that happened' do + described_class.without_sticky_writes do + described_class.current.write! + end + + session = described_class.current + + expect(session.performed_write?).to be true + end + end + + describe '#use_primary?' do + it 'returns true when the primary should be used' do + instance = described_class.new + + instance.use_primary! + + expect(instance.use_primary?).to eq(true) + end + + it 'returns false when a secondary should be used' do + expect(described_class.new.use_primary?).to eq(false) + end + + it 'returns true when a write was performed' do + instance = described_class.new + + instance.write! + + expect(instance.use_primary?).to eq(true) + end + end + + describe '#use_primary' do + let(:instance) { described_class.new } + + context 'when primary was used before' do + before do + instance.write! + end + + it 'restores state after use' do + expect { |blk| instance.use_primary(&blk) }.to yield_with_no_args + + expect(instance.use_primary?).to eq(true) + end + end + + context 'when primary was not used' do + it 'restores state after use' do + expect { |blk| instance.use_primary(&blk) }.to yield_with_no_args + + expect(instance.use_primary?).to eq(false) + end + end + + it 'uses primary during block' do + expect do |blk| + instance.use_primary do + expect(instance.use_primary?).to eq(true) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + end + + it 'continues using primary when write was performed' do + instance.use_primary do + instance.write! + end + + expect(instance.use_primary?).to eq(true) + end + end + + describe '#performed_write?' do + it 'returns true if a write was performed' do + instance = described_class.new + + instance.write! + + expect(instance.performed_write?).to eq(true) + end + end + + describe '#ignore_writes' do + it 'ignores write events' do + instance = described_class.new + + instance.ignore_writes { instance.write! } + + expect(instance).not_to be_using_primary + expect(instance.performed_write?).to eq true + end + + it 'does not prevent using primary if an exception is raised' do + instance = described_class.new + + instance.ignore_writes { raise ArgumentError } rescue ArgumentError + instance.write! + + expect(instance).to be_using_primary + end + end + + describe '#use_replicas_for_read_queries' do + let(:instance) { described_class.new } + + it 'sets the flag inside the block' do + expect do |blk| + instance.use_replicas_for_read_queries do + expect(instance.use_replicas_for_read_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.use_replicas_for_read_queries?).to eq(false) + end + + it 'restores state after use' do + expect do |blk| + instance.use_replicas_for_read_queries do + instance.use_replicas_for_read_queries do + expect(instance.use_replicas_for_read_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + + expect(instance.use_replicas_for_read_queries?).to eq(true) + end + end.to yield_control + + expect(instance.use_replicas_for_read_queries?).to eq(false) + end + + context 'when primary was used before' do + before do + instance.use_primary! + end + + it 'sets the flag inside the block' do + expect do |blk| + instance.use_replicas_for_read_queries do + expect(instance.use_replicas_for_read_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.use_replicas_for_read_queries?).to eq(false) + end + end + + context 'when a write query is performed before' do + before do + instance.write! + end + + it 'sets the flag inside the block' do + expect do |blk| + instance.use_replicas_for_read_queries do + expect(instance.use_replicas_for_read_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.use_replicas_for_read_queries?).to eq(false) + end + end + end + + describe '#fallback_to_replicas_for_ambiguous_queries' do + let(:instance) { described_class.new } + + it 'sets the flag inside the block' do + expect do |blk| + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + it 'restores state after use' do + expect do |blk| + instance.fallback_to_replicas_for_ambiguous_queries do + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + # call yield probe + blk.to_proc.call + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + end + end.to yield_control + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + context 'when primary was used before' do + before do + instance.use_primary! + end + + it 'uses primary during block' do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + expect do |blk| + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + end + + context 'when a write was performed before' do + before do + instance.write! + end + + it 'uses primary during block' do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + expect do |blk| + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + # call yield probe + blk.to_proc.call + end + end.to yield_control + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + end + + context 'when primary was used inside the block' do + it 'uses primary aterward' do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + instance.use_primary! + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + it 'restores state after use' do + instance.fallback_to_replicas_for_ambiguous_queries do + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + instance.use_primary! + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + end + + context 'when a write was performed inside the block' do + it 'uses primary aterward' do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + instance.write! + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + it 'restores state after use' do + instance.fallback_to_replicas_for_ambiguous_queries do + instance.fallback_to_replicas_for_ambiguous_queries do + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(true) + + instance.write! + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + + expect(instance.fallback_to_replicas_for_ambiguous_queries?).to eq(false) + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb new file mode 100644 index 00000000000..90051172fca --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb @@ -0,0 +1,161 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do + let(:middleware) { described_class.new } + + after do + Gitlab::Database::LoadBalancing::Session.clear_session + end + + describe '#call' do + shared_context 'data consistency worker class' do |data_consistency, feature_flag| + let(:worker_class) do + Class.new do + def self.name + 'TestDataConsistencyWorker' + end + + include ApplicationWorker + + data_consistency data_consistency, feature_flag: feature_flag + + def perform(*args) + end + end + end + + before do + stub_const('TestDataConsistencyWorker', worker_class) + end + end + + shared_examples_for 'does not pass database locations' do + it 'does not pass database locations', :aggregate_failures do + middleware.call(worker_class, job, double(:queue), redis_pool) { 10 } + + expect(job['database_replica_location']).to be_nil + expect(job['database_write_location']).to be_nil + end + end + + shared_examples_for 'mark data consistency location' do |data_consistency| + include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker + + let(:location) { '0/D525E3A8' } + + context 'when feature flag load_balancing_for_sidekiq is disabled' do + before do + stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) + end + + include_examples 'does not pass database locations' + end + + context 'when write was not performed' do + before do + allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(false) + end + + it 'passes database_replica_location' do + expect(middleware).to receive_message_chain(:load_balancer, :host, "database_replica_location").and_return(location) + + middleware.call(worker_class, job, double(:queue), redis_pool) { 10 } + + expect(job['database_replica_location']).to eq(location) + end + end + + context 'when write was performed' do + before do + allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(true) + end + + it 'passes primary write location', :aggregate_failures do + expect(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(location) + + middleware.call(worker_class, job, double(:queue), redis_pool) { 10 } + + expect(job['database_write_location']).to eq(location) + end + end + end + + shared_examples_for 'database location was already provided' do |provided_database_location, other_location| + shared_examples_for 'does not set database location again' do |use_primary| + before do + allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(use_primary) + end + + it 'does not set database locations again' do + middleware.call(worker_class, job, double(:queue), redis_pool) { 10 } + + expect(job[provided_database_location]).to eq(old_location) + expect(job[other_location]).to be_nil + end + end + + let(:old_location) { '0/D525E3A8' } + let(:new_location) { 'AB/12345' } + let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", provided_database_location => old_location } } + + before do + allow(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(new_location) + allow(middleware).to receive_message_chain(:load_balancer, :database_replica_location).and_return(new_location) + end + + context "when write was performed" do + include_examples 'does not set database location again', true + end + + context "when write was not performed" do + include_examples 'does not set database location again', false + end + end + + let(:queue) { 'default' } + let(:redis_pool) { Sidekiq.redis_pool } + let(:worker_class) { 'TestDataConsistencyWorker' } + let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } } + + before do + skip_feature_flags_yaml_validation + skip_default_enabled_yaml_check + end + + context 'when worker cannot be constantized' do + let(:worker_class) { 'ActionMailer::MailDeliveryJob' } + + include_examples 'does not pass database locations' + end + + context 'when worker class does not include ApplicationWorker' do + let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper } + + include_examples 'does not pass database locations' + end + + context 'database write location was already provided' do + include_examples 'database location was already provided', 'database_write_location', 'database_replica_location' + end + + context 'database replica location was already provided' do + include_examples 'database location was already provided', 'database_replica_location', 'database_write_location' + end + + context 'when worker data consistency is :always' do + include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker + + include_examples 'does not pass database locations' + end + + context 'when worker data consistency is :delayed' do + include_examples 'mark data consistency location', :delayed + end + + context 'when worker data consistency is :sticky' do + include_examples 'mark data consistency location', :sticky + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb new file mode 100644 index 00000000000..b7cd0caa922 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -0,0 +1,201 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do + let(:middleware) { described_class.new } + + after do + Gitlab::Database::LoadBalancing::Session.clear_session + end + + describe '#call' do + shared_context 'data consistency worker class' do |data_consistency, feature_flag| + let(:worker_class) do + Class.new do + def self.name + 'TestDataConsistencyWorker' + end + + include ApplicationWorker + + data_consistency data_consistency, feature_flag: feature_flag + + def perform(*args) + end + end + end + + before do + stub_const('TestDataConsistencyWorker', worker_class) + end + end + + shared_examples_for 'stick to the primary' do + it 'sticks to the primary' do + middleware.call(worker, job, double(:queue)) do + expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to be_truthy + end + end + end + + shared_examples_for 'replica is up to date' do |location, data_consistency| + it 'does not stick to the primary', :aggregate_failures do + expect(middleware).to receive(:replica_caught_up?).with(location).and_return(true) + + middleware.call(worker, job, double(:queue)) do + expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy + end + + expect(job[:database_chosen]).to eq('replica') + end + + it "updates job hash with data_consistency :#{data_consistency}" do + middleware.call(worker, job, double(:queue)) do + expect(job).to include(data_consistency: data_consistency.to_s) + end + end + end + + shared_examples_for 'sticks based on data consistency' do |data_consistency| + include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker + + context 'when load_balancing_for_test_data_consistency_worker is disabled' do + before do + stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) + end + + include_examples 'stick to the primary' + end + + context 'when database replica location is set' do + let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_replica_location' => '0/D525E3A8' } } + + before do + allow(middleware).to receive(:replica_caught_up?).and_return(true) + end + + it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency + end + + context 'when database primary location is set' do + let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } } + + before do + allow(middleware).to receive(:replica_caught_up?).and_return(true) + end + + it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency + end + + context 'when database location is not set' do + let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } } + + it_behaves_like 'stick to the primary', nil + end + end + + let(:queue) { 'default' } + let(:redis_pool) { Sidekiq.redis_pool } + let(:worker) { worker_class.new } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } } + let(:block) { 10 } + + before do + skip_feature_flags_yaml_validation + skip_default_enabled_yaml_check + allow(middleware).to receive(:clear) + allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:performed_write?).and_return(true) + end + + context 'when worker class does not include ApplicationWorker' do + let(:worker) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper.new } + + include_examples 'stick to the primary' + end + + context 'when worker data consistency is :always' do + include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker + + include_examples 'stick to the primary' + end + + context 'when worker data consistency is :delayed' do + include_examples 'sticks based on data consistency', :delayed + + context 'when replica is not up to date' do + before do + allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host) + allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :select_up_to_date_host).and_return(false) + end + + around do |example| + with_sidekiq_server_middleware do |chain| + chain.add described_class + Sidekiq::Testing.disable! { example.run } + end + end + + context 'when job is executed first' do + it 'raise an error and retries', :aggregate_failures do + expect do + process_job(job) + end.to raise_error(Sidekiq::JobRetry::Skip) + + expect(job['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate') + expect(job[:database_chosen]).to eq('retry') + end + end + + context 'when job is retried' do + it 'stick to the primary', :aggregate_failures do + expect do + process_job(job) + end.to raise_error(Sidekiq::JobRetry::Skip) + + process_job(job) + expect(job[:database_chosen]).to eq('primary') + end + end + + context 'replica selection mechanism feature flag rollout' do + before do + stub_feature_flags(sidekiq_load_balancing_rotate_up_to_date_replica: false) + end + + it 'uses different implmentation' do + expect(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :host, :caught_up?).and_return(false) + + expect do + process_job(job) + end.to raise_error(Sidekiq::JobRetry::Skip) + end + end + end + end + + context 'when worker data consistency is :sticky' do + include_examples 'sticks based on data consistency', :sticky + + context 'when replica is not up to date' do + before do + allow(middleware).to receive(:replica_caught_up?).and_return(false) + end + + include_examples 'stick to the primary' + + it 'updates job hash with primary database chosen', :aggregate_failures do + expect { |b| middleware.call(worker, job, double(:queue), &b) }.to yield_control + + expect(job[:database_chosen]).to eq('primary') + end + end + end + end + + def process_job(job) + Sidekiq::JobRetry.new.local(worker_class, job, queue) do + worker_class.process_job(job) + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/srv_resolver_spec.rb b/spec/lib/gitlab/database/load_balancing/srv_resolver_spec.rb new file mode 100644 index 00000000000..6ac0608d485 --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/srv_resolver_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::SrvResolver do + let(:resolver) { Net::DNS::Resolver.new(nameservers: '127.0.0.1', port: 8600, use_tcp: true) } + let(:additional) { dns_response_packet_from_fixture('srv_with_a_rr_in_additional_section').additional } + + describe '#address_for' do + let(:host) { 'patroni-02-db-gstg.node.east-us-2.consul.' } + + subject { described_class.new(resolver, additional).address_for(host) } + + context 'when additional section contains an A record' do + it 'returns an IP4 address' do + expect(subject).to eq(IPAddr.new('10.224.29.102')) + end + end + + context 'when additional section contains an AAAA record' do + let(:host) { 'a.gtld-servers.net.' } + let(:additional) { dns_response_packet_from_fixture('a_with_aaaa_rr_in_additional_section').additional } + + it 'returns an IP6 address' do + expect(subject).to eq(IPAddr.new('2001:503:a83e::2:30')) + end + end + + context 'when additional section does not contain A nor AAAA records' do + let(:additional) { [] } + + context 'when host resolves to an A record' do + before do + allow(resolver).to receive(:search).with(host, Net::DNS::ANY).and_return(dns_response_packet_from_fixture('a_rr')) + end + + it 'returns an IP4 address' do + expect(subject).to eq(IPAddr.new('10.224.29.102')) + end + end + + context 'when host does resolves to an AAAA record' do + before do + allow(resolver).to receive(:search).with(host, Net::DNS::ANY).and_return(dns_response_packet_from_fixture('aaaa_rr')) + end + + it 'returns an IP6 address' do + expect(subject).to eq(IPAddr.new('2a00:1450:400e:80a::200e')) + end + end + end + end + + def dns_response_packet_from_fixture(fixture_name) + fixture = File.read(Rails.root + "spec/fixtures/dns/#{fixture_name}.json") + encoded_payload = Gitlab::Json.parse(fixture)['payload'] + payload = Base64.decode64(encoded_payload) + + Net::DNS::Packet.parse(payload) + end +end diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb new file mode 100644 index 00000000000..bf4e3756e0e --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb @@ -0,0 +1,307 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do + after do + Gitlab::Database::LoadBalancing::Session.clear_session + end + + describe '.stick_if_necessary' do + context 'when sticking is disabled' do + it 'does not perform any sticking' do + expect(described_class).not_to receive(:stick) + + described_class.stick_if_necessary(:user, 42) + end + end + + context 'when sticking is enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?) + .and_return(true) + end + + it 'does not stick if no write was performed' do + allow(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:performed_write?) + .and_return(false) + + expect(described_class).not_to receive(:stick) + + described_class.stick_if_necessary(:user, 42) + end + + it 'sticks to the primary if a write was performed' do + allow(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:performed_write?) + .and_return(true) + + expect(described_class).to receive(:stick).with(:user, 42) + + described_class.stick_if_necessary(:user, 42) + end + end + end + + describe '.all_caught_up?' do + let(:lb) { double(:lb) } + + before do + allow(described_class).to receive(:load_balancer).and_return(lb) + end + + it 'returns true if no write location could be found' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return(nil) + + expect(lb).not_to receive(:all_caught_up?) + + expect(described_class.all_caught_up?(:user, 42)).to eq(true) + end + + it 'returns true, and unsticks if all secondaries have caught up' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') + + allow(lb).to receive(:all_caught_up?).with('foo').and_return(true) + + expect(described_class).to receive(:unstick).with(:user, 42) + + expect(described_class.all_caught_up?(:user, 42)).to eq(true) + end + + it 'return false if the secondaries have not yet caught up' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') + + allow(lb).to receive(:all_caught_up?).with('foo').and_return(false) + + expect(described_class.all_caught_up?(:user, 42)).to eq(false) + end + end + + describe '.unstick_or_continue_sticking' do + let(:lb) { double(:lb) } + + before do + allow(described_class).to receive(:load_balancer).and_return(lb) + end + + it 'simply returns if no write location could be found' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return(nil) + + expect(lb).not_to receive(:all_caught_up?) + + described_class.unstick_or_continue_sticking(:user, 42) + end + + it 'unsticks if all secondaries have caught up' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') + + allow(lb).to receive(:all_caught_up?).with('foo').and_return(true) + + expect(described_class).to receive(:unstick).with(:user, 42) + + described_class.unstick_or_continue_sticking(:user, 42) + end + + it 'continues using the primary if the secondaries have not yet caught up' do + allow(described_class).to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') + + allow(lb).to receive(:all_caught_up?).with('foo').and_return(false) + + expect(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:use_primary!) + + described_class.unstick_or_continue_sticking(:user, 42) + end + end + + RSpec.shared_examples 'sticking' do + context 'when sticking is disabled' do + it 'does not perform any sticking', :aggregate_failures do + expect(described_class).not_to receive(:set_write_location_for) + expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_primary!) + + described_class.bulk_stick(:user, ids) + end + end + + context 'when sticking is enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true) + + lb = double(:lb, primary_write_location: 'foo') + + allow(described_class).to receive(:load_balancer).and_return(lb) + end + + it 'sticks an entity to the primary', :aggregate_failures do + ids.each do |id| + expect(described_class).to receive(:set_write_location_for) + .with(:user, id, 'foo') + end + + expect(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:use_primary!) + + subject + end + end + end + + describe '.stick' do + it_behaves_like 'sticking' do + let(:ids) { [42] } + subject { described_class.stick(:user, ids.first) } + end + end + + describe '.bulk_stick' do + it_behaves_like 'sticking' do + let(:ids) { [42, 43] } + subject { described_class.bulk_stick(:user, ids) } + end + end + + describe '.mark_primary_write_location' do + context 'when enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true) + end + + it 'updates the write location with the load balancer' do + lb = double(:lb, primary_write_location: 'foo') + + allow(described_class).to receive(:load_balancer).and_return(lb) + + expect(described_class).to receive(:set_write_location_for) + .with(:user, 42, 'foo') + + described_class.mark_primary_write_location(:user, 42) + end + end + + context 'when load balancing is configured but not enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false) + allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(true) + end + + it 'updates the write location with the main ActiveRecord connection' do + allow(described_class).to receive(:load_balancer).and_return(nil) + expect(ActiveRecord::Base).to receive(:connection).and_call_original + expect(described_class).to receive(:set_write_location_for) + .with(:user, 42, anything) + + described_class.mark_primary_write_location(:user, 42) + end + + context 'when write location is nil' do + before do + allow(Gitlab::Database).to receive(:get_write_location).and_return(nil) + end + + it 'does not update the write location' do + expect(described_class).not_to receive(:set_write_location_for) + + described_class.mark_primary_write_location(:user, 42) + end + end + end + + context 'when load balancing is disabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false) + allow(Gitlab::Database::LoadBalancing).to receive(:configured?).and_return(false) + end + + it 'updates the write location with the main ActiveRecord connection' do + expect(described_class).not_to receive(:set_write_location_for) + + described_class.mark_primary_write_location(:user, 42) + end + end + end + + describe '.unstick' do + it 'removes the sticking data from Redis' do + described_class.set_write_location_for(:user, 4, 'foo') + described_class.unstick(:user, 4) + + expect(described_class.last_write_location_for(:user, 4)).to be_nil + end + end + + describe '.last_write_location_for' do + it 'returns the last WAL write location for a user' do + described_class.set_write_location_for(:user, 4, 'foo') + + expect(described_class.last_write_location_for(:user, 4)).to eq('foo') + end + end + + describe '.redis_key_for' do + it 'returns a String' do + expect(described_class.redis_key_for(:user, 42)) + .to eq('database-load-balancing/write-location/user/42') + end + end + + describe '.load_balancer' do + it 'returns a the load balancer' do + proxy = double(:proxy) + + expect(Gitlab::Database::LoadBalancing).to receive(:proxy) + .and_return(proxy) + + expect(proxy).to receive(:load_balancer) + + described_class.load_balancer + end + end + + describe '.select_caught_up_replicas' do + let(:lb) { double(:lb) } + + before do + allow(described_class).to receive(:load_balancer).and_return(lb) + end + + context 'with no write location' do + before do + allow(described_class).to receive(:last_write_location_for) + .with(:project, 42).and_return(nil) + end + + it 'returns false and does not try to find caught up hosts' do + expect(described_class).not_to receive(:select_caught_up_hosts) + expect(described_class.select_caught_up_replicas(:project, 42)).to be false + end + end + + context 'with write location' do + before do + allow(described_class).to receive(:last_write_location_for) + .with(:project, 42).and_return('foo') + end + + it 'returns true, selects hosts, and unsticks if any secondary has caught up' do + expect(lb).to receive(:select_caught_up_hosts).and_return(true) + expect(described_class).to receive(:unstick).with(:project, 42) + expect(described_class.select_caught_up_replicas(:project, 42)).to be true + end + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb new file mode 100644 index 00000000000..e7de7f2b43b --- /dev/null +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -0,0 +1,834 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LoadBalancing do + include_context 'clear DB Load Balancing configuration' + + before do + stub_env('ENABLE_LOAD_BALANCING_FOR_FOSS', 'true') + end + + describe '.proxy' do + context 'when configured' do + before do + allow(ActiveRecord::Base.singleton_class).to receive(:prepend) + subject.configure_proxy + end + + it 'returns the connection proxy' do + expect(subject.proxy).to be_an_instance_of(subject::ConnectionProxy) + end + end + + context 'when not configured' do + it 'returns nil' do + expect(subject.proxy).to be_nil + end + + it 'tracks an error to sentry' do + expect(Gitlab::ErrorTracking).to receive(:track_exception).with( + an_instance_of(subject::ProxyNotConfiguredError) + ) + + subject.proxy + end + end + end + + describe '.configuration' do + it 'returns a Hash' do + lb_config = { 'hosts' => %w(foo) } + + original_db_config = Gitlab::Database.config + modified_db_config = original_db_config.merge(load_balancing: lb_config) + expect(Gitlab::Database).to receive(:config).and_return(modified_db_config) + + expect(described_class.configuration).to eq(lb_config) + end + end + + describe '.max_replication_difference' do + context 'without an explicitly configured value' do + it 'returns the default value' do + allow(described_class) + .to receive(:configuration) + .and_return({}) + + expect(described_class.max_replication_difference).to eq(8.megabytes) + end + end + + context 'with an explicitly configured value' do + it 'returns the configured value' do + allow(described_class) + .to receive(:configuration) + .and_return({ 'max_replication_difference' => 4 }) + + expect(described_class.max_replication_difference).to eq(4) + end + end + end + + describe '.max_replication_lag_time' do + context 'without an explicitly configured value' do + it 'returns the default value' do + allow(described_class) + .to receive(:configuration) + .and_return({}) + + expect(described_class.max_replication_lag_time).to eq(60) + end + end + + context 'with an explicitly configured value' do + it 'returns the configured value' do + allow(described_class) + .to receive(:configuration) + .and_return({ 'max_replication_lag_time' => 4 }) + + expect(described_class.max_replication_lag_time).to eq(4) + end + end + end + + describe '.replica_check_interval' do + context 'without an explicitly configured value' do + it 'returns the default value' do + allow(described_class) + .to receive(:configuration) + .and_return({}) + + expect(described_class.replica_check_interval).to eq(60) + end + end + + context 'with an explicitly configured value' do + it 'returns the configured value' do + allow(described_class) + .to receive(:configuration) + .and_return({ 'replica_check_interval' => 4 }) + + expect(described_class.replica_check_interval).to eq(4) + end + end + end + + describe '.hosts' do + it 'returns a list of hosts' do + allow(described_class) + .to receive(:configuration) + .and_return({ 'hosts' => %w(foo bar baz) }) + + expect(described_class.hosts).to eq(%w(foo bar baz)) + end + end + + describe '.pool_size' do + it 'returns a Fixnum' do + expect(described_class.pool_size).to be_a_kind_of(Integer) + end + end + + describe '.enable?' do + before do + clear_load_balancing_configuration + allow(described_class).to receive(:hosts).and_return(%w(foo)) + end + + it 'returns false when no hosts are specified' do + allow(described_class).to receive(:hosts).and_return([]) + + expect(described_class.enable?).to eq(false) + end + + it 'returns false when Sidekiq is being used' do + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + + expect(described_class.enable?).to eq(false) + end + + it 'returns false when running inside a Rake task' do + allow(Gitlab::Runtime).to receive(:rake?).and_return(true) + + expect(described_class.enable?).to eq(false) + end + + it 'returns true when load balancing should be enabled' do + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false) + + expect(described_class.enable?).to eq(true) + end + + it 'returns true when service discovery is enabled' do + allow(described_class).to receive(:hosts).and_return([]) + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false) + + allow(described_class) + .to receive(:service_discovery_enabled?) + .and_return(true) + + expect(described_class.enable?).to eq(true) + end + + context 'when ENABLE_LOAD_BALANCING_FOR_SIDEKIQ environment variable is set' do + before do + stub_env('ENABLE_LOAD_BALANCING_FOR_SIDEKIQ', 'true') + end + + it 'returns true when Sidekiq is being used' do + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + + expect(described_class.enable?).to eq(true) + end + end + end + + describe '.configured?' do + before do + clear_load_balancing_configuration + end + + it 'returns true when Sidekiq is being used' do + allow(described_class).to receive(:hosts).and_return(%w(foo)) + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + expect(described_class.configured?).to eq(true) + end + + it 'returns true when service discovery is enabled in Sidekiq' do + allow(described_class).to receive(:hosts).and_return([]) + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + + allow(described_class) + .to receive(:service_discovery_enabled?) + .and_return(true) + + expect(described_class.configured?).to eq(true) + end + + it 'returns false when neither service discovery nor hosts are configured' do + allow(described_class).to receive(:hosts).and_return([]) + + allow(described_class) + .to receive(:service_discovery_enabled?) + .and_return(false) + + expect(described_class.configured?).to eq(false) + end + end + + describe '.configure_proxy' do + it 'configures the connection proxy' do + allow(ActiveRecord::Base.singleton_class).to receive(:prepend) + + described_class.configure_proxy + + expect(ActiveRecord::Base.singleton_class).to have_received(:prepend) + .with(Gitlab::Database::LoadBalancing::ActiveRecordProxy) + end + end + + describe '.active_record_models' do + it 'returns an Array' do + expect(described_class.active_record_models).to be_an_instance_of(Array) + end + end + + describe '.service_discovery_enabled?' do + it 'returns true if service discovery is enabled' do + allow(described_class) + .to receive(:configuration) + .and_return('discover' => { 'record' => 'foo' }) + + expect(described_class.service_discovery_enabled?).to eq(true) + end + + it 'returns false if service discovery is disabled' do + expect(described_class.service_discovery_enabled?).to eq(false) + end + end + + describe '.service_discovery_configuration' do + context 'when no configuration is provided' do + it 'returns a default configuration Hash' do + expect(described_class.service_discovery_configuration).to eq( + nameserver: 'localhost', + port: 8600, + record: nil, + record_type: 'A', + interval: 60, + disconnect_timeout: 120, + use_tcp: false + ) + end + end + + context 'when configuration is provided' do + it 'returns a Hash including the custom configuration' do + allow(described_class) + .to receive(:configuration) + .and_return('discover' => { 'record' => 'foo', 'record_type' => 'SRV' }) + + expect(described_class.service_discovery_configuration).to eq( + nameserver: 'localhost', + port: 8600, + record: 'foo', + record_type: 'SRV', + interval: 60, + disconnect_timeout: 120, + use_tcp: false + ) + end + end + end + + describe '.start_service_discovery' do + it 'does not start if service discovery is disabled' do + expect(Gitlab::Database::LoadBalancing::ServiceDiscovery) + .not_to receive(:new) + + described_class.start_service_discovery + end + + it 'starts service discovery if enabled' do + allow(described_class) + .to receive(:service_discovery_enabled?) + .and_return(true) + + instance = double(:instance) + + expect(Gitlab::Database::LoadBalancing::ServiceDiscovery) + .to receive(:new) + .with(an_instance_of(Hash)) + .and_return(instance) + + expect(instance) + .to receive(:start) + + described_class.start_service_discovery + end + end + + describe '.db_role_for_connection' do + let(:connection) { double(:conneciton) } + + context 'when the load balancing is not configured' do + before do + allow(described_class).to receive(:enable?).and_return(false) + end + + it 'returns primary' do + expect(described_class.db_role_for_connection(connection)).to be(:primary) + end + end + + context 'when the load balancing is configured' do + let(:proxy) { described_class::ConnectionProxy.new(%w(foo)) } + let(:load_balancer) { described_class::LoadBalancer.new(%w(foo)) } + + before do + allow(ActiveRecord::Base.singleton_class).to receive(:prepend) + + allow(described_class).to receive(:enable?).and_return(true) + allow(described_class).to receive(:proxy).and_return(proxy) + allow(proxy).to receive(:load_balancer).and_return(load_balancer) + + subject.configure_proxy(proxy) + end + + context 'when the load balancer returns :replica' do + it 'returns :replica' do + allow(load_balancer).to receive(:db_role_for_connection).and_return(:replica) + + expect(described_class.db_role_for_connection(connection)).to be(:replica) + + expect(load_balancer).to have_received(:db_role_for_connection).with(connection) + end + end + + context 'when the load balancer returns :primary' do + it 'returns :primary' do + allow(load_balancer).to receive(:db_role_for_connection).and_return(:primary) + + expect(described_class.db_role_for_connection(connection)).to be(:primary) + + expect(load_balancer).to have_received(:db_role_for_connection).with(connection) + end + end + + context 'when the load balancer returns nil' do + it 'returns nil' do + allow(load_balancer).to receive(:db_role_for_connection).and_return(nil) + + expect(described_class.db_role_for_connection(connection)).to be(nil) + + expect(load_balancer).to have_received(:db_role_for_connection).with(connection) + end + end + end + end + + # For such an important module like LoadBalancing, full mocking is not + # enough. This section implements some integration tests to test a full flow + # of the load balancer. + # - A real model with a table backed behind is defined + # - The load balancing module is set up for this module only, as to prevent + # breaking other tests. The replica configuration is cloned from the test + # configuraiton. + # - In each test, we listen to the SQL queries (via sql.active_record + # instrumentation) while triggering real queries from the defined model. + # - We assert the desinations (replica/primary) of the queries in order. + describe 'LoadBalancing integration tests', :delete do + before(:all) do + ActiveRecord::Schema.define do + create_table :load_balancing_test, force: true do |t| + t.string :name, null: true + end + end + end + + after(:all) do + ActiveRecord::Schema.define do + drop_table :load_balancing_test, force: true + end + end + + shared_context 'LoadBalancing setup' do + let(:development_db_config) { ActiveRecord::Base.configurations.configs_for(env_name: 'development').first.configuration_hash } + let(:hosts) { [development_db_config[:host]] } + let(:model) do + Class.new(ApplicationRecord) do + self.table_name = "load_balancing_test" + end + end + + before do + # Preloading testing class + model.singleton_class.prepend ::Gitlab::Database::LoadBalancing::ActiveRecordProxy + + # Setup load balancing + clear_load_balancing_configuration + allow(ActiveRecord::Base.singleton_class).to receive(:prepend) + subject.configure_proxy(::Gitlab::Database::LoadBalancing::ConnectionProxy.new(hosts)) + + original_db_config = Gitlab::Database.config + modified_db_config = original_db_config.merge(load_balancing: { hosts: hosts }) + allow(Gitlab::Database).to receive(:config).and_return(modified_db_config) + + ::Gitlab::Database::LoadBalancing::Session.clear_session + end + end + + where(:queries, :include_transaction, :expected_results) do + [ + # Read methods + [-> { model.first }, false, [:replica]], + [-> { model.find_by(id: 123) }, false, [:replica]], + [-> { model.where(name: 'hello').to_a }, false, [:replica]], + + # Write methods + [-> { model.create!(name: 'test1') }, false, [:primary]], + [ + -> { + instance = model.create!(name: 'test1') + instance.update!(name: 'test2') + }, + false, [:primary, :primary] + ], + [-> { model.update_all(name: 'test2') }, false, [:primary]], + [ + -> { + instance = model.create!(name: 'test1') + instance.destroy! + }, + false, [:primary, :primary] + ], + [-> { model.delete_all }, false, [:primary]], + + # Custom query + [-> { model.connection.exec_query('SELECT 1').to_a }, false, [:primary]], + + # Reads after a write + [ + -> { + model.first + model.create!(name: 'test1') + model.first + model.find_by(name: 'test1') + }, + false, [:replica, :primary, :primary, :primary] + ], + + # Inside a transaction + [ + -> { + model.transaction do + model.find_by(name: 'test1') + model.create!(name: 'test1') + instance = model.find_by(name: 'test1') + instance.update!(name: 'test2') + end + model.find_by(name: 'test1') + }, + true, [:primary, :primary, :primary, :primary, :primary, :primary, :primary] + ], + + # Nested transaction + [ + -> { + model.transaction do + model.transaction do + model.create!(name: 'test1') + end + model.update_all(name: 'test2') + end + model.find_by(name: 'test1') + }, + true, [:primary, :primary, :primary, :primary, :primary] + ], + + # Read-only transaction + [ + -> { + model.transaction do + model.first + model.where(name: 'test1').to_a + end + }, + true, [:primary, :primary, :primary, :primary] + ], + + # use_primary + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary do + model.first + model.where(name: 'test1').to_a + end + model.first + }, + false, [:primary, :primary, :replica] + ], + + # use_primary! + [ + -> { + model.first + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + model.where(name: 'test1').to_a + }, + false, [:replica, :primary] + ], + + # use_replicas_for_read_queries does not affect read queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.where(name: 'test1').to_a + end + }, + false, [:replica] + ], + + # use_replicas_for_read_queries does not affect write queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.create!(name: 'test1') + end + }, + false, [:primary] + ], + + # use_replicas_for_read_queries does not affect ambiguous queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.connection.exec_query("SELECT 1") + end + }, + false, [:primary] + ], + + # use_replicas_for_read_queries ignores use_primary! for read queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.where(name: 'test1').to_a + end + }, + false, [:replica] + ], + + # use_replicas_for_read_queries adheres use_primary! for write queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.create!(name: 'test1') + end + }, + false, [:primary] + ], + + # use_replicas_for_read_queries adheres use_primary! for ambiguous queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.connection.exec_query('SELECT 1') + end + }, + false, [:primary] + ], + + # use_replicas_for_read_queries ignores use_primary blocks + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary do + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.where(name: 'test1').to_a + end + end + }, + false, [:replica] + ], + + # use_replicas_for_read_queries ignores a session already performed write + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.write! + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + model.where(name: 'test1').to_a + end + }, + false, [:replica] + ], + + # fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.first + model.where(name: 'test1').to_a + end + }, + false, [:replica, :replica] + ], + + # fallback_to_replicas_for_ambiguous_queries for read-only transaction + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.transaction do + model.first + model.where(name: 'test1').to_a + end + end + }, + false, [:replica, :replica] + ], + + # A custom read query inside fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query("SELECT 1") + end + }, + false, [:replica] + ], + + # A custom read query inside a transaction fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.transaction do + model.connection.exec_query("SET LOCAL statement_timeout = 5000") + model.count + end + end + }, + true, [:replica, :replica, :replica, :replica] + ], + + # fallback_to_replicas_for_ambiguous_queries after a write + [ + -> { + model.create!(name: 'Test1') + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query("SELECT 1") + end + }, + false, [:primary, :primary] + ], + + # fallback_to_replicas_for_ambiguous_queries after use_primary! + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query("SELECT 1") + end + }, + false, [:primary] + ], + + # fallback_to_replicas_for_ambiguous_queries inside use_primary + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_primary do + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query("SELECT 1") + end + end + }, + false, [:primary] + ], + + # use_primary inside fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + ::Gitlab::Database::LoadBalancing::Session.current.use_primary do + model.connection.exec_query("SELECT 1") + end + end + }, + false, [:primary] + ], + + # A write query inside fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query("SELECT 1") + model.delete_all + model.connection.exec_query("SELECT 1") + end + }, + false, [:replica, :primary, :primary] + ], + + # use_replicas_for_read_queries incorporates with fallback_to_replicas_for_ambiguous_queries + [ + -> { + ::Gitlab::Database::LoadBalancing::Session.current.use_replicas_for_read_queries do + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.connection.exec_query('SELECT 1') + model.where(name: 'test1').to_a + end + end + }, + false, [:replica, :replica] + ] + ] + end + + with_them do + include_context 'LoadBalancing setup' + + it 'redirects queries to the right roles' do + roles = [] + + subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event| + payload = event.payload + + assert = + if payload[:name] == 'SCHEMA' + false + elsif payload[:name] == 'SQL' # Custom query + true + else + keywords = %w[load_balancing_test] + keywords += %w[begin commit] if include_transaction + keywords.any? { |keyword| payload[:sql].downcase.include?(keyword) } + end + + if assert + db_role = ::Gitlab::Database::LoadBalancing.db_role_for_connection(payload[:connection]) + roles << db_role + end + end + + self.instance_exec(&queries) + + expect(roles).to eql(expected_results) + ensure + ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber + end + end + + context 'custom connection handling' do + where(:queries, :expected_role) do + [ + # Reload cache. The schema loading queries should be handled by + # primary. + [ + -> { + model.connection.clear_cache! + model.connection.schema_cache.add('users') + model.connection.pool.release_connection + }, + :primary + ], + + # Call model's connection method + [ + -> { + connection = model.connection + connection.select_one('SELECT 1') + connection.pool.release_connection + }, + :replica + ], + + # Retrieve connection via #retrieve_connection + [ + -> { + connection = model.retrieve_connection + connection.select_one('SELECT 1') + connection.pool.release_connection + }, + :primary + ] + ] + end + + with_them do + include_context 'LoadBalancing setup' + + it 'redirects queries to the right roles' do + roles = [] + + subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event| + role = ::Gitlab::Database::LoadBalancing.db_role_for_connection(event.payload[:connection]) + roles << role if role.present? + end + + self.instance_exec(&queries) + + expect(roles).to all(eql(expected_role)) + ensure + ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber + end + end + end + + context 'a write inside a transaction inside fallback_to_replicas_for_ambiguous_queries block' do + include_context 'LoadBalancing setup' + + it 'raises an exception' do + expect do + ::Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do + model.transaction do + model.first + model.create!(name: 'hello') + end + end + end.to raise_error(Gitlab::Database::LoadBalancing::ConnectionProxy::WriteInsideReadOnlyTransactionError) + end + end + end +end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 40720628a89..f0ea07646fb 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -2001,6 +2001,41 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end end + describe '#ensure_batched_background_migration_is_finished' do + let(:configuration) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: :events, + column_name: :id, + job_arguments: [[:id], [:id_convert_to_bigint]] + } + end + + subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) } + + it 'raises an error when migration exists and is not marked as finished' do + create(:batched_background_migration, configuration.merge(status: :active)) + + expect { ensure_batched_background_migration_is_finished } + .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active': #{configuration}" + end + + it 'does not raise error when migration exists and is marked as finished' do + create(:batched_background_migration, configuration.merge(status: :finished)) + + expect { ensure_batched_background_migration_is_finished } + .not_to raise_error + end + + it 'logs a warning when migration does not exist' do + expect(Gitlab::AppLogger).to receive(:warn) + .with("Could not find batched background migration for the given configuration: #{configuration}") + + expect { ensure_batched_background_migration_is_finished } + .not_to raise_error + end + end + describe '#index_exists_by_name?' do it 'returns true if an index exists' do ActiveRecord::Base.connection.execute( diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb index c6d456964cf..e096e7f6e91 100644 --- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb @@ -242,6 +242,98 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do end end + describe '#requeue_background_migration_jobs_by_range_at_intervals' do + let!(:job_class_name) { 'TestJob' } + let!(:pending_job_1) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1, 2]) } + let!(:pending_job_2) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [3, 4]) } + let!(:successful_job_1) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [5, 6]) } + let!(:successful_job_2) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [7, 8]) } + + around do |example| + freeze_time do + Sidekiq::Testing.fake! do + example.run + end + end + end + + subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes) } + + it 'returns the expected duration' do + expect(subject).to eq(20.minutes) + end + + context 'when nothing is queued' do + subject { model.requeue_background_migration_jobs_by_range_at_intervals('FakeJob', 10.minutes) } + + it 'returns expected duration of zero when nothing gets queued' do + expect(subject).to eq(0) + end + end + + it 'queues pending jobs' do + subject + + expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]]) + expect(BackgroundMigrationWorker.jobs[0]['at']).to be_nil + expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]]) + expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(10.minutes.from_now.to_f) + end + + context 'with batch_size option' do + subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, batch_size: 1) } + + it 'returns the expected duration' do + expect(subject).to eq(20.minutes) + end + + it 'queues pending jobs' do + subject + + expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]]) + expect(BackgroundMigrationWorker.jobs[0]['at']).to be_nil + expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]]) + expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(10.minutes.from_now.to_f) + end + + it 'retrieve jobs in batches' do + jobs = double('jobs') + expect(Gitlab::Database::BackgroundMigrationJob).to receive(:pending) { jobs } + allow(jobs).to receive(:where).with(class_name: job_class_name) { jobs } + expect(jobs).to receive(:each_batch).with(of: 1) + + subject + end + end + + context 'with initial_delay option' do + let_it_be(:initial_delay) { 3.minutes } + + subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, initial_delay: initial_delay) } + + it 'returns the expected duration' do + expect(subject).to eq(23.minutes) + end + + it 'queues pending jobs' do + subject + + expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]]) + expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(3.minutes.from_now.to_f) + expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]]) + expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(13.minutes.from_now.to_f) + end + + context 'when nothing is queued' do + subject { model.requeue_background_migration_jobs_by_range_at_intervals('FakeJob', 10.minutes) } + + it 'returns expected duration of zero when nothing gets queued' do + expect(subject).to eq(0) + end + end + end + end + describe '#perform_background_migration_inline?' do it 'returns true in a test environment' do stub_rails_env('test') @@ -269,6 +361,38 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original end + context 'when such migration already exists' do + it 'does not create duplicate migration' do + create( + :batched_background_migration, + job_class_name: 'MyJobClass', + table_name: :projects, + column_name: :id, + interval: 10.minutes, + min_value: 5, + max_value: 1005, + batch_class_name: 'MyBatchClass', + batch_size: 200, + sub_batch_size: 20, + job_arguments: [[:id], [:id_convert_to_bigint]] + ) + + expect do + model.queue_batched_background_migration( + 'MyJobClass', + :projects, + :id, + [:id], [:id_convert_to_bigint], + job_interval: 5.minutes, + batch_min_value: 5, + batch_max_value: 1000, + batch_class_name: 'MyBatchClass', + batch_size: 100, + sub_batch_size: 10) + end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count } + end + end + it 'creates the database record for the migration' do expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info) diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb index 79ddb450d7a..4f1d6302331 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb @@ -580,7 +580,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe it 'idempotently cleans up after failed background migrations' do expect(partitioned_model.count).to eq(0) - partitioned_model.insert!(record2.attributes) + partitioned_model.insert(record2.attributes, unique_by: [:id, :created_at]) expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill| allow(backfill).to receive(:transaction_open?).and_return(false) diff --git a/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb new file mode 100644 index 00000000000..e9c512f94bb --- /dev/null +++ b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::PostgresqlAdapter::TypeMapCache do + let(:db_config) { ActiveRecord::Base.configurations.configs_for(env_name: 'test', name: 'primary').configuration_hash } + let(:adapter_class) { ActiveRecord::ConnectionAdapters::PostgreSQLAdapter } + + before do + adapter_class.type_map_cache.clear + end + + describe '#initialize_type_map' do + it 'caches loading of types in memory' do + recorder_without_cache = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) { initialize_connection.disconnect! } + expect(recorder_without_cache.log).to include(a_string_matching(/FROM pg_type/)).twice + + recorder_with_cache = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) { initialize_connection.disconnect! } + + expect(recorder_with_cache.count).to be < recorder_without_cache.count + + # There's still one pg_type query left here because `#add_pg_decoders` executes another pg_type query + # in https://github.com/rails/rails/blob/v6.1.3.2/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb#L912. + # This query is much cheaper because it only returns very few records. + expect(recorder_with_cache.log).to include(a_string_matching(/FROM pg_type/)).once + end + + it 'only reuses the cache if the connection parameters are exactly the same' do + initialize_connection.disconnect! + + other_config = db_config.dup + other_config[:connect_timeout] = db_config[:connect_timeout].to_i + 10 + + recorder = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) { initialize_connection(other_config).disconnect! } + + expect(recorder.log).to include(a_string_matching(/FROM pg_type/)).twice + end + end + + describe '#reload_type_map' do + it 'clears the cache and executes the type map query again' do + initialize_connection.disconnect! + + connection = initialize_connection + recorder = ActiveRecord::QueryRecorder.new(skip_schema_queries: false) { connection.reload_type_map } + + expect(recorder.log).to include(a_string_matching(/FROM pg_type/)).once + end + end + + # Based on https://github.com/rails/rails/blob/v6.1.3.2/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb#L36-L41 + def initialize_connection(config = db_config) + conn_params = config.symbolize_keys.compact + + conn_params[:user] = conn_params.delete(:username) if conn_params[:username] + conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database] + + valid_conn_param_keys = PG::Connection.conndefaults_hash.keys + [:requiressl] + conn_params.slice!(*valid_conn_param_keys) + + adapter_class.new( + adapter_class.new_client(conn_params), + ActiveRecord::Base.logger, + conn_params, + config + ) + end +end diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb index b08f39fc92a..df2c506e163 100644 --- a/spec/lib/gitlab/database/with_lock_retries_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb @@ -242,10 +242,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do - expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1").and_call_original - expect(ActiveRecord::Base.connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout; RESET lock_timeout').and_call_original + expect(ActiveRecord::Base.connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original + expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original expect(ActiveRecord::Base.connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original - expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1").and_call_original + expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original subject.run { } end diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 663c8d69328..847f7ec2d74 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -65,6 +65,28 @@ RSpec.describe Gitlab::Database do end end + describe '.disable_prepared_statements' do + around do |example| + original_config = ::Gitlab::Database.config + + example.run + + ActiveRecord::Base.establish_connection(original_config) + end + + it 'disables prepared statements' do + ActiveRecord::Base.establish_connection(::Gitlab::Database.config.merge(prepared_statements: true)) + expect(ActiveRecord::Base.connection.prepared_statements).to eq(true) + + expect(ActiveRecord::Base).to receive(:establish_connection) + .with(a_hash_including({ 'prepared_statements' => false })).and_call_original + + described_class.disable_prepared_statements + + expect(ActiveRecord::Base.connection.prepared_statements).to eq(false) + end + end + describe '.postgresql?' do subject { described_class.postgresql? } @@ -103,10 +125,10 @@ RSpec.describe Gitlab::Database do expect(described_class.postgresql_minimum_supported_version?).to eq(false) end - it 'returns true when using PostgreSQL 11' do + it 'returns false when using PostgreSQL 11' do allow(described_class).to receive(:version).and_return('11') - expect(described_class.postgresql_minimum_supported_version?).to eq(true) + expect(described_class.postgresql_minimum_supported_version?).to eq(false) end it 'returns true when using PostgreSQL 12' do @@ -307,7 +329,7 @@ RSpec.describe Gitlab::Database do expect(pool) .to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool) - expect(pool.spec.config[:pool]).to eq(5) + expect(pool.db_config.pool).to eq(5) ensure pool.disconnect! end @@ -317,7 +339,7 @@ RSpec.describe Gitlab::Database do pool = described_class.create_connection_pool(5, '127.0.0.1') begin - expect(pool.spec.config[:host]).to eq('127.0.0.1') + expect(pool.db_config.host).to eq('127.0.0.1') ensure pool.disconnect! end @@ -327,8 +349,8 @@ RSpec.describe Gitlab::Database do pool = described_class.create_connection_pool(5, '127.0.0.1', 5432) begin - expect(pool.spec.config[:host]).to eq('127.0.0.1') - expect(pool.spec.config[:port]).to eq(5432) + expect(pool.db_config.host).to eq('127.0.0.1') + expect(pool.db_config.configuration_hash[:port]).to eq(5432) ensure pool.disconnect! end diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb index 4c56911e665..9e94a63ea4b 100644 --- a/spec/lib/gitlab/diff/highlight_cache_spec.rb +++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb @@ -238,17 +238,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do subject { cache.key } it 'returns cache key' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true:true") - end - - context 'when the `introduce_marker_ranges` feature flag is disabled' do - before do - stub_feature_flags(introduce_marker_ranges: false) - end - - it 'returns the original version of the cache' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false:true:true") - end + is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true") end context 'when the `use_marker_ranges` feature flag is disabled' do @@ -257,7 +247,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do end it 'returns the original version of the cache' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:false:true") + is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false:true") end end @@ -267,7 +257,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do end it 'returns the original version of the cache' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true:false") + is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:false") end end end diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index 94d3b2ad0b3..94b28c38fa2 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -56,26 +56,6 @@ RSpec.describe Gitlab::Diff::Highlight do expect(subject[5].rich_text).to eq(code) end - context 'when introduce_marker_ranges is false' do - before do - stub_feature_flags(introduce_marker_ranges: false) - end - - it 'keeps the old bevavior (without mode classes)' do - code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n} - - expect(subject[5].rich_text).to eq(code) - end - - context 'when use_marker_ranges feature flag is false too' do - it 'does not affect the result' do - code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n} - - expect(subject[5].rich_text).to eq(code) - end - end - end - context 'when no diff_refs' do before do allow(diff_file).to receive(:diff_refs).and_return(nil) diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb index 1a7d837af73..dd230140b30 100644 --- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb @@ -128,6 +128,14 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) end end + + context 'when project ID is invalid' do + it 'raises a ProjectNotFound' do + handler = described_class.new(email_raw, "gitlabhq-gitlabhq-#{Gitlab::Database::MAX_INT_VALUE}-#{user.incoming_email_token}-issue") + + expect { handler.execute }.to raise_error(Gitlab::Email::ProjectNotFound) + end + end end def email_fixture(path) diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index 6d26b3e1064..3a60564d8d2 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -168,7 +168,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end context 'when using service desk key' do - let_it_be(:service_desk_settings) { create(:service_desk_setting, project: project, project_key: 'mykey') } + let_it_be(:service_desk_key) { 'mykey' } let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml') } let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) } @@ -176,6 +176,10 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com') end + before_all do + create(:service_desk_setting, project: project, project_key: service_desk_key) + end + it_behaves_like 'a new issue request' context 'when there is no project with the key' do @@ -193,6 +197,20 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) end end + + context 'when there are multiple projects with same key' do + let_it_be(:project_with_same_key) { create(:project, group: group, service_desk_enabled: true) } + let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: project_with_same_key.full_path_slug.to_s) } + + before do + create(:service_desk_setting, project: project_with_same_key, project_key: service_desk_key) + end + + it 'process email for project with matching slug' do + expect { receiver.execute }.to change { Issue.count }.by(1) + expect(Issue.last.project).to eq(project_with_same_key) + end + end end end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb index 42d84b3e4de..277f1158f8b 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb @@ -4,12 +4,13 @@ require 'spec_helper' RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } let(:series) { 0 } let(:test_class) { Gitlab::Email::Message::InProductMarketing::Create } describe 'initialize' do - subject { test_class.new(group: group, series: series) } + subject { test_class.new(group: group, user: user, series: series) } context 'when series does not exist' do let(:series) { 3 } @@ -29,13 +30,13 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do end describe '#logo_path' do - subject { test_class.new(group: group, series: series).logo_path } + subject { test_class.new(group: group, user: user, series: series).logo_path } it { is_expected.to eq('mailers/in_product_marketing/create-0.png') } end describe '#unsubscribe' do - subject { test_class.new(group: group, series: series).unsubscribe } + subject { test_class.new(group: group, user: user, series: series).unsubscribe } before do allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) @@ -55,7 +56,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do end describe '#cta_link' do - subject(:cta_link) { test_class.new(group: group, series: series).cta_link } + subject(:cta_link) { test_class.new(group: group, user: user, series: series).cta_link } it 'renders link' do expect(CGI.unescapeHTML(cta_link)).to include(Gitlab::Routing.url_helpers.group_email_campaigns_url(group, track: :create, series: series)) @@ -63,7 +64,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do end describe '#progress' do - subject { test_class.new(group: group, series: series).progress } + subject { test_class.new(group: group, user: user, series: series).progress } before do allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb index be8a33b18bd..35470ef3555 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb @@ -6,8 +6,9 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do using RSpec::Parameterized::TableSyntax let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } - subject(:message) { described_class.new(group: group, series: series)} + subject(:message) { described_class.new(group: group, user: user, series: series)} describe "public methods" do where(series: [0, 1, 2]) diff --git a/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb new file mode 100644 index 00000000000..b742eff3f56 --- /dev/null +++ b/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::Message::InProductMarketing::Experience do + let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } + + subject(:message) { described_class.new(group: group, user: user, series: series)} + + describe 'public methods' do + context 'with series 0' do + let(:series) { 0 } + + it 'returns value for series', :aggregate_failures do + expect(message.subject_line).to be_present + expect(message.tagline).to be_nil + expect(message.title).to be_present + expect(message.subtitle).to be_present + expect(message.body_line1).to be_present + expect(message.body_line2).to be_present + expect(message.cta_text).to be_nil + end + + describe '#feedback_link' do + let(:member_count) { 2 } + let(:user_access) { GroupMember::DEVELOPER } + let(:preferred_language) { 'en' } + + before do + allow(message).to receive(:onboarding_progress).and_return(1) + allow(group).to receive(:member_count).and_return(member_count) + allow(group).to receive(:max_member_access_for_user).and_return(user_access) + allow(user).to receive(:preferred_language).and_return(preferred_language) + end + + subject do + uri = URI.parse(message.feedback_link(1)) + Rack::Utils.parse_query(uri.query).with_indifferent_access[:show_invite_link] + end + + it { is_expected.to eq('true') } + + context 'with only one member' do + let(:member_count) { 1 } + + it { is_expected.to eq('false') } + end + + context 'with less than developer access' do + let(:user_access) { GroupMember::GUEST } + + it { is_expected.to eq('false') } + end + + context 'with preferred language other than English' do + let(:preferred_language) { 'nl' } + + it { is_expected.to eq('false') } + end + end + end + end +end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb index 6251128f560..f72994fcce1 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb @@ -6,8 +6,9 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do using RSpec::Parameterized::TableSyntax let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } - subject(:message) { described_class.new(group: group, series: series)} + subject(:message) { described_class.new(group: group, user: user, series: series)} describe "public methods" do where(series: [0, 1]) diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb index 2c435490765..5f7639a9ed6 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb @@ -6,8 +6,9 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do using RSpec::Parameterized::TableSyntax let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } - subject(:message) { described_class.new(group: group, series: series)} + subject(:message) { described_class.new(group: group, user: user, series: series)} describe "public methods" do where(series: [0, 1, 2]) diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb index 73252c0dbdf..a7da2e9553d 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb @@ -4,8 +4,9 @@ require 'spec_helper' RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } - subject(:message) { described_class.new(group: group, series: series)} + subject(:message) { described_class.new(group: group, user: user, series: series)} describe "public methods" do context 'with series 0' do diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb index 9b05c12ef57..2c1fe529a5d 100644 --- a/spec/lib/gitlab/email/receiver_spec.rb +++ b/spec/lib/gitlab/email/receiver_spec.rb @@ -5,9 +5,13 @@ require 'spec_helper' RSpec.describe Gitlab::Email::Receiver do include_context :email_shared_context - shared_examples 'correctly finds the mail key' do - specify do + shared_examples 'correctly finds the mail key and adds metric event' do + let(:metric_transaction) { double('Gitlab::Metrics::WebTransaction') } + + specify :aggregate_failures do expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler) + expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction) + expect(metric_transaction).to receive(:add_event).with(handler.metrics_event, handler.metrics_params) receiver.execute end @@ -30,7 +34,7 @@ RSpec.describe Gitlab::Email::Receiver do context 'when in a Delivered-To header' do let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') } - it_behaves_like 'correctly finds the mail key' + it_behaves_like 'correctly finds the mail key and adds metric event' it 'parses the metadata' do expect(metadata[:delivered_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"]) @@ -40,7 +44,7 @@ RSpec.describe Gitlab::Email::Receiver do context 'when in an Envelope-To header' do let(:email_raw) { fixture_file('emails/envelope_to_header.eml') } - it_behaves_like 'correctly finds the mail key' + it_behaves_like 'correctly finds the mail key and adds metric event' it 'parses the metadata' do expect(metadata[:envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"]) @@ -50,7 +54,7 @@ RSpec.describe Gitlab::Email::Receiver do context 'when in an X-Envelope-To header' do let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') } - it_behaves_like 'correctly finds the mail key' + it_behaves_like 'correctly finds the mail key and adds metric event' it 'parses the metadata' do expect(metadata[:x_envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"]) @@ -60,7 +64,7 @@ RSpec.describe Gitlab::Email::Receiver do context 'when enclosed with angle brackets in an Envelope-To header' do let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') } - it_behaves_like 'correctly finds the mail key' + it_behaves_like 'correctly finds the mail key and adds metric event' end end diff --git a/spec/lib/gitlab/emoji_spec.rb b/spec/lib/gitlab/emoji_spec.rb index ada37f25d1e..8f855489c12 100644 --- a/spec/lib/gitlab/emoji_spec.rb +++ b/spec/lib/gitlab/emoji_spec.rb @@ -91,7 +91,16 @@ RSpec.describe Gitlab::Emoji do it 'returns emoji image tag' do emoji_image = described_class.emoji_image_tag('emoji_one', 'src_url') - expect(emoji_image).to eq( "<img class='emoji' title=':emoji_one:' alt=':emoji_one:' src='src_url' height='20' width='20' align='absmiddle' />") + expect(emoji_image).to eq("<img class=\"emoji\" src=\"src_url\" title=\":emoji_one:\" alt=\":emoji_one:\" height=\"20\" width=\"20\" align=\"absmiddle\" />") + end + + it 'escapes emoji image attrs to prevent XSS' do + xss_payload = "<script>alert(1)</script>" + escaped_xss_payload = html_escape(xss_payload) + + emoji_image = described_class.emoji_image_tag(xss_payload, 'http://aaa#' + xss_payload) + + expect(emoji_image).to eq("<img class=\"emoji\" src=\"http://aaa##{escaped_xss_payload}\" title=\":#{escaped_xss_payload}:\" alt=\":#{escaped_xss_payload}:\" height=\"20\" width=\"20\" align=\"absmiddle\" />") end end diff --git a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb index 584eadb24a7..210829056c8 100644 --- a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb +++ b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb @@ -4,7 +4,15 @@ require 'spec_helper' RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do describe '.call' do - let(:event) { Raven::Event.new(payload) } + let(:required_options) do + { + configuration: Raven.configuration, + context: Raven.context, + breadcrumbs: Raven.breadcrumbs + } + end + + let(:event) { Raven::Event.new(required_options.merge(payload)) } let(:result_hash) { described_class.call(event).to_hash } before do diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb index 727b603feda..6076e525f06 100644 --- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb +++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb @@ -4,7 +4,15 @@ require 'spec_helper' RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do describe '.call' do - let(:event) { Raven::Event.from_exception(exception, data) } + let(:required_options) do + { + configuration: Raven.configuration, + context: Raven.context, + breadcrumbs: Raven.breadcrumbs + } + end + + let(:event) { Raven::Event.from_exception(exception, required_options.merge(data)) } let(:result_hash) { described_class.call(event).to_hash } context 'when there is no GRPC exception' do diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb index c8a362fcf05..af5f11c9362 100644 --- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb +++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb @@ -95,7 +95,15 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do end describe '.call' do - let(:event) { Raven::Event.new(wrapped_value) } + let(:required_options) do + { + configuration: Raven.configuration, + context: Raven.context, + breadcrumbs: Raven.breadcrumbs + } + end + + let(:event) { Raven::Event.new(required_options.merge(wrapped_value)) } let(:result_hash) { described_class.call(event).to_hash } context 'when there is Sidekiq data' do diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb index b3293e6473c..7ad1f52780a 100644 --- a/spec/lib/gitlab/error_tracking_spec.rb +++ b/spec/lib/gitlab/error_tracking_spec.rb @@ -204,23 +204,6 @@ RSpec.describe Gitlab::ErrorTracking do expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') end - - context 'when SQL cannot be parsed' do - let(:pg12_query) do - <<-SQL - CREATE INDEX CONCURRENTLY my_index ON merge_requests - USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id) - SQL - end - - let(:exception) { ActiveRecord::StatementInvalid.new(sql: pg12_query) } - - it 'injects the raw sql query into extra' do - track_exception - - expect(sentry_event.dig('extra', 'sql')).to eq(pg12_query) - end - end end context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb index 3122a3b1c07..c4da89e5f5c 100644 --- a/spec/lib/gitlab/etag_caching/middleware_spec.rb +++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb @@ -33,7 +33,6 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state expect(headers['ETag']).to be_nil expect(headers['X-Gitlab-From-Cache']).to be_nil - expect(headers[::Gitlab::Metrics::RequestsRackMiddleware::FEATURE_CATEGORY_HEADER]).to be_nil end it 'passes status code from app' do @@ -41,6 +40,12 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state expect(status).to eq app_status_code end + + it 'does not set feature category attribute' do + expect(Gitlab::ApplicationContext).not_to receive(:push) + + _, _, _ = middleware.call(build_request(path, if_none_match)) + end end context 'when there is no ETag in store for given resource' do @@ -164,8 +169,15 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state it 'sets correct headers' do _, headers, _ = middleware.call(build_request(path, if_none_match)) - expect(headers).to include('X-Gitlab-From-Cache' => 'true', - ::Gitlab::Metrics::RequestsRackMiddleware::FEATURE_CATEGORY_HEADER => 'issue_tracking') + expect(headers).to include('X-Gitlab-From-Cache' => 'true') + end + + it "pushes route's feature category to the context" do + expect(Gitlab::ApplicationContext).to receive(:push).with( + feature_category: 'issue_tracking' + ) + + _, _, _ = middleware.call(build_request(path, if_none_match)) end it_behaves_like 'sends a process_action.action_controller notification', 304 diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb index 5419a01ea3e..7a619c9f155 100644 --- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb +++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb @@ -196,9 +196,12 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do end describe '#track_experiment_event', :snowplow do + let(:user) { build(:user) } + context 'when the experiment is enabled' do before do stub_experiment(test_experiment: true) + allow(controller).to receive(:current_user).and_return(user) end context 'the user is part of the experimental group' do @@ -213,7 +216,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do category: 'Team', action: 'start', property: 'experimental_group', - value: 1 + value: 1, + user: user ) end end @@ -230,7 +234,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do category: 'Team', action: 'start', property: 'control_group', - value: 1 + value: 1, + user: user ) end end @@ -247,7 +252,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do category: 'Team', action: 'start', property: 'control_group', - value: 1 + value: 1, + user: user ) end end @@ -280,7 +286,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do action: 'start', property: 'control_group', value: 1, - label: Digest::MD5.hexdigest('abc') + label: Digest::MD5.hexdigest('abc'), + user: user ) end @@ -294,7 +301,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do action: 'start', property: 'control_group', value: 1, - label: Digest::MD5.hexdigest('somestring') + label: Digest::MD5.hexdigest('somestring'), + user: user ) end end @@ -313,7 +321,8 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do action: 'start', property: 'control_group', value: 1, - label: cookies.permanent.signed[:experimentation_subject_id] + label: cookies.permanent.signed[:experimentation_subject_id], + user: user ) end end diff --git a/spec/lib/gitlab/file_hook_spec.rb b/spec/lib/gitlab/file_hook_spec.rb index 7f40d9ae772..4fc55f7ad7e 100644 --- a/spec/lib/gitlab/file_hook_spec.rb +++ b/spec/lib/gitlab/file_hook_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::FileHook do let(:file_hook_source) do <<~EOS #!/usr/bin/env ruby - x = STDIN.read + x = $stdin.read File.write('#{tmp_file.path}', x) EOS end diff --git a/spec/lib/gitlab/git/conflict/resolver_spec.rb b/spec/lib/gitlab/git/conflict/resolver_spec.rb new file mode 100644 index 00000000000..2783e955c33 --- /dev/null +++ b/spec/lib/gitlab/git/conflict/resolver_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Git::Conflict::Resolver do + let(:repository) { instance_double(Gitlab::Git::Repository) } + let(:our_commit_oid) { 'our-commit-oid' } + let(:their_commit_oid) { 'their-commit-oid' } + let(:gitaly_conflicts_client) { instance_double(Gitlab::GitalyClient::ConflictsService) } + + subject(:resolver) { described_class.new(repository, our_commit_oid, their_commit_oid) } + + describe '#conflicts' do + before do + allow(repository).to receive(:gitaly_conflicts_client).and_return(gitaly_conflicts_client) + end + + it 'returns list of conflicts' do + conflicts = [double] + + expect(gitaly_conflicts_client).to receive(:list_conflict_files).and_return(conflicts) + expect(resolver.conflicts).to eq(conflicts) + end + + context 'when GRPC::FailedPrecondition is raised' do + it 'rescues and raises Gitlab::Git::Conflict::Resolver::ConflictSideMissing' do + expect(gitaly_conflicts_client).to receive(:list_conflict_files).and_raise(GRPC::FailedPrecondition) + expect { resolver.conflicts }.to raise_error(Gitlab::Git::Conflict::Resolver::ConflictSideMissing) + end + end + end +end diff --git a/spec/lib/gitlab/git/remote_repository_spec.rb b/spec/lib/gitlab/git/remote_repository_spec.rb index 84c17234ae4..c7bc81573a6 100644 --- a/spec/lib/gitlab/git/remote_repository_spec.rb +++ b/spec/lib/gitlab/git/remote_repository_spec.rb @@ -58,45 +58,4 @@ RSpec.describe Gitlab::Git::RemoteRepository, :seed_helper do it { expect(subject.same_repository?(other_repository)).to eq(result) } end end - - describe '#fetch_env' do - let(:remote_repository) { described_class.new(repository) } - - let(:gitaly_client) { double(:gitaly_client) } - let(:address) { 'fake-address' } - let(:token) { 'fake-token' } - - subject { remote_repository.fetch_env } - - before do - allow(remote_repository).to receive(:gitaly_client).and_return(gitaly_client) - - expect(gitaly_client).to receive(:address).with(repository.storage).and_return(address) - expect(gitaly_client).to receive(:token).with(repository.storage).and_return(token) - end - - it { expect(subject).to be_a(Hash) } - it { expect(subject['GITALY_ADDRESS']).to eq(address) } - it { expect(subject['GITALY_TOKEN']).to eq(token) } - it { expect(subject['GITALY_WD']).to eq(Dir.pwd) } - - it 'creates a plausible GIT_SSH_COMMAND' do - git_ssh_command = subject['GIT_SSH_COMMAND'] - - expect(git_ssh_command).to start_with('/') - expect(git_ssh_command).to end_with('/gitaly-ssh upload-pack') - end - - it 'creates a plausible GITALY_PAYLOAD' do - req = Gitaly::SSHUploadPackRequest.decode_json(subject['GITALY_PAYLOAD']) - - expect(remote_repository.gitaly_repository).to eq(req.repository) - end - - context 'when the token is blank' do - let(:token) { '' } - - it { expect(subject.keys).not_to include('GITALY_TOKEN') } - end - end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 1ddbdda12b5..336bf20d59c 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -133,32 +133,10 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do expect(metadata['ArchivePrefix']).to eq(expected_prefix) end - context 'when :include_lfs_blobs_in_archive feature flag is disabled' do - let(:expected_path) { File.join(storage_path, cache_key, expected_filename) } + it 'sets ArchivePath to the expected globally-unique path' do + expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID)) - before do - stub_feature_flags(include_lfs_blobs_in_archive: false) - end - - it 'sets ArchivePath to the expected globally-unique path' do - # This is really important from a security perspective. Think carefully - # before changing it: https://gitlab.com/gitlab-org/gitlab-foss/issues/45689 - expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID)) - - expect(metadata['ArchivePath']).to eq(expected_path) - end - end - - context 'when :include_lfs_blobs_in_archive feature flag is enabled' do - before do - stub_feature_flags(include_lfs_blobs_in_archive: true) - end - - it 'sets ArchivePath to the expected globally-unique path' do - expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID)) - - expect(metadata['ArchivePath']).to eq(expected_path) - end + expect(metadata['ArchivePath']).to eq(expected_path) end context 'path is set' do @@ -521,7 +499,9 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do no_tags: true, timeout: described_class::GITLAB_PROJECTS_TIMEOUT, prune: false, - check_tags_changed: false + check_tags_changed: false, + url: nil, + refmap: nil } expect(repository.gitaly_repository_client).to receive(:fetch_remote).with('remote-name', expected_opts) diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 3d6c04fd484..96a44575e24 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -731,6 +731,8 @@ RSpec.describe Gitlab::GitAccess do context 'when LFS is not enabled' do it 'does not run LFSIntegrity check' do + allow(project).to receive(:lfs_enabled?).and_return(false) + expect(Gitlab::Checks::LfsIntegrity).not_to receive(:new) push_access_check @@ -1004,10 +1006,10 @@ RSpec.describe Gitlab::GitAccess do expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control_count).with_threshold(2) end - it 'raises TimeoutError when #check_single_change_access raises a timeout error' do + it 'raises TimeoutError when #check_access! raises a timeout error' do message = "Push operation timed out\n\nTiming information for debugging purposes:\nRunning checks for ref: wow" - expect_next_instance_of(Gitlab::Checks::ChangeAccess) do |check| + expect_next_instance_of(Gitlab::Checks::SingleChangeAccess) do |check| expect(check).to receive(:validate!).and_raise(Gitlab::Checks::TimedLogger::TimeoutError) end diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb index 70fc4fe4416..df9dde324a5 100644 --- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb @@ -38,47 +38,26 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do let(:remote) { 'origin' } let(:url) { 'http://git.example.com/my-repo.git' } let(:auth) { 'Basic secret' } + let(:expected_params) { { remote_url: url, http_authorization_header: auth } } - shared_examples 'a find_remote_root_ref call' do - it 'sends an find_remote_root_ref message and returns the root ref' do - expect_any_instance_of(Gitaly::RemoteService::Stub) - .to receive(:find_remote_root_ref) - .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) - .with(gitaly_request_with_params(expected_params), kind_of(Hash)) - .and_return(double(ref: 'master')) - - expect(client.find_remote_root_ref(remote, url, auth)).to eq 'master' - end - - it 'ensure ref is a valid UTF-8 string' do - expect_any_instance_of(Gitaly::RemoteService::Stub) - .to receive(:find_remote_root_ref) - .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) - .with(gitaly_request_with_params(expected_params), kind_of(Hash)) - .and_return(double(ref: "an_invalid_ref_\xE5")) - - expect(client.find_remote_root_ref(remote, url, auth)).to eq "an_invalid_ref_å" - end - end - - context 'with inmemory feature enabled' do - before do - stub_feature_flags(find_remote_root_refs_inmemory: true) - end + it 'sends an find_remote_root_ref message and returns the root ref' do + expect_any_instance_of(Gitaly::RemoteService::Stub) + .to receive(:find_remote_root_ref) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .with(gitaly_request_with_params(expected_params), kind_of(Hash)) + .and_return(double(ref: 'master')) - it_behaves_like 'a find_remote_root_ref call' do - let(:expected_params) { { remote_url: url, http_authorization_header: auth } } - end + expect(client.find_remote_root_ref(remote, url, auth)).to eq 'master' end - context 'with inmemory feature disabled' do - before do - stub_feature_flags(find_remote_root_refs_inmemory: false) - end + it 'ensure ref is a valid UTF-8 string' do + expect_any_instance_of(Gitaly::RemoteService::Stub) + .to receive(:find_remote_root_ref) + .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) + .with(gitaly_request_with_params(expected_params), kind_of(Hash)) + .and_return(double(ref: "an_invalid_ref_\xE5")) - it_behaves_like 'a find_remote_root_ref call' do - let(:expected_params) { { remote: remote } } - end + expect(client.find_remote_root_ref(remote, url, auth)).to eq "an_invalid_ref_å" end end diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb index 26ec194a2e7..56c8fe20eca 100644 --- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb @@ -122,67 +122,89 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do end describe '#fetch_remote' do - let(:remote) { 'remote-name' } - - it 'sends a fetch_remote_request message' do - expected_request = gitaly_request_with_params( - remote: remote, - ssh_key: '', - known_hosts: '', - force: false, - no_tags: false, - no_prune: false, - check_tags_changed: false - ) + shared_examples 'a fetch' do + it 'sends a fetch_remote_request message' do + expected_remote_params = Gitaly::Remote.new( + url: url, http_authorization_header: "", mirror_refmaps: []) + + expected_request = gitaly_request_with_params( + remote: remote, + remote_params: url ? expected_remote_params : nil, + ssh_key: '', + known_hosts: '', + force: false, + no_tags: false, + no_prune: false, + check_tags_changed: false + ) + + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:fetch_remote) + .with(expected_request, kind_of(Hash)) + .and_return(double(value: true)) + + client.fetch_remote(remote, url: url, refmap: nil, ssh_auth: nil, forced: false, no_tags: false, timeout: 1, check_tags_changed: false) + end - expect_any_instance_of(Gitaly::RepositoryService::Stub) - .to receive(:fetch_remote) - .with(expected_request, kind_of(Hash)) - .and_return(double(value: true)) + context 'SSH auth' do + where(:ssh_mirror_url, :ssh_key_auth, :ssh_private_key, :ssh_known_hosts, :expected_params) do + false | false | 'key' | 'known_hosts' | {} + false | true | 'key' | 'known_hosts' | {} + true | false | 'key' | 'known_hosts' | { known_hosts: 'known_hosts' } + true | true | 'key' | 'known_hosts' | { ssh_key: 'key', known_hosts: 'known_hosts' } + true | true | 'key' | nil | { ssh_key: 'key' } + true | true | nil | 'known_hosts' | { known_hosts: 'known_hosts' } + true | true | nil | nil | {} + true | true | '' | '' | {} + end - client.fetch_remote(remote, ssh_auth: nil, forced: false, no_tags: false, timeout: 1, check_tags_changed: false) + with_them do + let(:ssh_auth) do + double( + :ssh_auth, + ssh_mirror_url?: ssh_mirror_url, + ssh_key_auth?: ssh_key_auth, + ssh_private_key: ssh_private_key, + ssh_known_hosts: ssh_known_hosts + ) + end + + it do + expected_remote_params = Gitaly::Remote.new( + url: url, http_authorization_header: "", mirror_refmaps: []) + + expected_request = gitaly_request_with_params({ + remote: remote, + remote_params: url ? expected_remote_params : nil, + ssh_key: '', + known_hosts: '', + force: false, + no_tags: false, + no_prune: false + }.update(expected_params)) + + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:fetch_remote) + .with(expected_request, kind_of(Hash)) + .and_return(double(value: true)) + + client.fetch_remote(remote, url: url, refmap: nil, ssh_auth: ssh_auth, forced: false, no_tags: false, timeout: 1) + end + end + end end - context 'SSH auth' do - where(:ssh_mirror_url, :ssh_key_auth, :ssh_private_key, :ssh_known_hosts, :expected_params) do - false | false | 'key' | 'known_hosts' | {} - false | true | 'key' | 'known_hosts' | {} - true | false | 'key' | 'known_hosts' | { known_hosts: 'known_hosts' } - true | true | 'key' | 'known_hosts' | { ssh_key: 'key', known_hosts: 'known_hosts' } - true | true | 'key' | nil | { ssh_key: 'key' } - true | true | nil | 'known_hosts' | { known_hosts: 'known_hosts' } - true | true | nil | nil | {} - true | true | '' | '' | {} + context 'with remote' do + it_behaves_like 'a fetch' do + let(:remote) { 'remote-name' } + let(:url) { nil } end + end - with_them do - let(:ssh_auth) do - double( - :ssh_auth, - ssh_mirror_url?: ssh_mirror_url, - ssh_key_auth?: ssh_key_auth, - ssh_private_key: ssh_private_key, - ssh_known_hosts: ssh_known_hosts - ) - end - - it do - expected_request = gitaly_request_with_params({ - remote: remote, - ssh_key: '', - known_hosts: '', - force: false, - no_tags: false, - no_prune: false - }.update(expected_params)) - - expect_any_instance_of(Gitaly::RepositoryService::Stub) - .to receive(:fetch_remote) - .with(expected_request, kind_of(Hash)) - .and_return(double(value: true)) - - client.fetch_remote(remote, ssh_auth: ssh_auth, forced: false, no_tags: false, timeout: 1) - end + context 'with URL' do + it_behaves_like 'a fetch' do + let(:remote) { "" } + let(:url) { 'https://example.com/git/repo.git' } end end end diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb index a8d42f4bccf..16f75691288 100644 --- a/spec/lib/gitlab/gitaly_client_spec.rb +++ b/spec/lib/gitlab/gitaly_client_spec.rb @@ -33,14 +33,6 @@ RSpec.describe Gitlab::GitalyClient do it { expect(subject.long_timeout).to eq(6.hours) } end - context 'running in Unicorn' do - before do - allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true) - end - - it { expect(subject.long_timeout).to eq(55) } - end - context 'running in Puma' do before do allow(Gitlab::Runtime).to receive(:puma?).and_return(true) diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb index 8a7867f3841..133d515246a 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do - let(:project) { create(:project, import_source: 'foo/bar') } + let(:url) { 'https://github.com/foo/bar.git' } + let(:project) { create(:project, import_source: 'foo/bar', import_url: url) } let(:client) { double(:client) } let(:pull_request) do @@ -147,14 +148,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do end end - describe '#update_repository' do + shared_examples '#update_repository' do it 'updates the repository' do importer = described_class.new(project, client) - expect(project.repository) - .to receive(:fetch_remote) - .with('github', forced: false) - expect_next_instance_of(Gitlab::Import::Logger) do |logger| expect(logger) .to receive(:info) @@ -173,6 +170,28 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do end end + describe '#update_repository with :fetch_remote_params enabled' do + before do + stub_feature_flags(fetch_remote_params: true) + expect(project.repository) + .to receive(:fetch_remote) + .with('github', forced: false, url: url, refmap: Gitlab::GithubImport.refmap) + end + + it_behaves_like '#update_repository' + end + + describe '#update_repository with :fetch_remote_params disabled' do + before do + stub_feature_flags(fetch_remote_params: false) + expect(project.repository) + .to receive(:fetch_remote) + .with('github', forced: false) + end + + it_behaves_like '#update_repository' + end + describe '#update_repository?' do let(:importer) { described_class.new(project, client) } diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb index f18064f10aa..08be350f0f9 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb @@ -27,30 +27,100 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do end describe '#each_object_to_import', :clean_gitlab_redis_cache do - it 'fetchs the merged pull requests data' do - merge_request = create( - :merged_merge_request, - iid: 999, - source_project: project, - target_project: project - ) - - review = double - - expect(review) - .to receive(:merge_request_id=) - .with(merge_request.id) - - allow(client) - .to receive(:pull_request_reviews) - .exactly(:once) # ensure to be cached on the second call - .with('github/repo', merge_request.iid) - .and_return([review]) - - expect { |b| subject.each_object_to_import(&b) } - .to yield_with_args(review) - - subject.each_object_to_import {} + context 'when github_review_importer_query_only_unimported_merge_requests is enabled' do + before do + stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: true) + end + + let(:merge_request) do + create( + :merged_merge_request, + iid: 999, + source_project: project, + target_project: project + ) + end + + let(:review) { double(id: 1) } + + it 'fetches the pull requests reviews data' do + page = double(objects: [review], number: 1) + + expect(review) + .to receive(:merge_request_id=) + .with(merge_request.id) + + expect(client) + .to receive(:each_page) + .exactly(:once) # ensure to be cached on the second call + .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 1) + .and_yield(page) + + expect { |b| subject.each_object_to_import(&b) } + .to yield_with_args(review) + + subject.each_object_to_import {} + end + + it 'skips cached pages' do + Gitlab::GithubImport::PageCounter + .new(project, "merge_request/#{merge_request.id}/pull_request_reviews") + .set(2) + + expect(review).not_to receive(:merge_request_id=) + + expect(client) + .to receive(:each_page) + .exactly(:once) # ensure to be cached on the second call + .with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 2) + + subject.each_object_to_import {} + end + + it 'skips cached merge requests' do + Gitlab::Cache::Import::Caching.set_add( + "github-importer/merge_request/already-imported/#{project.id}", + merge_request.id + ) + + expect(review).not_to receive(:merge_request_id=) + + expect(client).not_to receive(:each_page) + + subject.each_object_to_import {} + end + end + + context 'when github_review_importer_query_only_unimported_merge_requests is disabled' do + before do + stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: false) + end + + it 'fetchs the merged pull requests data' do + merge_request = create( + :merged_merge_request, + iid: 999, + source_project: project, + target_project: project + ) + + review = double + + expect(review) + .to receive(:merge_request_id=) + .with(merge_request.id) + + allow(client) + .to receive(:pull_request_reviews) + .exactly(:once) # ensure to be cached on the second call + .with('github/repo', merge_request.iid) + .and_return([review]) + + expect { |b| subject.each_object_to_import(&b) } + .to yield_with_args(review) + + subject.each_object_to_import {} + end end end end diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb index a1305b714b5..568bc8cbbef 100644 --- a/spec/lib/gitlab/github_import/page_counter_spec.rb +++ b/spec/lib/gitlab/github_import/page_counter_spec.rb @@ -31,4 +31,15 @@ RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do expect(counter.current).to eq(2) end end + + describe '#expire!' do + it 'expires the current page counter' do + counter.set(2) + + counter.expire! + + expect(Gitlab::Cache::Import::Caching.read_integer(counter.cache_key)).to be_nil + expect(counter.current).to eq(1) + end + end end diff --git a/spec/lib/gitlab/global_id/deprecations_spec.rb b/spec/lib/gitlab/global_id/deprecations_spec.rb new file mode 100644 index 00000000000..22a4766c0a0 --- /dev/null +++ b/spec/lib/gitlab/global_id/deprecations_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::GlobalId::Deprecations do + include GlobalIDDeprecationHelpers + + let_it_be(:deprecation_1) { described_class::Deprecation.new(old_model_name: 'Foo::Model', new_model_name: 'Bar', milestone: '9.0') } + let_it_be(:deprecation_2) { described_class::Deprecation.new(old_model_name: 'Baz', new_model_name: 'Qux::Model', milestone: '10.0') } + + before do + stub_global_id_deprecations(deprecation_1, deprecation_2) + end + + describe '.deprecated?' do + it 'returns a boolean to signal if model name has a deprecation', :aggregate_failures do + expect(described_class.deprecated?('Foo::Model')).to eq(true) + expect(described_class.deprecated?('Qux::Model')).to eq(false) + end + end + + describe '.deprecation_for' do + it 'returns the deprecation for the model if it exists', :aggregate_failures do + expect(described_class.deprecation_for('Foo::Model')).to eq(deprecation_1) + expect(described_class.deprecation_for('Qux::Model')).to be_nil + end + end + + describe '.deprecation_by' do + it 'returns the deprecation by the model if it exists', :aggregate_failures do + expect(described_class.deprecation_by('Foo::Model')).to be_nil + expect(described_class.deprecation_by('Qux::Model')).to eq(deprecation_2) + end + end + + describe '.apply_to_graphql_name' do + it 'returns the corresponding graphql_name of the GID for the new model', :aggregate_failures do + expect(described_class.apply_to_graphql_name('FooModelID')).to eq('BarID') + expect(described_class.apply_to_graphql_name('BazID')).to eq('QuxModelID') + end + + it 'returns the same value if there is no deprecation' do + expect(described_class.apply_to_graphql_name('ProjectID')).to eq('ProjectID') + end + end +end diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb deleted file mode 100644 index 14db51deb88..00000000000 --- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb +++ /dev/null @@ -1,639 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Graphql::Docs::Renderer do - describe '#contents' do - shared_examples 'renders correctly as GraphQL documentation' do - it 'contains the expected section' do - # duplicative - but much better error messages! - section.lines.each { |line| expect(contents).to include(line) } - expect(contents).to include(section) - end - end - - let(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') } - let(:field_description) { 'List of objects.' } - let(:type) { ::GraphQL::INT_TYPE } - - let(:query_type) do - Class.new(Types::BaseObject) { graphql_name 'Query' }.tap do |t| - # this keeps type and field_description in scope. - t.field :foo, type, null: true, description: field_description do - argument :id, GraphQL::ID_TYPE, required: false, description: 'ID of the object.' - end - end - end - - let(:mutation_root) do - Class.new(::Types::BaseObject) do - include ::Gitlab::Graphql::MountMutation - graphql_name 'Mutation' - end - end - - let(:mock_schema) do - Class.new(GraphQL::Schema) do - def resolve_type(obj, ctx) - raise 'Not a real schema' - end - end - end - - subject(:contents) do - mock_schema.query(query_type) - mock_schema.mutation(mutation_root) if mutation_root.fields.any? - - described_class.new( - mock_schema, - output_dir: nil, - template: template - ).contents - end - - describe 'headings' do - it 'contains the expected sections' do - expect(contents.lines.map(&:chomp)).to include( - '## `Query` type', - '## `Mutation` type', - '## Connections', - '## Object types', - '## Enumeration types', - '## Scalar types', - '## Abstract types', - '### Unions', - '### Interfaces', - '## Input types' - ) - end - end - - context 'when a field has a list type' do - let(:type) do - Class.new(Types::BaseObject) do - graphql_name 'ArrayTest' - - field :foo, [GraphQL::STRING_TYPE], null: false, description: 'A description.' - end - end - - specify do - type_name = '[String!]!' - inner_type = 'string' - expectation = <<~DOC - ### `ArrayTest` - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="arraytestfoo"></a>`foo` | [`#{type_name}`](##{inner_type}) | A description. | - DOC - - is_expected.to include(expectation) - end - - describe 'a top level query field' do - let(:expectation) do - <<~DOC - ### `Query.foo` - - List of objects. - - Returns [`ArrayTest`](#arraytest). - - #### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="queryfooid"></a>`id` | [`ID`](#id) | ID of the object. | - DOC - end - - it 'generates the query with arguments' do - expect(subject).to include(expectation) - end - - context 'when description does not end with `.`' do - let(:field_description) { 'List of objects' } - - it 'adds the `.` to the end' do - expect(subject).to include(expectation) - end - end - end - end - - describe 'when fields are not defined in alphabetical order' do - let(:type) do - Class.new(Types::BaseObject) do - graphql_name 'OrderingTest' - - field :foo, GraphQL::STRING_TYPE, null: false, description: 'A description of foo field.' - field :bar, GraphQL::STRING_TYPE, null: false, description: 'A description of bar field.' - end - end - - it 'lists the fields in alphabetical order' do - expectation = <<~DOC - ### `OrderingTest` - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="orderingtestbar"></a>`bar` | [`String!`](#string) | A description of bar field. | - | <a id="orderingtestfoo"></a>`foo` | [`String!`](#string) | A description of foo field. | - DOC - - is_expected.to include(expectation) - end - end - - context 'when a field has a documentation reference' do - let(:type) do - wibble = Class.new(::Types::BaseObject) do - graphql_name 'Wibble' - field :x, ::GraphQL::INT_TYPE, null: false - end - - Class.new(Types::BaseObject) do - graphql_name 'DocRefSpec' - description 'Testing doc refs' - - field :foo, - type: GraphQL::STRING_TYPE, - null: false, - description: 'The foo.', - see: { 'A list of foos' => 'https://example.com/foos' } - field :bar, - type: GraphQL::STRING_TYPE, - null: false, - description: 'The bar.', - see: { 'A list of bars' => 'https://example.com/bars' } do - argument :barity, ::GraphQL::INT_TYPE, required: false, description: '?' - end - field :wibbles, - type: wibble.connection_type, - null: true, - description: 'The wibbles', - see: { 'wibblance' => 'https://example.com/wibbles' } - end - end - - let(:section) do - <<~DOC - ### `DocRefSpec` - - Testing doc refs. - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="docrefspecfoo"></a>`foo` | [`String!`](#string) | The foo. See [A list of foos](https://example.com/foos). | - | <a id="docrefspecwibbles"></a>`wibbles` | [`WibbleConnection`](#wibbleconnection) | The wibbles. See [wibblance](https://example.com/wibbles). (see [Connections](#connections)) | - - #### Fields with arguments - - ##### `DocRefSpec.bar` - - The bar. See [A list of bars](https://example.com/bars). - - Returns [`String!`](#string). - - ###### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="docrefspecbarbarity"></a>`barity` | [`Int`](#int) | ?. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when an argument is deprecated' do - let(:type) do - Class.new(Types::BaseObject) do - graphql_name 'DeprecatedTest' - description 'A thing we used to use, but no longer support' - - field :foo, - type: GraphQL::STRING_TYPE, - null: false, - description: 'A description.' do - argument :foo_arg, GraphQL::STRING_TYPE, - required: false, - description: 'The argument.', - deprecated: { reason: 'Bad argument', milestone: '101.2' } - end - end - end - - let(:section) do - <<~DOC - ##### `DeprecatedTest.foo` - - A description. - - Returns [`String!`](#string). - - ###### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="deprecatedtestfoofooarg"></a>`fooArg` **{warning-solid}** | [`String`](#string) | **Deprecated** in 101.2. Bad argument. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when a field is deprecated' do - let(:type) do - Class.new(Types::BaseObject) do - graphql_name 'DeprecatedTest' - description 'A thing we used to use, but no longer support' - - field :foo, - type: GraphQL::STRING_TYPE, - null: false, - deprecated: { reason: 'This is deprecated', milestone: '1.10' }, - description: 'A description.' - field :foo_with_args, - type: GraphQL::STRING_TYPE, - null: false, - deprecated: { reason: 'Do not use', milestone: '1.10', replacement: 'X.y' }, - description: 'A description.' do - argument :arg, GraphQL::INT_TYPE, required: false, description: 'Argity' - end - field :bar, - type: GraphQL::STRING_TYPE, - null: false, - description: 'A description.', - deprecated: { - reason: :renamed, - milestone: '1.10', - replacement: 'Query.boom' - } - end - end - - let(:section) do - <<~DOC - ### `DeprecatedTest` - - A thing we used to use, but no longer support. - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="deprecatedtestbar"></a>`bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: [`Query.boom`](#queryboom). | - | <a id="deprecatedtestfoo"></a>`foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. | - - #### Fields with arguments - - ##### `DeprecatedTest.fooWithArgs` - - A description. - - WARNING: - **Deprecated** in 1.10. - Do not use. - Use: [`X.y`](#xy). - - Returns [`String!`](#string). - - ###### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="deprecatedtestfoowithargsarg"></a>`arg` | [`Int`](#int) | Argity. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when a Query.field is deprecated' do - before do - query_type.field( - name: :bar, - type: type, - null: true, - description: 'A bar', - deprecated: { reason: :renamed, milestone: '10.11', replacement: 'Query.foo' } - ) - end - - let(:type) { ::GraphQL::INT_TYPE } - let(:section) do - <<~DOC - ### `Query.bar` - - A bar. - - WARNING: - **Deprecated** in 10.11. - This was renamed. - Use: [`Query.foo`](#queryfoo). - - Returns [`Int`](#int). - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when a field has an Enumeration type' do - let(:type) do - enum_type = Class.new(Types::BaseEnum) do - graphql_name 'MyEnum' - description 'A test of an enum.' - - value 'BAZ', - description: 'A description of BAZ.' - value 'BAR', - description: 'A description of BAR.', - deprecated: { reason: 'This is deprecated', milestone: '1.10' } - end - - Class.new(Types::BaseObject) do - graphql_name 'EnumTest' - - field :foo, enum_type, null: false, description: 'A description of foo field.' - end - end - - let(:section) do - <<~DOC - ### `MyEnum` - - A test of an enum. - - | Value | Description | - | ----- | ----------- | - | <a id="myenumbar"></a>`BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10. | - | <a id="myenumbaz"></a>`BAZ` | A description of BAZ. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when a field has a global ID type' do - let(:type) do - Class.new(Types::BaseObject) do - graphql_name 'IDTest' - description 'A test for rendering IDs.' - - field :foo, ::Types::GlobalIDType[::User], null: true, description: 'A user foo.' - end - end - - describe 'section for IDTest' do - let(:section) do - <<~DOC - ### `IDTest` - - A test for rendering IDs. - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="idtestfoo"></a>`foo` | [`UserID`](#userid) | A user foo. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - describe 'section for UserID' do - let(:section) do - <<~DOC - ### `UserID` - - A `UserID` is a global ID. It is encoded as a string. - - An example `UserID` is: `"gid://gitlab/User/1"`. - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - end - - context 'when there is a mutation' do - let(:mutation) do - mutation = Class.new(::Mutations::BaseMutation) - - mutation.graphql_name 'MakeItPretty' - mutation.description 'Make everything very pretty.' - - mutation.argument :prettiness_factor, - type: GraphQL::FLOAT_TYPE, - required: true, - description: 'How much prettier?' - - mutation.argument :pulchritude, - type: GraphQL::FLOAT_TYPE, - required: false, - description: 'How much prettier?', - deprecated: { - reason: :renamed, - replacement: 'prettinessFactor', - milestone: '72.34' - } - - mutation.field :everything, - type: GraphQL::STRING_TYPE, - null: true, - description: 'What we made prettier.' - - mutation.field :omnis, - type: GraphQL::STRING_TYPE, - null: true, - description: 'What we made prettier.', - deprecated: { - reason: :renamed, - replacement: 'everything', - milestone: '72.34' - } - - mutation - end - - before do - mutation_root.mount_mutation mutation - end - - it_behaves_like 'renders correctly as GraphQL documentation' do - let(:section) do - <<~DOC - ### `Mutation.makeItPretty` - - Make everything very pretty. - - Input type: `MakeItPrettyInput` - - #### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. | - | <a id="mutationmakeitprettyprettinessfactor"></a>`prettinessFactor` | [`Float!`](#float) | How much prettier?. | - | <a id="mutationmakeitprettypulchritude"></a>`pulchritude` **{warning-solid}** | [`Float`](#float) | **Deprecated:** This was renamed. Please use `prettinessFactor`. Deprecated in 72.34. | - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. | - | <a id="mutationmakeitprettyerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. | - | <a id="mutationmakeitprettyeverything"></a>`everything` | [`String`](#string) | What we made prettier. | - | <a id="mutationmakeitprettyomnis"></a>`omnis` **{warning-solid}** | [`String`](#string) | **Deprecated:** This was renamed. Please use `everything`. Deprecated in 72.34. | - DOC - end - end - - it 'does not render the automatically generated payload type' do - expect(contents).not_to include('MakeItPrettyPayload') - end - - it 'does not render the automatically generated input type as its own section' do - expect(contents).not_to include('# `MakeItPrettyInput`') - end - end - - context 'when there is an input type' do - let(:type) do - Class.new(::Types::BaseObject) do - graphql_name 'Foo' - field :wibble, type: ::GraphQL::INT_TYPE, null: true do - argument :date_range, - type: ::Types::TimeframeInputType, - required: true, - description: 'When the foo happened.' - end - end - end - - let(:section) do - <<~DOC - ### `Timeframe` - - A time-frame defined as a closed inclusive range of two dates. - - #### Arguments - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="timeframeend"></a>`end` | [`Date!`](#date) | The end of the range. | - | <a id="timeframestart"></a>`start` | [`Date!`](#date) | The start of the range. | - DOC - end - - it_behaves_like 'renders correctly as GraphQL documentation' - end - - context 'when there is an interface and a union' do - let(:type) do - user = Class.new(::Types::BaseObject) - user.graphql_name 'User' - user.field :user_field, ::GraphQL::STRING_TYPE, null: true - group = Class.new(::Types::BaseObject) - group.graphql_name 'Group' - group.field :group_field, ::GraphQL::STRING_TYPE, null: true - - union = Class.new(::Types::BaseUnion) - union.graphql_name 'UserOrGroup' - union.description 'Either a user or a group.' - union.possible_types user, group - - interface = Module.new - interface.include(::Types::BaseInterface) - interface.graphql_name 'Flying' - interface.description 'Something that can fly.' - interface.field :flight_speed, GraphQL::INT_TYPE, null: true, description: 'Speed in mph.' - - african_swallow = Class.new(::Types::BaseObject) - african_swallow.graphql_name 'AfricanSwallow' - african_swallow.description 'A swallow from Africa.' - african_swallow.implements interface - interface.orphan_types african_swallow - - Class.new(::Types::BaseObject) do - graphql_name 'AbstractTypeTest' - description 'A test for abstract types.' - - field :foo, union, null: true, description: 'The foo.' - field :flying, interface, null: true, description: 'A flying thing.' - end - end - - it 'lists the fields correctly, and includes descriptions of all the types' do - type_section = <<~DOC - ### `AbstractTypeTest` - - A test for abstract types. - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="abstracttypetestflying"></a>`flying` | [`Flying`](#flying) | A flying thing. | - | <a id="abstracttypetestfoo"></a>`foo` | [`UserOrGroup`](#userorgroup) | The foo. | - DOC - - union_section = <<~DOC - #### `UserOrGroup` - - Either a user or a group. - - One of: - - - [`Group`](#group) - - [`User`](#user) - DOC - - interface_section = <<~DOC - #### `Flying` - - Something that can fly. - - Implementations: - - - [`AfricanSwallow`](#africanswallow) - - ##### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="flyingflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. | - DOC - - implementation_section = <<~DOC - ### `AfricanSwallow` - - A swallow from Africa. - - #### Fields - - | Name | Type | Description | - | ---- | ---- | ----------- | - | <a id="africanswallowflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. | - DOC - - is_expected.to include( - type_section, - union_section, - interface_section, - implementation_section - ) - end - end - end -end diff --git a/spec/lib/gitlab/health_checks/redis/trace_chunks_check_spec.rb b/spec/lib/gitlab/health_checks/redis/trace_chunks_check_spec.rb new file mode 100644 index 00000000000..5fb5232a4dd --- /dev/null +++ b/spec/lib/gitlab/health_checks/redis/trace_chunks_check_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative '../simple_check_shared' + +RSpec.describe Gitlab::HealthChecks::Redis::TraceChunksCheck do + include_examples 'simple_check', 'redis_trace_chunks_ping', 'RedisTraceChunks', 'PONG' +end diff --git a/spec/lib/gitlab/health_checks/unicorn_check_spec.rb b/spec/lib/gitlab/health_checks/unicorn_check_spec.rb deleted file mode 100644 index 1cc44016002..00000000000 --- a/spec/lib/gitlab/health_checks/unicorn_check_spec.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::HealthChecks::UnicornCheck do - let(:result_class) { Gitlab::HealthChecks::Result } - let(:readiness) { described_class.readiness } - let(:metrics) { described_class.metrics } - - before do - described_class.clear_memoization(:http_servers) - end - - shared_examples 'with state' do |(state, message)| - it "does provide readiness" do - expect(readiness).to eq(result_class.new('unicorn_check', state, message)) - end - - it "does provide metrics" do - expect(metrics).to include( - an_object_having_attributes(name: 'unicorn_check_success', value: state ? 1 : 0)) - expect(metrics).to include( - an_object_having_attributes(name: 'unicorn_check_latency_seconds', value: be >= 0)) - end - end - - context 'when Unicorn is not loaded' do - before do - allow(Gitlab::Runtime).to receive(:unicorn?).and_return(false) - hide_const('Unicorn') - end - - it "does not provide readiness and metrics" do - expect(readiness).to be_nil - expect(metrics).to be_nil - end - end - - context 'when Unicorn is loaded' do - let(:http_server_class) { Struct.new(:worker_processes) } - - before do - allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true) - stub_const('Unicorn::HttpServer', http_server_class) - end - - context 'when no servers are running' do - it_behaves_like 'with state', [false, 'unexpected Unicorn check result: 0'] - end - - context 'when servers without workers are running' do - before do - http_server_class.new(0) - end - - it_behaves_like 'with state', [false, 'unexpected Unicorn check result: 0'] - end - - context 'when servers with workers are running' do - before do - http_server_class.new(1) - end - - it_behaves_like 'with state', true - end - end -end diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb index a5e4d37d306..1f06019c929 100644 --- a/spec/lib/gitlab/highlight_spec.rb +++ b/spec/lib/gitlab/highlight_spec.rb @@ -54,7 +54,7 @@ RSpec.describe Gitlab::Highlight do end it 'increments the metric for oversized files' do - expect { result }.to change { over_highlight_size_limit('text highlighter') }.by(1) + expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1) end it 'returns plain version for long content' do @@ -143,9 +143,21 @@ RSpec.describe Gitlab::Highlight do end describe 'highlight timeouts' do - context 'when there is a timeout error while highlighting' do - let(:result) { described_class.highlight(file_name, content) } + let(:result) { described_class.highlight(file_name, content, language: "ruby") } + + context 'when there is an attempt' do + it "increments the attempt counter with a defined language" do + expect { result }.to change { highlight_attempt_total("ruby") } + end + + it "increments the attempt counter with an undefined language" do + expect do + described_class.highlight(file_name, content) + end.to change { highlight_attempt_total("undefined") } + end + end + context 'when there is a timeout error while highlighting' do before do allow(Timeout).to receive(:timeout).twice.and_raise(Timeout::Error) # This is done twice because it's rescued first and then @@ -177,6 +189,12 @@ RSpec.describe Gitlab::Highlight do .get(source: source) end + def highlight_attempt_total(source) + Gitlab::Metrics + .counter(:file_highlighting_attempt, 'Counts the times highlighting has been attempted on a file') + .get(source: source) + end + def over_highlight_size_limit(source) Gitlab::Metrics .counter(:over_highlight_size_limit, diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb index 8f898d898de..8f976bcf09d 100644 --- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb @@ -42,8 +42,10 @@ RSpec.describe Gitlab::HookData::IssueBuilder do it 'includes additional attrs' do expect(data).to include(:total_time_spent) + expect(data).to include(:time_change) expect(data).to include(:human_time_estimate) expect(data).to include(:human_total_time_spent) + expect(data).to include(:human_time_change) expect(data).to include(:assignee_ids) expect(data).to include(:state) expect(data).to include('labels' => [label.hook_attrs]) diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb index 0339faa9fcf..9e6ad35861f 100644 --- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb @@ -57,8 +57,10 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do expect(data).to include(:last_commit) expect(data).to include(:work_in_progress) expect(data).to include(:total_time_spent) + expect(data).to include(:time_change) expect(data).to include(:human_time_estimate) expect(data).to include(:human_total_time_spent) + expect(data).to include(:human_time_change) end context 'when the MR has an image in the description' do diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb index 38f1d48798b..451fd6c6f46 100644 --- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb +++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb @@ -27,20 +27,30 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do expect(subject.new(url: example_url, http_method: 'whatever')).not_to be_valid end - it 'onyl allow urls as upload urls' do + it 'only allow urls as upload urls' do expect(subject.new(url: example_url)).to be_valid expect(subject.new(url: 'whatever')).not_to be_valid end end describe '#execute' do - it 'removes the exported project file after the upload' do - allow(strategy).to receive(:send_file) - allow(strategy).to receive(:handle_response_error) + context 'when upload succeeds' do + before do + allow(strategy).to receive(:send_file) + allow(strategy).to receive(:handle_response_error) + end + + it 'does not remove the exported project file after the upload' do + expect(project).not_to receive(:remove_exports) - expect(project).to receive(:remove_exports) + strategy.execute(user, project) + end - strategy.execute(user, project) + it 'has finished export status' do + strategy.execute(user, project) + + expect(project.export_status).to eq(:finished) + end end context 'when upload fails' do diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index f81db1413c2..781c55f8d9b 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -342,10 +342,9 @@ container_repositories: - project - name project: -- external_approval_rules +- external_status_checks - taggings - base_tags -- tags - topic_taggings - topics - chat_services @@ -362,11 +361,11 @@ project: - boards - last_event - integrations -- campfire_service -- confluence_service -- datadog_service -- discord_service -- drone_ci_service +- campfire_integration +- confluence_integration +- datadog_integration +- discord_integration +- drone_ci_integration - emails_on_push_service - pipelines_email_service - mattermost_slash_commands_service @@ -376,26 +375,25 @@ project: - pivotaltracker_service - prometheus_service - flowdock_service -- assembla_service -- asana_service +- assembla_integration +- asana_integration - slack_service - microsoft_teams_service - mattermost_service - hangouts_chat_service - unify_circuit_service -- buildkite_service -- bamboo_service +- buildkite_integration +- bamboo_integration - teamcity_service - pushover_service - jira_service - redmine_service - youtrack_service -- custom_issue_tracker_service -- bugzilla_service +- custom_issue_tracker_integration +- bugzilla_integration - ewm_service - external_wiki_service - mock_ci_service -- mock_deployment_service - mock_monitoring_service - forked_to_members - forked_from_project @@ -571,6 +569,7 @@ project: - exported_protected_branches - incident_management_oncall_schedules - incident_management_oncall_rotations +- incident_management_escalation_policies - debian_distributions - merge_request_metrics - security_orchestration_policy_configuration @@ -624,6 +623,7 @@ metrics_setting: - project protected_environments: - project +- group - deploy_access_levels deploy_access_levels: - protected_environment diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb index df33b4896a4..6a7ff33465d 100644 --- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb +++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb @@ -43,6 +43,15 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do end end + context 'when author relation' do + let(:relation_sym) { :author } + let(:relation_hash) { { 'name' => 'User', 'project_id' => project.id } } + + it 'returns author hash unchanged' do + expect(subject).to eq(relation_hash) + end + end + context 'when #setup_models is not implemented' do it 'raises NotImplementedError' do expect { subject }.to raise_error(NotImplementedError) diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb index 4000e303816..39a10f87083 100644 --- a/spec/lib/gitlab/import_export/command_line_util_spec.rb +++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb @@ -42,6 +42,8 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do filename = File.basename(tempfile.path) subject.gzip(dir: path, filename: filename) + + expect(File.exist?("#{tempfile.path}.gz")).to eq(true) end context 'when exception occurs' do @@ -50,4 +52,25 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do end end end + + describe '#gunzip' do + it 'decompresses specified file' do + tmpdir = Dir.mktmpdir + filename = 'labels.ndjson.gz' + gz_filepath = "spec/fixtures/bulk_imports/gz/#{filename}" + FileUtils.copy_file(gz_filepath, File.join(tmpdir, filename)) + + subject.gunzip(dir: tmpdir, filename: filename) + + expect(File.exist?(File.join(tmpdir, 'labels.ndjson'))).to eq(true) + + FileUtils.remove_entry(tmpdir) + end + + context 'when exception occurs' do + it 'raises an exception' do + expect { subject.gunzip(dir: path, filename: 'test') }.to raise_error(Gitlab::ImportExport::Error) + end + end + end end diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb index 96c467e78d6..fe3b638d20f 100644 --- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb +++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb @@ -24,6 +24,14 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do it 'returns true' do expect(subject.valid?).to eq(true) end + + context 'when waiter thread no longer exists' do + it 'does not raise exception' do + allow(Process).to receive(:getpgid).and_raise(Errno::ESRCH) + + expect(subject.valid?).to eq(true) + end + end end context 'when file exceeds allowed decompressed size' do diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb index 908896e4891..c52daa8ccfd 100644 --- a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb @@ -31,7 +31,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeSaver do end it 'fails to export a group' do - allow_next_instance_of(Gitlab::ImportExport::JSON::NdjsonWriter) do |ndjson_writer| + allow_next_instance_of(Gitlab::ImportExport::Json::NdjsonWriter) do |ndjson_writer| allow(ndjson_writer).to receive(:write_relation_array).and_raise(RuntimeError, 'exception') end diff --git a/spec/lib/gitlab/import_export/import_failure_service_spec.rb b/spec/lib/gitlab/import_export/import_failure_service_spec.rb index c8bb067d40c..51f1fc9c6a2 100644 --- a/spec/lib/gitlab/import_export/import_failure_service_spec.rb +++ b/spec/lib/gitlab/import_export/import_failure_service_spec.rb @@ -43,7 +43,7 @@ RSpec.describe Gitlab::ImportExport::ImportFailureService do let(:importable) { create(:merge_request) } it 'raise exception' do - expect { subject }.to raise_exception(ActiveRecord::AssociationNotFoundError, "Association named 'import_failures' was not found on MergeRequest; perhaps you misspelled it?") + expect { subject }.to raise_exception(ActiveRecord::AssociationNotFoundError, /Association named 'import_failures' was not found on MergeRequest/) end end end diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb index e092891f236..793b3ebfb9e 100644 --- a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb +++ b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require_relative 'shared_example' -RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::File do +RSpec.describe Gitlab::ImportExport::Json::LegacyReader::File do it_behaves_like 'import/export json legacy reader' do let(:valid_path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } let(:data) { valid_path } diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb index e47122b6151..57d66dc0f50 100644 --- a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb +++ b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require_relative 'shared_example' -RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::Hash do +RSpec.describe Gitlab::ImportExport::Json::LegacyReader::Hash do it_behaves_like 'import/export json legacy reader' do let(:path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } diff --git a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb index eb7a2d4aa8b..ab2c4cc2059 100644 --- a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb +++ b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::JSON::LegacyWriter do +RSpec.describe Gitlab::ImportExport::Json::LegacyWriter do let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" } subject do diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb index b477ac45577..0ca4c4ccc87 100644 --- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb +++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::JSON::NdjsonReader do +RSpec.describe Gitlab::ImportExport::Json::NdjsonReader do include ImportExport::CommonUtil let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/tree' } diff --git a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb index 2a5e802bdc5..9be95591ae9 100644 --- a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb +++ b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" -RSpec.describe Gitlab::ImportExport::JSON::NdjsonWriter do +RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do include ImportExport::CommonUtil let(:path) { "#{Dir.tmpdir}/ndjson_writer_spec/tree" } diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb index a0b2faaecfe..deb22de9160 100644 --- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do +RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do let_it_be(:user) { create(:user) } let_it_be(:release) { create(:release) } let_it_be(:group) { create(:group) } @@ -27,7 +27,7 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do end let(:exportable_path) { 'project' } - let(:json_writer) { instance_double('Gitlab::ImportExport::JSON::LegacyWriter') } + let(:json_writer) { instance_double('Gitlab::ImportExport::Json::LegacyWriter') } let(:hash) { { name: exportable.name, description: exportable.description }.stringify_keys } let(:include) { [] } let(:custom_orderer) { nil } diff --git a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb index 454cc74b9d4..3b7ed7cb32b 100644 --- a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Gitlab::ImportExport::LegacyRelationTreeSaver do stub_feature_flags(export_reduce_relation_batch_size: true) end - include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::JSON::StreamingSerializer::SMALLER_BATCH_SIZE + include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::Json::StreamingSerializer::SMALLER_BATCH_SIZE end context 'when export_reduce_relation_batch_size feature flag is disabled' do @@ -36,7 +36,7 @@ RSpec.describe Gitlab::ImportExport::LegacyRelationTreeSaver do stub_feature_flags(export_reduce_relation_batch_size: false) end - include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::JSON::StreamingSerializer::BATCH_SIZE + include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::Json::StreamingSerializer::BATCH_SIZE end end end diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb index 7fcd2187a90..3dd1e9257cc 100644 --- a/spec/lib/gitlab/import_export/project/export_task_spec.rb +++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe Gitlab::ImportExport::Project::ExportTask do +RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout do let_it_be(:username) { 'root' } let(:namespace_path) { username } let_it_be(:user) { create(:user, username: username) } diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb index 90f4501acdc..c847224cb9b 100644 --- a/spec/lib/gitlab/import_export/project/import_task_spec.rb +++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe Gitlab::ImportExport::Project::ImportTask, :request_store do +RSpec.describe Gitlab::ImportExport::Project::ImportTask, :request_store, :silence_stdout do let(:username) { 'root' } let(:namespace_path) { username } let!(:user) { create(:user, username: username) } diff --git a/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb index f87f79d4462..f6a028383f2 100644 --- a/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb @@ -77,7 +77,7 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::RelationTreeRestorer do let(:relation_factory) { Gitlab::ImportExport::Project::Sample::RelationFactory } let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' } - let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } it 'initializes relation_factory with date_calculator as parameter' do expect(Gitlab::ImportExport::Project::Sample::RelationFactory).to receive(:create).with(hash_including(:date_calculator)).at_least(:once).times diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index bc5e6ea7bb3..1b5fba85020 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -600,9 +600,8 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do setup_import_export_config('light') setup_reader(reader) - expect(project) - .to receive(:merge_requests) - .and_raise(exception) + expect(project).to receive(:merge_requests).and_call_original + expect(project).to receive(:merge_requests).and_raise(exception) end it 'report post import error' do @@ -618,12 +617,9 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do setup_import_export_config('light') setup_reader(reader) - expect(project) - .to receive(:merge_requests) - .and_raise(exception) - expect(project) - .to receive(:merge_requests) - .and_call_original + expect(project).to receive(:merge_requests).and_call_original + expect(project).to receive(:merge_requests).and_raise(exception) + expect(project).to receive(:merge_requests).and_call_original expect(restored_project_json).to eq(true) end @@ -824,9 +820,9 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do end before do - allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(true) - allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(false) - allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:tree_hash) { tree_hash } + allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:exist?).and_return(true) + allow_any_instance_of(Gitlab::ImportExport::Json::NdjsonReader).to receive(:exist?).and_return(false) + allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:tree_hash) { tree_hash } end context 'no group visibility' do diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb index d3c14b1f8fe..9325cdac9ed 100644 --- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb @@ -96,7 +96,7 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do context 'using legacy reader' do let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } let(:relation_reader) do - Gitlab::ImportExport::JSON::LegacyReader::File.new( + Gitlab::ImportExport::Json::LegacyReader::File.new( path, relation_names: reader.project_relation_names, allowed_path: 'project' @@ -117,14 +117,14 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do context 'using ndjson reader' do let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } - let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } it_behaves_like 'import project successfully' end context 'with invalid relations' do let(:path) { 'spec/fixtures/lib/gitlab/import_export/project_with_invalid_relations/tree' } - let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } it 'logs the invalid relation and its errors' do expect(relation_tree_restorer.shared.logger) @@ -151,7 +151,7 @@ RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do let(:object_builder) { Gitlab::ImportExport::Group::ObjectBuilder } let(:relation_factory) { Gitlab::ImportExport::Group::RelationFactory } let(:relation_reader) do - Gitlab::ImportExport::JSON::LegacyReader::File.new( + Gitlab::ImportExport::Json::LegacyReader::File.new( path, relation_names: reader.group_relation_names) end diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 70ebff2a54e..2173bee6b4b 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -102,7 +102,6 @@ ProjectLabel: - template - description - priority -- remove_on_close Milestone: - id - title @@ -702,6 +701,7 @@ ProjectSetting: ProtectedEnvironment: - id - project_id +- group_id - name - created_at - updated_at diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb index 22f2d4c5077..feeb88397eb 100644 --- a/spec/lib/gitlab/import_export/shared_spec.rb +++ b/spec/lib/gitlab/import_export/shared_spec.rb @@ -37,6 +37,28 @@ RSpec.describe Gitlab::ImportExport::Shared do end end + context 'with a group on disk' do + describe '#base_path' do + it 'uses hashed storage path' do + group = create(:group) + subject = described_class.new(group) + base_path = %(/tmp/gitlab_exports/@groups/) + + expect(subject.base_path).to match(/#{base_path}\h{2}\/\h{2}\/\h{64}/) + end + end + end + + context 'when exportable type is unsupported' do + describe '#base_path' do + it 'raises' do + subject = described_class.new('test') + + expect { subject.base_path }.to raise_error(Gitlab::ImportExport::Error, 'Unsupported Exportable Type String') + end + end + end + describe '#error' do let(:error) { StandardError.new('Error importing into /my/folder Permission denied @ unlink_internal - /var/opt/gitlab/gitlab-rails/shared/a/b/c/uploads/file') } diff --git a/spec/lib/gitlab/instrumentation/redis_base_spec.rb b/spec/lib/gitlab/instrumentation/redis_base_spec.rb index 07be0ccf6e9..a7e08b5a9bd 100644 --- a/spec/lib/gitlab/instrumentation/redis_base_spec.rb +++ b/spec/lib/gitlab/instrumentation/redis_base_spec.rb @@ -18,24 +18,6 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do end end - describe '.known_payload_keys' do - it 'returns generated payload keys' do - expect(instrumentation_class_a.known_payload_keys).to eq([:redis_instance_a_calls, - :redis_instance_a_duration_s, - :redis_instance_a_read_bytes, - :redis_instance_a_write_bytes]) - end - - it 'does not call calculation methods' do - expect(instrumentation_class_a).not_to receive(:get_request_count) - expect(instrumentation_class_a).not_to receive(:query_time) - expect(instrumentation_class_a).not_to receive(:read_bytes) - expect(instrumentation_class_a).not_to receive(:write_bytes) - - instrumentation_class_a.known_payload_keys - end - end - describe '.payload' do it 'returns values that are higher than 0' do allow(instrumentation_class_a).to receive(:get_request_count) { 1 } diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb index e927f39cae2..6cddf958f2a 100644 --- a/spec/lib/gitlab/instrumentation/redis_spec.rb +++ b/spec/lib/gitlab/instrumentation/redis_spec.rb @@ -26,46 +26,6 @@ RSpec.describe Gitlab::Instrumentation::Redis do it_behaves_like 'aggregation of redis storage data', :read_bytes it_behaves_like 'aggregation of redis storage data', :write_bytes - describe '.known_payload_keys' do - it 'returns all known payload keys' do - expected_keys = [ - :redis_calls, - :redis_duration_s, - :redis_read_bytes, - :redis_write_bytes, - :redis_action_cable_calls, - :redis_action_cable_duration_s, - :redis_action_cable_read_bytes, - :redis_action_cable_write_bytes, - :redis_cache_calls, - :redis_cache_duration_s, - :redis_cache_read_bytes, - :redis_cache_write_bytes, - :redis_queues_calls, - :redis_queues_duration_s, - :redis_queues_read_bytes, - :redis_queues_write_bytes, - :redis_shared_state_calls, - :redis_shared_state_duration_s, - :redis_shared_state_read_bytes, - :redis_shared_state_write_bytes - ] - - expect(described_class.known_payload_keys).to eq(expected_keys) - end - - it 'does not call storage calculation methods' do - described_class::STORAGES.each do |storage| - expect(storage).not_to receive(:get_request_count) - expect(storage).not_to receive(:query_time) - expect(storage).not_to receive(:read_bytes) - expect(storage).not_to receive(:write_bytes) - end - - described_class.known_payload_keys - end - end - describe '.payload', :request_store do before do Gitlab::Redis::Cache.with { |redis| redis.set('cache-test', 321) } @@ -108,7 +68,8 @@ RSpec.describe Gitlab::Instrumentation::Redis do .to contain_exactly(details_row.merge(storage: 'ActionCable'), details_row.merge(storage: 'Cache'), details_row.merge(storage: 'Queues'), - details_row.merge(storage: 'SharedState')) + details_row.merge(storage: 'SharedState'), + details_row.merge(storage: 'TraceChunks')) end end end diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb index 488324ccddc..28ae90d4947 100644 --- a/spec/lib/gitlab/instrumentation_helper_spec.rb +++ b/spec/lib/gitlab/instrumentation_helper_spec.rb @@ -117,6 +117,42 @@ RSpec.describe Gitlab::InstrumentationHelper do end end end + + context 'when load balancing is enabled' do + include_context 'clear DB Load Balancing configuration' + + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + end + + it 'includes DB counts' do + subject + + expect(payload).to include(db_replica_count: 0, + db_replica_cached_count: 0, + db_primary_count: 0, + db_primary_cached_count: 0, + db_primary_wal_count: 0, + db_replica_wal_count: 0) + end + end + + context 'when load balancing is disabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false) + end + + it 'does not include DB counts' do + subject + + expect(payload).not_to include(db_replica_count: 0, + db_replica_cached_count: 0, + db_primary_count: 0, + db_primary_cached_count: 0, + db_primary_wal_count: 0, + db_replica_wal_count: 0) + end + end end describe '.queue_duration_for_job' do diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb index 42c4b315edf..f9f57752b0a 100644 --- a/spec/lib/gitlab/json_spec.rb +++ b/spec/lib/gitlab/json_spec.rb @@ -411,7 +411,7 @@ RSpec.describe Gitlab::Json do end describe Gitlab::Json::LimitedEncoder do - subject { described_class.encode(obj, limit: 8.kilobytes) } + subject { described_class.encode(obj, limit: 10.kilobytes) } context 'when object size is acceptable' do let(:obj) { { test: true } } @@ -431,6 +431,16 @@ RSpec.describe Gitlab::Json do end end + context 'when object contains ASCII-8BIT encoding' do + let(:obj) { [{ a: "\x8F" }] * 1000 } + + it 'does not raise encoding error' do + expect { subject }.not_to raise_error + expect(subject).to be_a(String) + expect(subject.size).to eq(10001) + end + end + context 'when json_limited_encoder is disabled' do let(:obj) { [{ test: true }] * 1000 } diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb new file mode 100644 index 00000000000..7bf2d30ca48 --- /dev/null +++ b/spec/lib/gitlab/kas/client_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Kas::Client do + let_it_be(:project) { create(:project) } + + describe '#initialize' do + context 'kas is not enabled' do + before do + allow(Gitlab::Kas).to receive(:enabled?).and_return(false) + end + + it 'raises a configuration error' do + expect { described_class.new }.to raise_error(described_class::ConfigurationError, 'GitLab KAS is not enabled') + end + end + + context 'internal url is not set' do + before do + allow(Gitlab::Kas).to receive(:enabled?).and_return(true) + allow(Gitlab::Kas).to receive(:internal_url).and_return(nil) + end + + it 'raises a configuration error' do + expect { described_class.new }.to raise_error(described_class::ConfigurationError, 'KAS internal URL is not configured') + end + end + end + + describe 'gRPC calls' do + let(:token) { instance_double(JSONWebToken::HMACToken, encoded: 'test-token') } + + before do + allow(Gitlab::Kas).to receive(:enabled?).and_return(true) + allow(Gitlab::Kas).to receive(:internal_url).and_return('grpc://example.kas.internal') + + expect(JSONWebToken::HMACToken).to receive(:new) + .with(Gitlab::Kas.secret) + .and_return(token) + + expect(token).to receive(:issuer=).with(Settings.gitlab.host) + expect(token).to receive(:audience=).with(described_class::JWT_AUDIENCE) + end + + describe '#list_agent_config_files' do + let(:stub) { instance_double(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub) } + + let(:request) { instance_double(Gitlab::Agent::ConfigurationProject::Rpc::ListAgentConfigFilesRequest) } + let(:response) { double(Gitlab::Agent::ConfigurationProject::Rpc::ListAgentConfigFilesResponse, config_files: agent_configurations) } + + let(:repository) { instance_double(Gitlab::Agent::Modserver::Repository) } + let(:gitaly_address) { instance_double(Gitlab::Agent::Modserver::GitalyAddress) } + + let(:agent_configurations) { [double] } + + subject { described_class.new.list_agent_config_files(project: project) } + + before do + expect(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub).to receive(:new) + .with('example.kas.internal', :this_channel_is_insecure, timeout: described_class::TIMEOUT) + .and_return(stub) + + expect(Gitlab::Agent::Modserver::Repository).to receive(:new) + .with(project.repository.gitaly_repository.to_h) + .and_return(repository) + + expect(Gitlab::Agent::Modserver::GitalyAddress).to receive(:new) + .with(Gitlab::GitalyClient.connection_data(project.repository_storage)) + .and_return(gitaly_address) + + expect(Gitlab::Agent::ConfigurationProject::Rpc::ListAgentConfigFilesRequest).to receive(:new) + .with(repository: repository, gitaly_address: gitaly_address) + .and_return(request) + + expect(stub).to receive(:list_agent_config_files) + .with(request, metadata: { 'authorization' => 'bearer test-token' }) + .and_return(response) + end + + it { expect(subject).to eq(agent_configurations) } + end + end +end diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb index e323f76b42e..c9d40f785b8 100644 --- a/spec/lib/gitlab/kas_spec.rb +++ b/spec/lib/gitlab/kas_spec.rb @@ -65,6 +65,12 @@ RSpec.describe Gitlab::Kas do end end + describe '.internal_url' do + it 'returns gitlab_kas internal_url config' do + expect(described_class.internal_url).to eq(Gitlab.config.gitlab_kas.internal_url) + end + end + describe '.version' do it 'returns gitlab_kas version config' do version_file = Rails.root.join(described_class::VERSION_FILE) diff --git a/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb b/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb deleted file mode 100644 index 435c296d5f1..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/parsers/list_v2_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::Parsers::ListV2 do - let(:valid_file_contents) do - <<~EOF - { - "Next": "", - "Releases": [ - { - "Name": "certmanager", - "Revision": 2, - "Updated": "Sun Mar 29 06:55:42 2020", - "Status": "DEPLOYED", - "Chart": "cert-manager-v0.10.1", - "AppVersion": "v0.10.1", - "Namespace": "gitlab-managed-apps" - }, - { - "Name": "certmanager-crds", - "Revision": 2, - "Updated": "Sun Mar 29 06:55:32 2020", - "Status": "DEPLOYED", - "Chart": "cert-manager-crds-v0.2.0", - "AppVersion": "release-0.10", - "Namespace": "gitlab-managed-apps" - }, - { - "Name": "certmanager-issuer", - "Revision": 1, - "Updated": "Tue Feb 18 10:04:04 2020", - "Status": "FAILED", - "Chart": "cert-manager-issuer-v0.1.0", - "AppVersion": "", - "Namespace": "gitlab-managed-apps" - }, - { - "Name": "runner", - "Revision": 2, - "Updated": "Sun Mar 29 07:01:01 2020", - "Status": "DEPLOYED", - "Chart": "gitlab-runner-0.14.0", - "AppVersion": "12.8.0", - "Namespace": "gitlab-managed-apps" - } - ] - } - EOF - end - - describe '#initialize' do - it 'initializes without error' do - expect do - described_class.new(valid_file_contents) - end.not_to raise_error - end - - it 'raises an error on invalid JSON' do - expect do - described_class.new('') - end.to raise_error(described_class::ParserError) - end - end - - describe '#releases' do - subject(:list_v2) { described_class.new(valid_file_contents) } - - it 'returns list of releases' do - expect(list_v2.releases).to match([ - a_hash_including('Name' => 'certmanager', 'Status' => 'DEPLOYED'), - a_hash_including('Name' => 'certmanager-crds', 'Status' => 'DEPLOYED'), - a_hash_including('Name' => 'certmanager-issuer', 'Status' => 'FAILED'), - a_hash_including('Name' => 'runner', 'Status' => 'DEPLOYED') - ]) - end - - context 'empty Releases' do - let(:valid_file_contents) { '{}' } - - it 'returns an empty array' do - expect(list_v2.releases).to eq([]) - end - end - - context 'invalid Releases' do - let(:invalid_file_contents) do - '{ "Releases" : ["a", "b"] }' - end - - subject(:list_v2) { described_class.new(invalid_file_contents) } - - it 'raises an error' do - expect do - list_v2.releases - end.to raise_error(described_class::ParserError, 'Invalid format for Releases') - end - end - end -end diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb index ab9a9a035f1..a42da4ad3e0 100644 --- a/spec/lib/gitlab/mail_room/mail_room_spec.rb +++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb @@ -33,6 +33,10 @@ RSpec.describe Gitlab::MailRoom do described_class.instance_variable_set(:@enabled_configs, nil) end + after do + described_class.instance_variable_set(:@enabled_configs, nil) + end + describe '#enabled_configs' do before do allow(described_class).to receive(:load_yaml).and_return(configs) diff --git a/spec/lib/gitlab/markdown_cache/field_data_spec.rb b/spec/lib/gitlab/markdown_cache/field_data_spec.rb index 76d8cbe6b7d..6d4b57254f2 100644 --- a/spec/lib/gitlab/markdown_cache/field_data_spec.rb +++ b/spec/lib/gitlab/markdown_cache/field_data_spec.rb @@ -12,4 +12,11 @@ RSpec.describe Gitlab::MarkdownCache::FieldData do it 'translates a markdown field name into a html field name' do expect(field_data.html_field(:description)).to eq("description_html") end + + describe '#key?' do + specify do + expect(field_data.key?(:description)).to be_truthy + expect(field_data.key?(:something_else)).to be_falsy + end + end end diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb index 1f7daaa308d..9d5c4bdf9e2 100644 --- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb +++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb @@ -7,8 +7,16 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do subject { described_class.new(app) } + around do |example| + # Simulate application context middleware + # In fact, this middleware cleans up the contexts after a request lifecycle + ::Gitlab::ApplicationContext.with_context({}) do + example.run + end + end + describe '#call' do - let(:status) { 100 } + let(:status) { 200 } let(:env) { { 'REQUEST_METHOD' => 'GET' } } let(:stack_result) { [status, {}, 'body'] } @@ -71,6 +79,17 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do end end + context '@app.call returns an error code' do + let(:status) { '500' } + + it 'tracks count but not duration' do + expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '500', feature_category: 'unknown') + expect(described_class).not_to receive(:http_request_duration_seconds) + + subject.call(env) + end + end + context '@app.call throws exception' do let(:http_request_duration_seconds) { double('http_request_duration_seconds') } let(:http_requests_total) { double('http_requests_total') } @@ -91,9 +110,9 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do end context 'feature category header' do - context 'when a feature category header is present' do + context 'when a feature category context is present' do before do - allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => 'issue_tracking' }, nil]) + ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking') end it 'adds the feature category to the labels for http_requests_total' do @@ -113,11 +132,20 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do end end - context 'when the feature category header is an empty string' do + context 'when application raises an exception when the feature category context is present' do before do - allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => '' }, nil]) + ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking') + allow(app).to receive(:call).and_raise(StandardError) end + it 'adds the feature category to the labels for http_requests_total' do + expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'issue_tracking') + + expect { subject.call(env) }.to raise_error(StandardError) + end + end + + context 'when the feature category context is not available' do it 'sets the feature category to unknown' do expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown') expect(described_class).not_to receive(:http_health_requests_total) diff --git a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb deleted file mode 100644 index 7971a7cabd5..00000000000 --- a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb +++ /dev/null @@ -1,141 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Samplers::UnicornSampler do - subject { described_class.new(1.second) } - - it_behaves_like 'metrics sampler', 'UNICORN_SAMPLER' - - describe '#sample' do - let(:unicorn) { Module.new } - let(:raindrops) { double('raindrops') } - let(:stats) { double('stats') } - - before do - stub_const('Unicorn', unicorn) - stub_const('Raindrops::Linux', raindrops) - allow(raindrops).to receive(:unix_listener_stats).and_return({}) - allow(raindrops).to receive(:tcp_listener_stats).and_return({}) - end - - context 'unicorn listens on unix sockets' do - let(:socket_address) { '/some/sock' } - let(:sockets) { [socket_address] } - - before do - allow(unicorn).to receive(:listener_names).and_return(sockets) - end - - it 'samples socket data' do - expect(raindrops).to receive(:unix_listener_stats).with(sockets) - - subject.sample - end - - context 'stats collected' do - before do - allow(stats).to receive(:active).and_return('active') - allow(stats).to receive(:queued).and_return('queued') - allow(raindrops).to receive(:unix_listener_stats).and_return({ socket_address => stats }) - end - - it 'updates metrics type unix and with addr' do - labels = { socket_type: 'unix', socket_address: socket_address } - - expect(subject.metrics[:unicorn_active_connections]).to receive(:set).with(labels, 'active') - expect(subject.metrics[:unicorn_queued_connections]).to receive(:set).with(labels, 'queued') - - subject.sample - end - end - end - - context 'unicorn listens on tcp sockets' do - let(:tcp_socket_address) { '0.0.0.0:8080' } - let(:tcp_sockets) { [tcp_socket_address] } - - before do - allow(unicorn).to receive(:listener_names).and_return(tcp_sockets) - end - - it 'samples socket data' do - expect(raindrops).to receive(:tcp_listener_stats).with(tcp_sockets) - - subject.sample - end - - context 'stats collected' do - before do - allow(stats).to receive(:active).and_return('active') - allow(stats).to receive(:queued).and_return('queued') - allow(raindrops).to receive(:tcp_listener_stats).and_return({ tcp_socket_address => stats }) - end - - it 'updates metrics type unix and with addr' do - labels = { socket_type: 'tcp', socket_address: tcp_socket_address } - - expect(subject.metrics[:unicorn_active_connections]).to receive(:set).with(labels, 'active') - expect(subject.metrics[:unicorn_queued_connections]).to receive(:set).with(labels, 'queued') - - subject.sample - end - end - end - - context 'unicorn workers' do - before do - allow(unicorn).to receive(:listener_names).and_return([]) - end - - context 'without http server' do - it "does set unicorn_workers to 0" do - expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, 0) - - subject.sample - end - end - - context 'with http server' do - let(:http_server_class) { Struct.new(:worker_processes) } - let!(:http_server) { http_server_class.new(5) } - - before do - stub_const('Unicorn::HttpServer', http_server_class) - end - - it "sets additional metrics" do - expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, 5) - - subject.sample - end - end - end - end - - describe '#start' do - context 'when enabled' do - before do - allow(subject).to receive(:enabled?).and_return(true) - end - - it 'creates new thread' do - expect(Thread).to receive(:new) - - subject.start - end - end - - context 'when disabled' do - before do - allow(subject).to receive(:enabled?).and_return(false) - end - - it "doesn't create new thread" do - expect(Thread).not_to receive(:new) - - subject.start - end - end - end -end diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index 6bfcfa21289..cffa62c3a52 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -150,4 +150,140 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do it_behaves_like 'track generic sql events' end end + + context 'Database Load Balancing enabled' do + let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10', connection: connection } } + + let(:event) do + double( + :event, + name: 'sql.active_record', + duration: 2, + payload: payload + ) + end + + # Emulate Marginalia pre-pending comments + def sql(query, comments: true) + if comments && !%w[BEGIN COMMIT].include?(query) + "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}" + else + query + end + end + + shared_examples 'track sql events for each role' do + where(:name, :sql_query, :record_query, :record_write_query, :record_cached_query, :record_wal_query) do + 'SQL' | 'SELECT * FROM users WHERE id = 10' | true | false | false | false + 'SQL' | 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' | true | false | false | false + 'SQL' | 'SELECT * FROM users WHERE id = 10 FOR UPDATE' | true | true | false | false + 'SQL' | 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' | true | true | false | false + 'SQL' | 'DELETE FROM users where id = 10' | true | true | false | false + 'SQL' | 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' | true | true | false | false + 'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false | false + 'SQL' | 'SELECT pg_current_wal_insert_lsn()::text AS location' | true | false | false | true + 'SQL' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | false | true + 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true | false + 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false | false + nil | 'BEGIN' | false | false | false | false + nil | 'COMMIT' | false | false | false | false + end + + with_them do + let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } } + + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + end + + context 'query using a connection to a replica' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(:replica) + end + + it 'queries connection db role' do + subscriber.sql(event) + + if record_query + expect(Gitlab::Database::LoadBalancing).to have_received(:db_role_for_connection).with(connection) + end + end + + it_behaves_like 'record ActiveRecord metrics', :replica + it_behaves_like 'store ActiveRecord info in RequestStore', :replica + end + + context 'query using a connection to a primary' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(:primary) + end + + it 'queries connection db role' do + subscriber.sql(event) + + if record_query + expect(Gitlab::Database::LoadBalancing).to have_received(:db_role_for_connection).with(connection) + end + end + + it_behaves_like 'record ActiveRecord metrics', :primary + it_behaves_like 'store ActiveRecord info in RequestStore', :primary + end + + context 'query using a connection to an unknown source' do + let(:transaction) { double('Gitlab::Metrics::WebTransaction') } + + before do + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(nil) + + allow(::Gitlab::Metrics::WebTransaction).to receive(:current).and_return(transaction) + allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(nil) + + allow(transaction).to receive(:increment) + allow(transaction).to receive(:observe) + end + + it 'does not record DB role metrics' do + expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_primary_count_total".to_sym, any_args) + expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_replica_count_total".to_sym, any_args) + + expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_primary_cached_count_total".to_sym, any_args) + expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_replica_cached_count_total".to_sym, any_args) + + expect(transaction).not_to receive(:observe).with("gitlab_sql_primary_duration_seconds".to_sym, any_args) + expect(transaction).not_to receive(:observe).with("gitlab_sql_replica_duration_seconds".to_sym, any_args) + + subscriber.sql(event) + end + + it 'does not store DB roles into into RequestStore' do + Gitlab::WithRequestStore.with_request_store do + subscriber.sql(event) + + expect(described_class.db_counter_payload).to include( + db_primary_cached_count: 0, + db_primary_count: 0, + db_primary_duration_s: 0, + db_replica_cached_count: 0, + db_replica_count: 0, + db_replica_duration_s: 0 + ) + end + end + end + end + end + + context 'without Marginalia comments' do + let(:comments) { false } + + it_behaves_like 'track sql events for each role' + end + + context 'with Marginalia comments' do + let(:comments) { true } + + it_behaves_like 'track sql events for each role' + end + end end diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/transaction_spec.rb index d4e5a1a94f2..2ff8efcd7cb 100644 --- a/spec/lib/gitlab/metrics/transaction_spec.rb +++ b/spec/lib/gitlab/metrics/transaction_spec.rb @@ -12,32 +12,6 @@ RSpec.describe Gitlab::Metrics::Transaction do } end - describe '#duration' do - it 'returns the duration of a transaction in seconds' do - transaction.run { } - - expect(transaction.duration).to be > 0 - end - end - - describe '#run' do - it 'yields the supplied block' do - expect { |b| transaction.run(&b) }.to yield_control - end - - it 'stores the transaction in the current thread' do - transaction.run do - expect(described_class.current).to eq(transaction) - end - end - - it 'removes the transaction from the current thread upon completion' do - transaction.run { } - - expect(described_class.current).to be_nil - end - end - describe '#method_call_for' do it 'returns a MethodCall' do method = transaction.method_call_for('Foo#bar', :Foo, '#bar') @@ -46,6 +20,10 @@ RSpec.describe Gitlab::Metrics::Transaction do end end + describe '#run' do + specify { expect { transaction.run }.to raise_error(NotImplementedError) } + end + describe '#add_event' do let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil, base_labels: {}) } diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb index 6ee9564ef75..5261d04c879 100644 --- a/spec/lib/gitlab/metrics/web_transaction_spec.rb +++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb @@ -38,16 +38,6 @@ RSpec.describe Gitlab::Metrics::WebTransaction do end end - describe '#duration' do - include_context 'transaction observe metrics' - - it 'returns the duration of a transaction in seconds' do - transaction.run { sleep(0.5) } - - expect(transaction.duration).to be >= 0.5 - end - end - describe '#run' do include_context 'transaction observe metrics' @@ -58,6 +48,9 @@ RSpec.describe Gitlab::Metrics::WebTransaction do it 'stores the transaction in the current thread' do transaction.run do expect(Thread.current[described_class::THREAD_KEY]).to eq(transaction) + expect(described_class.current).to eq(transaction) + + ['200', {}, ''] end end @@ -65,6 +58,33 @@ RSpec.describe Gitlab::Metrics::WebTransaction do transaction.run { } expect(Thread.current[described_class::THREAD_KEY]).to be_nil + expect(described_class.current).to be_nil + end + + it 'records the duration of the transaction if the request was successful' do + expect(transaction).to receive(:observe).with(:gitlab_transaction_duration_seconds, instance_of(Float)) + + transaction.run { ['200', {}, ''] } + end + + it 'does not record the duration of the transaction if the request failed' do + expect(transaction).not_to receive(:observe).with(:gitlab_transaction_duration_seconds, instance_of(Float)) + + transaction.run { ['500', {}, ''] } + end + + it 'does not record the duration of the transaction if it raised' do + expect(transaction).not_to receive(:observe).with(:gitlab_transaction_duration_seconds, instance_of(Float)) + + expect do + transaction.run { raise 'broken' } + end.to raise_error('broken') + end + + it 'returns the rack response' do + response = ['500', {}, ''] + + expect(transaction.run { response }).to eq(response) end end diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb index db5a23e2328..366843a4c03 100644 --- a/spec/lib/gitlab/metrics_spec.rb +++ b/spec/lib/gitlab/metrics_spec.rb @@ -92,6 +92,26 @@ RSpec.describe Gitlab::Metrics do end end + describe '.record_status_for_duration?' do + using RSpec::Parameterized::TableSyntax + + where(:status, :should_record) do + 100 | false + 200 | true + 401 | true + nil | false + 500 | false + 503 | false + '100' | false + '201' | true + 'nothing' | false + end + + with_them do + specify { expect(described_class.record_duration_for_status?(status)).to be(should_record) } + end + end + describe '.add_event' do context 'without a transaction' do it 'does nothing' do diff --git a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb index 26f9ea3a637..966b23bf51a 100644 --- a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb +++ b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb @@ -11,10 +11,10 @@ RSpec.describe ::Gitlab::Nav::TopNavMenuItem do active: true, icon: 'icon', href: 'href', - method: 'method', view: 'view', css_class: 'css_class', - data: {} + data: {}, + emoji: 'smile' } expect(described_class.build(**item)).to eq(item) diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb index 132a0e9ca78..8a26e153385 100644 --- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb +++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb @@ -108,7 +108,6 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do let(:expected_next_page_link) { %Q(<#{incoming_api_projects_url}?#{query.merge(page_token: branch2.name).to_query}>; rel="next") } it 'uses keyset pagination and adds link headers' do - expect(request_context).to receive(:header).with('Links', expected_next_page_link) expect(request_context).to receive(:header).with('Link', expected_next_page_link) pager.paginate(finder) @@ -119,7 +118,6 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do let(:branches) { [branch1] } it 'uses keyset pagination without link headers' do - expect(request_context).not_to receive(:header).with('Links', anything) expect(request_context).not_to receive(:header).with('Link', anything) pager.paginate(finder) diff --git a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb new file mode 100644 index 00000000000..3c9a8913876 --- /dev/null +++ b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb @@ -0,0 +1,120 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Pagination::Keyset::Paginator do + let_it_be(:project_1) { create(:project, created_at: 10.weeks.ago) } + let_it_be(:project_2) { create(:project, created_at: 2.weeks.ago) } + let_it_be(:project_3) { create(:project, created_at: 3.weeks.ago) } + let_it_be(:project_4) { create(:project, created_at: 5.weeks.ago) } + let_it_be(:project_5) { create(:project, created_at: 2.weeks.ago) } + + describe 'pagination' do + let(:per_page) { 10 } + let(:cursor) { nil } + let(:scope) { Project.order(created_at: :asc, id: :asc) } + let(:expected_order) { [project_1, project_4, project_3, project_2, project_5] } + + subject(:paginator) { scope.keyset_paginate(cursor: cursor, per_page: per_page) } + + context 'when per_page is greater than the record count' do + it { expect(paginator.records).to eq(expected_order) } + it { is_expected.not_to have_next_page } + it { is_expected.not_to have_previous_page } + + it 'has no next and previous cursor values' do + expect(paginator.cursor_for_next_page).to be_nil + expect(paginator.cursor_for_previous_page).to be_nil + end + end + + context 'when 0 records are returned' do + let(:scope) { Project.where(id: non_existing_record_id).order(created_at: :asc, id: :asc) } + + it { expect(paginator.records).to be_empty } + it { is_expected.not_to have_next_page } + it { is_expected.not_to have_previous_page } + end + + context 'when page size is smaller than the record count' do + let(:per_page) { 2 } + + it { expect(paginator.records).to eq(expected_order.first(2)) } + it { is_expected.to have_next_page } + it { is_expected.not_to have_previous_page } + + it 'has next page cursor' do + expect(paginator.cursor_for_next_page).not_to be_nil + end + + it 'does not have previous page cursor' do + expect(paginator.cursor_for_previous_page).to be_nil + end + + context 'when on the second page' do + let(:cursor) { scope.keyset_paginate(per_page: per_page).cursor_for_next_page } + + it { expect(paginator.records).to eq(expected_order[2...4]) } + it { is_expected.to have_next_page } + it { is_expected.to have_previous_page } + + context 'and then going back to the first page' do + let(:previous_page_cursor) { scope.keyset_paginate(cursor: cursor, per_page: per_page).cursor_for_previous_page } + + subject(:paginator) { scope.keyset_paginate(cursor: previous_page_cursor, per_page: per_page) } + + it { expect(paginator.records).to eq(expected_order.first(2)) } + it { is_expected.to have_next_page } + it { is_expected.not_to have_previous_page } + end + end + + context 'when jumping to the last page' do + let(:cursor) { scope.keyset_paginate(per_page: per_page).cursor_for_last_page } + + it { expect(paginator.records).to eq(expected_order.last(2)) } + it { is_expected.not_to have_next_page } + it { is_expected.to have_previous_page } + + context 'when paginating backwards' do + let(:previous_page_cursor) { scope.keyset_paginate(cursor: cursor, per_page: per_page).cursor_for_previous_page } + + subject(:paginator) { scope.keyset_paginate(cursor: previous_page_cursor, per_page: per_page) } + + it { expect(paginator.records).to eq(expected_order[-4...-2]) } + it { is_expected.to have_next_page } + it { is_expected.to have_previous_page } + end + + context 'when jumping to the first page' do + let(:first_page_cursor) { scope.keyset_paginate(cursor: cursor, per_page: per_page).cursor_for_first_page } + + subject(:paginator) { scope.keyset_paginate(cursor: first_page_cursor, per_page: per_page) } + + it { expect(paginator.records).to eq(expected_order.first(2)) } + it { is_expected.to have_next_page } + it { is_expected.not_to have_previous_page } + end + end + end + + describe 'default keyset direction parameter' do + let(:cursor_converter_class) { Gitlab::Pagination::Keyset::Paginator::Base64CursorConverter } + let(:per_page) { 2 } + + it 'exposes the direction parameter in the cursor' do + cursor = paginator.cursor_for_next_page + + expect(cursor_converter_class.parse(cursor)[:_kd]).to eq(described_class::FORWARD_DIRECTION) + end + end + end + + context 'when unsupported order is given' do + it 'raises error' do + scope = Project.order(path: :asc, name: :asc, id: :desc) # Cannot build 3 column order automatically + + expect { scope.keyset_paginate }.to raise_error(/does not support keyset pagination/) + end + end +end diff --git a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb index d4255176a4e..619d8cca28c 100644 --- a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb @@ -57,14 +57,15 @@ RSpec.describe Gitlab::Pagination::Keyset::RequestContext do subject { described_class.new(request_context).apply_headers(next_page) } - it 'sets Links header with same host/path as the original request' do + it 'sets Link header with same host/path as the original request' do orig_uri = URI.parse(request_context.request.url) - expect(request_context).to receive(:header).twice do |name, header| + expect(request_context).to receive(:header).once do |name, header| first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures uri = URI.parse(first_link) + expect(name).to eq('Link') expect(uri.host).to eq(orig_uri.host) expect(uri.path).to eq(orig_uri.path) end @@ -72,14 +73,15 @@ RSpec.describe Gitlab::Pagination::Keyset::RequestContext do subject end - it 'sets Links header with a link to the next page' do + it 'sets Link header with a link to the next page' do orig_uri = URI.parse(request_context.request.url) - expect(request_context).to receive(:header).twice do |name, header| + expect(request_context).to receive(:header).once do |name, header| first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures query = CGI.parse(URI.parse(first_link).query) + expect(name).to eq('Link') expect(query.except('id_after')).to eq(CGI.parse(orig_uri.query).except('id_after')) expect(query['id_after']).to eq(['42']) end @@ -90,14 +92,15 @@ RSpec.describe Gitlab::Pagination::Keyset::RequestContext do context 'with descending order' do let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }) } - it 'sets Links header with a link to the next page' do + it 'sets Link header with a link to the next page' do orig_uri = URI.parse(request_context.request.url) - expect(request_context).to receive(:header).twice do |name, header| + expect(request_context).to receive(:header).once do |name, header| first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures query = CGI.parse(URI.parse(first_link).query) + expect(name).to eq('Link') expect(query.except('id_before')).to eq(CGI.parse(orig_uri.query).except('id_before')) expect(query['id_before']).to eq(['42']) end diff --git a/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb b/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb deleted file mode 100644 index ca74f7573f3..00000000000 --- a/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Patch::ActionDispatchJourneyFormatter do - let(:group) { create(:group) } - let(:project) { create(:project, namespace: group) } - let(:pipeline) { create(:ci_empty_pipeline, project: project) } - let(:url) { Gitlab::Routing.url_helpers.project_pipeline_url(project, pipeline) } - let(:expected_path) { "#{project.full_path}/-/pipelines/#{pipeline.id}" } - - context 'custom implementation of #missing_keys' do - before do - expect_any_instance_of(Gitlab::Patch::ActionDispatchJourneyFormatter).to receive(:missing_keys) - end - - it 'generates correct url' do - expect(url).to end_with(expected_path) - end - end - - context 'original implementation of #missing_keys' do - before do - allow_any_instance_of(Gitlab::Patch::ActionDispatchJourneyFormatter).to receive(:missing_keys) do |instance, route, parts| - instance.send(:old_missing_keys, route, parts) # test the old implementation - end - end - - it 'generates correct url' do - expect(url).to end_with(expected_path) - end - end -end diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index cd89674af0f..54c866b4a6d 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -113,7 +113,7 @@ RSpec.describe Gitlab::PathRegex do let(:deprecated_routes) do # profile was deprecated in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51646 - %w(profile) + %w(profile s) end let(:ee_top_level_words) do @@ -183,7 +183,7 @@ RSpec.describe Gitlab::PathRegex do # We ban new items in this list, see https://gitlab.com/gitlab-org/gitlab/-/issues/215362 it 'does not allow expansion' do - expect(described_class::TOP_LEVEL_ROUTES.size).to eq(44) + expect(described_class::TOP_LEVEL_ROUTES.size).to eq(40) end end diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb index 48e2a2e9794..5187c96b511 100644 --- a/spec/lib/gitlab/profiler_spec.rb +++ b/spec/lib/gitlab/profiler_spec.rb @@ -206,8 +206,12 @@ RSpec.describe Gitlab::Profiler do end end - before do - stub_const('STDOUT', stdout) + around do |example| + original_stdout = $stdout + + $stdout = stdout # rubocop: disable RSpec/ExpectOutput + example.run + $stdout = original_stdout # rubocop: disable RSpec/ExpectOutput end it 'prints a profile result sorted by total time' do diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb index a76ad1f6f4c..2f28b8dfce0 100644 --- a/spec/lib/gitlab/project_search_results_spec.rb +++ b/spec/lib/gitlab/project_search_results_spec.rb @@ -549,30 +549,39 @@ RSpec.describe Gitlab::ProjectSearchResults do describe 'user search' do let(:query) { 'gob' } - let(:group) { create(:group) } - let(:project) { create(:project, namespace: group) } + + let_it_be(:user_1) { create(:user, username: 'gob_bluth') } + let_it_be(:user_2) { create(:user, username: 'michael_bluth') } + let_it_be(:user_3) { create(:user, username: 'gob_2018') } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, namespace: group) } subject(:objects) { results.objects('users') } it 'returns the user belonging to the project matching the search query' do - user1 = create(:user, username: 'gob_bluth') - create(:project_member, :developer, user: user1, project: project) + create(:project_member, :developer, user: user_1, project: project) + create(:project_member, :developer, user: user_2, project: project) - user2 = create(:user, username: 'michael_bluth') - create(:project_member, :developer, user: user2, project: project) + expect(objects).to contain_exactly(user_1) + end - create(:user, username: 'gob_2018') + it 'returns the user belonging to the group matching the search query' do + create(:group_member, :developer, user: user_1, group: group) - expect(objects).to contain_exactly(user1) + expect(objects).to contain_exactly(user_1) end - it 'returns the user belonging to the group matching the search query' do - user1 = create(:user, username: 'gob_bluth') - create(:group_member, :developer, user: user1, group: group) + context 'when multiple projects provided' do + let_it_be(:project_2) { create(:project, namespace: group) } + + subject(:results) { described_class.new(user, query, project: [project, project_2], repository_ref: repository_ref, filters: filters) } - create(:user, username: 'gob_2018') + it 'returns users belonging to projects matching the search query' do + create(:project_member, :developer, user: user_1, project: project) + create(:project_member, :developer, user: user_3, project: project_2) - expect(objects).to contain_exactly(user1) + expect(objects).to contain_exactly(user_1, user_3) + end end end end diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb index 259202178a2..1eaed65c805 100644 --- a/spec/lib/gitlab/prometheus/adapter_spec.rb +++ b/spec/lib/gitlab/prometheus/adapter_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Gitlab::Prometheus::Adapter do end context 'with cluster with prometheus available' do - let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) } + let!(:prometheus) { create(:clusters_integrations_prometheus, cluster: cluster) } it 'returns prometheus service' do expect(subject.prometheus_adapter).to eq(prometheus_service) @@ -32,16 +32,12 @@ RSpec.describe Gitlab::Prometheus::Adapter do context "prometheus service can't execute queries" do let(:prometheus_service) { double(:prometheus_service, can_query?: false) } - context 'with cluster with prometheus integration' do - let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) } - - it 'returns the integration' do - expect(subject.prometheus_adapter).to eq(prometheus_integration) - end + before do + allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service end - context 'with cluster with prometheus not available' do - let!(:prometheus) { create(:clusters_applications_prometheus, :installable, cluster: cluster) } + context 'with cluster with prometheus disabled' do + let!(:prometheus) { create(:clusters_integrations_prometheus, enabled: false, cluster: cluster) } it 'returns nil' do expect(subject.prometheus_adapter).to be_nil @@ -49,19 +45,11 @@ RSpec.describe Gitlab::Prometheus::Adapter do end context 'with cluster with prometheus available' do - let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) } + let!(:prometheus) { create(:clusters_integrations_prometheus, cluster: cluster) } it 'returns application handling all environments' do expect(subject.prometheus_adapter).to eq(prometheus) end - - context 'with cluster with prometheus integration' do - let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) } - - it 'returns the application' do - expect(subject.prometheus_adapter).to eq(prometheus) - end - end end context 'with cluster without prometheus installed' do diff --git a/spec/lib/gitlab/redis/cache_spec.rb b/spec/lib/gitlab/redis/cache_spec.rb index 5f73b84288d..31141ac1139 100644 --- a/spec/lib/gitlab/redis/cache_spec.rb +++ b/spec/lib/gitlab/redis/cache_spec.rb @@ -3,20 +3,16 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::Cache do - let(:config_file_name) { "config/redis.cache.yml" } + let(:instance_specific_config_file) { "config/redis.cache.yml" } let(:environment_config_file_name) { "GITLAB_REDIS_CACHE_CONFIG_FILE" } - let(:config_old_format_socket) { "spec/fixtures/config/redis_cache_old_format_socket.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_cache_new_format_socket.yml" } - let(:old_socket_path) {"/path/to/old/redis.cache.sock" } - let(:new_socket_path) {"/path/to/redis.cache.sock" } - let(:config_old_format_host) { "spec/fixtures/config/redis_cache_old_format_host.yml" } - let(:config_new_format_host) { "spec/fixtures/config/redis_cache_new_format_host.yml" } - let(:redis_port) { 6380 } - let(:redis_database) { 10 } - let(:sentinel_port) { redis_port + 20000 } - let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_cache_config_with_env.yml"} - let(:config_env_variable_url) {"TEST_GITLAB_REDIS_CACHE_URL"} - let(:class_redis_url) { Gitlab::Redis::Cache::DEFAULT_REDIS_CACHE_URL } include_examples "redis_shared_examples" + + describe '#raw_config_hash' do + it 'has a legacy default URL' do + expect(subject).to receive(:fetch_config) { false } + + expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380' ) + end + end end diff --git a/spec/lib/gitlab/redis/queues_spec.rb b/spec/lib/gitlab/redis/queues_spec.rb index 8a32c991943..2e396cde3bf 100644 --- a/spec/lib/gitlab/redis/queues_spec.rb +++ b/spec/lib/gitlab/redis/queues_spec.rb @@ -3,20 +3,16 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::Queues do - let(:config_file_name) { "config/redis.queues.yml" } + let(:instance_specific_config_file) { "config/redis.queues.yml" } let(:environment_config_file_name) { "GITLAB_REDIS_QUEUES_CONFIG_FILE" } - let(:config_old_format_socket) { "spec/fixtures/config/redis_queues_old_format_socket.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_queues_new_format_socket.yml" } - let(:old_socket_path) {"/path/to/old/redis.queues.sock" } - let(:new_socket_path) {"/path/to/redis.queues.sock" } - let(:config_old_format_host) { "spec/fixtures/config/redis_queues_old_format_host.yml" } - let(:config_new_format_host) { "spec/fixtures/config/redis_queues_new_format_host.yml" } - let(:redis_port) { 6381 } - let(:redis_database) { 11 } - let(:sentinel_port) { redis_port + 20000 } - let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_queues_config_with_env.yml"} - let(:config_env_variable_url) {"TEST_GITLAB_REDIS_QUEUES_URL"} - let(:class_redis_url) { Gitlab::Redis::Queues::DEFAULT_REDIS_QUEUES_URL } include_examples "redis_shared_examples" + + describe '#raw_config_hash' do + it 'has a legacy default URL' do + expect(subject).to receive(:fetch_config) { false } + + expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6381' ) + end + end end diff --git a/spec/lib/gitlab/redis/shared_state_spec.rb b/spec/lib/gitlab/redis/shared_state_spec.rb index bd90e4c750d..d240abfbf5b 100644 --- a/spec/lib/gitlab/redis/shared_state_spec.rb +++ b/spec/lib/gitlab/redis/shared_state_spec.rb @@ -3,20 +3,16 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::SharedState do - let(:config_file_name) { "config/redis.shared_state.yml" } + let(:instance_specific_config_file) { "config/redis.shared_state.yml" } let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" } - let(:config_old_format_socket) { "spec/fixtures/config/redis_shared_state_old_format_socket.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_shared_state_new_format_socket.yml" } - let(:old_socket_path) {"/path/to/old/redis.shared_state.sock" } - let(:new_socket_path) {"/path/to/redis.shared_state.sock" } - let(:config_old_format_host) { "spec/fixtures/config/redis_shared_state_old_format_host.yml" } - let(:config_new_format_host) { "spec/fixtures/config/redis_shared_state_new_format_host.yml" } - let(:redis_port) { 6382 } - let(:redis_database) { 12 } - let(:sentinel_port) { redis_port + 20000 } - let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_shared_state_config_with_env.yml"} - let(:config_env_variable_url) {"TEST_GITLAB_REDIS_SHARED_STATE_URL"} - let(:class_redis_url) { Gitlab::Redis::SharedState::DEFAULT_REDIS_SHARED_STATE_URL } include_examples "redis_shared_examples" + + describe '#raw_config_hash' do + it 'has a legacy default URL' do + expect(subject).to receive(:fetch_config) { false } + + expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382' ) + end + end end diff --git a/spec/lib/gitlab/redis/trace_chunks_spec.rb b/spec/lib/gitlab/redis/trace_chunks_spec.rb new file mode 100644 index 00000000000..e974dc519d6 --- /dev/null +++ b/spec/lib/gitlab/redis/trace_chunks_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::TraceChunks do + let(:instance_specific_config_file) { "config/redis.trace_chunks.yml" } + let(:environment_config_file_name) { "GITLAB_REDIS_TRACE_CHUNKS_CONFIG_FILE" } + let(:shared_state_config_file) { nil } + + before do + allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(shared_state_config_file) + end + + include_examples "redis_shared_examples" + + describe '.config_file_name' do + subject { described_class.config_file_name } + + let(:rails_root) { Dir.mktmpdir('redis_shared_examples') } + + before do + # Undo top-level stub of config_file_name because we are testing that method now. + allow(described_class).to receive(:config_file_name).and_call_original + + allow(described_class).to receive(:rails_root).and_return(rails_root) + FileUtils.mkdir_p(File.join(rails_root, 'config')) + end + + after do + FileUtils.rm_rf(rails_root) + end + + context 'when there is only a resque.yml' do + before do + FileUtils.touch(File.join(rails_root, 'config/resque.yml')) + end + + it { expect(subject).to eq("#{rails_root}/config/resque.yml") } + + context 'and there is a global env override' do + before do + stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override') + end + + it { expect(subject).to eq('global override') } + + context 'and SharedState has a different config file' do + let(:shared_state_config_file) { 'shared state config file' } + + it { expect(subject).to eq('shared state config file') } + end + end + end + end +end diff --git a/spec/lib/gitlab/redis/wrapper_spec.rb b/spec/lib/gitlab/redis/wrapper_spec.rb index ec233c022ee..dd1f0d8b414 100644 --- a/spec/lib/gitlab/redis/wrapper_spec.rb +++ b/spec/lib/gitlab/redis/wrapper_spec.rb @@ -3,47 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::Wrapper do - let(:config_file_name) { "config/resque.yml" } - let(:environment_config_file_name) { "GITLAB_REDIS_CONFIG_FILE" } - let(:config_old_format_socket) { "spec/fixtures/config/redis_old_format_socket.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } - let(:old_socket_path) {"/path/to/old/redis.sock" } - let(:new_socket_path) {"/path/to/redis.sock" } - let(:config_old_format_host) { "spec/fixtures/config/redis_old_format_host.yml" } - let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } - let(:redis_port) { 6379 } - let(:redis_database) { 99 } - let(:sentinel_port) { redis_port + 20000 } - let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_config_with_env.yml"} - let(:config_env_variable_url) {"TEST_GITLAB_REDIS_URL"} - let(:class_redis_url) { Gitlab::Redis::Wrapper::DEFAULT_REDIS_URL } - - include_examples "redis_shared_examples" do - before do - allow(described_class).to receive(:instrumentation_class) do - ::Gitlab::Instrumentation::Redis::Cache - end - end - end - - describe '.version' do - it 'returns a version' do - expect(described_class.version).to be_present - end - end - describe '.instrumentation_class' do - it 'raises a NotImplementedError' do - expect(described_class).to receive(:instrumentation_class).and_call_original - - expect { described_class.instrumentation_class }.to raise_error(NotImplementedError) - end - end - - describe '.config_file_path' do - it 'returns the absolute path to the configuration file' do - expect(described_class.config_file_path('foo.yml')) - .to eq Rails.root.join('config', 'foo.yml').to_s + it 'raises a NameError' do + expect { described_class.instrumentation_class }.to raise_error(NameError) end end end diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 28447d5c2a9..c1c97e87a4c 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -675,9 +675,20 @@ RSpec.describe Gitlab::Regex do describe '.helm_version_regex' do subject { described_class.helm_version_regex } + it { is_expected.to match('1.2.3') } + it { is_expected.to match('1.2.3-beta') } + it { is_expected.to match('1.2.3-alpha.3') } + it { is_expected.to match('v1.2.3') } it { is_expected.to match('v1.2.3-beta') } it { is_expected.to match('v1.2.3-alpha.3') } + + it { is_expected.not_to match('1') } + it { is_expected.not_to match('1.2') } + it { is_expected.not_to match('1./2.3') } + it { is_expected.not_to match('../../../../../1.2.3') } + it { is_expected.not_to match('%2e%2e%2f1.2.3') } + it { is_expected.not_to match('v1') } it { is_expected.not_to match('v1.2') } it { is_expected.not_to match('v1./2.3') } diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb index 881591ae805..9aeb9f11bac 100644 --- a/spec/lib/gitlab/repository_set_cache_spec.rb +++ b/spec/lib/gitlab/repository_set_cache_spec.rb @@ -7,6 +7,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do let(:repository) { project.repository } let(:namespace) { "#{repository.full_path}:#{project.id}" } + let(:gitlab_cache_namespace) { Gitlab::Redis::Cache::CACHE_NAMESPACE } let(:cache) { described_class.new(repository) } describe '#cache_key' do @@ -52,6 +53,24 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do end end + describe '#write' do + subject(:write_cache) { cache.write('branch_names', ['main']) } + + it 'writes the value to the cache' do + write_cache + + redis_keys = Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last + expect(redis_keys).to include("branch_names:#{namespace}:set") + expect(cache.fetch('branch_names')).to contain_exactly('main') + end + + it 'sets the expiry of the set' do + write_cache + + expect(cache.ttl('branch_names')).to be_within(1).of(cache.expires_in.seconds) + end + end + describe '#expire' do subject { cache.expire(*keys) } @@ -75,6 +94,12 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do expect(cache.read(:foo)).to be_empty end + + it 'expires the new key format' do + expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.new_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf + + subject + end end context 'multiple keys' do diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb index 0fcb7db7d5f..f51c5dd3d20 100644 --- a/spec/lib/gitlab/runtime_spec.rb +++ b/spec/lib/gitlab/runtime_spec.rb @@ -99,25 +99,6 @@ RSpec.describe Gitlab::Runtime do end end - context "unicorn" do - before do - stub_const('::Unicorn', Module.new) - stub_const('::Unicorn::HttpServer', Class.new) - stub_env('ACTION_CABLE_IN_APP', 'false') - end - - it_behaves_like "valid runtime", :unicorn, 1 - - context "when ActionCable in-app mode is enabled" do - before do - stub_env('ACTION_CABLE_IN_APP', 'true') - stub_env('ACTION_CABLE_WORKER_POOL_SIZE', '3') - end - - it_behaves_like "valid runtime", :unicorn, 4 - end - end - context "sidekiq" do let(:sidekiq_type) { double('::Sidekiq') } diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb index 43cbe71dd6b..5347680b253 100644 --- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb +++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb @@ -108,114 +108,101 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do end end - # Remove with https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/646 - context 'with --queue-selector and --experimental-queue-selector' do - it 'errors' do - expect(Gitlab::SidekiqCluster).not_to receive(:start) - - expect { cli.run(%w(--queue-selector name=foo --experimental-queue-selector name=bar)) } - .to raise_error(described_class::CommandError) - end - end - - # Simplify with https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/646 - ['--queue-selector', '--experimental-queue-selector'].each do |flag| - context "with #{flag}" do - where do - { - 'memory-bound queues' => { - query: 'resource_boundary=memory', - included_queues: %w(project_export), - excluded_queues: %w(merge) - }, - 'memory- or CPU-bound queues' => { - query: 'resource_boundary=memory,cpu', - included_queues: %w(auto_merge:auto_merge_process project_export), - excluded_queues: %w(merge) - }, - 'high urgency CI queues' => { - query: 'feature_category=continuous_integration&urgency=high', - included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache), - excluded_queues: %w(merge) - }, - 'CPU-bound high urgency CI queues' => { - query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu', - included_queues: %w(pipeline_cache:expire_pipeline_cache), - excluded_queues: %w(pipeline_cache:expire_job_cache merge) - }, - 'CPU-bound high urgency non-CI queues' => { - query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu', - included_queues: %w(new_issue), - excluded_queues: %w(pipeline_cache:expire_pipeline_cache) - }, - 'CI and SCM queues' => { - query: 'feature_category=continuous_integration|feature_category=source_code_management', - included_queues: %w(pipeline_cache:expire_job_cache merge), - excluded_queues: %w(mailers) - } + context "with --queue-selector" do + where do + { + 'memory-bound queues' => { + query: 'resource_boundary=memory', + included_queues: %w(project_export), + excluded_queues: %w(merge) + }, + 'memory- or CPU-bound queues' => { + query: 'resource_boundary=memory,cpu', + included_queues: %w(auto_merge:auto_merge_process project_export), + excluded_queues: %w(merge) + }, + 'high urgency CI queues' => { + query: 'feature_category=continuous_integration&urgency=high', + included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache), + excluded_queues: %w(merge) + }, + 'CPU-bound high urgency CI queues' => { + query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu', + included_queues: %w(pipeline_cache:expire_pipeline_cache), + excluded_queues: %w(pipeline_cache:expire_job_cache merge) + }, + 'CPU-bound high urgency non-CI queues' => { + query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu', + included_queues: %w(new_issue), + excluded_queues: %w(pipeline_cache:expire_pipeline_cache) + }, + 'CI and SCM queues' => { + query: 'feature_category=continuous_integration|feature_category=source_code_management', + included_queues: %w(pipeline_cache:expire_job_cache merge), + excluded_queues: %w(mailers) } - end - - with_them do - it 'expands queues by attributes' do - expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| - expect(opts).to eq(default_options) - expect(queues.first).to include(*included_queues) - expect(queues.first).not_to include(*excluded_queues) + } + end - [] - end + with_them do + it 'expands queues by attributes' do + expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| + expect(opts).to eq(default_options) + expect(queues.first).to include(*included_queues) + expect(queues.first).not_to include(*excluded_queues) - cli.run(%W(#{flag} #{query})) + [] end - it 'works when negated' do - expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| - expect(opts).to eq(default_options) - expect(queues.first).not_to include(*included_queues) - expect(queues.first).to include(*excluded_queues) + cli.run(%W(--queue-selector #{query})) + end - [] - end + it 'works when negated' do + expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| + expect(opts).to eq(default_options) + expect(queues.first).not_to include(*included_queues) + expect(queues.first).to include(*excluded_queues) - cli.run(%W(--negate #{flag} #{query})) + [] end + + cli.run(%W(--negate --queue-selector #{query})) end + end - it 'expands multiple queue groups correctly' do - expect(Gitlab::SidekiqCluster) - .to receive(:start) - .with([['chat_notification'], ['project_export']], default_options) - .and_return([]) + it 'expands multiple queue groups correctly' do + expect(Gitlab::SidekiqCluster) + .to receive(:start) + .with([['chat_notification'], ['project_export']], default_options) + .and_return([]) - cli.run(%W(#{flag} feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers)) - end + cli.run(%w(--queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers)) + end - it 'allows the special * selector' do - worker_queues = %w(foo bar baz) + it 'allows the special * selector' do + worker_queues = %w(foo bar baz) - expect(Gitlab::SidekiqConfig::CliMethods) - .to receive(:worker_queues).and_return(worker_queues) + expect(Gitlab::SidekiqConfig::CliMethods) + .to receive(:worker_queues).and_return(worker_queues) - expect(Gitlab::SidekiqCluster) - .to receive(:start).with([worker_queues], default_options) + expect(Gitlab::SidekiqCluster) + .to receive(:start).with([worker_queues], default_options) - cli.run(%W(#{flag} *)) - end + cli.run(%w(--queue-selector *)) + end - it 'errors when the selector matches no queues' do - expect(Gitlab::SidekiqCluster).not_to receive(:start) + it 'errors when the selector matches no queues' do + expect(Gitlab::SidekiqCluster).not_to receive(:start) - expect { cli.run(%W(#{flag} has_external_dependencies=true&has_external_dependencies=false)) } - .to raise_error(described_class::CommandError) - end + expect { cli.run(%w(--queue-selector has_external_dependencies=true&has_external_dependencies=false)) } + .to raise_error(described_class::CommandError) + end - it 'errors on an invalid query multiple queue groups correctly' do - expect(Gitlab::SidekiqCluster).not_to receive(:start) + it 'errors on an invalid query multiple queue groups correctly' do + expect(Gitlab::SidekiqCluster).not_to receive(:start) - expect { cli.run(%W(#{flag} unknown_field=chatops)) } - .to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError) - end + expect { cli.run(%w(--queue-selector unknown_field=chatops)) } + .to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError) end end end diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb index 731c509e221..dfdc1420eac 100644 --- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb @@ -228,6 +228,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do end context 'when the job performs database queries' do + include_context 'clear DB Load Balancing configuration' + before do allow(Time).to receive(:now).and_return(timestamp) allow(Process).to receive(:clock_gettime).and_call_original @@ -248,28 +250,112 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do ) end - it 'logs the database time' do - expect(logger).to receive(:info).with(expected_start_payload).ordered - expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered + shared_examples 'performs database queries' do + it 'logs the database time', :aggregate_errors do + expect(logger).to receive(:info).with(expected_start_payload).ordered + expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered - call_subject(job, 'test_queue') do - ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') + call_subject(job, 'test_queue') do + ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') + end + end + + it 'prevents database time from leaking to the next job', :aggregate_errors do + expect(logger).to receive(:info).with(expected_start_payload).ordered + expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered + expect(logger).to receive(:info).with(expected_start_payload).ordered + expect(logger).to receive(:info).with(expected_end_payload).ordered + + call_subject(job.dup, 'test_queue') do + ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') + end + + Gitlab::SafeRequestStore.clear! + + call_subject(job.dup, 'test_queue') { } end end - it 'prevents database time from leaking to the next job' do - expect(logger).to receive(:info).with(expected_start_payload).ordered - expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered - expect(logger).to receive(:info).with(expected_start_payload).ordered - expect(logger).to receive(:info).with(expected_end_payload).ordered + context 'when load balancing is disabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false) + end + + let(:expected_end_payload_with_db) do + expected_end_payload.merge( + 'db_duration_s' => a_value >= 0.1, + 'db_count' => a_value >= 1, + 'db_cached_count' => 0, + 'db_write_count' => 0 + ) + end + + include_examples 'performs database queries' + end + + context 'when load balancing is enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + end - call_subject(job.dup, 'test_queue') do - ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);') + let(:expected_end_payload_with_db) do + expected_end_payload.merge( + 'db_duration_s' => a_value >= 0.1, + 'db_count' => a_value >= 1, + 'db_cached_count' => 0, + 'db_write_count' => 0, + 'db_replica_count' => 0, + 'db_replica_cached_count' => 0, + 'db_replica_wal_count' => 0, + 'db_replica_duration_s' => a_value >= 0, + 'db_primary_count' => a_value >= 1, + 'db_primary_cached_count' => 0, + 'db_primary_wal_count' => 0, + 'db_primary_duration_s' => a_value > 0 + ) end - Gitlab::SafeRequestStore.clear! + let(:end_payload) do + start_payload.merge( + 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec', + 'job_status' => 'done', + 'duration_s' => 0.0, + 'completed_at' => timestamp.to_f, + 'cpu_s' => 1.111112, + 'db_duration_s' => 0.0, + 'db_cached_count' => 0, + 'db_count' => 0, + 'db_write_count' => 0, + 'db_replica_count' => 0, + 'db_replica_cached_count' => 0, + 'db_replica_wal_count' => 0, + 'db_replica_duration_s' => 0, + 'db_primary_count' => 0, + 'db_primary_cached_count' => 0, + 'db_primary_wal_count' => 0, + 'db_primary_duration_s' => 0 + ) + end + + include_examples 'performs database queries' + end + end + + context 'when the job uses load balancing capabilities' do + let(:expected_payload) { { 'database_chosen' => 'retry' } } + + before do + allow(Time).to receive(:now).and_return(timestamp) + allow(Process).to receive(:clock_gettime).and_call_original + end + + it 'logs the database chosen' do + expect(logger).to receive(:info).with(start_payload).ordered + expect(logger).to receive(:info).with(include(expected_payload)).ordered - call_subject(job.dup, 'test_queue') { } + call_subject(job, 'test_queue') do + job[:database_chosen] = 'retry' + end end end @@ -303,6 +389,39 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do expect { subject.call(job.dup, 'test_queue') {} }.not_to raise_error end end + + context 'when the job payload is compressed' do + let(:compressed_args) { "eJyLVspIzcnJV4oFAA88AxE=" } + let(:expected_start_payload) do + start_payload.merge( + 'args' => ['[COMPRESSED]'], + 'job_size_bytes' => Sidekiq.dump_json([compressed_args]).bytesize, + 'compressed' => true + ) + end + + let(:expected_end_payload) do + end_payload.merge( + 'args' => ['[COMPRESSED]'], + 'job_size_bytes' => Sidekiq.dump_json([compressed_args]).bytesize, + 'compressed' => true + ) + end + + it 'logs it in the done log' do + Timecop.freeze(timestamp) do + expect(logger).to receive(:info).with(expected_start_payload).ordered + expect(logger).to receive(:info).with(expected_end_payload).ordered + + job['args'] = [compressed_args] + job['compressed'] = true + + call_subject(job, 'test_queue') do + ::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor.decompress(job) + end + end + end + end end describe '#add_time_keys!' do diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index 0285467ecab..a10a8883591 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -18,14 +18,43 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end describe '#schedule' do - it 'calls schedule on the strategy' do - expect do |block| - expect_next_instance_of(Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting) do |strategy| - expect(strategy).to receive(:schedule).with(job, &block) + shared_examples 'scheduling with deduplication class' do |strategy_class| + it 'calls schedule on the strategy' do + expect do |block| + expect_next_instance_of("Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::#{strategy_class}".constantize) do |strategy| + expect(strategy).to receive(:schedule).with(job, &block) + end + + duplicate_job.schedule(&block) + end.to yield_control + end + end + + it_behaves_like 'scheduling with deduplication class', 'UntilExecuting' + + context 'when the deduplication depends on a FF' do + before do + skip_feature_flags_yaml_validation + skip_default_enabled_yaml_check + + allow(AuthorizedProjectsWorker).to receive(:get_deduplication_options).and_return(feature_flag: :my_feature_flag) + end + + context 'when the feature flag is enabled' do + before do + stub_feature_flags(my_feature_flag: true) end - duplicate_job.schedule(&block) - end.to yield_control + it_behaves_like 'scheduling with deduplication class', 'UntilExecuting' + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(my_feature_flag: false) + end + + it_behaves_like 'scheduling with deduplication class', 'None' + end end end @@ -51,6 +80,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi .from([nil, -2]) .to(['123', be_within(1).of(described_class::DUPLICATE_KEY_TTL)]) end + + it "adds the idempotency key to the jobs payload" do + expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key) + end end context 'when there was already a job with same arguments in the same queue' do @@ -81,14 +114,39 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi context 'when the key exists in redis' do before do - set_idempotency_key(idempotency_key, 'existing-key') + set_idempotency_key(idempotency_key, 'existing-jid') end - it 'removes the key from redis' do - expect { duplicate_job.delete! } - .to change { read_idempotency_key_with_ttl(idempotency_key) } - .from(['existing-key', -1]) - .to([nil, -2]) + shared_examples 'deleting the duplicate job' do + it 'removes the key from redis' do + expect { duplicate_job.delete! } + .to change { read_idempotency_key_with_ttl(idempotency_key) } + .from(['existing-jid', -1]) + .to([nil, -2]) + end + end + + context 'when the idempotency key is not part of the job' do + it_behaves_like 'deleting the duplicate job' + + it 'recalculates the idempotency hash' do + expect(duplicate_job).to receive(:idempotency_hash).and_call_original + + duplicate_job.delete! + end + end + + context 'when the idempotency key is part of the job' do + let(:idempotency_key) { 'not the same as what we calculate' } + let(:job) { super().merge('idempotency_key' => idempotency_key) } + + it_behaves_like 'deleting the duplicate job' + + it 'does not recalculate the idempotency hash' do + expect(duplicate_job).not_to receive(:idempotency_hash) + + duplicate_job.delete! + end end end end diff --git a/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb b/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb index eb9ba50cdcd..8cf65e1be5b 100644 --- a/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb @@ -24,58 +24,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::InstrumentationLogger do stub_const('TestWorker', worker) end - describe '.keys' do - it 'returns all available payload keys' do - expected_keys = [ - :cpu_s, - :gitaly_calls, - :gitaly_duration_s, - :rugged_calls, - :rugged_duration_s, - :elasticsearch_calls, - :elasticsearch_duration_s, - :elasticsearch_timed_out_count, - :mem_objects, - :mem_bytes, - :mem_mallocs, - :redis_calls, - :redis_duration_s, - :redis_read_bytes, - :redis_write_bytes, - :redis_action_cable_calls, - :redis_action_cable_duration_s, - :redis_action_cable_read_bytes, - :redis_action_cable_write_bytes, - :redis_cache_calls, - :redis_cache_duration_s, - :redis_cache_read_bytes, - :redis_cache_write_bytes, - :redis_queues_calls, - :redis_queues_duration_s, - :redis_queues_read_bytes, - :redis_queues_write_bytes, - :redis_shared_state_calls, - :redis_shared_state_duration_s, - :redis_shared_state_read_bytes, - :redis_shared_state_write_bytes, - :db_count, - :db_write_count, - :db_cached_count, - :external_http_count, - :external_http_duration_s, - :rack_attack_redis_count, - :rack_attack_redis_duration_s - ] - - expect(described_class.keys).to include(*expected_keys) - end - end - describe '#call', :request_store do let(:instrumentation_values) do { cpu_s: 10, - unknown_attribute: 123, db_count: 0, db_cached_count: 0, db_write_count: 0, @@ -90,12 +42,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::InstrumentationLogger do end end - it 'merges correct instrumentation data in the job' do + it 'merges all instrumentation data in the job' do expect { |b| subject.call(worker, job, queue, &b) }.to yield_control - expected_values = instrumentation_values.except(:unknown_attribute) - - expect(job[:instrumentation]).to eq(expected_values) + expect(job[:instrumentation]).to eq(instrumentation_values) end end end diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb index 95be76ce351..34b4541f339 100644 --- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb @@ -107,5 +107,110 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do let(:job_status) { :done } let(:labels_with_job_status) { labels.merge(job_status: job_status.to_s) } end + + context 'DB load balancing' do + using RSpec::Parameterized::TableSyntax + + subject { described_class.new } + + let(:queue) { :test } + let(:worker_class) { worker.class } + let(:job) { {} } + let(:job_status) { :done } + let(:labels_with_job_status) { default_labels.merge(job_status: job_status.to_s) } + let(:default_labels) do + { queue: queue.to_s, + worker: worker_class.to_s, + boundary: "", + external_dependencies: "no", + feature_category: "", + urgency: "low" } + end + + before do + stub_const('TestWorker', Class.new) + TestWorker.class_eval do + include Sidekiq::Worker + include WorkerAttributes + end + end + + let(:worker) { TestWorker.new } + + include_context 'server metrics with mocked prometheus' + + context 'when load_balancing is enabled' do + let(:load_balancing_metric) { double('load balancing metric') } + + include_context 'clear DB Load Balancing configuration' + + before do + allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric) + end + + describe '#initialize' do + it 'sets load_balancing metrics' do + expect(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric) + + subject + end + end + + describe '#call' do + include_context 'server metrics call' + + context 'when :database_chosen is provided' do + where(:database_chosen) do + %w[primary retry replica] + end + + with_them do + context "when #{params[:database_chosen]} is used" do + let(:labels_with_load_balancing) do + labels_with_job_status.merge(database_chosen: database_chosen, data_consistency: 'delayed') + end + + before do + job[:database_chosen] = database_chosen + job[:data_consistency] = 'delayed' + allow(load_balancing_metric).to receive(:increment) + end + + it 'increment sidekiq_load_balancing_count' do + expect(load_balancing_metric).to receive(:increment).with(labels_with_load_balancing, 1) + + described_class.new.call(worker, job, :test) { nil } + end + end + end + end + + context 'when :database_chosen is not provided' do + it 'does not increment sidekiq_load_balancing_count' do + expect(load_balancing_metric).not_to receive(:increment) + + described_class.new.call(worker, job, :test) { nil } + end + end + end + end + + context 'when load_balancing is disabled' do + include_context 'clear DB Load Balancing configuration' + + before do + allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false) + end + + describe '#initialize' do + it 'doesnt set load_balancing metrics' do + expect(Gitlab::Metrics).not_to receive(:counter).with(:sidekiq_load_balancing_count, anything) + + subject + end + end + end + end end # rubocop: enable RSpec/MultipleMemoizedHelpers diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/compressor_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/compressor_spec.rb new file mode 100644 index 00000000000..b9b58683459 --- /dev/null +++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/compressor_spec.rb @@ -0,0 +1,200 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Compressor do + using RSpec::Parameterized::TableSyntax + + let(:base_payload) do + { + "class" => "ARandomWorker", + "queue" => "a_worker", + "retry" => true, + "jid" => "d774900367dc8b2962b2479c", + "created_at" => 1234567890, + "enqueued_at" => 1234567890 + } + end + + describe '.compressed?' do + where(:job, :result) do + {} | false + base_payload.merge("args" => [123, 'hello', ['world']]) | false + base_payload.merge("args" => ['eJzLSM3JyQcABiwCFQ=='], 'compressed' => true) | true + end + + with_them do + it 'returns whether the job payload is compressed' do + expect(described_class.compressed?(job)).to eql(result) + end + end + end + + describe '.compress' do + where(:args) do + [ + nil, + [], + ['hello'], + [ + { + "job_class" => "SomeWorker", + "job_id" => "b4a577edbccf1d805744efa9", + "provider_job_id" => nil, + "queue_name" => "default", + "arguments" => ["some", ["argument"]], + "executions" => 0, + "locale" => "en", + "attempt_number" => 1 + }, + nil, + 'hello', + 12345678901234567890, + ['nice'] + ], + [ + '2021-05-13_09:59:37.57483 [35mrails-background-jobs : [0m{"severity":"ERROR","time":"2021-05-13T09:59:37.574Z"', + 'bonne journée - ขอให้มีความสุขในวันนี้ - một ngày mới tốt lành - 좋은 하루 되세요 - ごきげんよう', + '🤝 - 🦊' + ] + ] + end + + with_them do + let(:payload) { base_payload.merge("args" => args) } + + it 'injects compressed data' do + serialized_args = Sidekiq.dump_json(args) + described_class.compress(payload, serialized_args) + + expect(payload['args'].length).to be(1) + expect(payload['args'].first).to be_a(String) + expect(payload['compressed']).to be(true) + expect(payload['original_job_size_bytes']).to eql(serialized_args.bytesize) + expect do + Sidekiq.dump_json(payload) + end.not_to raise_error + end + + it 'can decompress the payload' do + original_payload = payload.deep_dup + + described_class.compress(payload, Sidekiq.dump_json(args)) + described_class.decompress(payload) + + expect(payload).to eql(original_payload) + end + end + end + + describe '.decompress' do + context 'job payload is not compressed' do + let(:payload) { base_payload.merge("args" => ['hello']) } + + it 'preserves the payload after decompression' do + original_payload = payload.deep_dup + + described_class.decompress(payload) + + expect(payload).to eql(original_payload) + end + end + + context 'job payload is compressed with a default level' do + let(:payload) do + base_payload.merge( + 'args' => ['eF6LVspIzcnJV9JRKs8vyklRigUAMq0FqQ=='], + 'compressed' => true + ) + end + + it 'decompresses and clean up the job payload' do + described_class.decompress(payload) + + expect(payload['args']).to eql(%w[hello world]) + expect(payload).not_to have_key('compressed') + end + end + + context 'job payload is compressed with a different level' do + let(:payload) do + base_payload.merge( + 'args' => [Base64.strict_encode64(Zlib::Deflate.deflate(Sidekiq.dump_json(%w[hello world]), 9))], + 'compressed' => true + ) + end + + it 'decompresses and clean up the job payload' do + described_class.decompress(payload) + + expect(payload['args']).to eql(%w[hello world]) + expect(payload).not_to have_key('compressed') + end + end + + context 'job payload argument list is malformed' do + let(:payload) do + base_payload.merge( + 'args' => ['eNqLVspIzcnJV9JRKs8vyklRigUAMq0FqQ==', 'something else'], + 'compressed' => true + ) + end + + it 'tracks the conflicting exception' do + expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception).with( + be_a(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::PayloadDecompressionConflictError) + ) + + described_class.decompress(payload) + + expect(payload['args']).to eql(%w[hello world]) + expect(payload).not_to have_key('compressed') + end + end + + context 'job payload is not a valid base64 string' do + let(:payload) do + base_payload.merge( + 'args' => ['hello123'], + 'compressed' => true + ) + end + + it 'raises an exception' do + expect do + described_class.decompress(payload) + end.to raise_error(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::PayloadDecompressionError) + end + end + + context 'job payload compression does not contain a valid Gzip header' do + let(:payload) do + base_payload.merge( + 'args' => ['aGVsbG8='], + 'compressed' => true + ) + end + + it 'raises an exception' do + expect do + described_class.decompress(payload) + end.to raise_error(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::PayloadDecompressionError) + end + end + + context 'job payload compression does not contain a valid Gzip body' do + let(:payload) do + base_payload.merge( + 'args' => ["eNqLVspIzcnJVw=="], + 'compressed' => true + ) + end + + it 'raises an exception' do + expect do + described_class.decompress(payload) + end.to raise_error(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::PayloadDecompressionError) + end + end + end +end diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/server_spec.rb new file mode 100644 index 00000000000..91b8ef97ab4 --- /dev/null +++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/server_spec.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# rubocop: disable RSpec/MultipleMemoizedHelpers +RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Server, :clean_gitlab_redis_queues do + subject(:middleware) { described_class.new } + + let(:worker) { Class.new } + let(:job) do + { + "class" => "ARandomWorker", + "queue" => "a_worker", + "args" => %w[Hello World], + "created_at" => 1234567890, + "enqueued_at" => 1234567890 + } + end + + before do + allow(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress) + end + + it 'yields block' do + expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once + end + + it 'calls the Compressor' do + expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:decompress).with(job) + + subject.call(worker, job, :test) {} + end +end diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb index 3140686c908..4fbe59c3c27 100644 --- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb @@ -3,6 +3,21 @@ require 'spec_helper' RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do + let(:base_payload) do + { + "class" => "ARandomWorker", + "queue" => "a_worker", + "retry" => true, + "jid" => "d774900367dc8b2962b2479c", + "created_at" => 1234567890, + "enqueued_at" => 1234567890 + } + end + + def job_payload(args = {}) + base_payload.merge('args' => args) + end + let(:worker_class) do Class.new do def self.name @@ -24,8 +39,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not log a warning message' do expect(::Sidekiq.logger).not_to receive(:warn) - described_class.new(TestSizeLimiterWorker, {}, mode: 'track') - described_class.new(TestSizeLimiterWorker, {}, mode: 'raise') + described_class.new(TestSizeLimiterWorker, job_payload, mode: 'track') + described_class.new(TestSizeLimiterWorker, job_payload, mode: 'compress') end end @@ -33,7 +48,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'defaults to track mode and logs a warning message' do expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter mode: invalid. Fallback to track mode.') - validator = described_class.new(TestSizeLimiterWorker, {}, mode: 'invalid') + validator = described_class.new(TestSizeLimiterWorker, job_payload, mode: 'invalid') expect(validator.mode).to eql('track') end @@ -43,7 +58,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'defaults to track mode' do expect(::Sidekiq.logger).not_to receive(:warn) - validator = described_class.new(TestSizeLimiterWorker, {}) + validator = described_class.new(TestSizeLimiterWorker, job_payload) expect(validator.mode).to eql('track') end @@ -53,8 +68,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not log a warning message' do expect(::Sidekiq.logger).not_to receive(:warn) - described_class.new(TestSizeLimiterWorker, {}, size_limit: 300) - described_class.new(TestSizeLimiterWorker, {}, size_limit: 0) + described_class.new(TestSizeLimiterWorker, job_payload, size_limit: 300) + described_class.new(TestSizeLimiterWorker, job_payload, size_limit: 0) end end @@ -62,7 +77,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'defaults to 0 and logs a warning message' do expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter limit: -1') - described_class.new(TestSizeLimiterWorker, {}, size_limit: -1) + described_class.new(TestSizeLimiterWorker, job_payload, size_limit: -1) end end @@ -70,15 +85,63 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'defaults to 0' do expect(::Sidekiq.logger).not_to receive(:warn) - validator = described_class.new(TestSizeLimiterWorker, {}) + validator = described_class.new(TestSizeLimiterWorker, job_payload) expect(validator.size_limit).to be(0) end end + + context 'when the compression threshold is valid' do + it 'does not log a warning message' do + expect(::Sidekiq.logger).not_to receive(:warn) + + described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 300) + described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 1) + end + end + + context 'when the compression threshold is negative' do + it 'logs a warning message' do + expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter compression threshold: -1') + + described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: -1) + end + + it 'falls back to the default' do + validator = described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: -1) + + expect(validator.compression_threshold).to be(100_000) + end + end + + context 'when the compression threshold is zero' do + it 'logs a warning message' do + expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter compression threshold: 0') + + described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 0) + end + + it 'falls back to the default' do + validator = described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 0) + + expect(validator.compression_threshold).to be(100_000) + end + end + + context 'when the compression threshold is empty' do + it 'defaults to 100_000' do + expect(::Sidekiq.logger).not_to receive(:warn) + + validator = described_class.new(TestSizeLimiterWorker, job_payload) + + expect(validator.compression_threshold).to be(100_000) + end + end end shared_examples 'validate limit job payload size' do context 'in track mode' do + let(:compression_threshold) { nil } let(:mode) { 'track' } context 'when size limit negative' do @@ -87,11 +150,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not track jobs' do expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) end it 'does not raise exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error + expect { validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) }.not_to raise_error end end @@ -101,11 +164,13 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not track jobs' do expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) end it 'does not raise exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error + expect do + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) + end.not_to raise_error end end @@ -117,11 +182,13 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do be_a(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError) ) - validate.call(TestSizeLimiterWorker, { a: 'a' * 100 }) + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 100)) end it 'does not raise an exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error + expect do + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) + end.not_to raise_error end context 'when the worker has big_payload attribute' do @@ -132,13 +199,17 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not track jobs' do expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) - validate.call('TestSizeLimiterWorker', { a: 'a' * 300 }) + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) + validate.call('TestSizeLimiterWorker', job_payload(a: 'a' * 300)) end it 'does not raise an exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error - expect { validate.call('TestSizeLimiterWorker', { a: 'a' * 300 }) }.not_to raise_error + expect do + validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) + end.not_to raise_error + expect do + validate.call('TestSizeLimiterWorker', job_payload(a: 'a' * 300)) + end.not_to raise_error end end end @@ -149,63 +220,60 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do it 'does not track job' do expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - validate.call(TestSizeLimiterWorker, { a: 'a' }) + validate.call(TestSizeLimiterWorker, job_payload(a: 'a')) end it 'does not raise an exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' }) }.not_to raise_error + expect { validate.call(TestSizeLimiterWorker, job_payload(a: 'a')) }.not_to raise_error end end end - context 'in raise mode' do - let(:mode) { 'raise' } - - context 'when size limit is negative' do - let(:size_limit) { -1 } - - it 'does not raise exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error - end - end + context 'in compress mode' do + let(:mode) { 'compress' } - context 'when size limit is 0' do - let(:size_limit) { 0 } + context 'when job size is less than compression threshold' do + let(:size_limit) { 50 } + let(:compression_threshold) { 30 } + let(:job) { job_payload(a: 'a' * 10) } - it 'does not raise exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error + it 'does not raise an exception' do + expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).not_to receive(:compress) + expect { validate.call(TestSizeLimiterWorker, job_payload(a: 'a')) }.not_to raise_error end end - context 'when job size is bigger than size limit' do + context 'when job size is bigger than compression threshold and less than size limit after compressed' do let(:size_limit) { 50 } + let(:compression_threshold) { 30 } + let(:args) { { a: 'a' * 300 } } + let(:job) { job_payload(args) } - it 'raises an exception' do - expect do - validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) - end.to raise_error( - Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError, - /TestSizeLimiterWorker job exceeds payload size limit/i - ) - end - - context 'when the worker has big_payload attribute' do - before do - worker_class.big_payload! - end + it 'does not raise an exception' do + expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress).with( + job, Sidekiq.dump_json(args) + ).and_return('a' * 40) - it 'does not raise an exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' * 300 }) }.not_to raise_error - expect { validate.call('TestSizeLimiterWorker', { a: 'a' * 300 }) }.not_to raise_error - end + expect do + validate.call(TestSizeLimiterWorker, job) + end.not_to raise_error end end - context 'when job size is less than size limit' do + context 'when job size is bigger than compression threshold and bigger than size limit after compressed' do let(:size_limit) { 50 } + let(:compression_threshold) { 30 } + let(:args) { { a: 'a' * 3000 } } + let(:job) { job_payload(args) } it 'does not raise an exception' do - expect { validate.call(TestSizeLimiterWorker, { a: 'a' }) }.not_to raise_error + expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress).with( + job, Sidekiq.dump_json(args) + ).and_return('a' * 60) + + expect do + validate.call(TestSizeLimiterWorker, job) + end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError) end end end @@ -218,6 +286,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do before do stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode) stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit) + stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_COMPRESSION_THRESHOLD_BYTES', compression_threshold) end it_behaves_like 'validate limit job payload size' @@ -226,14 +295,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do context 'when creating an instance with the related ENV variables' do let(:validate) do ->(worker_clas, job) do - validator = described_class.new(worker_class, job, mode: mode, size_limit: size_limit) - validator.validate! + described_class.new(worker_class, job).validate! end end before do stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode) stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit) + stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_COMPRESSION_THRESHOLD_BYTES', compression_threshold) end it_behaves_like 'validate limit job payload size' @@ -242,7 +311,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do context 'when creating an instance with mode and size limit' do let(:validate) do ->(worker_clas, job) do - validator = described_class.new(worker_class, job, mode: mode, size_limit: size_limit) + validator = described_class.new( + worker_class, job, + mode: mode, size_limit: size_limit, compression_threshold: compression_threshold + ) validator.validate! end end diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb index 0efdef0c999..5e4e79e818e 100644 --- a/spec/lib/gitlab/sidekiq_middleware_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb @@ -4,215 +4,212 @@ require 'spec_helper' require 'sidekiq/testing' RSpec.describe Gitlab::SidekiqMiddleware do - before do - stub_const('TestWorker', Class.new) + let(:job_args) { [0.01] } + let(:disabled_sidekiq_middlewares) { [] } + let(:chain) { Sidekiq::Middleware::Chain.new } + let(:queue) { 'test' } + let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares } + let(:worker_class) do + Class.new do + def self.name + 'TestWorker' + end - TestWorker.class_eval do - include Sidekiq::Worker include ApplicationWorker - def perform(_arg) + def perform(*args) Gitlab::SafeRequestStore['gitaly_call_actual'] = 1 Gitlab::SafeRequestStore[:gitaly_query_time] = 5 end end end - around do |example| - Sidekiq::Testing.inline! { example.run } + before do + stub_const('TestWorker', worker_class) end - let(:worker_class) { TestWorker } - let(:job_args) { [0.01] } - - # The test sets up a new server middleware stack, ensuring that the - # appropriate middlewares, as passed into server_configurator, - # are invoked. - # Additionally the test ensure that each middleware is - # 1) not failing - # 2) yielding exactly once - describe '.server_configurator' do - around do |example| - with_sidekiq_server_middleware do |chain| - described_class.server_configurator( - metrics: metrics, - arguments_logger: arguments_logger, - memory_killer: memory_killer - ).call(chain) + shared_examples "a middleware chain" do |load_balancing_enabled| + before do + allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(load_balancing_enabled) + configurator.call(chain) + end - example.run + it "passes through the right middlewares", :aggregate_failures do + enabled_sidekiq_middlewares.each do |middleware| + expect_next_instances_of(middleware, 1, true) do |middleware_instance| + expect(middleware_instance).to receive(:call).with(*middleware_expected_args).once.and_call_original + end end + + expect { |b| chain.invoke(*worker_args, &b) }.to yield_control.once end + end + + shared_examples "a middleware chain for mailer" do |load_balancing_enabled| + let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper } - let(:middleware_expected_args) { [a_kind_of(worker_class), hash_including({ 'args' => job_args }), anything] } + it_behaves_like "a middleware chain", load_balancing_enabled + end + + describe '.server_configurator' do + let(:configurator) { described_class.server_configurator } + let(:worker_args) { [worker_class.new, { 'args' => job_args }, queue] } + let(:middleware_expected_args) { [a_kind_of(worker_class), hash_including({ 'args' => job_args }), queue] } let(:all_sidekiq_middlewares) do [ - Gitlab::SidekiqMiddleware::Monitor, - Gitlab::SidekiqMiddleware::BatchLoader, - Labkit::Middleware::Sidekiq::Server, - Gitlab::SidekiqMiddleware::InstrumentationLogger, - Gitlab::SidekiqVersioning::Middleware, - Gitlab::SidekiqStatus::ServerMiddleware, - Gitlab::SidekiqMiddleware::ServerMetrics, - Gitlab::SidekiqMiddleware::ArgumentsLogger, - Gitlab::SidekiqMiddleware::MemoryKiller, - Gitlab::SidekiqMiddleware::RequestStoreMiddleware, - Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata, - Gitlab::SidekiqMiddleware::WorkerContext::Server, - Gitlab::SidekiqMiddleware::AdminMode::Server, - Gitlab::SidekiqMiddleware::DuplicateJobs::Server + ::Gitlab::SidekiqMiddleware::Monitor, + ::Gitlab::SidekiqMiddleware::ServerMetrics, + ::Gitlab::SidekiqMiddleware::ArgumentsLogger, + ::Gitlab::SidekiqMiddleware::MemoryKiller, + ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware, + ::Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata, + ::Gitlab::SidekiqMiddleware::BatchLoader, + ::Labkit::Middleware::Sidekiq::Server, + ::Gitlab::SidekiqMiddleware::InstrumentationLogger, + ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, + ::Gitlab::SidekiqMiddleware::AdminMode::Server, + ::Gitlab::SidekiqVersioning::Middleware, + ::Gitlab::SidekiqStatus::ServerMiddleware, + ::Gitlab::SidekiqMiddleware::WorkerContext::Server, + ::Gitlab::SidekiqMiddleware::DuplicateJobs::Server ] end - let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares } + describe "server metrics" do + around do |example| + with_sidekiq_server_middleware do |chain| + described_class.server_configurator( + metrics: true, + arguments_logger: true, + memory_killer: true + ).call(chain) - shared_examples "a server middleware chain" do - it "passes through the right server middlewares" do - enabled_sidekiq_middlewares.each do |middleware| - expect_next_instance_of(middleware) do |middleware_instance| - expect(middleware_instance).to receive(:call).with(*middleware_expected_args).once.and_call_original - end + Sidekiq::Testing.inline! { example.run } end + end + let(:gitaly_histogram) { double(:gitaly_histogram) } - disabled_sidekiq_middlewares.each do |middleware| - expect(middleware).not_to receive(:new) - end + before do + allow(Gitlab::Metrics).to receive(:histogram).and_call_original + + allow(Gitlab::Metrics).to receive(:histogram) + .with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything) + .and_return(gitaly_histogram) + end + + it "records correct Gitaly duration" do + expect(gitaly_histogram).to receive(:observe).with(anything, 5.0) worker_class.perform_async(*job_args) end end - shared_examples "a server middleware chain for mailer" do - let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper } - let(:job_args) do - [ - { - "job_class" => "ActionMailer::MailDeliveryJob", - "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", - "provider_job_id" => nil, - "queue_name" => "mailers", - "priority" => nil, - "arguments" => [ - "Notify", - "test_email", - "deliver_now", - { - "args" => [ - "test@example.com", - "subject", - "body" - ], - ActiveJob::Arguments.const_get('RUBY2_KEYWORDS_KEY', false) => ["args"] - } - ], - "executions" => 0, - "exception_executions" => {}, - "locale" => "en", - "timezone" => "UTC", - "enqueued_at" => "2020-07-27T07:43:31Z" - } - ] + context "all optional middlewares on" do + context "when load balancing is enabled" do + before do + allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host) + end + + it_behaves_like "a middleware chain", true + it_behaves_like "a middleware chain for mailer", true end - it_behaves_like "a server middleware chain" - end + context "when load balancing is disabled" do + let(:disabled_sidekiq_middlewares) do + [ + Gitlab::Database::LoadBalancing::SidekiqServerMiddleware + ] + end - context "all optional middlewares off" do - let(:metrics) { false } - let(:arguments_logger) { false } - let(:memory_killer) { false } - let(:disabled_sidekiq_middlewares) do - [ - Gitlab::SidekiqMiddleware::ServerMetrics, - Gitlab::SidekiqMiddleware::ArgumentsLogger, - Gitlab::SidekiqMiddleware::MemoryKiller - ] + it_behaves_like "a middleware chain", false + it_behaves_like "a middleware chain for mailer", false end - - it_behaves_like "a server middleware chain" - it_behaves_like "a server middleware chain for mailer" end - context "all optional middlewares on" do - let(:metrics) { true } - let(:arguments_logger) { true } - let(:memory_killer) { true } - let(:disabled_sidekiq_middlewares) { [] } - - it_behaves_like "a server middleware chain" - it_behaves_like "a server middleware chain for mailer" + context "all optional middlewares off" do + let(:configurator) do + described_class.server_configurator( + metrics: false, + arguments_logger: false, + memory_killer: false + ) + end - context "server metrics" do - let(:gitaly_histogram) { double(:gitaly_histogram) } + context "when load balancing is enabled" do + let(:disabled_sidekiq_middlewares) do + [ + Gitlab::SidekiqMiddleware::ServerMetrics, + Gitlab::SidekiqMiddleware::ArgumentsLogger, + Gitlab::SidekiqMiddleware::MemoryKiller + ] + end before do - allow(Gitlab::Metrics).to receive(:histogram).and_call_original - - allow(Gitlab::Metrics).to receive(:histogram) - .with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything) - .and_return(gitaly_histogram) + allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host) end - it "records correct Gitaly duration" do - expect(gitaly_histogram).to receive(:observe).with(anything, 5.0) + it_behaves_like "a middleware chain", true + it_behaves_like "a middleware chain for mailer", true + end - worker_class.perform_async(*job_args) + context "when load balancing is disabled" do + let(:disabled_sidekiq_middlewares) do + [ + Gitlab::SidekiqMiddleware::ServerMetrics, + Gitlab::SidekiqMiddleware::ArgumentsLogger, + Gitlab::SidekiqMiddleware::MemoryKiller, + Gitlab::Database::LoadBalancing::SidekiqServerMiddleware + ] end + + it_behaves_like "a middleware chain", false + it_behaves_like "a middleware chain for mailer", false end end end - # The test sets up a new client middleware stack. The test ensures - # that each middleware is: - # 1) not failing - # 2) yielding exactly once describe '.client_configurator' do - let(:chain) { Sidekiq::Middleware::Chain.new } - let(:job) { { 'args' => job_args } } - let(:queue) { 'default' } + let(:configurator) { described_class.client_configurator } let(:redis_pool) { Sidekiq.redis_pool } - let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] } - let(:expected_middlewares) do + let(:middleware_expected_args) { [worker_class, hash_including({ 'args' => job_args }), queue, redis_pool] } + let(:worker_args) { [worker_class, { 'args' => job_args }, queue, redis_pool] } + let(:all_sidekiq_middlewares) do [ - ::Gitlab::SidekiqMiddleware::WorkerContext::Client, - ::Labkit::Middleware::Sidekiq::Client, - ::Gitlab::SidekiqMiddleware::DuplicateJobs::Client, - ::Gitlab::SidekiqStatus::ClientMiddleware, - ::Gitlab::SidekiqMiddleware::AdminMode::Client, - ::Gitlab::SidekiqMiddleware::SizeLimiter::Client, - ::Gitlab::SidekiqMiddleware::ClientMetrics + ::Gitlab::SidekiqMiddleware::WorkerContext::Client, + ::Labkit::Middleware::Sidekiq::Client, + ::Gitlab::SidekiqMiddleware::DuplicateJobs::Client, + ::Gitlab::SidekiqStatus::ClientMiddleware, + ::Gitlab::SidekiqMiddleware::AdminMode::Client, + ::Gitlab::SidekiqMiddleware::SizeLimiter::Client, + ::Gitlab::SidekiqMiddleware::ClientMetrics, + ::Gitlab::Database::LoadBalancing::SidekiqClientMiddleware ] end - before do - described_class.client_configurator.call(chain) - end - - shared_examples "a client middleware chain" do - # Its possible that a middleware could accidentally omit a yield call - # this will prevent the full middleware chain from being executed. - # This test ensures that this does not happen - it "invokes the chain" do - expected_middlewares do |middleware| - expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.ordered.and_call_original - end - - expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once + context "when load balancing is disabled" do + let(:disabled_sidekiq_middlewares) do + [ + Gitlab::Database::LoadBalancing::SidekiqClientMiddleware + ] end - end - # Sidekiq documentation states that the worker class could be a string - # or a class reference. We should test for both - context "handles string worker_class values" do - let(:worker_class_arg) { worker_class.to_s } + it_behaves_like "a middleware chain", false + it_behaves_like "a middleware chain for mailer", false - it_behaves_like "a client middleware chain" - end + # Sidekiq documentation states that the worker class could be a string + # or a class reference. We should test for both + context "worker_class as string value" do + let(:worker_args) { [worker_class.to_s, { 'args' => job_args }, queue, redis_pool] } + let(:middleware_expected_args) { [worker_class.to_s, hash_including({ 'args' => job_args }), queue, redis_pool] } - context "handles string worker_class values" do - let(:worker_class_arg) { worker_class } + it_behaves_like "a middleware chain", false + it_behaves_like "a middleware chain for mailer", false + end + end - it_behaves_like "a client middleware chain" + context "when load balancing is enabled" do + it_behaves_like "a middleware chain", true + it_behaves_like "a middleware chain for mailer", true end end end diff --git a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb index 26c83ed6793..226fdb9c948 100644 --- a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb +++ b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb @@ -21,6 +21,55 @@ RSpec.describe Gitlab::Template::GitlabCiYmlTemplate do end end + describe '.find' do + let_it_be(:project) { create(:project) } + let_it_be(:other_project) { create(:project) } + + described_class::TEMPLATES_WITH_LATEST_VERSION.keys.each do |key| + it "finds the latest template for #{key}" do + result = described_class.find(key, project) + expect(result.full_name).to eq("#{key}.latest.gitlab-ci.yml") + expect(result.content).to be_present + end + + context 'when `redirect_to_latest_template` feature flag is disabled' do + before do + stub_feature_flags("redirect_to_latest_template_#{key.underscore.tr('/', '_')}".to_sym => false) + end + + it "finds the stable template for #{key}" do + result = described_class.find(key, project) + expect(result.full_name).to eq("#{key}.gitlab-ci.yml") + expect(result.content).to be_present + end + end + + context 'when `redirect_to_latest_template` feature flag is enabled on the project' do + before do + stub_feature_flags("redirect_to_latest_template_#{key.underscore.tr('/', '_')}".to_sym => project) + end + + it "finds the latest template for #{key}" do + result = described_class.find(key, project) + expect(result.full_name).to eq("#{key}.latest.gitlab-ci.yml") + expect(result.content).to be_present + end + end + + context 'when `redirect_to_latest_template` feature flag is enabled on the other project' do + before do + stub_feature_flags("redirect_to_latest_template_#{key.underscore.tr('/', '_')}".to_sym => other_project) + end + + it "finds the stable template for #{key}" do + result = described_class.find(key, project) + expect(result.full_name).to eq("#{key}.gitlab-ci.yml") + expect(result.content).to be_present + end + end + end + end + describe '#content' do it 'loads the full file' do gitignore = subject.new(Rails.root.join('lib/gitlab/ci/templates/Ruby.gitlab-ci.yml')) diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb index 8bbd1263057..ab0611e6b6a 100644 --- a/spec/lib/gitlab/time_tracking_formatter_spec.rb +++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb @@ -47,5 +47,11 @@ RSpec.describe Gitlab::TimeTrackingFormatter do it { expect(subject).to eq('1w 1d 1h 40m') } end + + context 'handles negative time input' do + let(:num_seconds) { -178_800 } + + it { expect(subject).to eq('-1w 1d 1h 40m') } + end end end diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb index 289818266bd..a0fb6a270a5 100644 --- a/spec/lib/gitlab/tracking/standard_context_spec.rb +++ b/spec/lib/gitlab/tracking/standard_context_spec.rb @@ -22,7 +22,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do context 'staging' do before do - stub_config_setting(url: 'https://staging.gitlab.com') + stub_config_setting(url: Gitlab::Saas.staging_com_url) end include_examples 'contains environment', 'staging' @@ -30,7 +30,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do context 'production' do before do - stub_config_setting(url: 'https://gitlab.com') + stub_config_setting(url: Gitlab::Saas.com_url) end include_examples 'contains environment', 'production' @@ -38,7 +38,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do context 'org' do before do - stub_config_setting(url: 'https://dev.gitlab.org') + stub_config_setting(url: Gitlab::Saas.dev_url) end include_examples 'contains environment', 'org' diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb index 92e51b8ea23..1ed639b2f7d 100644 --- a/spec/lib/gitlab/usage/metric_definition_spec.rb +++ b/spec/lib/gitlab/usage/metric_definition_spec.rb @@ -44,7 +44,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do end it 'has all definitons valid' do - expect { described_class.definitions }.not_to raise_error(Gitlab::Usage::Metric::InvalidMetricError) + expect { described_class.definitions }.not_to raise_error end describe '#key' do @@ -73,6 +73,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do :distribution | 'test' :tier | %w(test ee) :name | 'count_<adjective_describing>_boards' + :repair_issue_url | nil :instrumentation_class | 'Metric_Class' :instrumentation_class | 'metricClass' @@ -103,6 +104,19 @@ RSpec.describe Gitlab::Usage::MetricDefinition do end end end + + context 'conditional validations' do + context 'when metric has broken status' do + it 'has to have repair issue url provided' do + attributes[:status] = 'broken' + attributes.delete(:repair_issue_url) + + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::Metric::InvalidMetricError)) + + described_class.new(path, attributes).validate! + end + end + end end describe 'statuses' do @@ -153,7 +167,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do is_expected.to be_one end - it 'when the same meric is defined multiple times raises exception' do + it 'when the same metric is defined multiple times raises exception' do write_metric(metric1, path, yaml_content) write_metric(metric2, path, yaml_content) diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb index 0fb3a69df05..8e02f4f562c 100644 --- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb +++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb @@ -25,34 +25,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi end context 'aggregated_metrics_data' do - shared_examples 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature' do - before do - allow_next_instance_of(described_class) do |instance| - allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics) - end - end - - context 'with disabled database_sourced_aggregated_metrics feature flag' do - before do - stub_feature_flags(database_sourced_aggregated_metrics: false) - end - - let(:aggregated_metrics) do - [ - aggregated_metric(name: "gmau_2", source: "database", time_frame: time_frame) - ] - end - - it 'skips database sourced metrics', :aggregate_failures do - results = {} - params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at } - - expect(sources::PostgresHll).not_to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])) - expect(aggregated_metrics_data).to eq(results) - end - end - end - shared_examples 'aggregated_metrics_data' do context 'no aggregated metric is defined' do it 'returns empty hash' do @@ -237,7 +209,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi let(:time_frame) { ['all'] } it_behaves_like 'database_sourced_aggregated_metrics' - it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature' context 'redis sourced aggregated metrics' do let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', time_frame: time_frame)] } @@ -278,7 +249,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi it_behaves_like 'database_sourced_aggregated_metrics' it_behaves_like 'redis_sourced_aggregated_metrics' - it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature' end describe '.aggregated_metrics_monthly_data' do @@ -289,7 +259,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi it_behaves_like 'database_sourced_aggregated_metrics' it_behaves_like 'redis_sourced_aggregated_metrics' - it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature' end end end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb index 52c1ccdcd47..6f03c5a9ae3 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb @@ -5,5 +5,8 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric do let_it_be(:board) { create(:board) } - it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1 + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT("boards"."id") FROM "boards"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb index c3b59904f41..183aa03dd8a 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb @@ -5,5 +5,8 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric do let_it_be(:issue) { create(:issue) } - it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1 + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT("issues"."id") FROM "issues"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb index 9f4686ab6cd..3fb4c3a4e3f 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb @@ -8,10 +8,18 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersCreatingIssue let_it_be(:old_issue) { create(:issue, author: author, created_at: 2.months.ago) } context 'with all time frame' do - it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1 + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } end context 'for 28d time frame' do - it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }, 1 + let(:expected_value) { 1 } + let(:start) { 30.days.ago.to_s(:db) } + let(:finish) { 2.days.ago.to_s(:db) } + let(:expected_query) { "SELECT COUNT(DISTINCT \"issues\".\"author_id\") FROM \"issues\" WHERE \"issues\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'" } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } end end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb deleted file mode 100644 index 7adba825a13..00000000000 --- a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb +++ /dev/null @@ -1,15 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersUsingApproveQuickActionMetric, :clean_gitlab_redis_shared_state do - before do - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 2.weeks.ago) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.months.ago) - end - - it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'redis_hll' }, 2 - it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'redis_hll' }, 1 -end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb index 83e07200025..95e159a5bf7 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb @@ -3,5 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::HostnameMetric do - it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' }, Gitlab.config.gitlab.host + let(:expected_value) { Gitlab.config.gitlab.host } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' } end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric_spec.rb new file mode 100644 index 00000000000..347a2c779cb --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::RedisHLLMetric, :clean_gitlab_redis_shared_state do + before do + Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago) + Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 2.weeks.ago) + Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago) + Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.months.ago) + end + + context 'for 28d' do + let(:expected_value) { 2 } + + it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', options: { events: ['i_quickactions_approve'] } } + end + + context 'for 7d' do + let(:expected_value) { 1 } + + it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', options: { events: ['i_quickactions_approve'] } } + end + + it 'raise exception if events options is not present' do + expect { described_class.new(time_frame: '28d') }.to raise_error(ArgumentError) + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb index 212dd3dc851..33e7e85611d 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb @@ -3,5 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UuidMetric do - it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }, Gitlab::CurrentSettings.uuid + let(:expected_value) { Gitlab::CurrentSettings.uuid } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' } end diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb new file mode 100644 index 00000000000..6955fbcaf5a --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do + describe '#for' do + shared_examples 'name suggestion' do + it 'return correct name' do + expect(described_class.for(operation, relation: relation, column: column)).to match name_suggestion + end + end + + context 'for count with nil column' do + it_behaves_like 'name suggestion' do + let(:operation) { :count } + let(:relation) { Board } + let(:column) { nil } + let(:name_suggestion) { /count_boards/ } + end + end + + context 'for count with column :id' do + it_behaves_like 'name suggestion' do + let(:operation) { :count } + let(:relation) { Board } + let(:column) { :id } + let(:name_suggestion) { /count_boards/ } + end + end + + context 'for count distinct with column defined metrics' do + it_behaves_like 'name suggestion' do + let(:operation) { :distinct_count } + let(:relation) { ZoomMeeting } + let(:column) { :issue_id } + let(:name_suggestion) { /count_distinct_issue_id_from_zoom_meetings/ } + end + end + + context 'joined relations' do + context 'counted attribute comes from source relation' do + it_behaves_like 'name suggestion' do + # corresponding metric is collected with count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id) + let(:operation) { :count } + let(:relation) { Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot) } + let(:column) { nil } + let(:name_suggestion) { /count_<adjective describing\: '\(issues\.author_id != \d+\)'>_issues_<with>_alert_management_alerts/ } + end + end + end + + context 'strips off time period constraint' do + it_behaves_like 'name suggestion' do + # corresponding metric is collected with distinct_count(::Clusters::Cluster.aws_installed.enabled.where(time_period), :user_id) + let(:operation) { :distinct_count } + let(:relation) { ::Clusters::Cluster.aws_installed.enabled.where(created_at: 30.days.ago..2.days.ago ) } + let(:column) { :user_id } + let(:constraints) { /<adjective describing\: '\(clusters.provider_type = \d+ AND \(cluster_providers_aws\.status IN \(\d+\)\) AND clusters\.enabled = TRUE\)'>/ } + let(:name_suggestion) { /count_distinct_user_id_from_#{constraints}_clusters_<with>_#{constraints}_cluster_providers_aws/ } + end + end + + context 'for sum metrics' do + it_behaves_like 'name suggestion' do + # corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count) + let(:key_path) { 'counts.jira_imports_total_imported_issues_count' } + let(:operation) { :sum } + let(:relation) { JiraImportState.finished } + let(:column) { :imported_issues_count} + let(:name_suggestion) { /sum_imported_issues_count_from_<adjective describing\: '\(jira_imports\.status = \d+\)'>_jira_imports/ } + end + end + + context 'for redis metrics' do + it_behaves_like 'name suggestion' do + # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) } + let(:operation) { :redis } + let(:column) { nil } + let(:relation) { nil } + let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ } + end + end + + context 'for alt_usage_data metrics' do + it_behaves_like 'name suggestion' do + # corresponding metric is collected with alt_usage_data(fallback: nil) { operating_system } + let(:operation) { :alt } + let(:column) { nil } + let(:relation) { nil } + let(:name_suggestion) { /<please fill metric name>/ } + end + end + end +end diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb index 34b073b4729..b4ab9d4861b 100644 --- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb +++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb @@ -33,24 +33,6 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do end context 'joined relations' do - context 'counted attribute comes from joined relation' do - it_behaves_like 'name suggestion' do - # corresponding metric is collected with: - # distinct_count( - # ::Clusters::Applications::Ingress.modsecurity_enabled.logging - # .joins(cluster: :deployments) - # .merge(::Clusters::Cluster.enabled) - # .merge(Deployment.success), - # ::Deployment.arel_table[:environment_id] - # ) - let(:key_path) { 'counts.ingress_modsecurity_logging' } - let(:name_suggestion) do - constrains = /'\(clusters_applications_ingress\.modsecurity_enabled = TRUE AND clusters_applications_ingress\.modsecurity_mode = \d+ AND clusters.enabled = TRUE AND deployments.status = \d+\)'/ - /count_distinct_environment_id_from_<adjective describing\: #{constrains}>_deployments_<with>_<adjective describing\: #{constrains}>_clusters_<having>_<adjective describing\: #{constrains}>_clusters_applications_ingress/ - end - end - end - context 'counted attribute comes from source relation' do it_behaves_like 'name suggestion' do # corresponding metric is collected with count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id) diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb new file mode 100644 index 00000000000..60c8d044a64 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/query_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Query do + describe '.count' do + it 'returns the raw SQL' do + expect(described_class.for(:count, User)).to eq('SELECT COUNT("users"."id") FROM "users"') + end + + it 'does not mix a nil column with keyword arguments' do + expect(described_class.for(:count, User, nil)).to eq('SELECT COUNT("users"."id") FROM "users"') + end + end + + describe '.distinct_count' do + it 'returns the raw SQL' do + expect(described_class.for(:distinct_count, Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"') + end + + it 'does not mix a nil column with keyword arguments' do + expect(described_class.for(:distinct_count, Issue, nil)).to eq('SELECT COUNT(DISTINCT "issues"."id") FROM "issues"') + end + end + + describe '.sum' do + it 'returns the raw SQL' do + expect(described_class.for(:sum, Issue, :weight)).to eq('SELECT SUM("issues"."weight") FROM "issues"') + end + end + + describe 'estimate_batch_distinct_count' do + it 'returns the raw SQL' do + expect(described_class.for(:estimate_batch_distinct_count, Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"') + end + end + + describe '.histogram' do + it 'returns the histogram sql' do + expect(described_class.for(:histogram, AlertManagement::HttpIntegration.active, + :project_id, buckets: 1..2, bucket_size: 101)) + .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/) + end + end + + describe 'other' do + it 'raise ArgumentError error' do + expect { described_class.for(:other, nil) }.to raise_error(ArgumentError, 'other operation not supported') + end + end +end diff --git a/spec/lib/gitlab/usage_data/topology_spec.rb b/spec/lib/gitlab/usage_data/topology_spec.rb index b8462e0290c..737580e3493 100644 --- a/spec/lib/gitlab/usage_data/topology_spec.rb +++ b/spec/lib/gitlab/usage_data/topology_spec.rb @@ -95,7 +95,7 @@ RSpec.describe Gitlab::UsageData::Topology do }, { name: 'web', - server: 'unicorn' + server: 'puma' } ] } @@ -724,7 +724,7 @@ RSpec.describe Gitlab::UsageData::Topology do }, # instance 2 { - 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'unicorn' }, + 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'puma' }, 'value' => [1000, '1'] } ]) diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb index 6486a5a22ba..041fc2f20a8 100644 --- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb @@ -352,14 +352,6 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl it_behaves_like 'a tracked merge request unique event' do let(:action) { described_class::MR_INCLUDING_CI_CONFIG_ACTION } end - - context 'when FF usage_data_o_pipeline_authoring_unique_users_pushing_mr_ciconfigfile is disabled' do - before do - stub_feature_flags(usage_data_o_pipeline_authoring_unique_users_pushing_mr_ciconfigfile: false) - end - - it_behaves_like 'not tracked merge request unique event' - end end context 'when merge request does not include any ci config change' do @@ -386,4 +378,20 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl let(:action) { described_class::MR_LABELS_CHANGED_ACTION } end end + + describe '.track_loading_conflict_ui_action' do + subject { described_class.track_loading_conflict_ui_action(user: user) } + + it_behaves_like 'a tracked merge request unique event' do + let(:action) { described_class::MR_LOAD_CONFLICT_UI_ACTION } + end + end + + describe '.track_resolve_conflict_action' do + subject { described_class.track_resolve_conflict_action(user: user) } + + it_behaves_like 'a tracked merge request unique event' do + let(:action) { described_class::MR_RESOLVE_CONFLICT_ACTION } + end + end end diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb index c484595ee71..78cc27c8569 100644 --- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red end it 'includes the right events' do - expect(described_class::KNOWN_EVENTS.size).to eq 51 + expect(described_class::KNOWN_EVENTS.size).to eq 52 end described_class::KNOWN_EVENTS.each do |event| diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb index 695859c8e6e..438ae3efd11 100644 --- a/spec/lib/gitlab/usage_data_queries_spec.rb +++ b/spec/lib/gitlab/usage_data_queries_spec.rb @@ -13,9 +13,7 @@ RSpec.describe Gitlab::UsageDataQueries do end it 'does not mix a nil column with keyword arguments' do - expect(described_class).to receive(:raw_sql).with(User, nil) - - described_class.count(User, start: 1, finish: 2) + expect(described_class.count(User, nil)).to eq('SELECT COUNT("users"."id") FROM "users"') end end @@ -25,9 +23,7 @@ RSpec.describe Gitlab::UsageDataQueries do end it 'does not mix a nil column with keyword arguments' do - expect(described_class).to receive(:raw_sql).with(Issue, nil, :distinct) - - described_class.distinct_count(Issue, nil, start: 1, finish: 2) + expect(described_class.distinct_count(Issue, nil, start: 1, finish: 2)).to eq('SELECT COUNT(DISTINCT "issues"."id") FROM "issues"') end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index d4b6ac09261..ea82de186f5 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -91,7 +91,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(described_class.usage_activity_by_stage_package({})).to eq( projects_with_packages: 2 ) - expect(described_class.usage_activity_by_stage_package(described_class.last_28_days_time_period)).to eq( + expect(described_class.usage_activity_by_stage_package(described_class.monthly_time_range_db_params)).to eq( projects_with_packages: 1 ) end @@ -135,7 +135,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do project_clusters_disabled: 2, project_clusters_enabled: 10 ) - expect(described_class.usage_activity_by_stage_configure(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_configure(described_class.monthly_time_range_db_params)).to include( clusters_applications_cert_managers: 1, clusters_applications_helm: 1, clusters_applications_ingress: 1, @@ -185,7 +185,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do snippets: 2, suggestions: 2 ) - expect(described_class.usage_activity_by_stage_create(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_create(described_class.monthly_time_range_db_params)).to include( deploy_keys: 1, keys: 1, merge_requests: 1, @@ -225,7 +225,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do omniauth_providers: ['google_oauth2'], user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 } ) - expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include( events: 1, groups: 1, users_created: 3, @@ -252,7 +252,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do unique_users_all_imports: 10 ) - expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include( unique_users_all_imports: 5 ) end @@ -294,7 +294,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do gitlab: 2, gitlab_migration: 2, gitlab_project: 2, - manifest: 2 + manifest: 2, + total: 18 }, issue_imports: { jira: 2, @@ -326,7 +327,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do groups_imported: Gitlab::UsageData::DEPRECATED_VALUE } ) - expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include( { bulk_imports: { gitlab_v1: 1, @@ -341,7 +342,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do gitlab: 1, gitlab_migration: 1, gitlab_project: 1, - manifest: 1 + manifest: 1, + total: 9 }, issue_imports: { jira: 1, @@ -371,7 +373,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do csv: Gitlab::UsageData::DEPRECATED_VALUE }, groups_imported: Gitlab::UsageData::DEPRECATED_VALUE - } ) end @@ -410,7 +411,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do projects_with_enabled_alert_integrations_histogram: { '1' => 2 } ) - data_28_days = described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period) + data_28_days = described_class.usage_activity_by_stage_monitor(described_class.monthly_time_range_db_params) expect(data_28_days).to include( clusters: 1, clusters_applications_prometheus: 1, @@ -449,7 +450,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do projects_jira_dvcs_cloud_active: 2, projects_jira_dvcs_server_active: 2 ) - expect(described_class.usage_activity_by_stage_plan(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include( issues: 2, notes: 1, projects: 1, @@ -478,7 +479,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do releases: 2, successful_deployments: 2 ) - expect(described_class.usage_activity_by_stage_release(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include( deployments: 1, failed_deployments: 1, releases: 1, @@ -512,7 +513,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do ci_triggers: 2, clusters_applications_runner: 2 ) - expect(described_class.usage_activity_by_stage_verify(described_class.last_28_days_time_period)).to include( + expect(described_class.usage_activity_by_stage_verify(described_class.monthly_time_range_db_params)).to include( ci_builds: 1, ci_external_pipelines: 1, ci_internal_pipelines: 1, @@ -575,7 +576,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(count_data[:projects_with_error_tracking_enabled]).to eq(1) expect(count_data[:projects_with_tracing_enabled]).to eq(1) expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1) - expect(count_data[:projects_with_prometheus_alerts]).to eq(2) expect(count_data[:projects_with_terraform_reports]).to eq(2) expect(count_data[:projects_with_terraform_states]).to eq(2) expect(count_data[:projects_with_alerts_created]).to eq(1) @@ -706,10 +706,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end describe '.system_usage_data_monthly' do - let_it_be(:project) { create(:project) } + let_it_be(:project) { create(:project, created_at: 3.days.ago) } before do - project = create(:project) env = create(:environment) create(:package, project: project, created_at: 3.days.ago) create(:package, created_at: 2.months.ago, project: project) @@ -742,6 +741,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(counts_monthly[:personal_snippets]).to eq(1) expect(counts_monthly[:project_snippets]).to eq(1) expect(counts_monthly[:projects_with_alerts_created]).to eq(1) + expect(counts_monthly[:projects]).to eq(1) expect(counts_monthly[:packages]).to eq(1) expect(counts_monthly[:promoted_issues]).to eq(1) end @@ -966,138 +966,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end end - describe '.ingress_modsecurity_usage' do - subject { described_class.ingress_modsecurity_usage } - - let(:environment) { create(:environment) } - let(:project) { environment.project } - let(:environment_scope) { '*' } - let(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } - let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project]) } - let(:ingress_mode) { :modsecurity_blocking } - let!(:ingress) { create(:clusters_applications_ingress, ingress_mode, cluster: cluster) } - - context 'when cluster is disabled' do - let(:cluster) { create(:cluster, :disabled, projects: [project]) } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(0) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'when deployment is unsuccessful' do - let!(:deployment) { create(:deployment, :failed, environment: environment, project: project, cluster: cluster) } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(0) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'when deployment is successful' do - let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } - - context 'when modsecurity is in blocking mode' do - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(1) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'when modsecurity is in logging mode' do - let(:ingress_mode) { :modsecurity_logging } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(1) - expect(subject[:ingress_modsecurity_blocking]).to eq(0) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'when modsecurity is disabled' do - let(:ingress_mode) { :modsecurity_disabled } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(0) - expect(subject[:ingress_modsecurity_disabled]).to eq(1) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'when modsecurity is not installed' do - let(:ingress_mode) { :modsecurity_not_installed } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(0) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(1) - end - end - - context 'with multiple projects' do - let(:environment_2) { create(:environment) } - let(:project_2) { environment_2.project } - let(:cluster_2) { create(:cluster, environment_scope: environment_scope, projects: [project_2]) } - let!(:ingress_2) { create(:clusters_applications_ingress, :modsecurity_logging, cluster: cluster_2) } - let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster_2) } - - it 'gathers non-duplicated ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(1) - expect(subject[:ingress_modsecurity_blocking]).to eq(1) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'with multiple deployments' do - let!(:deployment_2) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) } - - it 'gathers non-duplicated ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(1) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'with multiple projects' do - let(:environment_2) { create(:environment) } - let(:project_2) { environment_2.project } - let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster) } - let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project, project_2]) } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(2) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - - context 'with multiple environments' do - let!(:environment_2) { create(:environment, project: project) } - let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project, cluster: cluster) } - - it 'gathers ingress data' do - expect(subject[:ingress_modsecurity_logging]).to eq(0) - expect(subject[:ingress_modsecurity_blocking]).to eq(2) - expect(subject[:ingress_modsecurity_disabled]).to eq(0) - expect(subject[:ingress_modsecurity_not_installed]).to eq(0) - end - end - end - end - describe '.grafana_embed_usage_data' do subject { described_class.grafana_embed_usage_data } @@ -1499,7 +1367,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do "in_product_marketing_email_team_1_sent" => -1, "in_product_marketing_email_team_1_cta_clicked" => -1, "in_product_marketing_email_team_2_sent" => -1, - "in_product_marketing_email_team_2_cta_clicked" => -1 + "in_product_marketing_email_team_2_cta_clicked" => -1, + "in_product_marketing_email_experience_0_sent" => -1 } expect(subject).to eq(expected_data) @@ -1537,7 +1406,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do "in_product_marketing_email_team_1_sent" => 0, "in_product_marketing_email_team_1_cta_clicked" => 0, "in_product_marketing_email_team_2_sent" => 0, - "in_product_marketing_email_team_2_cta_clicked" => 0 + "in_product_marketing_email_team_2_cta_clicked" => 0, + "in_product_marketing_email_experience_0_sent" => 0 } expect(subject).to eq(expected_data) diff --git a/spec/lib/gitlab/utils/measuring_spec.rb b/spec/lib/gitlab/utils/measuring_spec.rb index 4931ebf26f0..5dad79b1c5f 100644 --- a/spec/lib/gitlab/utils/measuring_spec.rb +++ b/spec/lib/gitlab/utils/measuring_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::Utils::Measuring do let(:result) { "result" } before do - allow(ActiveSupport::Logger).to receive(:logger_outputs_to?).with(Gitlab::Utils::Measuring.logger, STDOUT).and_return(false) + allow(ActiveSupport::Logger).to receive(:logger_outputs_to?).with(Gitlab::Utils::Measuring.logger, $stdout).and_return(false) end let(:measurement) { described_class.new(base_log_data) } diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index d40ecc7e04e..09f90a3e5b6 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -60,37 +60,6 @@ RSpec.describe Gitlab::Workhorse do }.deep_stringify_keys) end - context 'when include_lfs_blobs_in_archive is disabled' do - before do - stub_feature_flags(include_lfs_blobs_in_archive: false) - end - - it 'sets include_lfs_blobs to false' do - key, command, params = decode_workhorse_header(subject) - - expect(key).to eq('Gitlab-Workhorse-Send-Data') - expect(command).to eq('git-archive') - expect(params).to eq({ - 'GitalyServer' => { - features: { 'gitaly-feature-enforce-requests-limits' => 'true' }, - address: Gitlab::GitalyClient.address(project.repository_storage), - token: Gitlab::GitalyClient.token(project.repository_storage) - }, - 'ArchivePath' => metadata['ArchivePath'], - 'GetArchiveRequest' => Base64.encode64( - Gitaly::GetArchiveRequest.new( - repository: repository.gitaly_repository, - commit_id: metadata['CommitId'], - prefix: metadata['ArchivePrefix'], - format: Gitaly::GetArchiveRequest::Format::ZIP, - path: path, - include_lfs_blobs: false - ).to_proto - ) - }.deep_stringify_keys) - end - end - context 'when archive caching is disabled' do let(:cache_disabled) { true } diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb index 4df00eaa439..869eaf26772 100644 --- a/spec/lib/gitlab_spec.rb +++ b/spec/lib/gitlab_spec.rb @@ -70,20 +70,21 @@ RSpec.describe Gitlab do end describe '.com?' do - it 'is true when on GitLab.com' do - stub_config_setting(url: 'https://gitlab.com') + it "is true when on #{Gitlab::Saas.com_url}" do + stub_config_setting(url: Gitlab::Saas.com_url) expect(described_class.com?).to eq true end - it 'is true when on staging' do - stub_config_setting(url: 'https://staging.gitlab.com') + it "is true when on #{Gitlab::Saas.staging_com_url}" do + stub_config_setting(url: Gitlab::Saas.staging_com_url) expect(described_class.com?).to eq true end it 'is true when on other gitlab subdomain' do - stub_config_setting(url: 'https://example.gitlab.com') + url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://example.') + stub_config_setting(url: url_with_subdomain) expect(described_class.com?).to eq true end @@ -118,14 +119,14 @@ RSpec.describe Gitlab do describe '.staging?' do subject { described_class.staging? } - it 'is false when on GitLab.com' do - stub_config_setting(url: 'https://gitlab.com') + it "is false when on #{Gitlab::Saas.com_url}" do + stub_config_setting(url: Gitlab::Saas.com_url) expect(subject).to eq false end - it 'is true when on staging' do - stub_config_setting(url: 'https://staging.gitlab.com') + it "is true when on #{Gitlab::Saas.staging_com_url}" do + stub_config_setting(url: Gitlab::Saas.staging_com_url) expect(subject).to eq true end diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb index b674ae0218f..3dd8f7c413e 100644 --- a/spec/lib/google_api/cloud_platform/client_spec.rb +++ b/spec/lib/google_api/cloud_platform/client_spec.rb @@ -91,7 +91,6 @@ RSpec.describe GoogleApi::CloudPlatform::Client do cluster: { name: cluster_name, initial_node_count: cluster_size, - initial_cluster_version: '1.18', node_config: { machine_type: machine_type, oauth_scopes: [ @@ -101,7 +100,6 @@ RSpec.describe GoogleApi::CloudPlatform::Client do ] }, master_auth: { - username: 'admin', client_certificate_config: { issue_client_certificate: true } diff --git a/spec/lib/mattermost/client_spec.rb b/spec/lib/mattermost/client_spec.rb index 32755d1103c..5d57a226baf 100644 --- a/spec/lib/mattermost/client_spec.rb +++ b/spec/lib/mattermost/client_spec.rb @@ -14,13 +14,13 @@ RSpec.describe Mattermost::Client do it 'yields an error on malformed JSON' do bad_json = Struct::Request.new("I'm not json", true) - expect { subject.send(:json_response, bad_json) }.to raise_error(Mattermost::ClientError) + expect { subject.send(:json_response, bad_json) }.to raise_error(::Mattermost::ClientError) end it 'shows a client error if the request was unsuccessful' do bad_request = Struct::Request.new("true", false) - expect { subject.send(:json_response, bad_request) }.to raise_error(Mattermost::ClientError) + expect { subject.send(:json_response, bad_request) }.to raise_error(::Mattermost::ClientError) end end end diff --git a/spec/lib/mattermost/command_spec.rb b/spec/lib/mattermost/command_spec.rb index 0f2711e0b11..18cd1ff97a6 100644 --- a/spec/lib/mattermost/command_spec.rb +++ b/spec/lib/mattermost/command_spec.rb @@ -6,10 +6,10 @@ RSpec.describe Mattermost::Command do let(:params) { { 'token' => 'token', team_id: 'abc' } } before do - session = Mattermost::Session.new(nil) + session = ::Mattermost::Session.new(nil) session.base_uri = 'http://mattermost.example.com' - allow_any_instance_of(Mattermost::Client).to receive(:with_session) + allow_any_instance_of(::Mattermost::Client).to receive(:with_session) .and_yield(session) end @@ -57,7 +57,7 @@ RSpec.describe Mattermost::Command do end it 'raises an error with message' do - expect { subject }.to raise_error(Mattermost::Error, 'This trigger word is already in use. Please choose another word.') + expect { subject }.to raise_error(::Mattermost::Error, 'This trigger word is already in use. Please choose another word.') end end end diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb index 67ccb48e3a7..e2e1b4c28c7 100644 --- a/spec/lib/mattermost/session_spec.rb +++ b/spec/lib/mattermost/session_spec.rb @@ -33,7 +33,7 @@ RSpec.describe Mattermost::Session, type: :request do context 'without oauth uri' do it 'makes a request to the oauth uri' do - expect { subject.with_session }.to raise_error(Mattermost::NoSessionError) + expect { subject.with_session }.to raise_error(::Mattermost::NoSessionError) end end @@ -49,7 +49,7 @@ RSpec.describe Mattermost::Session, type: :request do it 'can not create a session' do expect do subject.with_session - end.to raise_error(Mattermost::NoSessionError) + end.to raise_error(::Mattermost::NoSessionError) end end @@ -113,13 +113,13 @@ RSpec.describe Mattermost::Session, type: :request do expect_to_cancel_exclusive_lease(lease_key, 'uuid') # Cannot set up a session, but we should still cancel the lease - expect { subject.with_session }.to raise_error(Mattermost::NoSessionError) + expect { subject.with_session }.to raise_error(::Mattermost::NoSessionError) end it 'returns a NoSessionError error without lease' do stub_exclusive_lease_taken(lease_key) - expect { subject.with_session }.to raise_error(Mattermost::NoSessionError) + expect { subject.with_session }.to raise_error(::Mattermost::NoSessionError) end end end diff --git a/spec/lib/mattermost/team_spec.rb b/spec/lib/mattermost/team_spec.rb index e3ef5ff5377..b2db770c9b9 100644 --- a/spec/lib/mattermost/team_spec.rb +++ b/spec/lib/mattermost/team_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Mattermost::Team do session = Mattermost::Session.new(nil) session.base_uri = 'http://mattermost.example.com' - allow_any_instance_of(Mattermost::Client).to receive(:with_session) + allow_any_instance_of(::Mattermost::Client).to receive(:with_session) .and_yield(session) end @@ -65,7 +65,7 @@ RSpec.describe Mattermost::Team do end it 'raises an error with message' do - expect { subject }.to raise_error(Mattermost::Error, 'Cannot list teams.') + expect { subject }.to raise_error(::Mattermost::Error, 'Cannot list teams.') end end end @@ -123,7 +123,7 @@ RSpec.describe Mattermost::Team do end it 'raises an error with message' do - expect { subject }.to raise_error(Mattermost::Error, 'A team with that name already exists') + expect { subject }.to raise_error(::Mattermost::Error, 'A team with that name already exists') end end end @@ -169,7 +169,7 @@ RSpec.describe Mattermost::Team do end it 'raises an error with message' do - expect { subject }.to raise_error(Mattermost::Error, "We couldn't find the existing team") + expect { subject }.to raise_error(::Mattermost::Error, "We couldn't find the existing team") end end end diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb index 9eeeca4de61..e5aae2822ed 100644 --- a/spec/lib/peek/views/active_record_spec.rb +++ b/spec/lib/peek/views/active_record_spec.rb @@ -5,16 +5,17 @@ require 'spec_helper' RSpec.describe Peek::Views::ActiveRecord, :request_store do subject { Peek.views.find { |v| v.instance_of?(Peek::Views::ActiveRecord) } } - let(:connection_1) { double(:connection) } - let(:connection_2) { double(:connection) } - let(:connection_3) { double(:connection) } + let(:connection_replica) { double(:connection_replica) } + let(:connection_primary_1) { double(:connection_primary) } + let(:connection_primary_2) { double(:connection_primary) } + let(:connection_unknown) { double(:connection_unknown) } let(:event_1) do { name: 'SQL', sql: 'SELECT * FROM users WHERE id = 10', cached: false, - connection: connection_1 + connection: connection_primary_1 } end @@ -23,7 +24,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do name: 'SQL', sql: 'SELECT * FROM users WHERE id = 10', cached: true, - connection: connection_2 + connection: connection_replica } end @@ -32,55 +33,141 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do name: 'SQL', sql: 'UPDATE users SET admin = true WHERE id = 10', cached: false, - connection: connection_3 + connection: connection_primary_2 + } + end + + let(:event_4) do + { + name: 'SCHEMA', + sql: 'SELECT VERSION()', + cached: false, + connection: connection_unknown } end before do allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true) - allow(connection_1).to receive(:transaction_open?).and_return(false) - allow(connection_2).to receive(:transaction_open?).and_return(false) - allow(connection_3).to receive(:transaction_open?).and_return(true) + allow(connection_replica).to receive(:transaction_open?).and_return(false) + allow(connection_primary_1).to receive(:transaction_open?).and_return(false) + allow(connection_primary_2).to receive(:transaction_open?).and_return(true) + allow(connection_unknown).to receive(:transaction_open?).and_return(false) end - it 'subscribes and store data into peek views' do - Timecop.freeze(2021, 2, 23, 10, 0) do - ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1) - ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2) - ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3) + context 'when database load balancing is not enabled' do + it 'subscribes and store data into peek views' do + Timecop.freeze(2021, 2, 23, 10, 0) do + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 4.seconds, '4', event_4) + end + + expect(subject.results).to match( + calls: 4, + summary: { + "Cached" => 1, + "In a transaction" => 1 + }, + duration: '10000.00ms', + warnings: ["active-record duration: 10000.0 over 3000"], + details: contain_exactly( + a_hash_including( + start: be_a(Time), + cached: '', + transaction: '', + duration: 1000.0, + sql: 'SELECT * FROM users WHERE id = 10' + ), + a_hash_including( + start: be_a(Time), + cached: 'Cached', + transaction: '', + duration: 2000.0, + sql: 'SELECT * FROM users WHERE id = 10' + ), + a_hash_including( + start: be_a(Time), + cached: '', + transaction: 'In a transaction', + duration: 3000.0, + sql: 'UPDATE users SET admin = true WHERE id = 10' + ), + a_hash_including( + start: be_a(Time), + cached: '', + transaction: '', + duration: 4000.0, + sql: 'SELECT VERSION()' + ) + ) + ) + end + end + + context 'when database load balancing is enabled' do + before do + allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true) + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_replica).and_return(:replica) + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_1).and_return(:primary) + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_primary_2).and_return(:primary) + allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).with(connection_unknown).and_return(nil) end - expect(subject.results).to match( - calls: 3, - summary: { - "Cached" => 1, - "In a transaction" => 1 - }, - duration: '6000.00ms', - warnings: ["active-record duration: 6000.0 over 3000"], - details: contain_exactly( - a_hash_including( - start: be_a(Time), - cached: '', - transaction: '', - duration: 1000.0, - sql: 'SELECT * FROM users WHERE id = 10' - ), - a_hash_including( - start: be_a(Time), - cached: 'Cached', - transaction: '', - duration: 2000.0, - sql: 'SELECT * FROM users WHERE id = 10' - ), - a_hash_including( - start: be_a(Time), - cached: '', - transaction: 'In a transaction', - duration: 3000.0, - sql: 'UPDATE users SET admin = true WHERE id = 10' + it 'includes db role data' do + Timecop.freeze(2021, 2, 23, 10, 0) do + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3) + ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 4.seconds, '4', event_4) + end + + expect(subject.results).to match( + calls: 4, + summary: { + "Cached" => 1, + "In a transaction" => 1, + "Primary" => 2, + "Replica" => 1, + "Unknown" => 1 + }, + duration: '10000.00ms', + warnings: ["active-record duration: 10000.0 over 3000"], + details: contain_exactly( + a_hash_including( + start: be_a(Time), + cached: '', + transaction: '', + duration: 1000.0, + sql: 'SELECT * FROM users WHERE id = 10', + db_role: 'Primary' + ), + a_hash_including( + start: be_a(Time), + cached: 'Cached', + transaction: '', + duration: 2000.0, + sql: 'SELECT * FROM users WHERE id = 10', + db_role: 'Replica' + ), + a_hash_including( + start: be_a(Time), + cached: '', + transaction: 'In a transaction', + duration: 3000.0, + sql: 'UPDATE users SET admin = true WHERE id = 10', + db_role: 'Primary' + ), + a_hash_including( + start: be_a(Time), + cached: '', + transaction: '', + duration: 4000.0, + sql: 'SELECT VERSION()', + db_role: 'Unknown' + ) ) ) - ) + end end end diff --git a/spec/lib/peek/views/memory_spec.rb b/spec/lib/peek/views/memory_spec.rb new file mode 100644 index 00000000000..1f88aadfc54 --- /dev/null +++ b/spec/lib/peek/views/memory_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Peek::Views::Memory, :request_store do + subject! { described_class.new } + + before do + stub_memory_instrumentation + end + + context 'with process_action.action_controller notification' do + it 'returns empty results when it has not yet fired' do + expect(subject.results).to eq({}) + end + + it 'returns memory instrumentation data when it has fired' do + publish_notification + + expect(subject.results[:calls]).to eq('2 MB') + expect(subject.results[:details]).to all(have_key(:item_header)) + expect(subject.results[:details]).to all(have_key(:item_content)) + expect(subject.results[:summary]).to include('Objects allocated' => '200 k') + expect(subject.results[:summary]).to include('Allocator calls' => '500') + expect(subject.results[:summary]).to include('Large allocations' => '1 KB') + end + end + + def stub_memory_instrumentation + start_memory = { + total_malloc_bytes: 1, + total_mallocs: 2, + total_allocated_objects: 3 + } + allow(Gitlab::Memory::Instrumentation).to receive(:start_thread_memory_allocations).and_return(start_memory) + allow(Gitlab::Memory::Instrumentation).to receive(:measure_thread_memory_allocations).with(start_memory).and_return({ + mem_total_bytes: 2_097_152, + mem_bytes: 1024, + mem_mallocs: 500, + mem_objects: 200_000 + }) + Gitlab::InstrumentationHelper.init_instrumentation_data + end + + def publish_notification + headers = double + allow(headers).to receive(:env).and_return('action_dispatch.request_id': 'req-42') + + ActiveSupport::Notifications.publish( + 'process_action.action_controller', Time.current - 1.second, Time.current, 'id', headers: headers + ) + end +end diff --git a/spec/lib/prometheus/pid_provider_spec.rb b/spec/lib/prometheus/pid_provider_spec.rb index f1d7f2ffff5..9fdca2662e7 100644 --- a/spec/lib/prometheus/pid_provider_spec.rb +++ b/spec/lib/prometheus/pid_provider_spec.rb @@ -28,66 +28,6 @@ RSpec.describe Prometheus::PidProvider do end end - context 'when running in Unicorn mode' do - before do - allow(Gitlab::Runtime).to receive(:unicorn?).and_return(true) - - expect(described_class).to receive(:process_name) - .at_least(:once) - .and_return(process_name) - end - - context 'when unicorn master is specified in process name' do - context 'when running in Omnibus' do - context 'before the process was renamed' do - let(:process_name) { "/opt/gitlab/embedded/bin/unicorn"} - - it { is_expected.to eq 'unicorn_master' } - end - - context 'after the process was renamed' do - let(:process_name) { "unicorn master -D -E production -c /var/opt/gitlab/gitlab-rails/etc/unicorn.rb /opt/gitlab/embedded/service/gitlab-rails/config.ru" } - - it { is_expected.to eq 'unicorn_master' } - end - end - - context 'when in development env' do - context 'before the process was renamed' do - let(:process_name) { "path_to_bindir/bin/unicorn_rails"} - - it { is_expected.to eq 'unicorn_master' } - end - - context 'after the process was renamed' do - let(:process_name) { "unicorn_rails master -c /gitlab_dir/config/unicorn.rb -E development" } - - it { is_expected.to eq 'unicorn_master' } - end - end - end - - context 'when unicorn worker id is specified in process name' do - context 'when running in Omnibus' do - let(:process_name) { "unicorn worker[1] -D -E production -c /var/opt/gitlab/gitlab-rails/etc/unicorn.rb /opt/gitlab/embedded/service/gitlab-rails/config.ru" } - - it { is_expected.to eq 'unicorn_1' } - end - - context 'when in development env' do - let(:process_name) { "unicorn_rails worker[1] -c gitlab_dir/config/unicorn.rb -E development" } - - it { is_expected.to eq 'unicorn_1' } - end - end - - context 'when no specified unicorn master or worker id in process name' do - let(:process_name) { "bin/unknown_process"} - - it { is_expected.to eq "process_#{Process.pid}" } - end - end - context 'when running in Puma mode' do before do allow(Gitlab::Runtime).to receive(:puma?).and_return(true) diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb index 6a1ea68fdd6..5337e8d9c39 100644 --- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb +++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb @@ -12,7 +12,6 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do [ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'test' }, { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 4 }, - { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 }, { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec, test, tests, tmp' } ] } end @@ -26,7 +25,6 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do [ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'security' }, { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 1 }, - { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 }, { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' } ] } end @@ -146,8 +144,6 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate } it 'writes SAST_EXCLUDED_ANALYZERS' do - stub_const('Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS', 'bandit, brakeman, flawfinder') - expect(result[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers) end end @@ -157,9 +153,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do subject(:result) { described_class.new(auto_devops_enabled, params_with_all_analyzers_enabled, gitlab_ci_content).generate } - it 'does not write SAST_DEFAULT_ANALYZERS or SAST_EXCLUDED_ANALYZERS' do - stub_const('Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS', 'brakeman, flawfinder') - + it 'does not write SAST_EXCLUDED_ANALYZERS' do expect(result[:content]).to eq(sast_yaml_with_no_variables_set) end end @@ -176,7 +170,6 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do [ { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'brand_new_stage' }, { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 5 }, - { 'field' => 'SAST_ANALYZER_IMAGE_TAG', 'defaultValue' => 2, 'value' => 2 }, { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' } ] } end @@ -227,27 +220,27 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do def existing_gitlab_ci_and_template_array_without_sast { "stages" => %w(test security), "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" }, - "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, + "sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, "include" => [{ "template" => "existing.yml" }] } end def existing_gitlab_ci_and_single_template_with_sast_and_default_stage { "stages" => %w(test), "variables" => { "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" }, - "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "test" }, + "sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "test" }, "include" => { "template" => "Security/SAST.gitlab-ci.yml" } } end def existing_gitlab_ci_and_single_template_without_sast { "stages" => %w(test security), "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" }, - "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, + "sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, "include" => { "template" => "existing.yml" } } end def existing_gitlab_ci_with_no_variables { "stages" => %w(test security), - "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, + "sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] } end @@ -267,7 +260,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do def existing_gitlab_ci { "stages" => %w(test security), "variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "bad_prefix" }, - "sast" => { "variables" => { "SAST_ANALYZER_IMAGE_TAG" => 2, "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, + "sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" }, "include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] } end end @@ -319,20 +312,6 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do end end - describe 'Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS' do - subject(:variable) {Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS} - - it 'is sorted alphabetically' do - sorted_variable = Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS - .split(',') - .map(&:strip) - .sort - .join(', ') - - expect(variable).to eq(sorted_variable) - end - end - # stubbing this method allows this spec file to use fast_spec_helper def fast_auto_devops_stages auto_devops_template = YAML.safe_load( File.read('lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml') ) @@ -345,7 +324,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test sast: @@ -364,7 +343,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test sast: @@ -380,7 +359,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -402,7 +381,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - build - test @@ -437,7 +416,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -461,7 +440,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -483,7 +462,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -506,7 +485,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -529,7 +508,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security diff --git a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb index 31854fcf3a7..f6181c6ef7a 100644 --- a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb +++ b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test - security @@ -63,7 +63,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence stages: - test variables: @@ -112,7 +112,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence include: - template: Security/Secret-Detection.gitlab-ci.yml CI_YML @@ -132,7 +132,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings # Note that environment variables can be set in several places - # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence include: - template: Auto-DevOps.gitlab-ci.yml CI_YML diff --git a/spec/lib/serializers/json_spec.rb b/spec/lib/serializers/json_spec.rb index 7054f98a719..0c1801b34f9 100644 --- a/spec/lib/serializers/json_spec.rb +++ b/spec/lib/serializers/json_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Serializers::JSON do +RSpec.describe Serializers::Json do describe '.dump' do let(:obj) { { key: "value" } } diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb index 7dcf1940442..95009aa063f 100644 --- a/spec/lib/sidebars/menu_spec.rb +++ b/spec/lib/sidebars/menu_spec.rb @@ -144,4 +144,50 @@ RSpec.describe Sidebars::Menu do end end end + + describe '#container_html_options' do + before do + allow(menu).to receive(:title).and_return('Foo Menu') + end + + context 'when menu can be rendered' do + before do + allow(menu).to receive(:render?).and_return(true) + end + + context 'when menu has renderable items' do + before do + menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: { path: 'bar' })) + end + + it 'contains the special class' do + expect(menu.container_html_options[:class]).to eq 'has-sub-items' + end + + context 'when menu already has other classes' do + it 'appends special class' do + allow(menu).to receive(:extra_container_html_options).and_return(class: 'foo') + + expect(menu.container_html_options[:class]).to eq 'foo has-sub-items' + end + end + end + + context 'when menu does not have renderable items' do + it 'does not contain the special class' do + expect(menu.container_html_options[:class]).to be_nil + end + end + end + + context 'when menu cannot be rendered' do + before do + allow(menu).to receive(:render?).and_return(false) + end + + it 'does not contain special class' do + expect(menu.container_html_options[:class]).to be_nil + end + end + end end diff --git a/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb index 0ecb328efd1..e3ae3add4fd 100644 --- a/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb @@ -18,7 +18,7 @@ RSpec.describe Sidebars::Projects::Menus::ConfluenceMenu do end context 'when Confluence integration is present' do - let!(:confluence) { create(:confluence_service, project: project, active: active) } + let!(:confluence) { create(:confluence_integration, project: project, active: active) } context 'when integration is disabled' do let(:active) { false } diff --git a/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb index 5d62eebca1c..0585eb2254c 100644 --- a/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb @@ -20,7 +20,7 @@ RSpec.describe Sidebars::Projects::Menus::ExternalIssueTrackerMenu do end context 'when active external issue tracker' do - let(:external_issue_tracker) { build(:custom_issue_tracker_service, project: project) } + let(:external_issue_tracker) { build(:custom_issue_tracker_integration, project: project) } context 'is present' do it 'returns true' do diff --git a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb index ef5ae550551..231e5a850c2 100644 --- a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb @@ -27,7 +27,6 @@ RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do { class: 'home', data: { - track_action: 'click_menu', track_property: tracking_category, track_label: 'learn_gitlab' } diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb index 731dd5eca23..cc4760e69e5 100644 --- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do - let(:project) { build(:project) } + let_it_be(:project) { create(:project) } + let(:user) { project.owner } let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) } diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb index b50bf0f4bf1..748796bc7ee 100644 --- a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb @@ -8,6 +8,20 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do let(:user) { project.owner } let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) } + describe '#container_html_options' do + subject { described_class.new(context).container_html_options } + + specify { is_expected.to match(hash_including(class: 'shortcuts-project-information has-sub-items')) } + + context 'when feature flag :sidebar_refactor is disabled' do + before do + stub_feature_flags(sidebar_refactor: false) + end + + specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link has-sub-items')) } + end + end + describe 'Menu Items' do subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } } diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb new file mode 100644 index 00000000000..f84d458a2e1 --- /dev/null +++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::Menus::ScopeMenu do + let(:project) { build(:project) } + let(:user) { project.owner } + let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) } + + describe '#container_html_options' do + subject { described_class.new(context).container_html_options } + + specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link')) } + + context 'when feature flag :sidebar_refactor is disabled' do + before do + stub_feature_flags(sidebar_refactor: false) + end + + specify { is_expected.to eq(aria: { label: project.name }) } + end + end +end diff --git a/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb b/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb new file mode 100644 index 00000000000..6e84beeb274 --- /dev/null +++ b/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::Menus::SecurityComplianceMenu do + let_it_be(:project) { create(:project) } + + let(:user) { project.owner } + let(:show_promotions) { true } + let(:show_discover_project_security) { true } + let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, show_promotions: show_promotions, show_discover_project_security: show_discover_project_security) } + + describe 'render?' do + subject { described_class.new(context).render? } + + context 'when user is not authenticated' do + let(:user) { nil } + + it { is_expected.to be_falsey } + end + + context 'when user is authenticated' do + context 'when the Security & Compliance is disabled' do + before do + allow(Ability).to receive(:allowed?).with(user, :access_security_and_compliance, project).and_return(false) + end + + it { is_expected.to be_falsey } + end + + context 'when the Security & Compliance is not disabled' do + it { is_expected.to be_truthy } + end + end + end +end diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb index 88f2df6cd84..6817f0e6ed6 100644 --- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe Sidebars::Projects::Menus::SettingsMenu do - let(:project) { build(:project) } + let_it_be(:project) { create(:project) } + let(:user) { project.owner } let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) } diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb index 51d37bf69ea..2e79ced7039 100644 --- a/spec/lib/sidebars/projects/panel_spec.rb +++ b/spec/lib/sidebars/projects/panel_spec.rb @@ -16,7 +16,7 @@ RSpec.describe Sidebars::Projects::Panel do subject { described_class.new(context).instance_variable_get(:@menus) } context 'when integration is present and active' do - let_it_be(:confluence) { create(:confluence_service, active: true) } + let_it_be(:confluence) { create(:confluence_integration, active: true) } let(:project) { confluence.project } diff --git a/spec/lib/system_check/app/hashed_storage_all_projects_check_spec.rb b/spec/lib/system_check/app/hashed_storage_all_projects_check_spec.rb index 14bf9d61ab5..4536618ba61 100644 --- a/spec/lib/system_check/app/hashed_storage_all_projects_check_spec.rb +++ b/spec/lib/system_check/app/hashed_storage_all_projects_check_spec.rb @@ -1,13 +1,8 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' - -RSpec.describe SystemCheck::App::HashedStorageAllProjectsCheck do - before do - silence_output - end +RSpec.describe SystemCheck::App::HashedStorageAllProjectsCheck, :silence_stdout do describe '#check?' do it 'fails when at least one project is in legacy storage' do create(:project, :legacy_storage) diff --git a/spec/lib/system_check/app/hashed_storage_enabled_check_spec.rb b/spec/lib/system_check/app/hashed_storage_enabled_check_spec.rb index 32a2f409858..36c6f0a2be1 100644 --- a/spec/lib/system_check/app/hashed_storage_enabled_check_spec.rb +++ b/spec/lib/system_check/app/hashed_storage_enabled_check_spec.rb @@ -1,13 +1,8 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' - -RSpec.describe SystemCheck::App::HashedStorageEnabledCheck do - before do - silence_output - end +RSpec.describe SystemCheck::App::HashedStorageEnabledCheck, :silence_stdout do describe '#check?' do it 'fails when hashed storage is disabled' do stub_application_setting(hashed_storage_enabled: false) diff --git a/spec/lib/system_check/incoming_email/imap_authentication_check_spec.rb b/spec/lib/system_check/incoming_email/imap_authentication_check_spec.rb new file mode 100644 index 00000000000..d7a77a84472 --- /dev/null +++ b/spec/lib/system_check/incoming_email/imap_authentication_check_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +MAIL_ROOM_CONFIG_ENABLED_SAMPLE = + ":mailboxes:\n"\ + " \n"\ + " -\n"\ + " :host: \"gitlab.example.com\"\n"\ + " :port: 143\n"\ + "" + +RSpec.describe SystemCheck::IncomingEmail::ImapAuthenticationCheck do + subject(:system_check) { described_class.new } + + describe '#load_config' do + subject { system_check.send(:load_config) } + + context 'returns no mailbox configurations with mailroom default configuration' do + it { is_expected.to be_nil } + end + + context 'returns an array of mailbox configurations with mailroom configured' do + before do + allow(File).to receive(:read).and_return(MAIL_ROOM_CONFIG_ENABLED_SAMPLE) + end + + it { is_expected.to eq([{ host: "gitlab.example.com", port: 143 }]) } + end + end +end diff --git a/spec/lib/system_check/orphans/namespace_check_spec.rb b/spec/lib/system_check/orphans/namespace_check_spec.rb index 795dfde9029..e764c2313cd 100644 --- a/spec/lib/system_check/orphans/namespace_check_spec.rb +++ b/spec/lib/system_check/orphans/namespace_check_spec.rb @@ -1,15 +1,13 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' -RSpec.describe SystemCheck::Orphans::NamespaceCheck do +RSpec.describe SystemCheck::Orphans::NamespaceCheck, :silence_stdout do let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } } before do allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces) - silence_output end describe '#multi_check' do diff --git a/spec/lib/system_check/orphans/repository_check_spec.rb b/spec/lib/system_check/orphans/repository_check_spec.rb index 2ab30f4802d..91b48969cc1 100644 --- a/spec/lib/system_check/orphans/repository_check_spec.rb +++ b/spec/lib/system_check/orphans/repository_check_spec.rb @@ -1,16 +1,14 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' -RSpec.describe SystemCheck::Orphans::RepositoryCheck do +RSpec.describe SystemCheck::Orphans::RepositoryCheck, :silence_stdout do let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } } before do allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces) allow(subject).to receive(:fetch_disk_repositories).and_return(disk_repositories) - # silence_output end describe '#multi_check' do diff --git a/spec/lib/system_check/simple_executor_spec.rb b/spec/lib/system_check/simple_executor_spec.rb index c9a09d86e32..96d93146d5e 100644 --- a/spec/lib/system_check/simple_executor_spec.rb +++ b/spec/lib/system_check/simple_executor_spec.rb @@ -1,9 +1,8 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' -RSpec.describe SystemCheck::SimpleExecutor do +RSpec.describe SystemCheck::SimpleExecutor, :silence_stdout do before do stub_const('SimpleCheck', Class.new(SystemCheck::BaseCheck)) stub_const('OtherCheck', Class.new(SystemCheck::BaseCheck)) @@ -154,8 +153,6 @@ RSpec.describe SystemCheck::SimpleExecutor do describe '#execute' do before do - silence_output - subject << SimpleCheck subject << OtherCheck end diff --git a/spec/lib/system_check_spec.rb b/spec/lib/system_check_spec.rb index 5ddb2741c4a..8b32eccead6 100644 --- a/spec/lib/system_check_spec.rb +++ b/spec/lib/system_check_spec.rb @@ -1,9 +1,8 @@ # frozen_string_literal: true require 'spec_helper' -require 'rake_helper' -RSpec.describe SystemCheck do +RSpec.describe SystemCheck, :silence_stdout do before do stub_const('SimpleCheck', Class.new(SystemCheck::BaseCheck)) stub_const('OtherCheck', Class.new(SystemCheck::BaseCheck)) @@ -19,8 +18,6 @@ RSpec.describe SystemCheck do false end end - - silence_output end describe '.run' do |