diff options
Diffstat (limited to 'spec/lib')
175 files changed, 5901 insertions, 1946 deletions
diff --git a/spec/lib/api/entities/application_setting_spec.rb b/spec/lib/api/entities/application_setting_spec.rb new file mode 100644 index 00000000000..5adb825672c --- /dev/null +++ b/spec/lib/api/entities/application_setting_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::ApplicationSetting do + let_it_be(:application_setting, reload: true) { create(:application_setting) } + + subject(:output) { described_class.new(application_setting).as_json } + + context 'housekeeping_bitmaps_enabled usage is deprecated and always enabled' do + before do + application_setting.housekeeping_bitmaps_enabled = housekeeping_bitmaps_enabled + end + + context 'when housekeeping_bitmaps_enabled db column is false' do + let(:housekeeping_bitmaps_enabled) { false } + + it 'returns true' do + expect(subject[:housekeeping_bitmaps_enabled]).to eq(true) + end + end + + context 'when housekeeping_bitmaps_enabled db column is true' do + let(:housekeeping_bitmaps_enabled) { false } + + it 'returns true' do + expect(subject[:housekeeping_bitmaps_enabled]).to eq(true) + end + end + end +end diff --git a/spec/lib/api/validations/validators/limit_spec.rb b/spec/lib/api/validations/validators/limit_spec.rb index d71dde470cc..0c10e2f74d2 100644 --- a/spec/lib/api/validations/validators/limit_spec.rb +++ b/spec/lib/api/validations/validators/limit_spec.rb @@ -22,4 +22,10 @@ RSpec.describe API::Validations::Validators::Limit do expect_validation_error('test' => "#{'a' * 256}") end end + + context 'value is nil' do + it 'does not raise a validation error' do + expect_no_validation_error('test' => nil) + end + end end diff --git a/spec/lib/backup/artifacts_spec.rb b/spec/lib/backup/artifacts_spec.rb deleted file mode 100644 index d830692d96b..00000000000 --- a/spec/lib/backup/artifacts_spec.rb +++ /dev/null @@ -1,24 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Backup::Artifacts do - let(:progress) { StringIO.new } - - subject(:backup) { described_class.new(progress) } - - describe '#dump' do - before do - allow(File).to receive(:realpath).with('/var/gitlab-artifacts').and_return('/var/gitlab-artifacts') - allow(File).to receive(:realpath).with('/var/gitlab-artifacts/..').and_return('/var') - allow(JobArtifactUploader).to receive(:root) { '/var/gitlab-artifacts' } - end - - it 'excludes tmp from backup tar' do - expect(backup).to receive(:tar).and_return('blabla-tar') - expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) - expect(backup).to receive(:pipeline_succeeded?).and_return(true) - backup.dump('artifacts.tar.gz') - end - end -end diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb index bbc465a26c9..f98b5e1414f 100644 --- a/spec/lib/backup/files_spec.rb +++ b/spec/lib/backup/files_spec.rb @@ -39,7 +39,7 @@ RSpec.describe Backup::Files do end describe '#restore' do - subject { described_class.new(progress, 'registry', '/var/gitlab-registry') } + subject { described_class.new(progress, '/var/gitlab-registry') } let(:timestamp) { Time.utc(2017, 3, 22) } @@ -110,7 +110,7 @@ RSpec.describe Backup::Files do end describe '#dump' do - subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) } + subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) } before do allow(subject).to receive(:run_pipeline!).and_return([[true, true], '']) @@ -118,14 +118,14 @@ RSpec.describe Backup::Files do end it 'raises no errors' do - expect { subject.dump('registry.tar.gz') }.not_to raise_error + expect { subject.dump('registry.tar.gz', 'backup_id') }.not_to raise_error end it 'excludes tmp dirs from archive' do expect(subject).to receive(:tar).and_return('blabla-tar') expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args) - subject.dump('registry.tar.gz') + subject.dump('registry.tar.gz', 'backup_id') end it 'raises an error on failure' do @@ -133,7 +133,7 @@ RSpec.describe Backup::Files do expect(subject).to receive(:pipeline_succeeded?).and_return(false) expect do - subject.dump('registry.tar.gz') + subject.dump('registry.tar.gz', 'backup_id') end.to raise_error(/Failed to create compressed file/) end @@ -149,7 +149,7 @@ RSpec.describe Backup::Files do .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup)) .and_return(['', 0]) - subject.dump('registry.tar.gz') + subject.dump('registry.tar.gz', 'backup_id') end it 'retries if rsync fails due to vanishing files' do @@ -158,7 +158,7 @@ RSpec.describe Backup::Files do .and_return(['rsync failed', 24], ['', 0]) expect do - subject.dump('registry.tar.gz') + subject.dump('registry.tar.gz', 'backup_id') end.to output(/files vanished during rsync, retrying/).to_stdout end @@ -168,7 +168,7 @@ RSpec.describe Backup::Files do .and_return(['rsync failed', 1]) expect do - subject.dump('registry.tar.gz') + subject.dump('registry.tar.gz', 'backup_id') end.to output(/rsync failed/).to_stdout .and raise_error(/Failed to create compressed file/) end @@ -176,7 +176,7 @@ RSpec.describe Backup::Files do end describe '#exclude_dirs' do - subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) } + subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) } it 'prepends a leading dot slash to tar excludes' do expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp']) @@ -188,7 +188,7 @@ RSpec.describe Backup::Files do end describe '#run_pipeline!' do - subject { described_class.new(progress, 'registry', '/var/gitlab-registry') } + subject { described_class.new(progress, '/var/gitlab-registry') } it 'executes an Open3.pipeline for cmd_list' do expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args) @@ -222,7 +222,7 @@ RSpec.describe Backup::Files do end describe '#pipeline_succeeded?' do - subject { described_class.new(progress, 'registry', '/var/gitlab-registry') } + subject { described_class.new(progress, '/var/gitlab-registry') } it 'returns true if both tar and gzip succeeeded' do expect( @@ -262,7 +262,7 @@ RSpec.describe Backup::Files do end describe '#tar_ignore_non_success?' do - subject { described_class.new(progress, 'registry', '/var/gitlab-registry') } + subject { described_class.new(progress, '/var/gitlab-registry') } context 'if `tar` command exits with 1 exitstatus' do it 'returns true' do @@ -310,7 +310,7 @@ RSpec.describe Backup::Files do end describe '#noncritical_warning?' do - subject { described_class.new(progress, 'registry', '/var/gitlab-registry') } + subject { described_class.new(progress, '/var/gitlab-registry') } it 'returns true if given text matches noncritical warnings list' do expect( diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb index f5295c2b04c..399e4ffa72b 100644 --- a/spec/lib/backup/gitaly_backup_spec.rb +++ b/spec/lib/backup/gitaly_backup_spec.rb @@ -25,11 +25,11 @@ RSpec.describe Backup::GitalyBackup do progress.close end - subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism, backup_id: backup_id) } + subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism) } context 'unknown' do it 'fails to start unknown' do - expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') + expect { subject.start(:unknown, destination, backup_id: backup_id) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') end end @@ -44,7 +44,7 @@ RSpec.describe Backup::GitalyBackup do expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::DESIGN) @@ -65,7 +65,7 @@ RSpec.describe Backup::GitalyBackup do it 'passes parallel option through' do expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3', '-layout', 'pointer', '-id', backup_id).and_call_original - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) subject.finish! end end @@ -76,7 +76,7 @@ RSpec.describe Backup::GitalyBackup do it 'passes parallel option through' do expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer', '-id', backup_id).and_call_original - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) subject.finish! end end @@ -84,10 +84,16 @@ RSpec.describe Backup::GitalyBackup do it 'raises when the exit code not zero' do expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') end + it 'raises when gitaly_backup_path is not set' do + stub_backup_setting(gitaly_backup_path: nil) + + expect { subject.start(:create, destination, backup_id: backup_id) }.to raise_error(::Backup::Error, 'gitaly-backup binary not found and gitaly_backup_path is not configured') + end + context 'feature flag incremental_repository_backup disabled' do before do stub_feature_flags(incremental_repository_backup: false) @@ -102,7 +108,7 @@ RSpec.describe Backup::GitalyBackup do expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::DESIGN) @@ -146,7 +152,7 @@ RSpec.describe Backup::GitalyBackup do it 'passes through SSL envs' do expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original - subject.start(:create, destination) + subject.start(:create, destination, backup_id: backup_id) subject.finish! end end @@ -171,7 +177,7 @@ RSpec.describe Backup::GitalyBackup do expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer').and_call_original - subject.start(:restore, destination) + subject.start(:restore, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::DESIGN) @@ -194,7 +200,7 @@ RSpec.describe Backup::GitalyBackup do it 'passes parallel option through' do expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3', '-layout', 'pointer').and_call_original - subject.start(:restore, destination) + subject.start(:restore, destination, backup_id: backup_id) subject.finish! end end @@ -205,7 +211,7 @@ RSpec.describe Backup::GitalyBackup do it 'passes parallel option through' do expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer').and_call_original - subject.start(:restore, destination) + subject.start(:restore, destination, backup_id: backup_id) subject.finish! end end @@ -224,7 +230,7 @@ RSpec.describe Backup::GitalyBackup do expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original - subject.start(:restore, destination) + subject.start(:restore, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::DESIGN) @@ -245,8 +251,14 @@ RSpec.describe Backup::GitalyBackup do it 'raises when the exit code not zero' do expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) - subject.start(:restore, destination) + subject.start(:restore, destination, backup_id: backup_id) expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') end + + it 'raises when gitaly_backup_path is not set' do + stub_backup_setting(gitaly_backup_path: nil) + + expect { subject.start(:restore, destination, backup_id: backup_id) }.to raise_error(::Backup::Error, 'gitaly-backup binary not found and gitaly_backup_path is not configured') + end end end diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb deleted file mode 100644 index 6cba8c5c9b1..00000000000 --- a/spec/lib/backup/gitaly_rpc_backup_spec.rb +++ /dev/null @@ -1,154 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Backup::GitalyRpcBackup do - let(:progress) { spy(:stdout) } - let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') } - - subject { described_class.new(progress) } - - after do - # make sure we do not leave behind any backup files - FileUtils.rm_rf(File.join(Gitlab.config.backup.path, 'repositories')) - end - - context 'unknown' do - it 'fails to start unknown' do - expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') - end - end - - context 'create' do - RSpec.shared_examples 'creates a repository backup' do - it 'creates repository bundles', :aggregate_failures do - # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. - create(:wiki_page, container: project) - create(:design, :with_file, issue: create(:issue, project: project)) - project_snippet = create(:project_snippet, :repository, project: project) - personal_snippet = create(:personal_snippet, :repository, author: project.first_owner) - - subject.start(:create, destination) - subject.enqueue(project, Gitlab::GlRepository::PROJECT) - subject.enqueue(project, Gitlab::GlRepository::WIKI) - subject.enqueue(project, Gitlab::GlRepository::DESIGN) - subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) - subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) - subject.finish! - - expect(File).to exist(File.join(destination, project.disk_path + '.bundle')) - expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle')) - expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle')) - expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle')) - expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle')) - end - - context 'failure' do - before do - allow_next_instance_of(Repository) do |repository| - allow(repository).to receive(:bundle_to_disk) { raise 'Fail in tests' } - end - end - - it 'logs an appropriate message', :aggregate_failures do - subject.start(:create, destination) - subject.enqueue(project, Gitlab::GlRepository::PROJECT) - subject.finish! - - expect(progress).to have_received(:puts).with("[Failed] backing up #{project.full_path} (#{project.disk_path})") - expect(progress).to have_received(:puts).with("Error Fail in tests") - end - end - end - - context 'hashed storage' do - let_it_be(:project) { create(:project, :repository) } - - it_behaves_like 'creates a repository backup' - end - - context 'legacy storage' do - let_it_be(:project) { create(:project, :repository, :legacy_storage) } - - it_behaves_like 'creates a repository backup' - end - end - - context 'restore' do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:personal_snippet) { create(:personal_snippet, author: project.first_owner) } - let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) } - - def copy_bundle_to_backup_path(bundle_name, destination) - FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(destination))) - FileUtils.cp(Rails.root.join('spec/fixtures/lib/backup', bundle_name), File.join(Gitlab.config.backup.path, 'repositories', destination)) - end - - it 'restores from repository bundles', :aggregate_failures do - copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle') - copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle') - copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle') - copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle') - copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle') - - subject.start(:restore, destination) - subject.enqueue(project, Gitlab::GlRepository::PROJECT) - subject.enqueue(project, Gitlab::GlRepository::WIKI) - subject.enqueue(project, Gitlab::GlRepository::DESIGN) - subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) - subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) - subject.finish! - - collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) } - - expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec']) - expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea']) - expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d']) - expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e']) - expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1']) - end - - it 'cleans existing repositories', :aggregate_failures do - expect_next_instance_of(DesignManagement::Repository) do |repository| - expect(repository).to receive(:remove) - end - - # 4 times = project repo + wiki repo + project_snippet repo + personal_snippet repo - expect(Repository).to receive(:new).exactly(4).times.and_wrap_original do |method, *original_args| - full_path, container, kwargs = original_args - - repository = method.call(full_path, container, **kwargs) - - expect(repository).to receive(:remove) - - repository - end - - subject.start(:restore, destination) - subject.enqueue(project, Gitlab::GlRepository::PROJECT) - subject.enqueue(project, Gitlab::GlRepository::WIKI) - subject.enqueue(project, Gitlab::GlRepository::DESIGN) - subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) - subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) - subject.finish! - end - - context 'failure' do - before do - allow_next_instance_of(Repository) do |repository| - allow(repository).to receive(:create_repository) { raise 'Fail in tests' } - allow(repository).to receive(:create_from_bundle) { raise 'Fail in tests' } - end - end - - it 'logs an appropriate message', :aggregate_failures do - subject.start(:restore, destination) - subject.enqueue(project, Gitlab::GlRepository::PROJECT) - subject.finish! - - expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})") - expect(progress).to have_received(:puts).with("Error Fail in tests") - end - end - end -end diff --git a/spec/lib/backup/lfs_spec.rb b/spec/lib/backup/lfs_spec.rb deleted file mode 100644 index a27f60f20d0..00000000000 --- a/spec/lib/backup/lfs_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Backup::Lfs do - let(:progress) { StringIO.new } - - subject(:backup) { described_class.new(progress) } - - describe '#dump' do - before do - allow(File).to receive(:realpath).and_call_original - allow(File).to receive(:realpath).with('/var/lfs-objects').and_return('/var/lfs-objects') - allow(File).to receive(:realpath).with('/var/lfs-objects/..').and_return('/var') - allow(Settings.lfs).to receive(:storage_path).and_return('/var/lfs-objects') - end - - it 'uses the correct lfs dir in tar command', :aggregate_failures do - expect(backup).to receive(:tar).and_return('blabla-tar') - expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) - expect(backup).to receive(:pipeline_succeeded?).and_return(true) - - backup.dump('lfs.tar.gz') - end - end -end diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 9cf78a11bc7..192739d05a7 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -22,13 +22,13 @@ RSpec.describe Backup::Manager do describe '#run_create_task' do let(:enabled) { true } - let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) } - let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } } + let(:task) { instance_double(Backup::Task) } + let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, destination_path: 'my_task.tar.gz', human_name: 'my task') } } it 'calls the named task' do expect(task).to receive(:dump) expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ') - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done') + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... done') subject.run_create_task('my_task') end @@ -37,8 +37,7 @@ RSpec.describe Backup::Manager do let(:enabled) { false } it 'informs the user' do - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ') - expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]') + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... [DISABLED]') subject.run_create_task('my_task') end @@ -48,8 +47,7 @@ RSpec.describe Backup::Manager do it 'informs the user' do stub_env('SKIP', 'my_task') - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ') - expect(Gitlab::BackupLogger).to receive(:info).with(message: '[SKIPPED]') + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... [SKIPPED]') subject.run_create_task('my_task') end @@ -60,12 +58,10 @@ RSpec.describe Backup::Manager do let(:enabled) { true } let(:pre_restore_warning) { nil } let(:post_restore_warning) { nil } - let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } } + let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, human_name: 'my task', destination_path: 'my_task.tar.gz') } } let(:backup_information) { {} } let(:task) do instance_double(Backup::Task, - human_name: 'my task', - enabled: enabled, pre_restore_warning: pre_restore_warning, post_restore_warning: post_restore_warning) end @@ -78,7 +74,7 @@ RSpec.describe Backup::Manager do it 'calls the named task' do expect(task).to receive(:restore) expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... done').ordered subject.run_restore_task('my_task') end @@ -87,8 +83,7 @@ RSpec.describe Backup::Manager do let(:enabled) { false } it 'informs the user' do - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered - expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]').ordered + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... [DISABLED]').ordered subject.run_restore_task('my_task') end @@ -100,7 +95,7 @@ RSpec.describe Backup::Manager do it 'displays and waits for the user' do expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... done').ordered expect(Gitlab::TaskHelpers).to receive(:ask_to_continue) expect(task).to receive(:restore) @@ -124,7 +119,7 @@ RSpec.describe Backup::Manager do it 'displays and waits for the user' do expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... done').ordered expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered expect(Gitlab::TaskHelpers).to receive(:ask_to_continue) expect(task).to receive(:restore) @@ -134,7 +129,7 @@ RSpec.describe Backup::Manager do it 'does not continue when the user quits' do expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered + expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... done').ordered expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered expect(task).to receive(:restore) @@ -148,8 +143,10 @@ RSpec.describe Backup::Manager do end describe '#create' do + let(:incremental_env) { 'false' } let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} } - let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' } + let(:backup_id) { '1546300800_2019_01_01_12.3' } + let(:tar_file) { "#{backup_id}_gitlab_backup.tar" } let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } } let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] } let(:backup_information) do @@ -159,24 +156,27 @@ RSpec.describe Backup::Manager do } end - let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) } - let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) } + let(:task1) { instance_double(Backup::Task) } + let(:task2) { instance_double(Backup::Task) } let(:definitions) do { - 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'), - 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz') + 'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'), + 'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz') } end before do + stub_env('INCREMENTAL', incremental_env) allow(ActiveRecord::Base.connection).to receive(:reconnect!) + allow(Gitlab::BackupLogger).to receive(:info) allow(Kernel).to receive(:system).and_return(true) + allow(YAML).to receive(:load_file).and_call_original allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml')) .and_return(backup_information) allow(subject).to receive(:backup_information).and_return(backup_information) - allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz')) - allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')) + allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'), backup_id) + allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), backup_id) end it 'executes tar' do @@ -185,8 +185,22 @@ RSpec.describe Backup::Manager do expect(Kernel).to have_received(:system).with(*tar_cmdline) end + context 'tar fails' do + before do + expect(Kernel).to receive(:system).with(*tar_cmdline).and_return(false) + end + + it 'logs a failure' do + expect do + subject.create # rubocop:disable Rails/SaveBang + end.to raise_error(Backup::Error, 'Backup failed') + + expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{tar_file} failed") + end + end + context 'when BACKUP is set' do - let(:tar_file) { 'custom_gitlab_backup.tar' } + let(:backup_id) { 'custom' } it 'uses the given value as tar file name' do stub_env('BACKUP', '/ignored/path/custom') @@ -213,6 +227,20 @@ RSpec.describe Backup::Manager do end end + context 'when SKIP env is set' do + let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} } + + before do + stub_env('SKIP', 'task2') + end + + it 'executes tar' do + subject.create # rubocop:disable Rails/SaveBang + + expect(Kernel).to have_received(:system).with(*tar_cmdline) + end + end + context 'when the destination is optional' do let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} } let(:definitions) do @@ -248,6 +276,7 @@ RSpec.describe Backup::Manager do end before do + allow(Gitlab::BackupLogger).to receive(:info) allow(Dir).to receive(:chdir).and_yield allow(Dir).to receive(:glob).and_return(files) allow(FileUtils).to receive(:rm) @@ -266,7 +295,7 @@ RSpec.describe Backup::Manager do end it 'prints a skipped message' do - expect(progress).to have_received(:puts).with('skipping') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting old backups ... [SKIPPED]') end end @@ -290,7 +319,7 @@ RSpec.describe Backup::Manager do end it 'prints a done message' do - expect(progress).to have_received(:puts).with('done. (0 removed)') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting old backups ... done. (0 removed)') end end @@ -307,7 +336,7 @@ RSpec.describe Backup::Manager do end it 'prints a done message' do - expect(progress).to have_received(:puts).with('done. (0 removed)') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting old backups ... done. (0 removed)') end end @@ -348,7 +377,7 @@ RSpec.describe Backup::Manager do end it 'prints a done message' do - expect(progress).to have_received(:puts).with('done. (8 removed)') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting old backups ... done. (8 removed)') end end @@ -372,11 +401,11 @@ RSpec.describe Backup::Manager do end it 'sets the correct removed count' do - expect(progress).to have_received(:puts).with('done. (7 removed)') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting old backups ... done. (7 removed)') end it 'prints the error from file that could not be removed' do - expect(progress).to have_received(:puts).with(a_string_matching(message)) + expect(Gitlab::BackupLogger).to have_received(:info).with(message: a_string_matching(message)) end end end @@ -386,6 +415,7 @@ RSpec.describe Backup::Manager do let(:backup_filename) { File.basename(backup_file.path) } before do + allow(Gitlab::BackupLogger).to receive(:info) allow(subject).to receive(:tar_file).and_return(backup_filename) stub_backup_setting( @@ -410,6 +440,23 @@ RSpec.describe Backup::Manager do connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang end + context 'skipped upload' do + let(:backup_information) do + { + backup_created_at: Time.zone.parse('2019-01-01'), + gitlab_version: '12.3', + skipped: ['remote'] + } + end + + it 'informs the user' do + stub_env('SKIP', 'remote') + subject.create # rubocop:disable Rails/SaveBang + + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Uploading backup archive to remote storage directory ... [SKIPPED]') + end + end + context 'target path' do it 'uses the tar filename by default' do expect_any_instance_of(Fog::Collection).to receive(:create) @@ -462,7 +509,7 @@ RSpec.describe Backup::Manager do it 'sets encryption attributes' do subject.create # rubocop:disable Rails/SaveBang - expect(progress).to have_received(:puts).with("done (encrypted with AES256)") + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Uploading backup archive to remote storage directory ... done (encrypted with AES256)') end end @@ -473,7 +520,7 @@ RSpec.describe Backup::Manager do it 'sets encryption attributes' do subject.create # rubocop:disable Rails/SaveBang - expect(progress).to have_received(:puts).with("done (encrypted with AES256)") + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Uploading backup archive to remote storage directory ... done (encrypted with AES256)') end end @@ -488,7 +535,7 @@ RSpec.describe Backup::Manager do it 'sets encryption attributes' do subject.create # rubocop:disable Rails/SaveBang - expect(progress).to have_received(:puts).with("done (encrypted with aws:kms)") + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Uploading backup archive to remote storage directory ... done (encrypted with aws:kms)') end end end @@ -546,15 +593,169 @@ RSpec.describe Backup::Manager do end end end + + context 'incremental' do + let(:incremental_env) { 'true' } + let(:gitlab_version) { Gitlab::VERSION } + let(:backup_id) { "1546300800_2019_01_01_#{gitlab_version}" } + let(:tar_file) { "#{backup_id}_gitlab_backup.tar" } + let(:backup_information) do + { + backup_created_at: Time.zone.parse('2019-01-01'), + gitlab_version: gitlab_version + } + end + + context 'when there are no backup files in the directory' do + before do + allow(Dir).to receive(:glob).and_return([]) + end + + it 'fails the operation and prints an error' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(progress).to have_received(:puts) + .with(a_string_matching('No backups found')) + end + end + + context 'when there are two backup files in the directory and BACKUP variable is not set' do + before do + allow(Dir).to receive(:glob).and_return( + [ + '1451606400_2016_01_01_1.2.3_gitlab_backup.tar', + '1451520000_2015_12_31_gitlab_backup.tar' + ] + ) + end + + it 'prints the list of available backups' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(progress).to have_received(:puts) + .with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31')) + end + + it 'fails the operation and prints an error' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(progress).to have_received(:puts) + .with(a_string_matching('Found more than one backup')) + end + end + + context 'when BACKUP variable is set to a non-existing file' do + before do + allow(Dir).to receive(:glob).and_return( + [ + '1451606400_2016_01_01_gitlab_backup.tar' + ] + ) + allow(File).to receive(:exist?).and_return(false) + + stub_env('BACKUP', 'wrong') + end + + it 'fails the operation and prints an error' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar') + expect(progress).to have_received(:puts) + .with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist')) + end + end + + context 'when BACKUP variable is set to a correct file' do + let(:backup_id) { '1451606400_2016_01_01_1.2.3' } + let(:tar_cmdline) { %w{tar -xf 1451606400_2016_01_01_1.2.3_gitlab_backup.tar} } + + before do + allow(Gitlab::BackupLogger).to receive(:info) + allow(Dir).to receive(:glob).and_return( + [ + '1451606400_2016_01_01_1.2.3_gitlab_backup.tar' + ] + ) + allow(File).to receive(:exist?).and_return(true) + allow(Kernel).to receive(:system).and_return(true) + + stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3') + end + + it 'unpacks the file' do + subject.create # rubocop:disable Rails/SaveBang + + expect(Kernel).to have_received(:system).with(*tar_cmdline) + end + + context 'tar fails' do + before do + expect(Kernel).to receive(:system).with(*tar_cmdline).and_return(false) + end + + it 'logs a failure' do + expect do + subject.create # rubocop:disable Rails/SaveBang + end.to raise_error(SystemExit) + + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Unpacking backup failed') + end + end + + context 'on version mismatch' do + let(:backup_information) do + { + backup_created_at: Time.zone.parse('2019-01-01'), + gitlab_version: "not #{gitlab_version}" + } + end + + it 'stops the process' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(progress).to have_received(:puts) + .with(a_string_matching('GitLab version mismatch')) + end + end + end + + context 'when there is a non-tarred backup in the directory' do + before do + allow(Dir).to receive(:glob).and_return( + [ + 'backup_information.yml' + ] + ) + allow(File).to receive(:exist?).and_return(true) + end + + it 'selects the non-tarred backup to restore from' do + subject.create # rubocop:disable Rails/SaveBang + + expect(progress).to have_received(:puts) + .with(a_string_matching('Non tarred backup found ')) + end + + context 'on version mismatch' do + let(:backup_information) do + { + backup_created_at: Time.zone.parse('2019-01-01'), + gitlab_version: "not #{gitlab_version}" + } + end + + it 'stops the process' do + expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang + expect(progress).to have_received(:puts) + .with(a_string_matching('GitLab version mismatch')) + end + end + end + end end describe '#restore' do - let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) } - let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) } + let(:task1) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) } + let(:task2) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) } let(:definitions) do { - 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'), - 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz') + 'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'), + 'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz') } end @@ -570,6 +771,7 @@ RSpec.describe Backup::Manager do Rake.application.rake_require 'tasks/gitlab/shell' Rake.application.rake_require 'tasks/cache' + allow(Gitlab::BackupLogger).to receive(:info) allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz')) allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')) allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml')) @@ -634,7 +836,10 @@ RSpec.describe Backup::Manager do end context 'when BACKUP variable is set to a correct file' do + let(:tar_cmdline) { %w{tar -xf 1451606400_2016_01_01_1.2.3_gitlab_backup.tar} } + before do + allow(Gitlab::BackupLogger).to receive(:info) allow(Dir).to receive(:glob).and_return( [ '1451606400_2016_01_01_1.2.3_gitlab_backup.tar' @@ -649,8 +854,21 @@ RSpec.describe Backup::Manager do it 'unpacks the file' do subject.restore - expect(Kernel).to have_received(:system) - .with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar") + expect(Kernel).to have_received(:system).with(*tar_cmdline) + end + + context 'tar fails' do + before do + expect(Kernel).to receive(:system).with(*tar_cmdline).and_return(false) + end + + it 'logs a failure' do + expect do + subject.restore + end.to raise_error(SystemExit) + + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Unpacking backup failed') + end end context 'on version mismatch' do @@ -680,7 +898,7 @@ RSpec.describe Backup::Manager do subject.restore - expect(progress).to have_received(:print).with('Deleting backups/tmp ... ') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ') end end end @@ -731,7 +949,7 @@ RSpec.describe Backup::Manager do subject.restore - expect(progress).to have_received(:print).with('Deleting backups/tmp ... ') + expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ') end end end diff --git a/spec/lib/backup/object_backup_spec.rb b/spec/lib/backup/object_backup_spec.rb deleted file mode 100644 index 85658173b0e..00000000000 --- a/spec/lib/backup/object_backup_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.shared_examples 'backup object' do |setting| - let(:progress) { StringIO.new } - let(:backup_path) { "/var/#{setting}" } - - subject(:backup) { described_class.new(progress) } - - describe '#dump' do - before do - allow(File).to receive(:realpath).and_call_original - allow(File).to receive(:realpath).with(backup_path).and_return(backup_path) - allow(File).to receive(:realpath).with("#{backup_path}/..").and_return('/var') - allow(Settings.send(setting)).to receive(:storage_path).and_return(backup_path) - end - - it 'uses the correct storage dir in tar command and excludes tmp', :aggregate_failures do - expect(backup).to receive(:tar).and_return('blabla-tar') - expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) - expect(backup).to receive(:pipeline_succeeded?).and_return(true) - - backup.dump('backup_object.tar.gz') - end - end -end - -RSpec.describe Backup::Packages do - it_behaves_like 'backup object', 'packages' -end - -RSpec.describe Backup::TerraformState do - it_behaves_like 'backup object', 'terraform_state' -end diff --git a/spec/lib/backup/pages_spec.rb b/spec/lib/backup/pages_spec.rb deleted file mode 100644 index 095dda61cf4..00000000000 --- a/spec/lib/backup/pages_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Backup::Pages do - let(:progress) { StringIO.new } - - subject { described_class.new(progress) } - - before do - allow(File).to receive(:realpath).with("/var/gitlab-pages").and_return("/var/gitlab-pages") - allow(File).to receive(:realpath).with("/var/gitlab-pages/..").and_return("/var") - end - - describe '#dump' do - it 'excludes tmp from backup tar' do - allow(Gitlab.config.pages).to receive(:path) { '/var/gitlab-pages' } - - expect(subject).to receive(:tar).and_return('blabla-tar') - expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) - expect(subject).to receive(:pipeline_succeeded?).and_return(true) - subject.dump('pages.tar.gz') - end - end -end diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index db3e507596f..c6f611e727c 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -4,18 +4,14 @@ require 'spec_helper' RSpec.describe Backup::Repositories do let(:progress) { spy(:stdout) } - let(:parallel_enqueue) { true } - let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) } - let(:max_concurrency) { 1 } - let(:max_storage_concurrency) { 1 } + let(:strategy) { spy(:strategy) } let(:destination) { 'repositories' } + let(:backup_id) { 'backup_id' } subject do described_class.new( progress, - strategy: strategy, - max_concurrency: max_concurrency, - max_storage_concurrency: max_storage_concurrency + strategy: strategy ) end @@ -27,9 +23,9 @@ RSpec.describe Backup::Repositories do project_snippet = create(:project_snippet, :repository, project: project) personal_snippet = create(:personal_snippet, :repository, author: project.first_owner) - subject.dump(destination) + subject.dump(destination, backup_id) - expect(strategy).to have_received(:start).with(:create, destination) + expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) @@ -51,139 +47,30 @@ RSpec.describe Backup::Repositories do it_behaves_like 'creates repository bundles' end - context 'no concurrency' do - it 'creates the expected number of threads' do - expect(Thread).not_to receive(:new) + describe 'command failure' do + it 'enqueue_project raises an error' do + allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) - expect(strategy).to receive(:start).with(:create, destination) - projects.each do |project| - expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) - end - expect(strategy).to receive(:finish!) - - subject.dump(destination) - end - - describe 'command failure' do - it 'enqueue_project raises an error' do - allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) - - expect { subject.dump(destination) }.to raise_error(IOError) - end - - it 'project query raises an error' do - allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) - - expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout) - end + expect { subject.dump(destination, backup_id) }.to raise_error(IOError) end - it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new do - subject.dump(destination) - end.count + it 'project query raises an error' do + allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) - create_list(:project, 2, :repository) - - expect do - subject.dump(destination) - end.not_to exceed_query_limit(control_count) + expect { subject.dump(destination, backup_id) }.to raise_error(ActiveRecord::StatementTimeout) end end - context 'concurrency with a strategy without parallel enqueueing support' do - let(:parallel_enqueue) { false } - let(:max_concurrency) { 2 } - let(:max_storage_concurrency) { 2 } - - it 'enqueues all projects sequentially' do - expect(Thread).not_to receive(:new) - - expect(strategy).to receive(:start).with(:create, destination) - projects.each do |project| - expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) - end - expect(strategy).to receive(:finish!) - - subject.dump(destination) - end - end - - [4, 10].each do |max_storage_concurrency| - context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do - let(:storage_keys) { %w[default test_second_storage] } - let(:max_storage_concurrency) { max_storage_concurrency } - - before do - allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys) - end - - it 'creates the expected number of threads' do - expect(Thread).to receive(:new) - .exactly(storage_keys.length * (max_storage_concurrency + 1)).times - .and_call_original + it 'avoids N+1 database queries' do + control_count = ActiveRecord::QueryRecorder.new do + subject.dump(destination, backup_id) + end.count - expect(strategy).to receive(:start).with(:create, destination) - projects.each do |project| - expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) - end - expect(strategy).to receive(:finish!) + create_list(:project, 2, :repository) - subject.dump(destination) - end - - context 'with extra max concurrency' do - let(:max_concurrency) { 3 } - - it 'creates the expected number of threads' do - expect(Thread).to receive(:new) - .exactly(storage_keys.length * (max_storage_concurrency + 1)).times - .and_call_original - - expect(strategy).to receive(:start).with(:create, destination) - projects.each do |project| - expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) - end - expect(strategy).to receive(:finish!) - - subject.dump(destination) - end - end - - describe 'command failure' do - it 'enqueue_project raises an error' do - allow(strategy).to receive(:enqueue).and_raise(IOError) - - expect { subject.dump(destination) }.to raise_error(IOError) - end - - it 'project query raises an error' do - allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout) - - expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout) - end - - context 'misconfigured storages' do - let(:storage_keys) { %w[test_second_storage] } - - it 'raises an error' do - expect { subject.dump(destination) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured') - end - end - end - - it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new do - subject.dump(destination) - end.count - - create_list(:project, 2, :repository) - - expect do - subject.dump(destination) - end.not_to exceed_query_limit(control_count) - end - end + expect do + subject.dump(destination, backup_id) + end.not_to exceed_query_limit(control_count) end end diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb index b0eb885d3f4..80f1fe01b78 100644 --- a/spec/lib/backup/task_spec.rb +++ b/spec/lib/backup/task_spec.rb @@ -7,15 +7,9 @@ RSpec.describe Backup::Task do subject { described_class.new(progress) } - describe '#human_name' do - it 'must be implemented by the subclass' do - expect { subject.human_name }.to raise_error(NotImplementedError) - end - end - describe '#dump' do it 'must be implemented by the subclass' do - expect { subject.dump('some/path') }.to raise_error(NotImplementedError) + expect { subject.dump('some/path', 'backup_id') }.to raise_error(NotImplementedError) end end diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb deleted file mode 100644 index 0cfc80a9cb9..00000000000 --- a/spec/lib/backup/uploads_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Backup::Uploads do - let(:progress) { StringIO.new } - - subject(:backup) { described_class.new(progress) } - - describe '#dump' do - before do - allow(File).to receive(:realpath).and_call_original - allow(File).to receive(:realpath).with('/var/uploads').and_return('/var/uploads') - allow(File).to receive(:realpath).with('/var/uploads/..').and_return('/var') - allow(Gitlab.config.uploads).to receive(:storage_path) { '/var' } - end - - it 'excludes tmp from backup tar' do - expect(backup).to receive(:tar).and_return('blabla-tar') - expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) - expect(backup).to receive(:pipeline_succeeded?).and_return(true) - backup.dump('uploads.tar.gz') - end - end -end diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb index 94e77663d0f..6e29b910a6c 100644 --- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb @@ -18,31 +18,30 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter do doc = filter('<p>:tanuki:</p>', project: project) expect(doc.css('gl-emoji').first.attributes['title'].value).to eq('tanuki') - expect(doc.css('gl-emoji img').size).to eq 1 end it 'correctly uses the custom emoji URL' do doc = filter('<p>:tanuki:</p>') - expect(doc.css('img').first.attributes['src'].value).to eq(custom_emoji.file) + expect(doc.css('gl-emoji').first.attributes['data-fallback-src'].value).to eq(custom_emoji.file) end it 'matches multiple same custom emoji' do doc = filter(':tanuki: :tanuki:') - expect(doc.css('img').size).to eq 2 + expect(doc.css('gl-emoji').size).to eq 2 end it 'matches multiple custom emoji' do doc = filter(':tanuki: (:happy_tanuki:)') - expect(doc.css('img').size).to eq 2 + expect(doc.css('gl-emoji').size).to eq 2 end it 'does not match enclosed colons' do doc = filter('tanuki:tanuki:') - expect(doc.css('img').size).to be 0 + expect(doc.css('gl-emoji').size).to be 0 end it 'does not do N+1 query' do diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb index 238c3cdb9c1..6326d894b08 100644 --- a/spec/lib/banzai/filter/image_link_filter_spec.rb +++ b/spec/lib/banzai/filter/image_link_filter_spec.rb @@ -46,6 +46,16 @@ RSpec.describe Banzai::Filter::ImageLinkFilter do expect(doc.at_css('img')['data-canonical-src']).to eq doc.at_css('a')['data-canonical-src'] end + it 'moves the data-diagram* attributes' do + doc = filter(%q(<img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,Qm9iIC0+IFNhcmEgOiBIZWxsbw==">), context) + + expect(doc.at_css('a')['data-diagram']).to eq "plantuml" + expect(doc.at_css('a')['data-diagram-src']).to eq "data:text/plain;base64,Qm9iIC0+IFNhcmEgOiBIZWxsbw==" + + expect(doc.at_css('a img')['data-diagram']).to be_nil + expect(doc.at_css('a img')['data-diagram-src']).to be_nil + end + it 'adds no-attachment icon class to the link' do doc = filter(image(path), context) diff --git a/spec/lib/banzai/filter/kroki_filter_spec.rb b/spec/lib/banzai/filter/kroki_filter_spec.rb index c9594ac702d..1fb61ad1991 100644 --- a/spec/lib/banzai/filter/kroki_filter_spec.rb +++ b/spec/lib/banzai/filter/kroki_filter_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Banzai::Filter::KrokiFilter do stub_application_setting(kroki_enabled: true, kroki_url: "http://localhost:8000") doc = filter("<pre lang='nomnoml'><code>[Pirate|eyeCount: Int|raid();pillage()|\n [beard]--[parrot]\n [beard]-:>[foul mouth]\n]</code></pre>") - expect(doc.to_s).to eq '<img class="js-render-kroki" src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KzhUlCITkpNLEqJ1dWNLkgsKsoviUUSs7KLTssvzVHIzS8tyYjligUAMhEd0g==">' + expect(doc.to_s).to eq '<img src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KzhUlCITkpNLEqJ1dWNLkgsKsoviUUSs7KLTssvzVHIzS8tyYjligUAMhEd0g==" class="js-render-kroki" data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,W1BpcmF0ZXxleWVDb3VudDogSW50fHJhaWQoKTtwaWxsYWdlKCl8CiAgW2JlYXJkXS0tW3BhcnJvdF0KICBbYmVhcmRdLTo+W2ZvdWwgbW91dGhdCl0=">' end it 'replaces nomnoml pre tag with img tag if both kroki and plantuml are enabled' do @@ -19,7 +19,7 @@ RSpec.describe Banzai::Filter::KrokiFilter do plantuml_url: "http://localhost:8080") doc = filter("<pre lang='nomnoml'><code>[Pirate|eyeCount: Int|raid();pillage()|\n [beard]--[parrot]\n [beard]-:>[foul mouth]\n]</code></pre>") - expect(doc.to_s).to eq '<img class="js-render-kroki" src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KzhUlCITkpNLEqJ1dWNLkgsKsoviUUSs7KLTssvzVHIzS8tyYjligUAMhEd0g==">' + expect(doc.to_s).to eq '<img src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KzhUlCITkpNLEqJ1dWNLkgsKsoviUUSs7KLTssvzVHIzS8tyYjligUAMhEd0g==" class="js-render-kroki" data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,W1BpcmF0ZXxleWVDb3VudDogSW50fHJhaWQoKTtwaWxsYWdlKCl8CiAgW2JlYXJkXS0tW3BhcnJvdF0KICBbYmVhcmRdLTo+W2ZvdWwgbW91dGhdCl0=">' end it 'does not replace nomnoml pre tag with img tag if kroki is disabled' do @@ -44,6 +44,6 @@ RSpec.describe Banzai::Filter::KrokiFilter do text = '[Pirate|eyeCount: Int|raid();pillage()|\n [beard]--[parrot]\n [beard]-:>[foul mouth]\n]' * 25 doc = filter("<pre lang='nomnoml'><code>#{text}</code></pre>") - expect(doc.to_s).to eq '<img class="js-render-kroki" src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KyJyVNQiE5KTSxKidXVjS5ILCrKL4lFFrSyi07LL81RyM0vLckAysRGjxo8avCowaMGjxo8avCowaMGU8lgAE7mIdc=" hidden>' + expect(doc.to_s).to start_with '<img src="http://localhost:8000/nomnoml/svg/eNqLDsgsSixJrUmtTHXOL80rsVLwzCupKUrMTNHQtC7IzMlJTE_V0KyJyVNQiE5KTSxKidXVjS5ILCrKL4lFFrSyi07LL81RyM0vLckAysRGjxo8avCowaMGjxo8avCowaMGU8lgAE7mIdc=" hidden="" class="js-render-kroki" data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,W1BpcmF0ZXxleWVDb3VudDog' end end diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb index 2d1a01116e0..dcfeb2ce3ba 100644 --- a/spec/lib/banzai/filter/plantuml_filter_spec.rb +++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080") input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' - output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>' + output = '<img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,Qm9iIC0+IFNhcmEgOiBIZWxsbw==">' doc = filter(input) expect(doc.to_s).to eq output @@ -29,7 +29,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid") input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' - output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>' + output = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' doc = filter(input) expect(doc.to_s).to eq output diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb index c1a9ea7b7e2..f03a178b993 100644 --- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb @@ -21,6 +21,8 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do ) end + expect(context.portable).to receive(:try).with(:after_import) + expect { subject.run } .to change(entity, :status_name).to(:finished) end diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb index b6bb8a7d195..645dee4a6f1 100644 --- a/spec/lib/bulk_imports/groups/stage_spec.rb +++ b/spec/lib/bulk_imports/groups/stage_spec.rb @@ -3,7 +3,10 @@ require 'spec_helper' RSpec.describe BulkImports::Groups::Stage do + let(:ancestor) { create(:group) } + let(:group) { create(:group, parent: ancestor) } let(:bulk_import) { build(:bulk_import) } + let(:entity) { build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path) } let(:pipelines) do [ @@ -19,26 +22,46 @@ RSpec.describe BulkImports::Groups::Stage do end it 'raises error when initialized without a BulkImport' do - expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImport') + expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImports::Entity') end describe '.pipelines' do it 'list all the pipelines with their stage number, ordered by stage' do - expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines) - expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher) + expect(described_class.new(entity).pipelines & pipelines).to contain_exactly(*pipelines) + expect(described_class.new(entity).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher) end - it 'includes project entities pipeline' do - stub_feature_flags(bulk_import_projects: true) + context 'when bulk_import_projects feature flag is enabled' do + it 'includes project entities pipeline' do + stub_feature_flags(bulk_import_projects: true) - expect(described_class.new(bulk_import).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline]) + expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline]) + end + + context 'when feature flag is enabled on root ancestor level' do + it 'includes project entities pipeline' do + stub_feature_flags(bulk_import_projects: ancestor) + + expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline]) + end + end + + context 'when destination namespace is not present' do + it 'includes project entities pipeline' do + stub_feature_flags(bulk_import_projects: true) + + entity = create(:bulk_import_entity, destination_namespace: '') + + expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline]) + end + end end context 'when bulk_import_projects feature flag is disabled' do it 'does not include project entities pipeline' do stub_feature_flags(bulk_import_projects: false) - expect(described_class.new(bulk_import).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline) + expect(described_class.new(entity).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline) end end end diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb index ef98613dc25..9fce30f3a81 100644 --- a/spec/lib/bulk_imports/projects/stage_spec.rb +++ b/spec/lib/bulk_imports/projects/stage_spec.rb @@ -34,9 +34,9 @@ RSpec.describe BulkImports::Projects::Stage do end subject do - bulk_import = build(:bulk_import) + entity = build(:bulk_import_entity, :project_entity) - described_class.new(bulk_import) + described_class.new(entity) end describe '#pipelines' do diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb index 4fe229024e5..16d2c42f332 100644 --- a/spec/lib/container_registry/gitlab_api_client_spec.rb +++ b/spec/lib/container_registry/gitlab_api_client_spec.rb @@ -62,6 +62,7 @@ RSpec.describe ContainerRegistry::GitlabApiClient do where(:status_code, :expected_result) do 200 | :already_imported 202 | :ok + 400 | :bad_request 401 | :unauthorized 404 | :not_found 409 | :already_being_imported @@ -86,6 +87,7 @@ RSpec.describe ContainerRegistry::GitlabApiClient do where(:status_code, :expected_result) do 200 | :already_imported 202 | :ok + 400 | :bad_request 401 | :unauthorized 404 | :not_found 409 | :already_being_imported @@ -104,54 +106,106 @@ RSpec.describe ContainerRegistry::GitlabApiClient do end end - describe '#import_status' do - subject { client.import_status(path) } + describe '#cancel_repository_import' do + let(:force) { false } - before do - stub_import_status(path, status) + subject { client.cancel_repository_import(path, force: force) } + + where(:status_code, :expected_result) do + 200 | :already_imported + 202 | :ok + 400 | :bad_request + 401 | :unauthorized + 404 | :not_found + 409 | :already_being_imported + 418 | :error + 424 | :pre_import_failed + 425 | :already_being_imported + 429 | :too_many_imports end - context 'with a status' do + with_them do + before do + stub_import_cancel(path, status_code, force: force) + end + + it { is_expected.to eq({ status: expected_result, migration_state: nil }) } + end + + context 'bad request' do let(:status) { 'this_is_a_test' } - it { is_expected.to eq(status) } + before do + stub_import_cancel(path, 400, status: status, force: force) + end + + it { is_expected.to eq({ status: :bad_request, migration_state: status }) } end - context 'with no status' do - let(:status) { nil } + context 'force cancel' do + let(:force) { true } - it { is_expected.to eq('error') } + before do + stub_import_cancel(path, 202, force: force) + end + + it { is_expected.to eq({ status: :ok, migration_state: nil }) } end end - describe '#repository_details' do - let(:path) { 'namespace/path/to/repository' } - let(:response) { { foo: :bar, this: :is_a_test } } - let(:with_size) { true } - - subject { client.repository_details(path, with_size: with_size) } + describe '#import_status' do + subject { client.import_status(path) } - context 'with size' do + context 'with successful response' do before do - stub_repository_details(path, with_size: with_size, respond_with: response) + stub_import_status(path, status) end - it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) } - end + context 'with a status' do + let(:status) { 'this_is_a_test' } + + it { is_expected.to eq(status) } + end + + context 'with no status' do + let(:status) { nil } - context 'without_size' do - let(:with_size) { false } + it { is_expected.to eq('error') } + end + end + context 'with non successful response' do before do - stub_repository_details(path, with_size: with_size, respond_with: response) + stub_import_status(path, nil, status_code: 404) end - it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) } + it { is_expected.to eq('pre_import_failed') } + end + end + + describe '#repository_details' do + let(:path) { 'namespace/path/to/repository' } + let(:response) { { foo: :bar, this: :is_a_test } } + + subject { client.repository_details(path, sizing: sizing) } + + [:self, :self_with_descendants, nil].each do |size_type| + context "with sizing #{size_type}" do + let(:sizing) { size_type } + + before do + stub_repository_details(path, sizing: sizing, respond_with: response) + end + + it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) } + end end context 'with non successful response' do + let(:sizing) { nil } + before do - stub_repository_details(path, with_size: with_size, status_code: 404) + stub_repository_details(path, sizing: sizing, status_code: 404) end it { is_expected.to eq({}) } @@ -216,6 +270,54 @@ RSpec.describe ContainerRegistry::GitlabApiClient do end end + describe '.deduplicated_size' do + let(:path) { 'foo/bar' } + let(:response) { { 'size_bytes': 555 } } + let(:registry_enabled) { true } + + subject { described_class.deduplicated_size(path) } + + before do + stub_container_registry_config(enabled: registry_enabled, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key') + end + + context 'with successful response' do + before do + expect(Auth::ContainerRegistryAuthenticationService).to receive(:pull_nested_repositories_access_token).with(path).and_return(token) + stub_repository_details(path, sizing: :self_with_descendants, status_code: 200, respond_with: response) + end + + it { is_expected.to eq(555) } + end + + context 'with unsuccessful response' do + before do + expect(Auth::ContainerRegistryAuthenticationService).to receive(:pull_nested_repositories_access_token).with(path).and_return(token) + stub_repository_details(path, sizing: :self_with_descendants, status_code: 404, respond_with: response) + end + + it { is_expected.to eq(nil) } + end + + context 'with the registry disabled' do + let(:registry_enabled) { false } + + it { is_expected.to eq(nil) } + end + + context 'with a nil path' do + let(:path) { nil } + let(:token) { nil } + + before do + expect(Auth::ContainerRegistryAuthenticationService).not_to receive(:pull_nested_repositories_access_token) + stub_repository_details(path, sizing: :self_with_descendants, status_code: 401, respond_with: response) + end + + it { is_expected.to eq(nil) } + end + end + def stub_pre_import(path, status_code, pre:) import_type = pre ? 'pre' : 'final' stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?import_type=#{import_type}") @@ -230,21 +332,50 @@ RSpec.describe ContainerRegistry::GitlabApiClient do .to_return(status: status_code, body: '') end - def stub_import_status(path, status) + def stub_import_status(path, status, status_code: 200) stub_request(:get, "#{registry_api_url}/gitlab/v1/import/#{path}/") .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{import_token}" }) .to_return( - status: 200, + status: status_code, body: { status: status }.to_json, headers: { content_type: 'application/json' } ) end - def stub_repository_details(path, with_size: true, status_code: 200, respond_with: {}) + def stub_import_cancel(path, http_status, status: nil, force: false) + body = {} + + if http_status == 400 + body = { status: status } + end + + headers = { + 'Accept' => described_class::JSON_TYPE, + 'Authorization' => "bearer #{import_token}", + 'User-Agent' => "GitLab/#{Gitlab::VERSION}", + 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3' + } + + params = force ? '?force=true' : '' + + stub_request(:delete, "#{registry_api_url}/gitlab/v1/import/#{path}/#{params}") + .with(headers: headers) + .to_return( + status: http_status, + body: body.to_json, + headers: { content_type: 'application/json' } + ) + end + + def stub_repository_details(path, sizing: nil, status_code: 200, respond_with: {}) url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/" - url += "?size=self" if with_size + url += "?size=#{sizing}" if sizing + + headers = { 'Accept' => described_class::JSON_TYPE } + headers['Authorization'] = "bearer #{token}" if token + stub_request(:get, url) - .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{token}" }) + .with(headers: headers) .to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE }) end end diff --git a/spec/lib/container_registry/migration_spec.rb b/spec/lib/container_registry/migration_spec.rb index ffbbfb249e3..6c0fc94e27f 100644 --- a/spec/lib/container_registry/migration_spec.rb +++ b/spec/lib/container_registry/migration_spec.rb @@ -37,8 +37,8 @@ RSpec.describe ContainerRegistry::Migration do subject { described_class.enqueue_waiting_time } where(:slow_enabled, :fast_enabled, :expected_result) do - false | false | 1.hour - true | false | 6.hours + false | false | 45.minutes + true | false | 165.minutes false | true | 0 true | true | 0 end @@ -154,15 +154,35 @@ RSpec.describe ContainerRegistry::Migration do end end - describe '.target_plan' do - let_it_be(:plan) { create(:plan) } + describe '.target_plans' do + subject { described_class.target_plans } - before do - stub_application_setting(container_registry_import_target_plan: plan.name) + where(:target_plan, :result) do + 'free' | described_class::FREE_TIERS + 'premium' | described_class::PREMIUM_TIERS + 'ultimate' | described_class::ULTIMATE_TIERS end - it 'returns the matching application_setting' do - expect(described_class.target_plan).to eq(plan) + with_them do + before do + stub_application_setting(container_registry_import_target_plan: target_plan) + end + + it { is_expected.to eq(result) } + end + end + + describe '.all_plans?' do + subject { described_class.all_plans? } + + it { is_expected.to eq(true) } + + context 'feature flag disabled' do + before do + stub_feature_flags(container_registry_migration_phase2_all_plans: false) + end + + it { is_expected.to eq(false) } end end end diff --git a/spec/lib/error_tracking/sentry_client/issue_spec.rb b/spec/lib/error_tracking/sentry_client/issue_spec.rb index 82db0f70f2e..d7bb0ca5c9a 100644 --- a/spec/lib/error_tracking/sentry_client/issue_spec.rb +++ b/spec/lib/error_tracking/sentry_client/issue_spec.rb @@ -13,7 +13,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do describe '#list_issues' do shared_examples 'issues have correct return type' do |klass| it "returns objects of type #{klass}" do - expect(subject[:issues]).to all( be_a(klass) ) + expect(subject[:issues]).to all(be_a(klass)) end end @@ -41,10 +41,18 @@ RSpec.describe ErrorTracking::SentryClient::Issue do let(:cursor) { nil } let(:sort) { 'last_seen' } let(:sentry_api_response) { issues_sample_response } - let(:sentry_request_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' } + let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved" } let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) } - subject { client.list_issues(issue_status: issue_status, limit: limit, search_term: search_term, sort: sort, cursor: cursor) } + subject do + client.list_issues( + issue_status: issue_status, + limit: limit, + search_term: search_term, + sort: sort, + cursor: cursor + ) + end it_behaves_like 'calls sentry api' @@ -52,7 +60,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do it_behaves_like 'issues have correct length', 3 shared_examples 'has correct external_url' do - context 'external_url' do + describe '#external_url' do it 'is constructed correctly' do expect(subject[:issues][0].external_url).to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/11') end @@ -62,7 +70,8 @@ RSpec.describe ErrorTracking::SentryClient::Issue do context 'when response has a pagination info' do let(:headers) do { - link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"' + link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1",' \ + '<https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"' } end @@ -76,7 +85,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do end end - context 'error object created from sentry response' do + context 'when error object created from sentry response' do using RSpec::Parameterized::TableSyntax where(:error_object, :sentry_response) do @@ -104,13 +113,13 @@ RSpec.describe ErrorTracking::SentryClient::Issue do it_behaves_like 'has correct external_url' end - context 'redirects' do - let(:sentry_api_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' } + context 'with redirects' do + let(:sentry_api_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved" } it_behaves_like 'no Sentry redirects' end - context 'requests with sort parameter in sentry api' do + context 'with sort parameter in sentry api' do let(:sentry_request_url) do 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' \ 'issues/?limit=20&query=is:unresolved&sort=freq' @@ -140,7 +149,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do end end - context 'Older sentry versions where keys are not present' do + context 'with older sentry versions where keys are not present' do let(:sentry_api_response) do issues_sample_response[0...1].map do |issue| issue[:project].delete(:id) @@ -156,7 +165,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do it_behaves_like 'has correct external_url' end - context 'essential keys missing in API response' do + context 'when essential keys are missing in API response' do let(:sentry_api_response) do issues_sample_response[0...1].map do |issue| issue.except(:id) @@ -164,16 +173,18 @@ RSpec.describe ErrorTracking::SentryClient::Issue do end it 'raises exception' do - expect { subject }.to raise_error(ErrorTracking::SentryClient::MissingKeysError, 'Sentry API response is missing keys. key not found: "id"') + expect { subject }.to raise_error(ErrorTracking::SentryClient::MissingKeysError, + 'Sentry API response is missing keys. key not found: "id"') end end - context 'sentry api response too large' do + context 'when sentry api response is too large' do it 'raises exception' do - deep_size = double('Gitlab::Utils::DeepSize', valid?: false) + deep_size = instance_double(Gitlab::Utils::DeepSize, valid?: false) allow(Gitlab::Utils::DeepSize).to receive(:new).with(sentry_api_response).and_return(deep_size) - expect { subject }.to raise_error(ErrorTracking::SentryClient::ResponseInvalidSizeError, 'Sentry API response is too big. Limit is 1 MB.') + expect { subject }.to raise_error(ErrorTracking::SentryClient::ResponseInvalidSizeError, + 'Sentry API response is too big. Limit is 1 MB.') end end @@ -212,7 +223,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do subject { client.issue_details(issue_id: issue_id) } - context 'error object created from sentry response' do + context 'with error object created from sentry response' do using RSpec::Parameterized::TableSyntax where(:error_object, :sentry_response) do @@ -298,17 +309,16 @@ RSpec.describe ErrorTracking::SentryClient::Issue do describe '#update_issue' do let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' } let(:sentry_request_url) { "#{sentry_url}/issues/#{issue_id}/" } - - before do - stub_sentry_request(sentry_request_url, :put) - end - let(:params) do { status: 'resolved' } end + before do + stub_sentry_request(sentry_request_url, :put) + end + subject { client.update_issue(issue_id: issue_id, params: params) } it_behaves_like 'calls sentry api' do @@ -319,7 +329,7 @@ RSpec.describe ErrorTracking::SentryClient::Issue do expect(subject).to be_truthy end - context 'error encountered' do + context 'when error is encountered' do let(:error) { StandardError.new('error') } before do diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb index 55f5ae7d7dc..f9e18a65af4 100644 --- a/spec/lib/gitlab/application_context_spec.rb +++ b/spec/lib/gitlab/application_context_spec.rb @@ -146,7 +146,8 @@ RSpec.describe Gitlab::ApplicationContext do where(:provided_options, :client) do [:remote_ip] | :remote_ip [:remote_ip, :runner] | :runner - [:remote_ip, :runner, :user] | :user + [:remote_ip, :runner, :user] | :runner + [:remote_ip, :user] | :user end with_them do @@ -195,6 +196,16 @@ RSpec.describe Gitlab::ApplicationContext do expect(result(context)).to include(project: nil) end end + + context 'when using job context' do + let_it_be(:job) { create(:ci_build, :pending, :queued, user: user, project: project) } + + it 'sets expected values' do + context = described_class.new(job: job) + + expect(result(context)).to include(job_id: job.id, project: project.full_path, pipeline_id: job.pipeline_id) + end + end end describe '#use' do diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb index 1a9e2f02de6..6cb9085c3ad 100644 --- a/spec/lib/gitlab/auth/o_auth/user_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb @@ -6,11 +6,15 @@ RSpec.describe Gitlab::Auth::OAuth::User do include LdapHelpers let(:oauth_user) { described_class.new(auth_hash) } + let(:oauth_user_2) { described_class.new(auth_hash_2) } let(:gl_user) { oauth_user.gl_user } + let(:gl_user_2) { oauth_user_2.gl_user } let(:uid) { 'my-uid' } + let(:uid_2) { 'my-uid-2' } let(:dn) { 'uid=user1,ou=people,dc=example' } let(:provider) { 'my-provider' } let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash) } + let(:auth_hash_2) { OmniAuth::AuthHash.new(uid: uid_2, provider: provider, info: info_hash) } let(:info_hash) do { nickname: '-john+gitlab-ETC%.git@gmail.com', @@ -24,6 +28,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do end let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') } + let(:ldap_user_2) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') } describe '.find_by_uid_and_provider' do let(:dn) { 'CN=John Ã…ström, CN=Users, DC=Example, DC=com' } @@ -46,12 +51,12 @@ RSpec.describe Gitlab::Auth::OAuth::User do let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') } it "finds an existing user based on uid and provider (facebook)" do - expect( oauth_user.persisted? ).to be_truthy + expect(oauth_user.persisted?).to be_truthy end it 'returns false if user is not found in database' do allow(auth_hash).to receive(:uid).and_return('non-existing') - expect( oauth_user.persisted? ).to be_falsey + expect(oauth_user.persisted?).to be_falsey end end @@ -78,15 +83,27 @@ RSpec.describe Gitlab::Auth::OAuth::User do context 'when signup is disabled' do before do stub_application_setting signup_enabled: false + stub_omniauth_config(allow_single_sign_on: [provider]) end it 'creates the user' do - stub_omniauth_config(allow_single_sign_on: [provider]) - oauth_user.save # rubocop:disable Rails/SaveBang expect(gl_user).to be_persisted end + + it 'does not repeat the default user password' do + oauth_user.save # rubocop:disable Rails/SaveBang + oauth_user_2.save # rubocop:disable Rails/SaveBang + + expect(gl_user.password).not_to eq(gl_user_2.password) + end + + it 'has the password length within specified range' do + oauth_user.save # rubocop:disable Rails/SaveBang + + expect(gl_user.password.length).to be_between(Devise.password_length.min, Devise.password_length.max) + end end context 'when user confirmation email is enabled' do @@ -330,6 +347,12 @@ RSpec.describe Gitlab::Auth::OAuth::User do allow(ldap_user).to receive(:name) { 'John Doe' } allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] } allow(ldap_user).to receive(:dn) { dn } + + allow(ldap_user_2).to receive(:uid) { uid_2 } + allow(ldap_user_2).to receive(:username) { uid_2 } + allow(ldap_user_2).to receive(:name) { 'Beck Potter' } + allow(ldap_user_2).to receive(:email) { ['beckpotter@example.com', 'beck2@example.com'] } + allow(ldap_user_2).to receive(:dn) { dn } end context "and no account for the LDAP user" do @@ -340,6 +363,14 @@ RSpec.describe Gitlab::Auth::OAuth::User do oauth_user.save # rubocop:disable Rails/SaveBang end + it 'does not repeat the default user password' do + allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user_2) + + oauth_user_2.save # rubocop:disable Rails/SaveBang + + expect(gl_user.password).not_to eq(gl_user_2.password) + end + it "creates a user with dual LDAP and omniauth identities" do expect(gl_user).to be_valid expect(gl_user.username).to eql uid @@ -609,6 +640,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do context 'signup with SAML' do let(:provider) { 'saml' } + let(:block_auto_created_users) { false } before do stub_omniauth_config({ @@ -625,6 +657,13 @@ RSpec.describe Gitlab::Auth::OAuth::User do it_behaves_like 'not being blocked on creation' do let(:block_auto_created_users) { false } end + + it 'does not repeat the default user password' do + oauth_user.save # rubocop:disable Rails/SaveBang + oauth_user_2.save # rubocop:disable Rails/SaveBang + + expect(gl_user.password).not_to eq(gl_user_2.password) + end end context 'signup with omniauth only' do diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb index a7895623d6f..1158eedfe7c 100644 --- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests do +RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests, :migration, schema: 20220326161803 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:merge_requests) { table(:merge_requests) } @@ -50,5 +50,19 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests d subject.perform(mr_ids.first, mr_ids.last) end + + it_behaves_like 'marks background migration job records' do + let!(:non_eligible_mrs) do + Array.new(2) do + create_merge_request( + title: "Not a d-r-a-f-t 1", + draft: false, + state_id: 1 + ) + end + end + + let(:arguments) { [non_eligible_mrs.first.id, non_eligible_mrs.last.id] } + end end end diff --git a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb new file mode 100644 index 00000000000..4705f0d0ab9 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, schema: 20220302114046 do + let(:group_features) { table(:group_features) } + let(:namespaces) { table(:namespaces) } + + subject { described_class.new(connection: ActiveRecord::Base.connection) } + + describe '#perform' do + it 'creates settings for all group namespaces in range' do + namespaces.create!(id: 1, name: 'group1', path: 'group1', type: 'Group') + namespaces.create!(id: 2, name: 'user', path: 'user') + namespaces.create!(id: 3, name: 'group2', path: 'group2', type: 'Group') + + # Checking that no error is raised if the group_feature for a group already exists + namespaces.create!(id: 4, name: 'group3', path: 'group3', type: 'Group') + group_features.create!(id: 1, group_id: 4) + expect(group_features.count).to eq 1 + + expect { subject.perform(1, 4, :namespaces, :id, 10, 0, 4) }.to change { group_features.count }.by(2) + + expect(group_features.count).to eq 3 + expect(group_features.all.pluck(:group_id)).to contain_exactly(1, 3, 4) + end + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb b/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb deleted file mode 100644 index 242da383453..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb +++ /dev/null @@ -1,27 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillIncidentIssueEscalationStatuses, schema: 20211214012507 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:issues) { table(:issues) } - let(:issuable_escalation_statuses) { table(:incident_management_issuable_escalation_statuses) } - - subject(:migration) { described_class.new } - - it 'correctly backfills issuable escalation status records' do - namespace = namespaces.create!(name: 'foo', path: 'foo') - project = projects.create!(namespace_id: namespace.id) - - issues.create!(project_id: project.id, title: 'issue 1', issue_type: 0) # non-incident issue - issues.create!(project_id: project.id, title: 'incident 1', issue_type: 1) - issues.create!(project_id: project.id, title: 'incident 2', issue_type: 1) - incident_issue_existing_status = issues.create!(project_id: project.id, title: 'incident 3', issue_type: 1) - issuable_escalation_statuses.create!(issue_id: incident_issue_existing_status.id) - - migration.perform(1, incident_issue_existing_status.id) - - expect(issuable_escalation_statuses.count).to eq(3) - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb index b29d4c3583b..f98aea2dda7 100644 --- a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData do +RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData, :migration, schema: 20220326161803 do let(:namespaces_table) { table(:namespaces) } let(:projects_table) { table(:projects) } let(:issue_search_data_table) { table(:issue_search_data) } diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb new file mode 100644 index 00000000000..2dcd4645c84 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute do + let(:migration) { described_class.new } + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:routes) { table(:routes) } + + let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') } + let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') } + let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') } + + let(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) } + let(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) } + let(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) } + let(:proj_namespace4) { namespaces.create!(name: 'proj4', path: 'proj4', type: 'Project', parent_id: namespace3.id) } + + # rubocop:disable Layout/LineLength + let(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) } + let(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) } + let(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) } + let(:proj4) { projects.create!(name: 'proj4', path: 'proj4', namespace_id: namespace3.id, project_namespace_id: proj_namespace4.id) } + # rubocop:enable Layout/LineLength + + let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace') } + let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') } + let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') } + + let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project') } + let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') } + let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') } + let!(:proj_route4) { routes.create!(path: 'space1/space3/proj4', source_id: proj4.id, source_type: 'Project') } + + subject(:perform_migration) { migration.perform(proj_route1.id, proj_route4.id, :routes, :id, 2, 0) } + + it 'backfills namespace_id for the selected records', :aggregate_failures do + perform_migration + + expected_namespaces = [proj_namespace1.id, proj_namespace2.id, proj_namespace3.id, proj_namespace4.id] + + expected_projects = [proj_route1.id, proj_route2.id, proj_route3.id, proj_route4.id] + expect(routes.where.not(namespace_id: nil).pluck(:id)).to match_array(expected_projects) + expect(routes.where.not(namespace_id: nil).pluck(:namespace_id)).to match_array(expected_namespaces) + end + + it 'tracks timings of queries' do + expect(migration.batch_metrics.timings).to be_empty + + expect { perform_migration }.to change { migration.batch_metrics.timings } + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb new file mode 100644 index 00000000000..8d82c533d20 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillWorkItemTypeIdForIssues, :migration, schema: 20220326161803 do + subject(:migrate) { migration.perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms, issue_type_enum[:issue], issue_type.id) } + + let(:migration) { described_class.new } + + let(:batch_table) { 'issues' } + let(:batch_column) { 'id' } + let(:sub_batch_size) { 2 } + let(:pause_ms) { 0 } + + # let_it_be can't be used in migration specs because all tables but `work_item_types` are deleted after each spec + let(:issue_type_enum) { { issue: 0, incident: 1, test_case: 2, requirement: 3, task: 4 } } + let(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } + let(:project) { table(:projects).create!(namespace_id: namespace.id) } + let(:issues_table) { table(:issues) } + let(:issue_type) { table(:work_item_types).find_by!(namespace_id: nil, base_type: issue_type_enum[:issue]) } + + let(:issue1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let(:issue2) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let(:issue3) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let(:incident1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:incident]) } + # test_case and requirement are EE only, but enum values exist on the FOSS model + let(:test_case1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:test_case]) } + let(:requirement1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:requirement]) } + + let(:start_id) { issue1.id } + let(:end_id) { requirement1.id } + + let(:all_issues) { [issue1, issue2, issue3, incident1, test_case1, requirement1] } + + it 'sets work_item_type_id only for the given type' do + expect(all_issues).to all(have_attributes(work_item_type_id: nil)) + + expect { migrate }.to make_queries_matching(/UPDATE \"issues\" SET "work_item_type_id"/, 2) + all_issues.each(&:reload) + + expect([issue1, issue2, issue3]).to all(have_attributes(work_item_type_id: issue_type.id)) + expect(all_issues - [issue1, issue2, issue3]).to all(have_attributes(work_item_type_id: nil)) + end + + it 'tracks timings of queries' do + expect(migration.batch_metrics.timings).to be_empty + + expect { migrate }.to change { migration.batch_metrics.timings } + end + + context 'when database timeouts' do + using RSpec::Parameterized::TableSyntax + + where(error_class: [ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled]) + + with_them do + it 'retries on timeout error' do + expect(migration).to receive(:update_batch).exactly(3).times.and_raise(error_class) + expect(migration).to receive(:sleep).with(30).twice + + expect do + migrate + end.to raise_error(error_class) + end + end + end +end diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy_spec.rb new file mode 100644 index 00000000000..3cba99bfe51 --- /dev/null +++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy_spec.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillIssueWorkItemTypeBatchingStrategy, '#next_batch', schema: 20220326161803 do # rubocop:disable Layout/LineLength + # let! can't be used in migration specs because all tables but `work_item_types` are deleted after each spec + let!(:issue_type_enum) { { issue: 0, incident: 1, test_case: 2, requirement: 3, task: 4 } } + let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } + let!(:project) { table(:projects).create!(namespace_id: namespace.id) } + let!(:issues_table) { table(:issues) } + let!(:task_type) { table(:work_item_types).find_by!(namespace_id: nil, base_type: issue_type_enum[:task]) } + + let!(:issue1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let!(:task1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:task]) } + let!(:issue2) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let!(:issue3) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:issue]) } + let!(:task2) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:task]) } + let!(:incident1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:incident]) } + # test_case is EE only, but enum values exist on the FOSS model + let!(:test_case1) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:test_case]) } + + let!(:task3) do + issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:task], work_item_type_id: task_type.id) + end + + let!(:task4) { issues_table.create!(project_id: project.id, issue_type: issue_type_enum[:task]) } + + let!(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) } + + context 'when issue_type is issue' do + let(:job_arguments) { [issue_type_enum[:issue], 'irrelevant_work_item_id'] } + + context 'when starting on the first batch' do + it 'returns the bounds of the next batch' do + batch_bounds = next_batch(issue1.id, 2) + + expect(batch_bounds).to match_array([issue1.id, issue2.id]) + end + end + + context 'when additional batches remain' do + it 'returns the bounds of the next batch' do + batch_bounds = next_batch(issue2.id, 2) + + expect(batch_bounds).to match_array([issue2.id, issue3.id]) + end + end + + context 'when on the final batch' do + it 'returns the bounds of the next batch' do + batch_bounds = next_batch(issue3.id, 2) + + expect(batch_bounds).to match_array([issue3.id, issue3.id]) + end + end + + context 'when no additional batches remain' do + it 'returns nil' do + batch_bounds = next_batch(issue3.id + 1, 1) + + expect(batch_bounds).to be_nil + end + end + end + + context 'when issue_type is incident' do + let(:job_arguments) { [issue_type_enum[:incident], 'irrelevant_work_item_id'] } + + context 'when starting on the first batch' do + it 'returns the bounds of the next batch with only one element' do + batch_bounds = next_batch(incident1.id, 2) + + expect(batch_bounds).to match_array([incident1.id, incident1.id]) + end + end + end + + context 'when issue_type is requirement and there are no matching records' do + let(:job_arguments) { [issue_type_enum[:requirement], 'irrelevant_work_item_id'] } + + context 'when starting on the first batch' do + it 'returns nil' do + batch_bounds = next_batch(1, 2) + + expect(batch_bounds).to be_nil + end + end + end + + context 'when issue_type is task' do + let(:job_arguments) { [issue_type_enum[:task], 'irrelevant_work_item_id'] } + + context 'when starting on the first batch' do + it 'returns the bounds of the next batch' do + batch_bounds = next_batch(task1.id, 2) + + expect(batch_bounds).to match_array([task1.id, task2.id]) + end + end + + context 'when additional batches remain' do + it 'returns the bounds of the next batch, does not skip records where FK is already set' do + batch_bounds = next_batch(task2.id, 2) + + expect(batch_bounds).to match_array([task2.id, task3.id]) + end + end + + context 'when on the final batch' do + it 'returns the bounds of the next batch' do + batch_bounds = next_batch(task4.id, 2) + + expect(batch_bounds).to match_array([task4.id, task4.id]) + end + end + + context 'when no additional batches remain' do + it 'returns nil' do + batch_bounds = next_batch(task4.id + 1, 1) + + expect(batch_bounds).to be_nil + end + end + end + + def next_batch(min_value, batch_size) + batching_strategy.next_batch( + :issues, + :id, + batch_min_value: min_value, + batch_size: batch_size, + job_arguments: job_arguments + ) + end +end diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb index b01dd5b410e..dc0935efa94 100644 --- a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb +++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectNamespacePerGroupBatchingStrategy, '#next_batch' do +RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectNamespacePerGroupBatchingStrategy, '#next_batch', :migration, schema: 20220326161803 do let!(:namespaces) { table(:namespaces) } let!(:projects) { table(:projects) } let!(:background_migrations) { table(:batched_background_migrations) } diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb index 4e0ebd4b692..521e2067744 100644 --- a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb +++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb @@ -15,7 +15,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi context 'when starting on the first batch' do it 'returns the bounds of the next batch' do - batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace1.id, batch_size: 3, job_arguments: nil) + batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace1.id, batch_size: 3, job_arguments: []) expect(batch_bounds).to eq([namespace1.id, namespace3.id]) end @@ -23,7 +23,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi context 'when additional batches remain' do it 'returns the bounds of the next batch' do - batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace2.id, batch_size: 3, job_arguments: nil) + batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace2.id, batch_size: 3, job_arguments: []) expect(batch_bounds).to eq([namespace2.id, namespace4.id]) end @@ -31,7 +31,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi context 'when on the final batch' do it 'returns the bounds of the next batch' do - batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: nil) + batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: []) expect(batch_bounds).to eq([namespace4.id, namespace4.id]) end @@ -39,9 +39,30 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi context 'when no additional batches remain' do it 'returns nil' do - batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id + 1, batch_size: 1, job_arguments: nil) + batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id + 1, batch_size: 1, job_arguments: []) expect(batch_bounds).to be_nil end end + + context 'additional filters' do + let(:strategy_with_filters) do + Class.new(described_class) do + def apply_additional_filters(relation, job_arguments:) + min_id = job_arguments.first + + relation.where.not(type: 'Project').where('id >= ?', min_id) + end + end + end + + let(:batching_strategy) { strategy_with_filters.new(connection: ActiveRecord::Base.connection) } + let!(:namespace5) { namespaces.create!(name: 'batchtest5', path: 'batch-test5', type: 'Project') } + + it 'applies additional filters' do + batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: [1]) + + expect(batch_bounds).to eq([namespace4.id, namespace4.id]) + end + end end diff --git a/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb b/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb new file mode 100644 index 00000000000..d1ef7ca2188 --- /dev/null +++ b/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::CleanupDraftDataFromFaultyRegex, :migration, schema: 20220326161803 do + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:merge_requests) { table(:merge_requests) } + + let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') } + let(:project) { projects.create!(namespace_id: group.id) } + + let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] } + + def create_merge_request(params) + common_params = { + target_project_id: project.id, + target_branch: 'feature1', + source_branch: 'master' + } + + merge_requests.create!(common_params.merge(params)) + end + + context "mr.draft == true, and title matches the leaky regex and not the corrected regex" do + let(:mr_ids) { merge_requests.all.collect(&:id) } + + before do + draft_prefixes.each do |prefix| + (1..4).each do |n| + create_merge_request( + title: "#{prefix} This is a title", + draft: true, + state_id: 1 + ) + end + end + + create_merge_request(title: "This has draft in the title", draft: true, state_id: 1) + end + + it "updates all open draft merge request's draft field to true" do + expect { subject.perform(mr_ids.first, mr_ids.last) } + .to change { MergeRequest.where(draft: true).count } + .by(-1) + end + + it "marks successful slices as completed" do + expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last) + + subject.perform(mr_ids.first, mr_ids.last) + end + end +end diff --git a/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb b/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb index 04eb9ad475f..8a63673bf38 100644 --- a/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb +++ b/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::DisableExpirationPoliciesLinkedToNoContainerImages do +RSpec.describe Gitlab::BackgroundMigration::DisableExpirationPoliciesLinkedToNoContainerImages, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength let_it_be(:projects) { table(:projects) } let_it_be(:container_expiration_policies) { table(:container_expiration_policies) } let_it_be(:container_repositories) { table(:container_repositories) } diff --git a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb index 94d9f4509a7..4e7b97d33f6 100644 --- a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb +++ b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb @@ -39,6 +39,14 @@ RSpec.describe Gitlab::BackgroundMigration::EncryptStaticObjectToken do expect(new_state[user_with_encrypted_token.id]).to match_array([nil, 'encrypted']) end + context 'when id range does not include existing user ids' do + let(:arguments) { [non_existing_record_id, non_existing_record_id.succ] } + + it_behaves_like 'marks background migration job records' do + subject { described_class.new } + end + end + private def create_user!(name:, token: nil, encrypted_token: nil) diff --git a/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb b/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb new file mode 100644 index 00000000000..65663d26f37 --- /dev/null +++ b/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::FixDuplicateProjectNameAndPath, :migration, schema: 20220325155953 do + let(:migration) { described_class.new } + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:routes) { table(:routes) } + + let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'batch-test1') } + let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'batch-test2') } + let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'batch-test3') } + + let(:project_namespace2) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project', parent_id: namespace2.id, visibility_level: 20) } + let(:project_namespace3) { namespaces.create!(name: 'project3', path: 'project3', type: 'Project', parent_id: namespace3.id, visibility_level: 20) } + + let(:project1) { projects.create!(name: 'project1', path: 'project1', namespace_id: namespace1.id, visibility_level: 20) } + let(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, project_namespace_id: project_namespace2.id, visibility_level: 20) } + let(:project2_dup) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) } + let(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, project_namespace_id: project_namespace3.id, visibility_level: 20) } + let(:project3_dup) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) } + + let!(:namespace_route1) { routes.create!(path: 'batch-test1', source_id: namespace1.id, source_type: 'Namespace') } + let!(:namespace_route2) { routes.create!(path: 'batch-test1/batch-test2', source_id: namespace2.id, source_type: 'Namespace') } + let!(:namespace_route3) { routes.create!(path: 'batch-test1/batch-test3', source_id: namespace3.id, source_type: 'Namespace') } + + let!(:proj_route1) { routes.create!(path: 'batch-test1/project1', source_id: project1.id, source_type: 'Project') } + let!(:proj_route2) { routes.create!(path: 'batch-test1/batch-test2/project2', source_id: project2.id, source_type: 'Project') } + let!(:proj_route2_dup) { routes.create!(path: "batch-test1/batch-test2/project2-route-#{project2_dup.id}", source_id: project2_dup.id, source_type: 'Project') } + let!(:proj_route3) { routes.create!(path: 'batch-test1/batch-test3/project3', source_id: project3.id, source_type: 'Project') } + let!(:proj_route3_dup) { routes.create!(path: "batch-test1/batch-test3/project3-route-#{project3_dup.id}", source_id: project3_dup.id, source_type: 'Project') } + + subject(:perform_migration) { migration.perform(projects.minimum(:id), projects.maximum(:id)) } + + describe '#up' do + it 'backfills namespace_id for the selected records', :aggregate_failures do + expect(namespaces.where(type: 'Project').count).to eq(2) + + perform_migration + + expect(namespaces.where(type: 'Project').count).to eq(5) + + expect(project1.reload.name).to eq("project1-#{project1.id}") + expect(project1.path).to eq('project1') + + expect(project2.reload.name).to eq('project2') + expect(project2.path).to eq('project2') + + expect(project2_dup.reload.name).to eq("project2-#{project2_dup.id}") + expect(project2_dup.path).to eq("project2-route-#{project2_dup.id}") + + expect(project3.reload.name).to eq("project3") + expect(project3.path).to eq("project3") + + expect(project3_dup.reload.name).to eq("project3-#{project3_dup.id}") + expect(project3_dup.path).to eq("project3-route-#{project3_dup.id}") + + projects.all.each do |pr| + project_namespace = namespaces.find(pr.project_namespace_id) + expect(project_namespace).to be_in_sync_with_project(pr) + end + end + end +end diff --git a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb b/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb new file mode 100644 index 00000000000..254b4fea698 --- /dev/null +++ b/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::MergeTopicsWithSameName, schema: 20220223124428 do + def set_avatar(topic_id, avatar) + topic = ::Projects::Topic.find(topic_id) + topic.avatar = avatar + topic.save! + topic.avatar.absolute_path + end + + it 'merges project topics with same case insensitive name' do + namespaces = table(:namespaces) + projects = table(:projects) + topics = table(:topics) + project_topics = table(:project_topics) + + group = namespaces.create!(name: 'group', path: 'group') + project_1 = projects.create!(namespace_id: group.id, visibility_level: 20) + project_2 = projects.create!(namespace_id: group.id, visibility_level: 10) + project_3 = projects.create!(namespace_id: group.id, visibility_level: 0) + topic_1_keep = topics.create!( + name: 'topic1', + description: 'description 1 to keep', + total_projects_count: 2, + non_private_projects_count: 2 + ) + topic_1_remove = topics.create!( + name: 'TOPIC1', + description: 'description 1 to remove', + total_projects_count: 2, + non_private_projects_count: 1 + ) + topic_2_remove = topics.create!( + name: 'topic2', + total_projects_count: 0 + ) + topic_2_keep = topics.create!( + name: 'TOPIC2', + description: 'description 2 to keep', + total_projects_count: 1 + ) + topic_3_remove_1 = topics.create!( + name: 'topic3', + total_projects_count: 2, + non_private_projects_count: 1 + ) + topic_3_keep = topics.create!( + name: 'Topic3', + total_projects_count: 2, + non_private_projects_count: 2 + ) + topic_3_remove_2 = topics.create!( + name: 'TOPIC3', + description: 'description 3 to keep', + total_projects_count: 2, + non_private_projects_count: 1 + ) + topic_4_keep = topics.create!( + name: 'topic4' + ) + + project_topics_1 = [] + project_topics_3 = [] + project_topics_removed = [] + + project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_1.id) + project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_2.id) + project_topics_removed << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_2.id) + project_topics_1 << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_3.id) + + project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_1.id) + project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_2.id) + project_topics_removed << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_1.id) + project_topics_3 << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_3.id) + project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_1.id) + project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_3.id) + + avatar_paths = { + topic_1_keep: set_avatar(topic_1_keep.id, fixture_file_upload('spec/fixtures/avatars/avatar1.png')), + topic_1_remove: set_avatar(topic_1_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar2.png')), + topic_2_remove: set_avatar(topic_2_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar3.png')), + topic_3_remove_1: set_avatar(topic_3_remove_1.id, fixture_file_upload('spec/fixtures/avatars/avatar4.png')), + topic_3_remove_2: set_avatar(topic_3_remove_2.id, fixture_file_upload('spec/fixtures/avatars/avatar5.png')) + } + + subject.perform(%w[topic1 topic2 topic3 topic4]) + + # Topics + [topic_1_keep, topic_2_keep, topic_3_keep, topic_4_keep].each(&:reload) + expect(topic_1_keep.name).to eq('topic1') + expect(topic_1_keep.description).to eq('description 1 to keep') + expect(topic_1_keep.total_projects_count).to eq(3) + expect(topic_1_keep.non_private_projects_count).to eq(2) + expect(topic_2_keep.name).to eq('TOPIC2') + expect(topic_2_keep.description).to eq('description 2 to keep') + expect(topic_2_keep.total_projects_count).to eq(0) + expect(topic_2_keep.non_private_projects_count).to eq(0) + expect(topic_3_keep.name).to eq('Topic3') + expect(topic_3_keep.description).to eq('description 3 to keep') + expect(topic_3_keep.total_projects_count).to eq(3) + expect(topic_3_keep.non_private_projects_count).to eq(2) + expect(topic_4_keep.reload.name).to eq('topic4') + + [topic_1_remove, topic_2_remove, topic_3_remove_1, topic_3_remove_2].each do |topic| + expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound) + end + + # Topic avatars + expect(topic_1_keep.avatar).to eq('avatar1.png') + expect(File.exist?(::Projects::Topic.find(topic_1_keep.id).avatar.absolute_path)).to be_truthy + expect(topic_2_keep.avatar).to eq('avatar3.png') + expect(File.exist?(::Projects::Topic.find(topic_2_keep.id).avatar.absolute_path)).to be_truthy + expect(topic_3_keep.avatar).to eq('avatar4.png') + expect(File.exist?(::Projects::Topic.find(topic_3_keep.id).avatar.absolute_path)).to be_truthy + + [:topic_1_remove, :topic_2_remove, :topic_3_remove_1, :topic_3_remove_2].each do |topic| + expect(File.exist?(avatar_paths[topic])).to be_falsey + end + + # Project Topic assignments + project_topics_1.each do |project_topic| + expect(project_topic.reload.topic_id).to eq(topic_1_keep.id) + end + + project_topics_3.each do |project_topic| + expect(project_topic.reload.topic_id).to eq(topic_3_keep.id) + end + + project_topics_removed.each do |project_topic| + expect { project_topic.reload }.to raise_error(ActiveRecord::RecordNotFound) + end + end +end diff --git a/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb b/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb new file mode 100644 index 00000000000..8bc6bb8ae0a --- /dev/null +++ b/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::MigrateShimoConfluenceIntegrationCategory, schema: 20220326161803 do + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:integrations) { table(:integrations) } + let(:perform) { described_class.new.perform(1, 5) } + + before do + namespace = namespaces.create!(name: 'test', path: 'test') + projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') + integrations.create!(id: 1, active: true, type_new: "Integrations::SlackSlashCommands", + category: 'chat', project_id: 1) + integrations.create!(id: 3, active: true, type_new: "Integrations::Confluence", category: 'common', project_id: 1) + integrations.create!(id: 5, active: true, type_new: "Integrations::Shimo", category: 'common', project_id: 1) + end + + describe '#up' do + it 'updates category to third_party_wiki for Shimo and Confluence' do + perform + + expect(integrations.where(category: 'third_party_wiki').count).to eq(2) + expect(integrations.where(category: 'chat').count).to eq(1) + end + end +end diff --git a/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb b/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb new file mode 100644 index 00000000000..0463f5a0c0d --- /dev/null +++ b/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::PopulateContainerRepositoryMigrationPlan, schema: 20220316202640 do + let_it_be(:container_repositories) { table(:container_repositories) } + let_it_be(:projects) { table(:projects) } + let_it_be(:namespaces) { table(:namespaces) } + let_it_be(:gitlab_subscriptions) { table(:gitlab_subscriptions) } + let_it_be(:plans) { table(:plans) } + let_it_be(:namespace_statistics) { table(:namespace_statistics) } + + let!(:namepace1) { namespaces.create!(id: 1, type: 'Group', name: 'group1', path: 'group1', traversal_ids: [1]) } + let!(:namepace2) { namespaces.create!(id: 2, type: 'Group', name: 'group2', path: 'group2', traversal_ids: [2]) } + let!(:namepace3) { namespaces.create!(id: 3, type: 'Group', name: 'group3', path: 'group3', traversal_ids: [3]) } + let!(:sub_namespace) { namespaces.create!(id: 4, type: 'Group', name: 'group3', path: 'group3', parent_id: 1, traversal_ids: [1, 4]) } + let!(:plan1) { plans.create!(id: 1, name: 'plan1') } + let!(:plan2) { plans.create!(id: 2, name: 'plan2') } + let!(:gitlab_subscription1) { gitlab_subscriptions.create!(id: 1, namespace_id: 1, hosted_plan_id: 1) } + let!(:gitlab_subscription2) { gitlab_subscriptions.create!(id: 2, namespace_id: 2, hosted_plan_id: 2) } + let!(:project1) { projects.create!(id: 1, name: 'project1', path: 'project1', namespace_id: 4) } + let!(:project2) { projects.create!(id: 2, name: 'project2', path: 'project2', namespace_id: 2) } + let!(:project3) { projects.create!(id: 3, name: 'project3', path: 'project3', namespace_id: 3) } + let!(:container_repository1) { container_repositories.create!(id: 1, name: 'cr1', project_id: 1) } + let!(:container_repository2) { container_repositories.create!(id: 2, name: 'cr2', project_id: 2) } + let!(:container_repository3) { container_repositories.create!(id: 3, name: 'cr3', project_id: 3) } + + let(:migration) { described_class.new } + + subject do + migration.perform(1, 4) + end + + it 'updates the migration_plan to match the actual plan', :aggregate_failures do + expect(Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded) + .with('PopulateContainerRepositoryMigrationPlan', [1, 4]).and_return(true) + + subject + + expect(container_repository1.reload.migration_plan).to eq('plan1') + expect(container_repository2.reload.migration_plan).to eq('plan2') + expect(container_repository3.reload.migration_plan).to eq(nil) + end +end diff --git a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb new file mode 100644 index 00000000000..98b2bc437f3 --- /dev/null +++ b/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::PopulateNamespaceStatistics do + let_it_be(:namespaces) { table(:namespaces) } + let_it_be(:namespace_statistics) { table(:namespace_statistics) } + let_it_be(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) } + let_it_be(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) } + + let!(:group1) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') } + let!(:group2) { namespaces.create!(id: 20, type: 'Group', name: 'group2', path: 'group2') } + + let!(:group1_manifest) do + dependency_proxy_manifests.create!(group_id: 10, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123') + end + + let!(:group2_manifest) do + dependency_proxy_manifests.create!(group_id: 20, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123') + end + + let!(:group1_stats) { namespace_statistics.create!(id: 10, namespace_id: 10) } + + let(:ids) { namespaces.pluck(:id) } + let(:statistics) { [] } + + subject(:perform) { described_class.new.perform(ids, statistics) } + + it 'creates/updates all namespace_statistics and updates root storage statistics', :aggregate_failures do + expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group1.id) + expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group2.id) + + expect { perform }.to change(namespace_statistics, :count).from(1).to(2) + + namespace_statistics.all.each do |stat| + expect(stat.dependency_proxy_size).to eq 20 + expect(stat.storage_size).to eq 20 + end + end + + context 'when just a stat is passed' do + let(:statistics) { [:dependency_proxy_size] } + + it 'calls the statistics update service with just that stat' do + expect(Groups::UpdateStatisticsService) + .to receive(:new) + .with(anything, statistics: [:dependency_proxy_size]) + .twice.and_call_original + + perform + end + end + + context 'when a statistics update fails' do + before do + error_response = instance_double(ServiceResponse, message: 'an error', error?: true) + + allow_next_instance_of(Groups::UpdateStatisticsService) do |instance| + allow(instance).to receive(:execute).and_return(error_response) + end + end + + it 'logs an error' do + expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| + expect(instance).to receive(:error).twice + end + + perform + end + end +end diff --git a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb index a265fa95b23..3de84a4e880 100644 --- a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads do +RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads, :migration, schema: 20220326161803 do let(:vulnerabilities) { table(:vulnerabilities) } let(:vulnerability_reads) { table(:vulnerability_reads) } let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb index 2c5de448fbc..2ad561ead87 100644 --- a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb +++ b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration do +RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do include MigrationsHelpers context 'when migrating data', :aggregate_failures do diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb index f6f4a3f6115..8003159f59e 100644 --- a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb +++ b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings do +RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings, :migration, schema: 20220326161803 do let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let(:users) { table(:users) } let(:user) { create_user! } diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb index 28aa9efde4f..07cff32304e 100644 --- a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb +++ b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings do +RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration, schema: 20220326161803 do let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let(:users) { table(:users) } let(:user) { create_user! } diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb index 6aea549b136..d02f7245c15 100644 --- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects do +RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb index cbe762c2680..fd61047d851 100644 --- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects do +RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/blame_spec.rb b/spec/lib/gitlab/blame_spec.rb index e22399723ac..f636ce283ae 100644 --- a/spec/lib/gitlab/blame_spec.rb +++ b/spec/lib/gitlab/blame_spec.rb @@ -3,13 +3,31 @@ require 'spec_helper' RSpec.describe Gitlab::Blame do - let(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project, :repository) } + let(:path) { 'files/ruby/popen.rb' } let(:commit) { project.commit('master') } let(:blob) { project.repository.blob_at(commit.id, path) } + let(:range) { nil } + + subject(:blame) { described_class.new(blob, commit, range: range) } + + describe '#first_line' do + subject { blame.first_line } + + it { is_expected.to eq(1) } + + context 'with a range' do + let(:range) { 2..3 } + + it { is_expected.to eq(range.first) } + end + end describe "#groups" do - let(:subject) { described_class.new(blob, commit).groups(highlight: false) } + let(:highlighted) { false } + + subject(:groups) { blame.groups(highlight: highlighted) } it 'groups lines properly' do expect(subject.count).to eq(18) @@ -22,5 +40,62 @@ RSpec.describe Gitlab::Blame do expect(subject[-1][:commit].sha).to eq('913c66a37b4a45b9769037c55c2d238bd0942d2e') expect(subject[-1][:lines]).to eq([" end", "end"]) end + + context 'with a range 1..5' do + let(:range) { 1..5 } + + it 'returns the correct lines' do + expect(groups.count).to eq(2) + expect(groups[0][:lines]).to eq(["require 'fileutils'", "require 'open3'", ""]) + expect(groups[1][:lines]).to eq(['module Popen', ' extend self']) + end + + context 'with highlighted lines' do + let(:highlighted) { true } + + it 'returns the correct lines' do + expect(groups.count).to eq(2) + expect(groups[0][:lines][0]).to match(/LC1.*fileutils/) + expect(groups[0][:lines][1]).to match(/LC2.*open3/) + expect(groups[0][:lines][2]).to eq("<span id=\"LC3\" class=\"line\" lang=\"ruby\"></span>\n") + expect(groups[1][:lines][0]).to match(/LC4.*Popen/) + expect(groups[1][:lines][1]).to match(/LC5.*extend/) + end + end + end + + context 'with a range 2..4' do + let(:range) { 2..4 } + + it 'returns the correct lines' do + expect(groups.count).to eq(2) + expect(groups[0][:lines]).to eq(["require 'open3'", ""]) + expect(groups[1][:lines]).to eq(['module Popen']) + end + + context 'with highlighted lines' do + let(:highlighted) { true } + + it 'returns the correct lines' do + expect(groups.count).to eq(2) + expect(groups[0][:lines][0]).to match(/LC2.*open3/) + expect(groups[0][:lines][1]).to eq("<span id=\"LC3\" class=\"line\" lang=\"ruby\"></span>\n") + expect(groups[1][:lines][0]).to match(/LC4.*Popen/) + end + end + end + + context 'renamed file' do + let(:path) { 'files/plain_text/renamed' } + let(:commit) { project.commit('blame-on-renamed') } + + it 'adds previous path' do + expect(subject[0][:previous_path]).to be nil + expect(subject[0][:lines]).to match_array(['Initial commit', 'Initial commit']) + + expect(subject[1][:previous_path]).to eq('files/plain_text/initial-commit') + expect(subject[1][:lines]).to match_array(['Renamed as "filename"']) + end + end end end diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb index 71cd57d317c..630dfcd06bb 100644 --- a/spec/lib/gitlab/ci/build/image_spec.rb +++ b/spec/lib/gitlab/ci/build/image_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Build::Image do subject { described_class.from_image(job) } context 'when image is defined in job' do - let(:image_name) { 'ruby:2.7' } + let(:image_name) { 'image:1.0' } let(:job) { create(:ci_build, options: { image: image_name } ) } context 'when image is defined as string' do diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb index e810d65d560..e16a9a7a74a 100644 --- a/spec/lib/gitlab/ci/config/entry/image_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb @@ -6,11 +6,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do let(:entry) { described_class.new(config) } context 'when configuration is a string' do - let(:config) { 'ruby:2.7' } + let(:config) { 'image:1.0' } describe '#value' do it 'returns image hash' do - expect(entry.value).to eq({ name: 'ruby:2.7' }) + expect(entry.value).to eq({ name: 'image:1.0' }) end end @@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do describe '#image' do it "returns image's name" do - expect(entry.name).to eq 'ruby:2.7' + expect(entry.name).to eq 'image:1.0' end end @@ -46,7 +46,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do end context 'when configuration is a hash' do - let(:config) { { name: 'ruby:2.7', entrypoint: %w(/bin/sh run) } } + let(:config) { { name: 'image:1.0', entrypoint: %w(/bin/sh run) } } describe '#value' do it 'returns image hash' do @@ -68,7 +68,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do describe '#image' do it "returns image's name" do - expect(entry.name).to eq 'ruby:2.7' + expect(entry.name).to eq 'image:1.0' end end @@ -80,7 +80,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do context 'when configuration has ports' do let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] } - let(:config) { { name: 'ruby:2.7', entrypoint: %w(/bin/sh run), ports: ports } } + let(:config) { { name: 'image:1.0', entrypoint: %w(/bin/sh run), ports: ports } } let(:entry) { described_class.new(config, with_image_ports: image_ports) } let(:image_ports) { false } @@ -112,7 +112,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do end context 'when entry value is not correct' do - let(:config) { ['ruby:2.7'] } + let(:config) { ['image:1.0'] } describe '#errors' do it 'saves errors' do @@ -129,7 +129,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do end context 'when unexpected key is specified' do - let(:config) { { name: 'ruby:2.7', non_existing: 'test' } } + let(:config) { { name: 'image:1.0', non_existing: 'test' } } describe '#errors' do it 'saves errors' do diff --git a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb index 588f53150ff..0fd9a83a4fa 100644 --- a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'fast_spec_helper' +require 'spec_helper' RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do let(:entry) { described_class.new(config) } diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb index daf58aff116..b9c32bc51be 100644 --- a/spec/lib/gitlab/ci/config/entry/root_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb @@ -31,7 +31,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do let(:hash) do { before_script: %w(ls pwd), - image: 'ruby:2.7', + image: 'image:1.0', default: {}, services: ['postgres:9.1', 'mysql:5.5'], variables: { VAR: 'root', VAR2: { value: 'val 2', description: 'this is var 2' } }, @@ -154,7 +154,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do { name: :rspec, script: %w[rspec ls], before_script: %w(ls pwd), - image: { name: 'ruby:2.7' }, + image: { name: 'image:1.0' }, services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }], stage: 'test', cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }], @@ -169,7 +169,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do { name: :spinach, before_script: [], script: %w[spinach], - image: { name: 'ruby:2.7' }, + image: { name: 'image:1.0' }, services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }], stage: 'test', cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }], @@ -186,7 +186,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do before_script: [], script: ["make changelog | tee release_changelog.txt"], release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" }, - image: { name: "ruby:2.7" }, + image: { name: "image:1.0" }, services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }], cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }], only: { refs: %w(branches tags) }, @@ -206,7 +206,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do { before_script: %w(ls pwd), after_script: ['make clean'], default: { - image: 'ruby:2.7', + image: 'image:1.0', services: ['postgres:9.1', 'mysql:5.5'] }, variables: { VAR: 'root' }, @@ -233,7 +233,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do rspec: { name: :rspec, script: %w[rspec ls], before_script: %w(ls pwd), - image: { name: 'ruby:2.7' }, + image: { name: 'image:1.0' }, services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }], stage: 'test', cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }], @@ -246,7 +246,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do spinach: { name: :spinach, before_script: [], script: %w[spinach], - image: { name: 'ruby:2.7' }, + image: { name: 'image:1.0' }, services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }], stage: 'test', cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }], diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb index b59fc95a8cc..9da8d106862 100644 --- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb @@ -4,8 +4,9 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::File::Artifact do let(:parent_pipeline) { create(:ci_pipeline) } + let(:variables) {} let(:context) do - Gitlab::Ci::Config::External::Context.new(parent_pipeline: parent_pipeline) + Gitlab::Ci::Config::External::Context.new(variables: variables, parent_pipeline: parent_pipeline) end let(:external_file) { described_class.new(params, context) } @@ -29,14 +30,15 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do end describe '#valid?' do - shared_examples 'is invalid' do - it 'is not valid' do - expect(external_file).not_to be_valid - end + subject(:valid?) do + external_file.validate! + external_file.valid? + end + shared_examples 'is invalid' do it 'sets the expected error' do - expect(external_file.errors) - .to contain_exactly(expected_error) + expect(valid?).to be_falsy + expect(external_file.errors).to contain_exactly(expected_error) end end @@ -148,7 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do context 'when file is not empty' do it 'is valid' do - expect(external_file).to be_valid + expect(valid?).to be_truthy expect(external_file.content).to be_present end @@ -160,6 +162,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do user: anything } expect(context).to receive(:mutate).with(expected_attrs).and_call_original + external_file.validate! external_file.content end end @@ -168,6 +171,58 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do end end end + + context 'when job is provided as a variable' do + let(:variables) do + Gitlab::Ci::Variables::Collection.new([ + { key: 'VAR1', value: 'a_secret_variable_value', masked: true } + ]) + end + + let(:params) { { artifact: 'generated.yml', job: 'a_secret_variable_value' } } + + context 'when job does not exist in the parent pipeline' do + let(:expected_error) do + 'Job `xxxxxxxxxxxxxxxxxxxxxxx` not found in parent pipeline or does not have artifacts!' + end + + it_behaves_like 'is invalid' + end + end + end + end + + describe '#metadata' do + let(:params) { { artifact: 'generated.yml' } } + + subject(:metadata) { external_file.metadata } + + it { + is_expected.to eq( + context_project: nil, + context_sha: nil, + type: :artifact, + location: 'generated.yml', + extra: { job_name: nil } + ) + } + + context 'when job name includes a masked variable' do + let(:variables) do + Gitlab::Ci::Variables::Collection.new([{ key: 'VAR1', value: 'a_secret_variable_value', masked: true }]) + end + + let(:params) { { artifact: 'generated.yml', job: 'a_secret_variable_value' } } + + it { + is_expected.to eq( + context_project: nil, + context_sha: nil, + type: :artifact, + location: 'generated.yml', + extra: { job_name: 'xxxxxxxxxxxxxxxxxxxxxxx' } + ) + } end end end diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb index 536f48ecba6..280bebe1a7c 100644 --- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do end end - subject { test_class.new(location, context) } + subject(:file) { test_class.new(location, context) } before do allow_any_instance_of(test_class) @@ -32,7 +32,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do let(:location) { 'some-location' } it 'returns true' do - expect(subject).to be_matching + expect(file).to be_matching end end @@ -40,40 +40,45 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do let(:location) { nil } it 'returns false' do - expect(subject).not_to be_matching + expect(file).not_to be_matching end end end describe '#valid?' do + subject(:valid?) do + file.validate! + file.valid? + end + context 'when location is not a string' do let(:location) { %w(some/file.txt other/file.txt) } - it { is_expected.not_to be_valid } + it { is_expected.to be_falsy } end context 'when location is not a YAML file' do let(:location) { 'some/file.txt' } - it { is_expected.not_to be_valid } + it { is_expected.to be_falsy } end context 'when location has not a valid naming scheme' do let(:location) { 'some/file/.yml' } - it { is_expected.not_to be_valid } + it { is_expected.to be_falsy } end context 'when location is a valid .yml extension' do let(:location) { 'some/file/config.yml' } - it { is_expected.to be_valid } + it { is_expected.to be_truthy } end context 'when location is a valid .yaml extension' do let(:location) { 'some/file/config.yaml' } - it { is_expected.to be_valid } + it { is_expected.to be_truthy } end context 'when there are YAML syntax errors' do @@ -86,8 +91,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do end it 'is not a valid file' do - expect(subject).not_to be_valid - expect(subject.error_message).to eq('Included file `some/file/xxxxxxxxxxxxxxxx.yml` does not have valid YAML syntax!') + expect(valid?).to be_falsy + expect(file.error_message).to eq('Included file `some/file/xxxxxxxxxxxxxxxx.yml` does not have valid YAML syntax!') end end end @@ -103,8 +108,56 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do end it 'does expand hash to include the template' do - expect(subject.to_hash).to include(:before_script) + expect(file.to_hash).to include(:before_script) end end end + + describe '#metadata' do + let(:location) { 'some/file/config.yml' } + + subject(:metadata) { file.metadata } + + it { + is_expected.to eq( + context_project: nil, + context_sha: 'HEAD' + ) + } + end + + describe '#eql?' do + let(:location) { 'some/file/config.yml' } + + subject(:eql) { file.eql?(other_file) } + + context 'when the other file has the same params' do + let(:other_file) { test_class.new(location, context) } + + it { is_expected.to eq(true) } + end + + context 'when the other file has not the same params' do + let(:other_file) { test_class.new('some/other/file', context) } + + it { is_expected.to eq(false) } + end + end + + describe '#hash' do + let(:location) { 'some/file/config.yml' } + + subject(:filehash) { file.hash } + + context 'with a project' do + let(:project) { create(:project) } + let(:context_params) { { project: project, sha: 'HEAD', variables: variables } } + + it { is_expected.to eq([location, project.full_path, 'HEAD'].hash) } + end + + context 'without a project' do + it { is_expected.to eq([location, nil, 'HEAD'].hash) } + end + end end diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb index b9314dfc44e..c0a0b0009ce 100644 --- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb @@ -55,6 +55,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do end describe '#valid?' do + subject(:valid?) do + local_file.validate! + local_file.valid? + end + context 'when is a valid local path' do let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' } @@ -62,25 +67,19 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do allow_any_instance_of(described_class).to receive(:fetch_local_content).and_return("image: 'ruby2:2'") end - it 'returns true' do - expect(local_file.valid?).to be_truthy - end + it { is_expected.to be_truthy } end context 'when it is not a valid local path' do let(:location) { '/lib/gitlab/ci/templates/non-existent-file.yml' } - it 'returns false' do - expect(local_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end context 'when it is not a yaml file' do let(:location) { '/config/application.rb' } - it 'returns false' do - expect(local_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end context 'when it is an empty file' do @@ -89,6 +88,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do it 'returns false and adds an error message about an empty file' do allow_any_instance_of(described_class).to receive(:fetch_local_content).and_return("") + local_file.validate! expect(local_file.errors).to include("Local file `/lib/gitlab/ci/templates/xxxxxx/existent-file.yml` is empty!") end end @@ -98,7 +98,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do let(:sha) { ':' } it 'returns false and adds an error message stating that included file does not exist' do - expect(local_file).not_to be_valid + expect(valid?).to be_falsy expect(local_file.errors).to include("Sha #{sha} is not valid!") end end @@ -140,6 +140,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do let(:location) { '/lib/gitlab/ci/templates/secret_file.yml' } let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) } + before do + local_file.validate! + end + it 'returns an error message' do expect(local_file.error_message).to eq("Local file `/lib/gitlab/ci/templates/xxxxxxxxxxx.yml` does not exist!") end @@ -174,6 +178,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do allow(project.repository).to receive(:blob_data_at).with(sha, another_location) .and_return(another_content) + + local_file.validate! end it 'does expand hash to include the template' do @@ -181,4 +187,20 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do end end end + + describe '#metadata' do + let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' } + + subject(:metadata) { local_file.metadata } + + it { + is_expected.to eq( + context_project: project.full_path, + context_sha: '12345', + type: :local, + location: location, + extra: {} + ) + } + end end diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb index 74720c0a3ca..5d3412a148b 100644 --- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb @@ -66,6 +66,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end describe '#valid?' do + subject(:valid?) do + project_file.validate! + project_file.valid? + end + context 'when a valid path is used' do let(:params) do { project: project.full_path, file: '/file.yml' } @@ -74,18 +79,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do let(:root_ref_sha) { project.repository.root_ref_sha } before do - stub_project_blob(root_ref_sha, '/file.yml') { 'image: ruby:2.7' } + stub_project_blob(root_ref_sha, '/file.yml') { 'image: image:1.0' } end - it 'returns true' do - expect(project_file).to be_valid - end + it { is_expected.to be_truthy } context 'when user does not have permission to access file' do let(:context_user) { create(:user) } it 'returns false' do - expect(project_file).not_to be_valid + expect(valid?).to be_falsy expect(project_file.error_message).to include("Project `#{project.full_path}` not found or access denied!") end end @@ -99,12 +102,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do let(:ref_sha) { project.commit('master').sha } before do - stub_project_blob(ref_sha, '/file.yml') { 'image: ruby:2.7' } + stub_project_blob(ref_sha, '/file.yml') { 'image: image:1.0' } end - it 'returns true' do - expect(project_file).to be_valid - end + it { is_expected.to be_truthy } end context 'when an empty file is used' do @@ -120,7 +121,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end it 'returns false' do - expect(project_file).not_to be_valid + expect(valid?).to be_falsy expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxx.yml` is empty!") end end @@ -131,7 +132,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end it 'returns false' do - expect(project_file).not_to be_valid + expect(valid?).to be_falsy expect(project_file.error_message).to include("Project `#{project.full_path}` reference `I-Do-Not-Exist` does not exist!") end end @@ -144,7 +145,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end it 'returns false' do - expect(project_file).not_to be_valid + expect(valid?).to be_falsy expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxxxxxxxxxx.yml` does not exist!") end end @@ -155,10 +156,27 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end it 'returns false' do - expect(project_file).not_to be_valid + expect(valid?).to be_falsy expect(project_file.error_message).to include('Included file `/invalid-file` does not have YAML extension!') end end + + context 'when non-existing project is used with a masked variable' do + let(:variables) do + Gitlab::Ci::Variables::Collection.new([ + { key: 'VAR1', value: 'a_secret_variable_value', masked: true } + ]) + end + + let(:params) do + { project: 'a_secret_variable_value', file: '/file.yml' } + end + + it 'returns false with masked project name' do + expect(valid?).to be_falsy + expect(project_file.error_message).to include("Project `xxxxxxxxxxxxxxxxxxxxxxx` not found or access denied!") + end + end end describe '#expand_context' do @@ -176,6 +194,45 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end end + describe '#metadata' do + let(:params) do + { project: project.full_path, file: '/file.yml' } + end + + subject(:metadata) { project_file.metadata } + + it { + is_expected.to eq( + context_project: context_project.full_path, + context_sha: '12345', + type: :file, + location: '/file.yml', + extra: { project: project.full_path, ref: 'HEAD' } + ) + } + + context 'when project name and ref include masked variables' do + let(:variables) do + Gitlab::Ci::Variables::Collection.new([ + { key: 'VAR1', value: 'a_secret_variable_value1', masked: true }, + { key: 'VAR2', value: 'a_secret_variable_value2', masked: true } + ]) + end + + let(:params) { { project: 'a_secret_variable_value1', ref: 'a_secret_variable_value2', file: '/file.yml' } } + + it { + is_expected.to eq( + context_project: context_project.full_path, + context_sha: '12345', + type: :file, + location: '/file.yml', + extra: { project: 'xxxxxxxxxxxxxxxxxxxxxxxx', ref: 'xxxxxxxxxxxxxxxxxxxxxxxx' } + ) + } + end + end + private def stub_project_blob(ref, path) diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb index 2613bfbfdcf..5c07c87fd5a 100644 --- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb @@ -54,22 +54,23 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do end describe "#valid?" do + subject(:valid?) do + remote_file.validate! + remote_file.valid? + end + context 'when is a valid remote url' do before do stub_full_request(location).to_return(body: remote_file_content) end - it 'returns true' do - expect(remote_file.valid?).to be_truthy - end + it { is_expected.to be_truthy } end context 'with an irregular url' do let(:location) { 'not-valid://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' } - it 'returns false' do - expect(remote_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end context 'with a timeout' do @@ -77,25 +78,19 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do allow(Gitlab::HTTP).to receive(:get).and_raise(Timeout::Error) end - it 'is falsy' do - expect(remote_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end context 'when is not a yaml file' do let(:location) { 'https://asdasdasdaj48ggerexample.com' } - it 'is falsy' do - expect(remote_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end context 'with an internal url' do let(:location) { 'http://localhost:8080' } - it 'is falsy' do - expect(remote_file.valid?).to be_falsy - end + it { is_expected.to be_falsy } end end @@ -142,7 +137,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do end describe "#error_message" do - subject { remote_file.error_message } + subject(:error_message) do + remote_file.validate! + remote_file.error_message + end context 'when remote file location is not valid' do let(:location) { 'not-valid://gitlab.com/gitlab-org/gitlab-foss/blob/1234/?secret_file.yml' } @@ -201,4 +199,22 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do is_expected.to be_empty end end + + describe '#metadata' do + before do + stub_full_request(location).to_return(body: remote_file_content) + end + + subject(:metadata) { remote_file.metadata } + + it { + is_expected.to eq( + context_project: nil, + context_sha: '12345', + type: :remote, + location: 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.xxxxxxxxxxx.yml', + extra: {} + ) + } + end end diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb index 66a06de3d28..4da9a933a9f 100644 --- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb @@ -45,12 +45,15 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template do end describe "#valid?" do + subject(:valid?) do + template_file.validate! + template_file.valid? + end + context 'when is a valid template name' do let(:template) { 'Auto-DevOps.gitlab-ci.yml' } - it 'returns true' do - expect(template_file).to be_valid - end + it { is_expected.to be_truthy } end context 'with invalid template name' do @@ -59,7 +62,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template do let(:context_params) { { project: project, sha: '12345', user: user, variables: variables } } it 'returns false' do - expect(template_file).not_to be_valid + expect(valid?).to be_falsy expect(template_file.error_message).to include('`xxxxxxxxxxxxxx.yml` is not a valid location!') end end @@ -68,7 +71,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template do let(:template) { 'I-Do-Not-Have-This-Template.gitlab-ci.yml' } it 'returns false' do - expect(template_file).not_to be_valid + expect(valid?).to be_falsy expect(template_file.error_message).to include('Included file `I-Do-Not-Have-This-Template.gitlab-ci.yml` is empty or does not exist!') end end @@ -111,4 +114,18 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template do is_expected.to be_empty end end + + describe '#metadata' do + subject(:metadata) { template_file.metadata } + + it { + is_expected.to eq( + context_project: project.full_path, + context_sha: '12345', + type: :template, + location: template, + extra: {} + ) + } + end end diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb index f69feba5e59..2d2adf09a42 100644 --- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb @@ -17,10 +17,12 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do let(:file_content) do <<~HEREDOC - image: 'ruby:2.7' + image: 'image:1.0' HEREDOC end + subject(:mapper) { described_class.new(values, context) } + before do stub_full_request(remote_url).to_return(body: file_content) @@ -30,13 +32,13 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do end describe '#process' do - subject { described_class.new(values, context).process } + subject(:process) { mapper.process } context "when single 'include' keyword is defined" do context 'when the string is a local file' do let(:values) do { include: local_file, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns File instances' do @@ -48,7 +50,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when the key is a local file hash' do let(:values) do { include: { 'local' => local_file }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns File instances' do @@ -59,7 +61,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when the string is a remote file' do let(:values) do - { include: remote_url, image: 'ruby:2.7' } + { include: remote_url, image: 'image:1.0' } end it 'returns File instances' do @@ -71,7 +73,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when the key is a remote file hash' do let(:values) do { include: { 'remote' => remote_url }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns File instances' do @@ -83,7 +85,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when the key is a template file hash' do let(:values) do { include: { 'template' => template_file }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns File instances' do @@ -98,7 +100,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do let(:remote_url) { 'https://gitlab.com/secret-file.yml' } let(:values) do { include: { 'local' => local_file, 'remote' => remote_url }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns ambigious specification error' do @@ -109,7 +111,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context "when the key is a project's file" do let(:values) do { include: { project: project.full_path, file: local_file }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns File instances' do @@ -121,7 +123,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context "when the key is project's files" do let(:values) do { include: { project: project.full_path, file: [local_file, 'another_file_path.yml'] }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns two File instances' do @@ -135,7 +137,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context "when 'include' is defined as an array" do let(:values) do { include: [remote_url, local_file], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns Files instances' do @@ -147,7 +149,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context "when 'include' is defined as an array of hashes" do let(:values) do { include: [{ remote: remote_url }, { local: local_file }], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns Files instances' do @@ -158,7 +160,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when it has ambigious match' do let(:values) do { include: [{ remote: remote_url, local: local_file }], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'returns ambigious specification error' do @@ -170,7 +172,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context "when 'include' is not defined" do let(:values) do { - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -185,11 +187,16 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do { 'local' => local_file }, { 'local' => local_file } ], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'does not raise an exception' do - expect { subject }.not_to raise_error + expect { process }.not_to raise_error + end + + it 'has expanset with one' do + process + expect(mapper.expandset.size).to eq(1) end end @@ -199,7 +206,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do { 'local' => local_file }, { 'remote' => remote_url } ], - image: 'ruby:2.7' } + image: 'image:1.0' } end before do @@ -217,7 +224,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do { 'local' => local_file }, { 'remote' => remote_url } ], - image: 'ruby:2.7' } + image: 'image:1.0' } end before do @@ -269,7 +276,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'defined as an array' do let(:values) do { include: [full_local_file_path, remote_url], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'expands the variable' do @@ -281,7 +288,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'defined as an array of hashes' do let(:values) do { include: [{ local: full_local_file_path }, { remote: remote_url }], - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'expands the variable' do @@ -303,7 +310,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'project name' do let(:values) do { include: { project: '$CI_PROJECT_PATH', file: local_file }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'expands the variable', :aggregate_failures do @@ -315,7 +322,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'with multiple files' do let(:values) do { include: { project: project.full_path, file: [full_local_file_path, 'another_file_path.yml'] }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'expands the variable' do @@ -327,7 +334,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do context 'when include variable has an unsupported type for variable expansion' do let(:values) do { include: { project: project.id, file: local_file }, - image: 'ruby:2.7' } + image: 'image:1.0' } end it 'does not invoke expansion for the variable', :aggregate_failures do @@ -365,7 +372,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do let(:values) do { include: [{ remote: remote_url }, { local: local_file, rules: [{ if: "$CI_PROJECT_ID == '#{project_id}'" }] }], - image: 'ruby:2.7' } + image: 'image:1.0' } end context 'when the rules matches' do @@ -385,5 +392,27 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do end end end + + context "when locations are same after masking variables" do + let(:variables) do + Gitlab::Ci::Variables::Collection.new([ + { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file1', 'masked' => true }, + { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file2', 'masked' => true } + ]) + end + + let(:values) do + { include: [ + { 'local' => 'hello/secret-file1.yml' }, + { 'local' => 'hello/secret-file2.yml' } + ], + image: 'ruby:2.7' } + end + + it 'has expanset with two' do + process + expect(mapper.expandset.size).to eq(2) + end + end end end diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index 97bd74721f2..56cd006717e 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -22,10 +22,10 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do end describe "#perform" do - subject { processor.perform } + subject(:perform) { processor.perform } context 'when no external files defined' do - let(:values) { { image: 'ruby:2.7' } } + let(:values) { { image: 'image:1.0' } } it 'returns the same values' do expect(processor.perform).to eq(values) @@ -33,7 +33,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do end context 'when an invalid local file is defined' do - let(:values) { { include: '/lib/gitlab/ci/templates/non-existent-file.yml', image: 'ruby:2.7' } } + let(:values) { { include: '/lib/gitlab/ci/templates/non-existent-file.yml', image: 'image:1.0' } } it 'raises an error' do expect { processor.perform }.to raise_error( @@ -45,7 +45,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do context 'when an invalid remote file is defined' do let(:remote_file) { 'http://doesntexist.com/.gitlab-ci-1.yml' } - let(:values) { { include: remote_file, image: 'ruby:2.7' } } + let(:values) { { include: remote_file, image: 'image:1.0' } } before do stub_full_request(remote_file).and_raise(SocketError.new('Some HTTP error')) @@ -61,7 +61,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do context 'with a valid remote external file is defined' do let(:remote_file) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' } - let(:values) { { include: remote_file, image: 'ruby:2.7' } } + let(:values) { { include: remote_file, image: 'image:1.0' } } let(:external_file_content) do <<-HEREDOC before_script: @@ -95,7 +95,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do end context 'with a valid local external file is defined' do - let(:values) { { include: '/lib/gitlab/ci/templates/template.yml', image: 'ruby:2.7' } } + let(:values) { { include: '/lib/gitlab/ci/templates/template.yml', image: 'image:1.0' } } let(:local_file_content) do <<-HEREDOC before_script: @@ -133,7 +133,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do let(:values) do { include: external_files, - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -165,7 +165,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do end context 'when external files are defined but not valid' do - let(:values) { { include: '/lib/gitlab/ci/templates/template.yml', image: 'ruby:2.7' } } + let(:values) { { include: '/lib/gitlab/ci/templates/template.yml', image: 'image:1.0' } } let(:local_file_content) { 'invalid content file ////' } @@ -187,7 +187,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do let(:values) do { include: remote_file, - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -200,7 +200,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do it 'takes precedence' do stub_full_request(remote_file).to_return(body: remote_file_content) - expect(processor.perform[:image]).to eq('ruby:2.7') + expect(processor.perform[:image]).to eq('image:1.0') end end @@ -210,7 +210,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do include: [ { local: '/local/file.yml' } ], - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -262,6 +262,18 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do expect(process_obs_count).to eq(3) end + + it 'stores includes' do + perform + + expect(context.includes).to contain_exactly( + { type: :local, location: '/local/file.yml', extra: {}, context_project: project.full_path, context_sha: '12345' }, + { type: :template, location: 'Ruby.gitlab-ci.yml', extra: {}, context_project: project.full_path, context_sha: '12345' }, + { type: :remote, location: 'http://my.domain.com/config.yml', extra: {}, context_project: project.full_path, context_sha: '12345' }, + { type: :file, location: '/templates/my-workflow.yml', extra: { project: another_project.full_path, ref: 'HEAD' }, context_project: project.full_path, context_sha: '12345' }, + { type: :local, location: '/templates/my-build.yml', extra: {}, context_project: another_project.full_path, context_sha: another_project.commit.sha } + ) + end end context 'when user is reporter of another project' do @@ -294,7 +306,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do context 'when config includes an external configuration file via SSL web request' do before do stub_full_request('https://sha256.badssl.com/fake.yml', ip_address: '8.8.8.8') - .to_return(body: 'image: ruby:2.6', status: 200) + .to_return(body: 'image: image:1.0', status: 200) stub_full_request('https://self-signed.badssl.com/fake.yml', ip_address: '8.8.8.9') .to_raise(OpenSSL::SSL::SSLError.new('SSL_connect returned=1 errno=0 state=error: certificate verify failed (self signed certificate)')) @@ -303,7 +315,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do context 'with an acceptable certificate' do let(:values) { { include: 'https://sha256.badssl.com/fake.yml' } } - it { is_expected.to include(image: 'ruby:2.6') } + it { is_expected.to include(image: 'image:1.0') } end context 'with a self-signed certificate' do @@ -319,7 +331,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do let(:values) do { include: { project: another_project.full_path, file: '/templates/my-build.yml' }, - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -349,7 +361,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do project: another_project.full_path, file: ['/templates/my-build.yml', '/templates/my-test.yml'] }, - image: 'ruby:2.7' + image: 'image:1.0' } end @@ -377,13 +389,22 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do output = processor.perform expect(output.keys).to match_array([:image, :my_build, :my_test]) end + + it 'stores includes' do + perform + + expect(context.includes).to contain_exactly( + { type: :file, location: '/templates/my-build.yml', extra: { project: another_project.full_path, ref: 'HEAD' }, context_project: project.full_path, context_sha: '12345' }, + { type: :file, location: '/templates/my-test.yml', extra: { project: another_project.full_path, ref: 'HEAD' }, context_project: project.full_path, context_sha: '12345' } + ) + end end context 'when local file path has wildcard' do - let_it_be(:project) { create(:project, :repository) } + let(:project) { create(:project, :repository) } let(:values) do - { include: 'myfolder/*.yml', image: 'ruby:2.7' } + { include: 'myfolder/*.yml', image: 'image:1.0' } end before do @@ -412,6 +433,15 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do output = processor.perform expect(output.keys).to match_array([:image, :my_build, :my_test]) end + + it 'stores includes' do + perform + + expect(context.includes).to contain_exactly( + { type: :local, location: 'myfolder/file1.yml', extra: {}, context_project: project.full_path, context_sha: '12345' }, + { type: :local, location: 'myfolder/file2.yml', extra: {}, context_project: project.full_path, context_sha: '12345' } + ) + end end context 'when rules defined' do diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb index 05ff1f3618b..3ba6a9059c6 100644 --- a/spec/lib/gitlab/ci/config_spec.rb +++ b/spec/lib/gitlab/ci/config_spec.rb @@ -20,7 +20,7 @@ RSpec.describe Gitlab::Ci::Config do context 'when config is valid' do let(:yml) do <<-EOS - image: ruby:2.7 + image: image:1.0 rspec: script: @@ -32,7 +32,7 @@ RSpec.describe Gitlab::Ci::Config do describe '#to_hash' do it 'returns hash created from string' do hash = { - image: 'ruby:2.7', + image: 'image:1.0', rspec: { script: ['gem install rspec', 'rspec'] @@ -104,12 +104,32 @@ RSpec.describe Gitlab::Ci::Config do end it { is_expected.to contain_exactly('Jobs/Deploy.gitlab-ci.yml', 'Jobs/Build.gitlab-ci.yml') } + + it 'stores includes' do + expect(config.metadata[:includes]).to contain_exactly( + { type: :template, + location: 'Jobs/Deploy.gitlab-ci.yml', + extra: {}, + context_project: nil, + context_sha: nil }, + { type: :template, + location: 'Jobs/Build.gitlab-ci.yml', + extra: {}, + context_project: nil, + context_sha: nil }, + { type: :remote, + location: 'https://example.com/gitlab-ci.yml', + extra: {}, + context_project: nil, + context_sha: nil } + ) + end end context 'when using extendable hash' do let(:yml) do <<-EOS - image: ruby:2.7 + image: image:1.0 rspec: script: rspec @@ -122,7 +142,7 @@ RSpec.describe Gitlab::Ci::Config do it 'correctly extends the hash' do hash = { - image: 'ruby:2.7', + image: 'image:1.0', rspec: { script: 'rspec' }, test: { extends: 'rspec', @@ -212,7 +232,7 @@ RSpec.describe Gitlab::Ci::Config do let(:yml) do <<-EOS image: - name: ruby:2.7 + name: image:1.0 ports: - 80 EOS @@ -226,12 +246,12 @@ RSpec.describe Gitlab::Ci::Config do context 'in the job image' do let(:yml) do <<-EOS - image: ruby:2.7 + image: image:1.0 test: script: rspec image: - name: ruby:2.7 + name: image:1.0 ports: - 80 EOS @@ -245,11 +265,11 @@ RSpec.describe Gitlab::Ci::Config do context 'in the services' do let(:yml) do <<-EOS - image: ruby:2.7 + image: image:1.0 test: script: rspec - image: ruby:2.7 + image: image:1.0 services: - name: test alias: test @@ -325,7 +345,7 @@ RSpec.describe Gitlab::Ci::Config do - project: '$MAIN_PROJECT' ref: '$REF' file: '$FILENAME' - image: ruby:2.7 + image: image:1.0 HEREDOC end @@ -364,7 +384,7 @@ RSpec.describe Gitlab::Ci::Config do it 'returns a composed hash' do composed_hash = { before_script: local_location_hash[:before_script], - image: "ruby:2.7", + image: "image:1.0", rspec: { script: ["bundle exec rspec"] }, variables: remote_file_hash[:variables] } @@ -403,6 +423,26 @@ RSpec.describe Gitlab::Ci::Config do end end end + + it 'stores includes' do + expect(config.metadata[:includes]).to contain_exactly( + { type: :local, + location: local_location, + extra: {}, + context_project: project.full_path, + context_sha: '12345' }, + { type: :remote, + location: remote_location, + extra: {}, + context_project: project.full_path, + context_sha: '12345' }, + { type: :file, + location: '.gitlab-ci.yml', + extra: { project: main_project.full_path, ref: 'HEAD' }, + context_project: project.full_path, + context_sha: '12345' } + ) + end end context "when gitlab_ci.yml has invalid 'include' defined" do @@ -481,7 +521,7 @@ RSpec.describe Gitlab::Ci::Config do include: - #{remote_location} - image: ruby:2.7 + image: image:1.0 HEREDOC end @@ -492,7 +532,7 @@ RSpec.describe Gitlab::Ci::Config do end it 'takes precedence' do - expect(config.to_hash).to eq({ image: 'ruby:2.7' }) + expect(config.to_hash).to eq({ image: 'image:1.0' }) end end @@ -699,7 +739,7 @@ RSpec.describe Gitlab::Ci::Config do - #{local_location} - #{other_file_location} - image: ruby:2.7 + image: image:1.0 HEREDOC end @@ -718,7 +758,7 @@ RSpec.describe Gitlab::Ci::Config do it 'returns a composed hash' do composed_hash = { before_script: local_location_hash[:before_script], - image: "ruby:2.7", + image: "image:1.0", build: { stage: "build", script: "echo hello" }, rspec: { stage: "test", script: "bundle exec rspec" } } @@ -735,7 +775,7 @@ RSpec.describe Gitlab::Ci::Config do - local: #{local_location} rules: - if: $CI_PROJECT_ID == "#{project_id}" - image: ruby:2.7 + image: image:1.0 HEREDOC end @@ -763,7 +803,7 @@ RSpec.describe Gitlab::Ci::Config do - local: #{local_location} rules: - exists: "#{filename}" - image: ruby:2.7 + image: image:1.0 HEREDOC end diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb index 1e96c717a4f..dfc5dec1481 100644 --- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb @@ -4,6 +4,18 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Parsers::Security::Common do describe '#parse!' do + let_it_be(:scanner_data) do + { + scan: { + scanner: { + id: "gemnasium", + name: "Gemnasium", + version: "2.1.0" + } + } + } + end + where(vulnerability_finding_signatures_enabled: [true, false]) with_them do let_it_be(:pipeline) { create(:ci_pipeline) } @@ -30,7 +42,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do describe 'schema validation' do let(:validator_class) { Gitlab::Ci::Parsers::Security::Validators::SchemaValidator } - let(:parser) { described_class.new('{}', report, vulnerability_finding_signatures_enabled, validate: validate) } + let(:data) { {}.merge(scanner_data) } + let(:json_data) { data.to_json } + let(:parser) { described_class.new(json_data, report, vulnerability_finding_signatures_enabled, validate: validate) } subject(:parse_report) { parser.parse! } @@ -38,172 +52,138 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do allow(validator_class).to receive(:new).and_call_original end - context 'when show_report_validation_warnings is enabled' do - before do - stub_feature_flags(show_report_validation_warnings: true) - end - - context 'when the validate flag is set to `false`' do - let(:validate) { false } - let(:valid?) { false } - let(:errors) { ['foo'] } - - before do - allow_next_instance_of(validator_class) do |instance| - allow(instance).to receive(:valid?).and_return(valid?) - allow(instance).to receive(:errors).and_return(errors) - end - - allow(parser).to receive_messages(create_scanner: true, create_scan: true) - end - - it 'instantiates the validator with correct params' do - parse_report - - expect(validator_class).to have_received(:new).with(report.type, {}, report.version) - end - - context 'when the report data is not valid according to the schema' do - it 'adds warnings to the report' do - expect { parse_report }.to change { report.warnings }.from([]).to([{ message: 'foo', type: 'Schema' }]) - end - - it 'keeps the execution flow as normal' do - parse_report + context 'when the validate flag is set to `false`' do + let(:validate) { false } + let(:valid?) { false } + let(:errors) { ['foo'] } + let(:warnings) { ['bar'] } - expect(parser).to have_received(:create_scanner) - expect(parser).to have_received(:create_scan) - end + before do + allow_next_instance_of(validator_class) do |instance| + allow(instance).to receive(:valid?).and_return(valid?) + allow(instance).to receive(:errors).and_return(errors) + allow(instance).to receive(:warnings).and_return(warnings) end - context 'when the report data is valid according to the schema' do - let(:valid?) { true } - let(:errors) { [] } - - it 'does not add warnings to the report' do - expect { parse_report }.not_to change { report.errors } - end - - it 'keeps the execution flow as normal' do - parse_report - - expect(parser).to have_received(:create_scanner) - expect(parser).to have_received(:create_scan) - end - end + allow(parser).to receive_messages(create_scanner: true, create_scan: true) end - context 'when the validate flag is set to `true`' do - let(:validate) { true } - let(:valid?) { false } - let(:errors) { ['foo'] } + it 'instantiates the validator with correct params' do + parse_report - before do - allow_next_instance_of(validator_class) do |instance| - allow(instance).to receive(:valid?).and_return(valid?) - allow(instance).to receive(:errors).and_return(errors) - end + expect(validator_class).to have_received(:new).with( + report.type, + data.deep_stringify_keys, + report.version, + project: pipeline.project, + scanner: data.dig(:scan, :scanner).deep_stringify_keys + ) + end - allow(parser).to receive_messages(create_scanner: true, create_scan: true) + context 'when the report data is not valid according to the schema' do + it 'adds warnings to the report' do + expect { parse_report }.to change { report.warnings }.from([]).to( + [ + { message: 'foo', type: 'Schema' }, + { message: 'bar', type: 'Schema' } + ] + ) end - it 'instantiates the validator with correct params' do + it 'keeps the execution flow as normal' do parse_report - expect(validator_class).to have_received(:new).with(report.type, {}, report.version) + expect(parser).to have_received(:create_scanner) + expect(parser).to have_received(:create_scan) end + end - context 'when the report data is not valid according to the schema' do - it 'adds errors to the report' do - expect { parse_report }.to change { report.errors }.from([]).to([{ message: 'foo', type: 'Schema' }]) - end - - it 'does not try to create report entities' do - parse_report + context 'when the report data is valid according to the schema' do + let(:valid?) { true } + let(:errors) { [] } + let(:warnings) { [] } - expect(parser).not_to have_received(:create_scanner) - expect(parser).not_to have_received(:create_scan) - end + it 'does not add errors to the report' do + expect { parse_report }.not_to change { report.errors } end - context 'when the report data is valid according to the schema' do - let(:valid?) { true } - let(:errors) { [] } - - it 'does not add errors to the report' do - expect { parse_report }.not_to change { report.errors }.from([]) - end + it 'does not add warnings to the report' do + expect { parse_report }.not_to change { report.warnings } + end - it 'keeps the execution flow as normal' do - parse_report + it 'keeps the execution flow as normal' do + parse_report - expect(parser).to have_received(:create_scanner) - expect(parser).to have_received(:create_scan) - end + expect(parser).to have_received(:create_scanner) + expect(parser).to have_received(:create_scan) end end end - context 'when show_report_validation_warnings is disabled' do - before do - stub_feature_flags(show_report_validation_warnings: false) - end - - context 'when the validate flag is set as `false`' do - let(:validate) { false } + context 'when the validate flag is set to `true`' do + let(:validate) { true } + let(:valid?) { false } + let(:errors) { ['foo'] } + let(:warnings) { ['bar'] } - it 'does not run the validation logic' do - parse_report - - expect(validator_class).not_to have_received(:new) + before do + allow_next_instance_of(validator_class) do |instance| + allow(instance).to receive(:valid?).and_return(valid?) + allow(instance).to receive(:errors).and_return(errors) + allow(instance).to receive(:warnings).and_return(warnings) end + + allow(parser).to receive_messages(create_scanner: true, create_scan: true) end - context 'when the validate flag is set as `true`' do - let(:validate) { true } - let(:valid?) { false } + it 'instantiates the validator with correct params' do + parse_report - before do - allow_next_instance_of(validator_class) do |instance| - allow(instance).to receive(:valid?).and_return(valid?) - allow(instance).to receive(:errors).and_return(['foo']) - end + expect(validator_class).to have_received(:new).with( + report.type, + data.deep_stringify_keys, + report.version, + project: pipeline.project, + scanner: data.dig(:scan, :scanner).deep_stringify_keys + ) + end - allow(parser).to receive_messages(create_scanner: true, create_scan: true) + context 'when the report data is not valid according to the schema' do + it 'adds errors to the report' do + expect { parse_report }.to change { report.errors }.from([]).to( + [ + { message: 'foo', type: 'Schema' }, + { message: 'bar', type: 'Schema' } + ] + ) end - it 'instantiates the validator with correct params' do + it 'does not try to create report entities' do parse_report - expect(validator_class).to have_received(:new).with(report.type, {}, report.version) + expect(parser).not_to have_received(:create_scanner) + expect(parser).not_to have_received(:create_scan) end + end - context 'when the report data is not valid according to the schema' do - it 'adds errors to the report' do - expect { parse_report }.to change { report.errors }.from([]).to([{ message: 'foo', type: 'Schema' }]) - end - - it 'does not try to create report entities' do - parse_report + context 'when the report data is valid according to the schema' do + let(:valid?) { true } + let(:errors) { [] } + let(:warnings) { [] } - expect(parser).not_to have_received(:create_scanner) - expect(parser).not_to have_received(:create_scan) - end + it 'does not add errors to the report' do + expect { parse_report }.not_to change { report.errors }.from([]) end - context 'when the report data is valid according to the schema' do - let(:valid?) { true } - - it 'does not add errors to the report' do - expect { parse_report }.not_to change { report.errors }.from([]) - end + it 'does not add warnings to the report' do + expect { parse_report }.not_to change { report.warnings }.from([]) + end - it 'keeps the execution flow as normal' do - parse_report + it 'keeps the execution flow as normal' do + parse_report - expect(parser).to have_received(:create_scanner) - expect(parser).to have_received(:create_scan) - end + expect(parser).to have_received(:create_scanner) + expect(parser).to have_received(:create_scan) end end end diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb index c83427b68ef..f6409c8b01f 100644 --- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb @@ -3,6 +3,18 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do + let_it_be(:project) { create(:project) } + + let(:scanner) do + { + 'id' => 'gemnasium', + 'name' => 'Gemnasium', + 'version' => '2.1.0' + } + end + + let(:validator) { described_class.new(report_type, report_data, report_version, project: project, scanner: scanner) } + describe 'SUPPORTED_VERSIONS' do schema_path = Rails.root.join("lib", "gitlab", "ci", "parsers", "security", "validators", "schemas") @@ -47,48 +59,652 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do end end - using RSpec::Parameterized::TableSyntax + describe '#valid?' do + subject { validator.valid? } - where(:report_type, :report_version, :expected_errors, :valid_data) do - 'sast' | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] } - :sast | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] } - :secret_detection | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] } - end + context 'when given a supported schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last } - with_them do - let(:validator) { described_class.new(report_type, report_data, report_version) } + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end - describe '#valid?' do - subject { validator.valid? } + it { is_expected.to be_truthy } + end - context 'when given data is invalid according to the schema' do - let(:report_data) { {} } + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end it { is_expected.to be_falsey } + + it 'logs related information' do + expect(Gitlab::AppLogger).to receive(:info).with( + message: "security report schema validation problem", + security_report_type: report_type, + security_report_version: report_version, + project_id: project.id, + security_report_failure: 'schema_validation_fails', + security_report_scanner_id: 'gemnasium', + security_report_scanner_version: '2.1.0' + ) + + subject + end end + end + + context 'when given a deprecated schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last } - context 'when given data is valid according to the schema' do - let(:report_data) { valid_data } + context 'and the report passes schema validation' do + let(:report_data) do + { + 'version' => '10.0.0', + 'vulnerabilities' => [] + } + end it { is_expected.to be_truthy } + + it 'logs related information' do + expect(Gitlab::AppLogger).to receive(:info).with( + message: "security report schema validation problem", + security_report_type: report_type, + security_report_version: report_version, + project_id: project.id, + security_report_failure: 'using_deprecated_schema_version', + security_report_scanner_id: 'gemnasium', + security_report_scanner_version: '2.1.0' + ) + + subject + end end - context 'when no report_version is provided' do - let(:report_version) { nil } - let(:report_data) { valid_data } + context 'and the report does not pass schema validation' do + context 'and enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end - it 'does not fail' do - expect { subject }.not_to raise_error + it { is_expected.to be_falsey } + end + + context 'and enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end + + it { is_expected.to be_truthy } end end end - describe '#errors' do - let(:report_data) { { 'version' => '10.0.0' } } + context 'when given an unsupported schema version' do + let(:report_type) { :dast } + let(:report_version) { "12.37.0" } + + context 'if enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + it { is_expected.to be_falsey } + + it 'logs related information' do + expect(Gitlab::AppLogger).to receive(:info).with( + message: "security report schema validation problem", + security_report_type: report_type, + security_report_version: report_version, + project_id: project.id, + security_report_failure: 'using_unsupported_schema_version', + security_report_scanner_id: 'gemnasium', + security_report_scanner_version: '2.1.0' + ) + + subject + end + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + context 'and scanner information is empty' do + let(:scanner) { {} } + + it 'logs related information' do + expect(Gitlab::AppLogger).to receive(:info).with( + message: "security report schema validation problem", + security_report_type: report_type, + security_report_version: report_version, + project_id: project.id, + security_report_failure: 'schema_validation_fails', + security_report_scanner_id: nil, + security_report_scanner_version: nil + ) + + expect(Gitlab::AppLogger).to receive(:info).with( + message: "security report schema validation problem", + security_report_type: report_type, + security_report_version: report_version, + project_id: project.id, + security_report_failure: 'using_unsupported_schema_version', + security_report_scanner_id: nil, + security_report_scanner_version: nil + ) + + subject + end + end + + it { is_expected.to be_falsey } + end + end + + context 'if enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + it { is_expected.to be_truthy } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + it { is_expected.to be_truthy } + end + end + end + end - subject { validator.errors } + describe '#errors' do + subject { validator.errors } - it { is_expected.to eq(expected_errors) } + context 'when given a supported schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last } + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + context 'if enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: project) + end + + let(:expected_errors) do + [ + 'root is missing required keys: vulnerabilities' + ] + end + + it { is_expected.to match_array(expected_errors) } + end + + context 'if enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + end + end + + context 'when given a deprecated schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last } + + context 'and the report passes schema validation' do + let(:report_data) do + { + 'version' => '10.0.0', + 'vulnerabilities' => [] + } + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + + context 'and the report does not pass schema validation' do + context 'and enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end + + let(:expected_errors) do + [ + "property '/version' does not match pattern: ^[0-9]+\\.[0-9]+\\.[0-9]+$", + "root is missing required keys: vulnerabilities" + ] + end + + it { is_expected.to match_array(expected_errors) } + end + + context 'and enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + end + end + + context 'when given an unsupported schema version' do + let(:report_type) { :dast } + let(:report_version) { "12.37.0" } + + context 'if enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_errors) do + [ + "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: 14.0.0, 14.0.1, 14.0.2, 14.0.3, 14.0.4, 14.0.5, 14.0.6, 14.1.0, 14.1.1" + ] + end + + it { is_expected.to match_array(expected_errors) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + let(:expected_errors) do + [ + "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: 14.0.0, 14.0.1, 14.0.2, 14.0.3, 14.0.4, 14.0.5, 14.0.6, 14.1.0, 14.1.1", + "root is missing required keys: vulnerabilities" + ] + end + + it { is_expected.to match_array(expected_errors) } + end + end + + context 'if enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + let(:expected_errors) { [] } + + it { is_expected.to match_array(expected_errors) } + end + end + end + end + + describe '#deprecation_warnings' do + subject { validator.deprecation_warnings } + + context 'when given a supported schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last } + + let(:expected_deprecation_warnings) { [] } + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + it { is_expected.to match_array(expected_deprecation_warnings) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + it { is_expected.to match_array(expected_deprecation_warnings) } + end + end + + context 'when given a deprecated schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last } + let(:expected_deprecation_warnings) do + [ + "Version V2.7.0 for report type dast has been deprecated, supported versions for this report type are: 14.0.0, 14.0.1, 14.0.2, 14.0.3, 14.0.4, 14.0.5, 14.0.6, 14.1.0, 14.1.1" + ] + end + + context 'and the report passes schema validation' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + it { is_expected.to match_array(expected_deprecation_warnings) } + end + + context 'and the report does not pass schema validation' do + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end + + it { is_expected.to match_array(expected_deprecation_warnings) } + end + end + + context 'when given an unsupported schema version' do + let(:report_type) { :dast } + let(:report_version) { "21.37.0" } + let(:expected_deprecation_warnings) { [] } + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + it { is_expected.to match_array(expected_deprecation_warnings) } + end + end + + describe '#warnings' do + subject { validator.warnings } + + context 'when given a supported schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last } + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + context 'if enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: project) + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + + context 'if enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + let(:expected_warnings) do + [ + 'root is missing required keys: vulnerabilities' + ] + end + + it { is_expected.to match_array(expected_warnings) } + end + end + end + + context 'when given a deprecated schema version' do + let(:report_type) { :dast } + let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last } + + context 'and the report passes schema validation' do + let(:report_data) do + { + 'vulnerabilities' => [] + } + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + + context 'and the report does not pass schema validation' do + let(:report_data) do + { + 'version' => 'V2.7.0' + } + end + + context 'and enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + + context 'and enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + let(:expected_warnings) do + [ + "property '/version' does not match pattern: ^[0-9]+\\.[0-9]+\\.[0-9]+$", + "root is missing required keys: vulnerabilities" + ] + end + + it { is_expected.to match_array(expected_warnings) } + end + end + end + + context 'when given an unsupported schema version' do + let(:report_type) { :dast } + let(:report_version) { "12.37.0" } + + context 'if enforce_security_report_validation is enabled' do + before do + stub_feature_flags(enforce_security_report_validation: true) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + let(:expected_warnings) { [] } + + it { is_expected.to match_array(expected_warnings) } + end + end + + context 'if enforce_security_report_validation is disabled' do + before do + stub_feature_flags(enforce_security_report_validation: false) + end + + context 'and the report is valid' do + let(:report_data) do + { + 'version' => report_version, + 'vulnerabilities' => [] + } + end + + let(:expected_warnings) do + [ + "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: 14.0.0, 14.0.1, 14.0.2, 14.0.3, 14.0.4, 14.0.5, 14.0.6, 14.1.0, 14.1.1" + ] + end + + it { is_expected.to match_array(expected_warnings) } + end + + context 'and the report is invalid' do + let(:report_data) do + { + 'version' => report_version + } + end + + let(:expected_warnings) do + [ + "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: 14.0.0, 14.0.1, 14.0.2, 14.0.3, 14.0.4, 14.0.5, 14.0.6, 14.1.0, 14.1.1", + "root is missing required keys: vulnerabilities" + ] + end + + it { is_expected.to match_array(expected_warnings) } + end + end end end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb new file mode 100644 index 00000000000..aa8aec2af4a --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb @@ -0,0 +1,179 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :clean_gitlab_redis_rate_limiting do + let_it_be(:user) { create(:user) } + let_it_be(:namespace) { create(:namespace) } + let_it_be(:project, reload: true) { create(:project, namespace: namespace) } + + let(:save_incompleted) { false } + let(:throttle_message) do + 'Too many pipelines created in the last minute. Try again later.' + end + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + save_incompleted: save_incompleted + ) + end + + let(:pipeline) { build(:ci_pipeline, project: project, source: source) } + let(:source) { 'push' } + let(:step) { described_class.new(pipeline, command) } + + def perform(count: 2) + count.times { step.perform! } + end + + context 'when the limit is exceeded' do + before do + allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits) + .and_return(pipelines_create: { threshold: 1, interval: 1.minute }) + + stub_feature_flags(ci_throttle_pipelines_creation_dry_run: false) + end + + it 'does not persist the pipeline' do + perform + + expect(pipeline).not_to be_persisted + expect(pipeline.errors.added?(:base, throttle_message)).to be_truthy + end + + it 'breaks the chain' do + perform + + expect(step.break?).to be_truthy + end + + it 'creates a log entry' do + expect(Gitlab::AppJsonLogger).to receive(:info).with( + a_hash_including( + class: described_class.name, + project_id: project.id, + subscription_plan: project.actual_plan_name, + commit_sha: command.sha + ) + ) + + perform + end + + context 'with child pipelines' do + let(:source) { 'parent_pipeline' } + + it 'does not break the chain' do + perform + + expect(step.break?).to be_falsey + end + + it 'does not invalidate the pipeline' do + perform + + expect(pipeline.errors).to be_empty + end + + it 'does not log anything' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + perform + end + end + + context 'when saving incompleted pipelines' do + let(:save_incompleted) { true } + + it 'does not persist the pipeline' do + perform + + expect(pipeline).not_to be_persisted + expect(pipeline.errors.added?(:base, throttle_message)).to be_truthy + end + + it 'breaks the chain' do + perform + + expect(step.break?).to be_truthy + end + end + + context 'when ci_throttle_pipelines_creation is disabled' do + before do + stub_feature_flags(ci_throttle_pipelines_creation: false) + end + + it 'does not break the chain' do + perform + + expect(step.break?).to be_falsey + end + + it 'does not invalidate the pipeline' do + perform + + expect(pipeline.errors).to be_empty + end + + it 'does not log anything' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + perform + end + end + + context 'when ci_throttle_pipelines_creation_dry_run is enabled' do + before do + stub_feature_flags(ci_throttle_pipelines_creation_dry_run: true) + end + + it 'does not break the chain' do + perform + + expect(step.break?).to be_falsey + end + + it 'does not invalidate the pipeline' do + perform + + expect(pipeline.errors).to be_empty + end + + it 'creates a log entry' do + expect(Gitlab::AppJsonLogger).to receive(:info).with( + a_hash_including( + class: described_class.name, + project_id: project.id, + subscription_plan: project.actual_plan_name, + commit_sha: command.sha + ) + ) + + perform + end + end + end + + context 'when the limit is not exceeded' do + it 'does not break the chain' do + perform + + expect(step.break?).to be_falsey + end + + it 'does not invalidate the pipeline' do + perform + + expect(pipeline.errors).to be_empty + end + + it 'does not log anything' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + perform + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb index 8e0b032e68c..ddd0de69d79 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::TemplateUsage do %w(Template-1 Template-2).each do |expected_template| expect(Gitlab::UsageDataCounters::CiTemplateUniqueCounter).to( receive(:track_unique_project_event) - .with(project_id: project.id, template: expected_template, config_source: pipeline.config_source) + .with(project: project, template: expected_template, config_source: pipeline.config_source, user: user) ) end diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb index 4dc1eca3859..ab0efb90901 100644 --- a/spec/lib/gitlab/ci/reports/security/report_spec.rb +++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb @@ -184,6 +184,22 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do end end + describe 'warnings?' do + subject { report.warnings? } + + context 'when the report does not have any errors' do + it { is_expected.to be_falsey } + end + + context 'when the report has warnings' do + before do + report.add_warning('foo', 'bar') + end + + it { is_expected.to be_truthy } + end + end + describe '#primary_scanner_order_to' do let(:scanner_1) { build(:ci_reports_security_scanner) } let(:scanner_2) { build(:ci_reports_security_scanner) } diff --git a/spec/lib/gitlab/ci/reports/security/scanner_spec.rb b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb index 99f5d4723d3..eb406e01b24 100644 --- a/spec/lib/gitlab/ci/reports/security/scanner_spec.rb +++ b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb @@ -109,6 +109,7 @@ RSpec.describe Gitlab::Ci::Reports::Security::Scanner do { external_id: 'gemnasium-maven', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | -1 { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | { external_id: 'bandit', name: 'foo', vendor: 'bar' } | 1 { external_id: 'bandit', name: 'foo', vendor: 'bar' } | { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | -1 + { external_id: 'spotbugs', name: 'foo', vendor: 'bar' } | { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | -1 { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | { external_id: 'unknown', name: 'foo', vendor: 'bar' } | -1 { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium', name: 'foo', vendor: nil } | 1 end diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb new file mode 100644 index 00000000000..9e4a8739c0f --- /dev/null +++ b/spec/lib/gitlab/ci/runner_releases_spec.rb @@ -0,0 +1,114 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::RunnerReleases do + subject { described_class.instance } + + describe '#releases' do + before do + subject.reset! + + stub_application_setting(public_runner_releases_url: 'the release API URL') + allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response(response) } + end + + def releases + subject.releases + end + + shared_examples 'requests that follow cache status' do |validity_period| + context "almost #{validity_period.inspect} later" do + let(:followup_request_interval) { validity_period - 0.001.seconds } + + it 'returns cached releases' do + releases + + travel followup_request_interval do + expect(Gitlab::HTTP).not_to receive(:try_get) + + expect(releases).to eq(expected_result) + end + end + end + + context "after #{validity_period.inspect}" do + let(:followup_request_interval) { validity_period + 1.second } + let(:followup_response) { (response || []) + [{ 'name' => 'v14.9.2' }] } + + it 'checks new releases' do + releases + + travel followup_request_interval do + expect(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response(followup_response) } + + expect(releases).to eq((expected_result || []) + [Gitlab::VersionInfo.new(14, 9, 2)]) + end + end + end + end + + context 'when response is nil' do + let(:response) { nil } + let(:expected_result) { nil } + + it 'returns nil' do + expect(releases).to be_nil + end + + it_behaves_like 'requests that follow cache status', 5.seconds + + it 'performs exponential backoff on requests', :aggregate_failures do + start_time = Time.now.utc.change(usec: 0) + + http_call_timestamp_offsets = [] + allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL') do + http_call_timestamp_offsets << Time.now.utc - start_time + mock_http_response(response) + end + + # An initial HTTP request fails + travel_to(start_time) + subject.reset! + expect(releases).to be_nil + + # Successive failed requests result in HTTP requests only after specific backoff periods + backoff_periods = [5, 10, 20, 40, 80, 160, 320, 640, 1280, 2560, 3600].map(&:seconds) + backoff_periods.each do |period| + travel(period - 1.second) + expect(releases).to be_nil + + travel 1.second + expect(releases).to be_nil + end + + expect(http_call_timestamp_offsets).to eq([0, 5, 15, 35, 75, 155, 315, 635, 1275, 2555, 5115, 8715]) + + # Finally a successful HTTP request results in releases being returned + allow(Gitlab::HTTP).to receive(:try_get).with('the release API URL').once { mock_http_response([{ 'name' => 'v14.9.1' }]) } + travel 1.hour + expect(releases).not_to be_nil + end + end + + context 'when response is not nil' do + let(:response) { [{ 'name' => 'v14.9.1' }, { 'name' => 'v14.9.0' }] } + let(:expected_result) { [Gitlab::VersionInfo.new(14, 9, 0), Gitlab::VersionInfo.new(14, 9, 1)] } + + it 'returns parsed and sorted Gitlab::VersionInfo objects' do + expect(releases).to eq(expected_result) + end + + it_behaves_like 'requests that follow cache status', 1.day + end + + def mock_http_response(response) + http_response = instance_double(HTTParty::Response) + + allow(http_response).to receive(:success?).and_return(response.present?) + allow(http_response).to receive(:parsed_response).and_return(response) + + http_response + end + end +end diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb new file mode 100644 index 00000000000..b430da376dd --- /dev/null +++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do + include StubVersion + using RSpec::Parameterized::TableSyntax + + describe '#check_runner_upgrade_status' do + subject(:result) { described_class.instance.check_runner_upgrade_status(runner_version) } + + before do + runner_releases_double = instance_double(Gitlab::Ci::RunnerReleases) + + allow(Gitlab::Ci::RunnerReleases).to receive(:instance).and_return(runner_releases_double) + allow(runner_releases_double).to receive(:releases).and_return(available_runner_releases.map { |v| ::Gitlab::VersionInfo.parse(v) }) + end + + context 'with available_runner_releases configured up to 14.1.1' do + let(:available_runner_releases) { %w[13.9.0 13.9.1 13.9.2 13.10.0 13.10.1 14.0.0 14.0.1 14.0.2 14.1.0 14.1.1 14.1.1-rc3] } + + context 'with nil runner_version' do + let(:runner_version) { nil } + + it 'raises :unknown' do + is_expected.to eq(:unknown) + end + end + + context 'with invalid runner_version' do + let(:runner_version) { 'junk' } + + it 'raises ArgumentError' do + expect { subject }.to raise_error(ArgumentError) + end + end + + context 'with Gitlab::VERSION set to 14.1.123' do + before do + stub_version('14.1.123', 'deadbeef') + + described_class.instance.reset! + end + + context 'with a runner_version that is too recent' do + let(:runner_version) { 'v14.2.0' } + + it 'returns :not_available' do + is_expected.to eq(:not_available) + end + end + end + + context 'with Gitlab::VERSION set to 14.0.123' do + before do + stub_version('14.0.123', 'deadbeef') + + described_class.instance.reset! + end + + context 'with valid params' do + where(:runner_version, :expected_result) do + 'v14.1.0-rc3' | :not_available # not available since the GitLab instance is still on 14.0.x + 'v14.1.0~beta.1574.gf6ea9389' | :not_available # suffixes are correctly handled + 'v14.1.0/1.1.0' | :not_available # suffixes are correctly handled + 'v14.1.0' | :not_available # not available since the GitLab instance is still on 14.0.x + 'v14.0.1' | :recommended # recommended upgrade since 14.0.2 is available + 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available + 'v13.10.1' | :available # available upgrade: 14.1.1 + 'v13.10.1~beta.1574.gf6ea9389' | :available # suffixes are correctly handled + 'v13.10.1/1.1.0' | :available # suffixes are correctly handled + 'v13.10.0' | :recommended # recommended upgrade since 13.10.1 is available + 'v13.9.2' | :recommended # recommended upgrade since backports are no longer released for this version + 'v13.9.0' | :recommended # recommended upgrade since backports are no longer released for this version + 'v13.8.1' | :recommended # recommended upgrade since build is too old (missing in records) + 'v11.4.1' | :recommended # recommended upgrade since build is too old (missing in records) + end + + with_them do + it 'returns symbol representing expected upgrade status' do + is_expected.to be_a(Symbol) + is_expected.to eq(expected_result) + end + end + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/status/build/manual_spec.rb b/spec/lib/gitlab/ci/status/build/manual_spec.rb index 78193055139..150705c1e36 100644 --- a/spec/lib/gitlab/ci/status/build/manual_spec.rb +++ b/spec/lib/gitlab/ci/status/build/manual_spec.rb @@ -3,15 +3,27 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Status::Build::Manual do - let(:user) { create(:user) } + let_it_be(:user) { create(:user) } + let_it_be(:job) { create(:ci_build, :manual) } subject do - build = create(:ci_build, :manual) - described_class.new(Gitlab::Ci::Status::Core.new(build, user)) + described_class.new(Gitlab::Ci::Status::Core.new(job, user)) end describe '#illustration' do it { expect(subject.illustration).to include(:image, :size, :title, :content) } + + context 'when the user can trigger the job' do + before do + job.project.add_maintainer(user) + end + + it { expect(subject.illustration[:content]).to match /This job requires manual intervention to start/ } + end + + context 'when the user can not trigger the job' do + it { expect(subject.illustration[:content]).to match /This job does not run automatically and must be started manually/ } + end end describe '.matches?' do diff --git a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb new file mode 100644 index 00000000000..a12d69b67a6 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'MATLAB.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('MATLAB') } + + describe 'the created pipeline' do + let_it_be(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) } + + let(:user) { project.first_owner } + let(:default_branch) { 'master' } + let(:pipeline_branch) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) } + let(:pipeline) { service.execute!(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + end + + it 'creates all jobs' do + expect(build_names).to include('command', 'test', 'test_artifacts_job') + end + end +end diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb index cdda7e953d0..ca096fcecc4 100644 --- a/spec/lib/gitlab/ci/templates/templates_spec.rb +++ b/spec/lib/gitlab/ci/templates/templates_spec.rb @@ -23,7 +23,8 @@ RSpec.describe 'CI YML Templates' do exceptions = [ 'Security/DAST.gitlab-ci.yml', # DAST stage is defined inside AutoDevops yml 'Security/DAST-API.gitlab-ci.yml', # no auto-devops - 'Security/API-Fuzzing.gitlab-ci.yml' # no auto-devops + 'Security/API-Fuzzing.gitlab-ci.yml', # no auto-devops + 'ThemeKit.gitlab-ci.yml' ] context 'when including available templates in a CI YAML configuration' do diff --git a/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..4708108f404 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'ThemeKit.gitlab-ci.yml' do + before do + allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([]) + end + + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('ThemeKit') } + + describe 'the created pipeline' do + let(:pipeline_ref) { project.default_branch_or_main } + let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) } + let(:user) { project.first_owner } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + end + + context 'on the default branch' do + it 'only creates staging deploy', :aggregate_failures do + expect(pipeline.errors).to be_empty + expect(build_names).to include('staging') + expect(build_names).not_to include('production') + end + end + + context 'on a tag' do + let(:pipeline_ref) { '1.0' } + + before do + project.repository.add_tag(user, pipeline_ref, project.default_branch_or_main) + end + + it 'only creates a production deploy', :aggregate_failures do + expect(pipeline.errors).to be_empty + expect(build_names).to include('production') + expect(build_names).not_to include('staging') + end + end + + context 'outside of the default branch' do + let(:pipeline_ref) { 'patch-1' } + + before do + project.repository.create_branch(pipeline_ref, project.default_branch_or_main) + end + + it 'has no jobs' do + expect { pipeline }.to raise_error( + Ci::CreatePipelineService::CreateError, 'No stages / jobs for this pipeline.' + ) + end + end + end +end diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb index 8552a06eab3..b9aa5f7c431 100644 --- a/spec/lib/gitlab/ci/variables/builder_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -199,6 +199,20 @@ RSpec.describe Gitlab::Ci::Variables::Builder do 'O' => '15', 'P' => '15') end end + + context 'with schedule variables' do + let_it_be(:schedule) { create(:ci_pipeline_schedule, project: project) } + let_it_be(:schedule_variable) { create(:ci_pipeline_schedule_variable, pipeline_schedule: schedule) } + + before do + pipeline.update!(pipeline_schedule_id: schedule.id) + end + + it 'includes schedule variables' do + expect(subject.to_runner_variables) + .to include(a_hash_including(key: schedule_variable.key, value: schedule_variable.value)) + end + end end describe '#user_variables' do @@ -278,6 +292,14 @@ RSpec.describe Gitlab::Ci::Variables::Builder do end shared_examples "secret CI variables" do + let(:protected_variable_item) do + Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) + end + + let(:unprotected_variable_item) do + Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) + end + context 'when ref is branch' do context 'when ref is protected' do before do @@ -338,189 +360,255 @@ RSpec.describe Gitlab::Ci::Variables::Builder do let_it_be(:protected_variable) { create(:ci_instance_variable, protected: true) } let_it_be(:unprotected_variable) { create(:ci_instance_variable, protected: false) } - let(:protected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) } - let(:unprotected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) } - include_examples "secret CI variables" end describe '#secret_group_variables' do - subject { builder.secret_group_variables(ref: job.git_ref, environment: job.expanded_environment_name) } + subject { builder.secret_group_variables(environment: job.expanded_environment_name) } let_it_be(:protected_variable) { create(:ci_group_variable, protected: true, group: group) } let_it_be(:unprotected_variable) { create(:ci_group_variable, protected: false, group: group) } - context 'with ci_variables_builder_memoize_secret_variables disabled' do - before do - stub_feature_flags(ci_variables_builder_memoize_secret_variables: false) + include_examples "secret CI variables" + + context 'variables memoization' do + let_it_be(:scoped_variable) { create(:ci_group_variable, group: group, environment_scope: 'scoped') } + + let(:environment) { job.expanded_environment_name } + let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) } + + context 'with protected environments' do + it 'memoizes the result by environment' do + expect(pipeline.project) + .to receive(:protected_for?) + .with(pipeline.jobs_git_ref) + .once.and_return(true) + + expect_next_instance_of(described_class::Group) do |group_variables_builder| + expect(group_variables_builder) + .to receive(:secret_variables) + .with(environment: 'production', protected_ref: true) + .once + .and_call_original + end + + 2.times do + expect(builder.secret_group_variables(environment: 'production')) + .to contain_exactly(unprotected_variable_item, protected_variable_item) + end + end end - let(:protected_variable_item) { protected_variable } - let(:unprotected_variable_item) { unprotected_variable } + context 'with unprotected environments' do + it 'memoizes the result by environment' do + expect(pipeline.project) + .to receive(:protected_for?) + .with(pipeline.jobs_git_ref) + .once.and_return(false) + + expect_next_instance_of(described_class::Group) do |group_variables_builder| + expect(group_variables_builder) + .to receive(:secret_variables) + .with(environment: nil, protected_ref: false) + .once + .and_call_original + + expect(group_variables_builder) + .to receive(:secret_variables) + .with(environment: 'scoped', protected_ref: false) + .once + .and_call_original + end + + 2.times do + expect(builder.secret_group_variables(environment: nil)) + .to contain_exactly(unprotected_variable_item) - include_examples "secret CI variables" + expect(builder.secret_group_variables(environment: 'scoped')) + .to contain_exactly(unprotected_variable_item, scoped_variable_item) + end + end + end end + end - context 'with ci_variables_builder_memoize_secret_variables enabled' do - before do - stub_feature_flags(ci_variables_builder_memoize_secret_variables: true) - end + describe '#secret_project_variables' do + let_it_be(:protected_variable) { create(:ci_variable, protected: true, project: project) } + let_it_be(:unprotected_variable) { create(:ci_variable, protected: false, project: project) } - let(:protected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) } - let(:unprotected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) } + let(:environment) { job.expanded_environment_name } - include_examples "secret CI variables" + subject { builder.secret_project_variables(environment: environment) } - context 'variables memoization' do - let_it_be(:scoped_variable) { create(:ci_group_variable, group: group, environment_scope: 'scoped') } + include_examples "secret CI variables" - let(:ref) { job.git_ref } - let(:environment) { job.expanded_environment_name } - let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) } + context 'variables memoization' do + let_it_be(:scoped_variable) { create(:ci_variable, project: project, environment_scope: 'scoped') } - context 'with protected environments' do - it 'memoizes the result by environment' do - expect(pipeline.project) - .to receive(:protected_for?) - .with(pipeline.jobs_git_ref) - .once.and_return(true) + let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) } - expect_next_instance_of(described_class::Group) do |group_variables_builder| - expect(group_variables_builder) - .to receive(:secret_variables) - .with(environment: 'production', protected_ref: true) - .once - .and_call_original - end + context 'with protected environments' do + it 'memoizes the result by environment' do + expect(pipeline.project) + .to receive(:protected_for?) + .with(pipeline.jobs_git_ref) + .once.and_return(true) - 2.times do - expect(builder.secret_group_variables(ref: ref, environment: 'production')) - .to contain_exactly(unprotected_variable_item, protected_variable_item) - end + expect_next_instance_of(described_class::Project) do |project_variables_builder| + expect(project_variables_builder) + .to receive(:secret_variables) + .with(environment: 'production', protected_ref: true) + .once + .and_call_original + end + + 2.times do + expect(builder.secret_project_variables(environment: 'production')) + .to contain_exactly(unprotected_variable_item, protected_variable_item) end end + end - context 'with unprotected environments' do - it 'memoizes the result by environment' do - expect(pipeline.project) - .to receive(:protected_for?) - .with(pipeline.jobs_git_ref) - .once.and_return(false) - - expect_next_instance_of(described_class::Group) do |group_variables_builder| - expect(group_variables_builder) - .to receive(:secret_variables) - .with(environment: nil, protected_ref: false) - .once - .and_call_original - - expect(group_variables_builder) - .to receive(:secret_variables) - .with(environment: 'scoped', protected_ref: false) - .once - .and_call_original - end - - 2.times do - expect(builder.secret_group_variables(ref: 'other', environment: nil)) - .to contain_exactly(unprotected_variable_item) - - expect(builder.secret_group_variables(ref: 'other', environment: 'scoped')) - .to contain_exactly(unprotected_variable_item, scoped_variable_item) - end + context 'with unprotected environments' do + it 'memoizes the result by environment' do + expect(pipeline.project) + .to receive(:protected_for?) + .with(pipeline.jobs_git_ref) + .once.and_return(false) + + expect_next_instance_of(described_class::Project) do |project_variables_builder| + expect(project_variables_builder) + .to receive(:secret_variables) + .with(environment: nil, protected_ref: false) + .once + .and_call_original + + expect(project_variables_builder) + .to receive(:secret_variables) + .with(environment: 'scoped', protected_ref: false) + .once + .and_call_original + end + + 2.times do + expect(builder.secret_project_variables(environment: nil)) + .to contain_exactly(unprotected_variable_item) + + expect(builder.secret_project_variables(environment: 'scoped')) + .to contain_exactly(unprotected_variable_item, scoped_variable_item) end end end end end - describe '#secret_project_variables' do - let_it_be(:protected_variable) { create(:ci_variable, protected: true, project: project) } - let_it_be(:unprotected_variable) { create(:ci_variable, protected: false, project: project) } + describe '#config_variables' do + subject(:config_variables) { builder.config_variables } - let(:ref) { job.git_ref } - let(:environment) { job.expanded_environment_name } + context 'without project' do + before do + pipeline.update!(project_id: nil) + end + + it { expect(config_variables.size).to eq(0) } + end - subject { builder.secret_project_variables(ref: ref, environment: environment) } + context 'without repository' do + let(:project) { create(:project) } + let(:pipeline) { build(:ci_pipeline, ref: nil, sha: nil, project: project) } - context 'with ci_variables_builder_memoize_secret_variables disabled' do - before do - stub_feature_flags(ci_variables_builder_memoize_secret_variables: false) + it { expect(config_variables['CI_COMMIT_SHA']).to be_nil } + end + + context 'with protected variables' do + let_it_be(:instance_variable) do + create(:ci_instance_variable, :protected, key: 'instance_variable') + end + + let_it_be(:group_variable) do + create(:ci_group_variable, :protected, group: group, key: 'group_variable') end - let(:protected_variable_item) { protected_variable } - let(:unprotected_variable_item) { unprotected_variable } + let_it_be(:project_variable) do + create(:ci_variable, :protected, project: project, key: 'project_variable') + end - include_examples "secret CI variables" + it 'does not include protected variables' do + expect(config_variables[instance_variable.key]).to be_nil + expect(config_variables[group_variable.key]).to be_nil + expect(config_variables[project_variable.key]).to be_nil + end end - context 'with ci_variables_builder_memoize_secret_variables enabled' do - before do - stub_feature_flags(ci_variables_builder_memoize_secret_variables: true) + context 'with scoped variables' do + let_it_be(:scoped_group_variable) do + create(:ci_group_variable, + group: group, + key: 'group_variable', + value: 'scoped', + environment_scope: 'scoped') end - let(:protected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) } - let(:unprotected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) } + let_it_be(:group_variable) do + create(:ci_group_variable, + group: group, + key: 'group_variable', + value: 'unscoped') + end - include_examples "secret CI variables" + let_it_be(:scoped_project_variable) do + create(:ci_variable, + project: project, + key: 'project_variable', + value: 'scoped', + environment_scope: 'scoped') + end - context 'variables memoization' do - let_it_be(:scoped_variable) { create(:ci_variable, project: project, environment_scope: 'scoped') } + let_it_be(:project_variable) do + create(:ci_variable, + project: project, + key: 'project_variable', + value: 'unscoped') + end - let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) } + it 'does not include scoped variables' do + expect(config_variables.to_hash[group_variable.key]).to eq('unscoped') + expect(config_variables.to_hash[project_variable.key]).to eq('unscoped') + end + end - context 'with protected environments' do - it 'memoizes the result by environment' do - expect(pipeline.project) - .to receive(:protected_for?) - .with(pipeline.jobs_git_ref) - .once.and_return(true) + context 'variables ordering' do + def var(name, value) + { key: name, value: value.to_s, public: true, masked: false } + end - expect_next_instance_of(described_class::Project) do |project_variables_builder| - expect(project_variables_builder) - .to receive(:secret_variables) - .with(environment: 'production', protected_ref: true) - .once - .and_call_original - end + before do + allow(pipeline.project).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] } + allow(pipeline).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] } + allow(builder).to receive(:secret_instance_variables) { [var('C', 3), var('D', 3)] } + allow(builder).to receive(:secret_group_variables) { [var('D', 4), var('E', 4)] } + allow(builder).to receive(:secret_project_variables) { [var('E', 5), var('F', 5)] } + allow(pipeline).to receive(:variables) { [var('F', 6), var('G', 6)] } + allow(pipeline).to receive(:pipeline_schedule) { double(job_variables: [var('G', 7), var('H', 7)]) } + end - 2.times do - expect(builder.secret_project_variables(ref: ref, environment: 'production')) - .to contain_exactly(unprotected_variable_item, protected_variable_item) - end - end - end + it 'returns variables in order depending on resource hierarchy' do + expect(config_variables.to_runner_variables).to eq( + [var('A', 1), var('B', 1), + var('B', 2), var('C', 2), + var('C', 3), var('D', 3), + var('D', 4), var('E', 4), + var('E', 5), var('F', 5), + var('F', 6), var('G', 6), + var('G', 7), var('H', 7)]) + end - context 'with unprotected environments' do - it 'memoizes the result by environment' do - expect(pipeline.project) - .to receive(:protected_for?) - .with(pipeline.jobs_git_ref) - .once.and_return(false) - - expect_next_instance_of(described_class::Project) do |project_variables_builder| - expect(project_variables_builder) - .to receive(:secret_variables) - .with(environment: nil, protected_ref: false) - .once - .and_call_original - - expect(project_variables_builder) - .to receive(:secret_variables) - .with(environment: 'scoped', protected_ref: false) - .once - .and_call_original - end - - 2.times do - expect(builder.secret_project_variables(ref: 'other', environment: nil)) - .to contain_exactly(unprotected_variable_item) - - expect(builder.secret_project_variables(ref: 'other', environment: 'scoped')) - .to contain_exactly(unprotected_variable_item, scoped_variable_item) - end - end - end + it 'overrides duplicate keys depending on resource hierarchy' do + expect(config_variables.to_hash).to match( + 'A' => '1', 'B' => '2', + 'C' => '3', 'D' => '4', + 'E' => '5', 'F' => '6', + 'G' => '7', 'H' => '7') end end end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index ebb5c91ebad..9b68ee2d6a2 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -842,7 +842,7 @@ module Gitlab describe "Image and service handling" do context "when extended docker configuration is used" do it "returns image and service when defined" do - config = YAML.dump({ image: { name: "ruby:2.7", entrypoint: ["/usr/local/bin/init", "run"] }, + config = YAML.dump({ image: { name: "image:1.0", entrypoint: ["/usr/local/bin/init", "run"] }, services: ["mysql", { name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"], command: ["/usr/local/bin/init", "run"] }], @@ -860,7 +860,7 @@ module Gitlab options: { before_script: ["pwd"], script: ["rspec"], - image: { name: "ruby:2.7", entrypoint: ["/usr/local/bin/init", "run"] }, + image: { name: "image:1.0", entrypoint: ["/usr/local/bin/init", "run"] }, services: [{ name: "mysql" }, { name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"], command: ["/usr/local/bin/init", "run"] }] @@ -874,10 +874,10 @@ module Gitlab end it "returns image and service when overridden for job" do - config = YAML.dump({ image: "ruby:2.7", + config = YAML.dump({ image: "image:1.0", services: ["mysql"], before_script: ["pwd"], - rspec: { image: { name: "ruby:3.0", entrypoint: ["/usr/local/bin/init", "run"] }, + rspec: { image: { name: "image:1.0", entrypoint: ["/usr/local/bin/init", "run"] }, services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"], command: ["/usr/local/bin/init", "run"] }, "docker:dind"], @@ -894,7 +894,7 @@ module Gitlab options: { before_script: ["pwd"], script: ["rspec"], - image: { name: "ruby:3.0", entrypoint: ["/usr/local/bin/init", "run"] }, + image: { name: "image:1.0", entrypoint: ["/usr/local/bin/init", "run"] }, services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"], command: ["/usr/local/bin/init", "run"] }, { name: "docker:dind" }] @@ -910,7 +910,7 @@ module Gitlab context "when etended docker configuration is not used" do it "returns image and service when defined" do - config = YAML.dump({ image: "ruby:2.7", + config = YAML.dump({ image: "image:1.0", services: ["mysql", "docker:dind"], before_script: ["pwd"], rspec: { script: "rspec" } }) @@ -926,7 +926,7 @@ module Gitlab options: { before_script: ["pwd"], script: ["rspec"], - image: { name: "ruby:2.7" }, + image: { name: "image:1.0" }, services: [{ name: "mysql" }, { name: "docker:dind" }] }, allow_failure: false, @@ -938,10 +938,10 @@ module Gitlab end it "returns image and service when overridden for job" do - config = YAML.dump({ image: "ruby:2.7", + config = YAML.dump({ image: "image:1.0", services: ["mysql"], before_script: ["pwd"], - rspec: { image: "ruby:3.0", services: ["postgresql", "docker:dind"], script: "rspec" } }) + rspec: { image: "image:1.0", services: ["postgresql", "docker:dind"], script: "rspec" } }) config_processor = Gitlab::Ci::YamlProcessor.new(config).execute @@ -954,7 +954,7 @@ module Gitlab options: { before_script: ["pwd"], script: ["rspec"], - image: { name: "ruby:3.0" }, + image: { name: "image:1.0" }, services: [{ name: "postgresql" }, { name: "docker:dind" }] }, allow_failure: false, @@ -1557,7 +1557,7 @@ module Gitlab describe "Artifacts" do it "returns artifacts when defined" do config = YAML.dump({ - image: "ruby:2.7", + image: "image:1.0", services: ["mysql"], before_script: ["pwd"], rspec: { @@ -1583,7 +1583,7 @@ module Gitlab options: { before_script: ["pwd"], script: ["rspec"], - image: { name: "ruby:2.7" }, + image: { name: "image:1.0" }, services: [{ name: "mysql" }], artifacts: { name: "custom_name", @@ -2327,7 +2327,7 @@ module Gitlab context 'when hidden job have a script definition' do let(:config) do YAML.dump({ - '.hidden_job' => { image: 'ruby:2.7', script: 'test' }, + '.hidden_job' => { image: 'image:1.0', script: 'test' }, 'normal_job' => { script: 'test' } }) end @@ -2338,7 +2338,7 @@ module Gitlab context "when hidden job doesn't have a script definition" do let(:config) do YAML.dump({ - '.hidden_job' => { image: 'ruby:2.7' }, + '.hidden_job' => { image: 'image:1.0' }, 'normal_job' => { script: 'test' } }) end diff --git a/spec/lib/gitlab/config/loader/yaml_spec.rb b/spec/lib/gitlab/config/loader/yaml_spec.rb index be568a8e5f9..66ea931a42c 100644 --- a/spec/lib/gitlab/config/loader/yaml_spec.rb +++ b/spec/lib/gitlab/config/loader/yaml_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::Config::Loader::Yaml do let(:yml) do <<~YAML - image: 'ruby:2.7' + image: 'image:1.0' texts: nested_key: 'value1' more_text: @@ -34,7 +34,7 @@ RSpec.describe Gitlab::Config::Loader::Yaml do end context 'when yaml syntax is correct' do - let(:yml) { 'image: ruby:2.7' } + let(:yml) { 'image: image:1.0' } describe '#valid?' do it 'returns true' do @@ -44,7 +44,7 @@ RSpec.describe Gitlab::Config::Loader::Yaml do describe '#load!' do it 'returns a valid hash' do - expect(loader.load!).to eq(image: 'ruby:2.7') + expect(loader.load!).to eq(image: 'image:1.0') end end end @@ -164,7 +164,7 @@ RSpec.describe Gitlab::Config::Loader::Yaml do describe '#load_raw!' do it 'loads keys as strings' do expect(loader.load_raw!).to eq( - 'image' => 'ruby:2.7', + 'image' => 'image:1.0', 'texts' => { 'nested_key' => 'value1', 'more_text' => { @@ -178,7 +178,7 @@ RSpec.describe Gitlab::Config::Loader::Yaml do describe '#load!' do it 'symbolizes keys' do expect(loader.load!).to eq( - image: 'ruby:2.7', + image: 'image:1.0', texts: { nested_key: 'value1', more_text: { diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 08d29f7842c..44e2cb21677 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -107,24 +107,8 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do stub_env('CUSTOMER_PORTAL_URL', customer_portal_url) end - context 'when in production' do - before do - allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production')) - end - - it 'does not add CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid") - end - end - - context 'when in development' do - before do - allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) - end - - it 'adds CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid") - end + it 'adds CUSTOMER_PORTAL_URL to CSP' do + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid #{customer_portal_url}") end end diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb index ab8c8a51694..e8fe80f75cb 100644 --- a/spec/lib/gitlab/data_builder/deployment_spec.rb +++ b/spec/lib/gitlab/data_builder/deployment_spec.rb @@ -46,5 +46,42 @@ RSpec.describe Gitlab::DataBuilder::Deployment do expect(data[:deployable_url]).to be_nil end + + context 'when commit does not exist in the repository' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:deployment) { create(:deployment, project: project) } + + subject(:data) { described_class.build(deployment, Time.current) } + + before(:all) do + project.repository.remove + end + + it 'returns nil for commit_url' do + expect(data[:commit_url]).to be_nil + end + + it 'returns nil for commit_title' do + expect(data[:commit_title]).to be_nil + end + end + + context 'when deployed_by is nil' do + let_it_be(:deployment) { create(:deployment, user: nil, deployable: nil) } + + subject(:data) { described_class.build(deployment, Time.current) } + + before(:all) do + deployment.user = nil + end + + it 'returns nil for user' do + expect(data[:user]).to be_nil + end + + it 'returns nil for user_url' do + expect(data[:user_url]).to be_nil + end + end end end diff --git a/spec/lib/gitlab/data_builder/note_spec.rb b/spec/lib/gitlab/data_builder/note_spec.rb index 90ca5430526..3fa535dd800 100644 --- a/spec/lib/gitlab/data_builder/note_spec.rb +++ b/spec/lib/gitlab/data_builder/note_spec.rb @@ -8,18 +8,22 @@ RSpec.describe Gitlab::DataBuilder::Note do let(:data) { described_class.build(note, user) } let(:fixed_time) { Time.at(1425600000) } # Avoid time precision errors - before do - expect(data).to have_key(:object_attributes) - expect(data[:object_attributes]).to have_key(:url) - expect(data[:object_attributes][:url]) - .to eq(Gitlab::UrlBuilder.build(note)) - expect(data[:object_kind]).to eq('note') - expect(data[:user]).to eq(user.hook_attrs) + shared_examples 'includes general data' do + specify do + expect(data).to have_key(:object_attributes) + expect(data[:object_attributes]).to have_key(:url) + expect(data[:object_attributes][:url]) + .to eq(Gitlab::UrlBuilder.build(note)) + expect(data[:object_kind]).to eq('note') + expect(data[:user]).to eq(user.hook_attrs) + end end describe 'When asking for a note on commit' do let(:note) { create(:note_on_commit, project: project) } + it_behaves_like 'includes general data' + it 'returns the note and commit-specific data' do expect(data).to have_key(:commit) end @@ -31,6 +35,8 @@ RSpec.describe Gitlab::DataBuilder::Note do describe 'When asking for a note on commit diff' do let(:note) { create(:diff_note_on_commit, project: project) } + it_behaves_like 'includes general data' + it 'returns the note and commit-specific data' do expect(data).to have_key(:commit) end @@ -51,22 +57,21 @@ RSpec.describe Gitlab::DataBuilder::Note do create(:note_on_issue, noteable: issue, project: project) end + it_behaves_like 'includes general data' + it 'returns the note and issue-specific data' do - without_timestamps = lambda { |label| label.except('created_at', 'updated_at') } - hook_attrs = issue.reload.hook_attrs + expect_next_instance_of(Gitlab::HookData::IssueBuilder) do |issue_data_builder| + expect(issue_data_builder).to receive(:build).and_return('Issue data') + end - expect(data).to have_key(:issue) - expect(data[:issue].except('updated_at', 'labels')) - .to eq(hook_attrs.except('updated_at', 'labels')) - expect(data[:issue]['updated_at']) - .to be >= hook_attrs['updated_at'] - expect(data[:issue]['labels'].map(&without_timestamps)) - .to eq(hook_attrs['labels'].map(&without_timestamps)) + expect(data[:issue]).to eq('Issue data') end context 'with confidential issue' do let(:issue) { create(:issue, project: project, confidential: true) } + it_behaves_like 'includes general data' + it 'sets event_type to confidential_note' do expect(data[:event_type]).to eq('confidential_note') end @@ -77,10 +82,12 @@ RSpec.describe Gitlab::DataBuilder::Note do end describe 'When asking for a note on merge request' do + let(:label) { create(:label, project: project) } let(:merge_request) do - create(:merge_request, created_at: fixed_time, + create(:labeled_merge_request, created_at: fixed_time, updated_at: fixed_time, - source_project: project) + source_project: project, + labels: [label]) end let(:note) do @@ -88,12 +95,14 @@ RSpec.describe Gitlab::DataBuilder::Note do project: project) end - it 'returns the note and merge request data' do - expect(data).to have_key(:merge_request) - expect(data[:merge_request].except('updated_at')) - .to eq(merge_request.reload.hook_attrs.except('updated_at')) - expect(data[:merge_request]['updated_at']) - .to be >= merge_request.hook_attrs['updated_at'] + it_behaves_like 'includes general data' + + it 'returns the merge request data' do + expect_next_instance_of(Gitlab::HookData::MergeRequestBuilder) do |mr_data_builder| + expect(mr_data_builder).to receive(:build).and_return('MR data') + end + + expect(data[:merge_request]).to eq('MR data') end include_examples 'project hook data' @@ -101,9 +110,11 @@ RSpec.describe Gitlab::DataBuilder::Note do end describe 'When asking for a note on merge request diff' do + let(:label) { create(:label, project: project) } let(:merge_request) do - create(:merge_request, created_at: fixed_time, updated_at: fixed_time, - source_project: project) + create(:labeled_merge_request, created_at: fixed_time, updated_at: fixed_time, + source_project: project, + labels: [label]) end let(:note) do @@ -111,12 +122,14 @@ RSpec.describe Gitlab::DataBuilder::Note do project: project) end - it 'returns the note and merge request diff data' do - expect(data).to have_key(:merge_request) - expect(data[:merge_request].except('updated_at')) - .to eq(merge_request.reload.hook_attrs.except('updated_at')) - expect(data[:merge_request]['updated_at']) - .to be >= merge_request.hook_attrs['updated_at'] + it_behaves_like 'includes general data' + + it 'returns the merge request data' do + expect_next_instance_of(Gitlab::HookData::MergeRequestBuilder) do |mr_data_builder| + expect(mr_data_builder).to receive(:build).and_return('MR data') + end + + expect(data[:merge_request]).to eq('MR data') end include_examples 'project hook data' @@ -134,6 +147,8 @@ RSpec.describe Gitlab::DataBuilder::Note do project: project) end + it_behaves_like 'includes general data' + it 'returns the note and project snippet data' do expect(data).to have_key(:snippet) expect(data[:snippet].except('updated_at')) diff --git a/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb index 66983733411..6db3081ca7e 100644 --- a/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb @@ -10,7 +10,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do expect(batch_metrics.timings).to be_empty expect(Gitlab::Metrics::System).to receive(:monotonic_time) - .exactly(6).times .and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0) batch_metrics.time_operation(:my_label) do @@ -28,4 +27,33 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0]) end end + + describe '#instrument_operation' do + it 'tracks duration and affected rows' do + expect(batch_metrics.timings).to be_empty + expect(batch_metrics.affected_rows).to be_empty + + expect(Gitlab::Metrics::System).to receive(:monotonic_time) + .and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0, 420.0, 450.0) + + batch_metrics.instrument_operation(:my_label) do + 3 + end + + batch_metrics.instrument_operation(:my_other_label) do + 42 + end + + batch_metrics.instrument_operation(:my_label) do + 2 + end + + batch_metrics.instrument_operation(:my_other_label) do + :not_an_integer + end + + expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0, 30.0]) + expect(batch_metrics.affected_rows).to eq(my_label: [3, 2], my_other_label: [42]) + end + end end diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb index 8c663ff9f8a..c39f6a78e93 100644 --- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb @@ -21,7 +21,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d context 'when a job is running' do it 'logs the transition' do - expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :running, previous_state: :failed } ) + expect(Gitlab::AppLogger).to receive(:info).with( + { + batched_job_id: job.id, + batched_migration_id: job.batched_background_migration_id, + exception_class: nil, + exception_message: nil, + job_arguments: job.batched_migration.job_arguments, + job_class_name: job.batched_migration.job_class_name, + message: 'BatchedJob transition', + new_state: :running, + previous_state: :failed + } + ) expect { job.run! }.to change(job, :started_at) end @@ -31,7 +43,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d let(:job) { create(:batched_background_migration_job, :running) } it 'logs the transition' do - expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :succeeded, previous_state: :running } ) + expect(Gitlab::AppLogger).to receive(:info).with( + { + batched_job_id: job.id, + batched_migration_id: job.batched_background_migration_id, + exception_class: nil, + exception_message: nil, + job_arguments: job.batched_migration.job_arguments, + job_class_name: job.batched_migration.job_class_name, + message: 'BatchedJob transition', + new_state: :succeeded, + previous_state: :running + } + ) job.succeed! end @@ -89,7 +113,15 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d end it 'logs the error' do - expect(Gitlab::AppLogger).to receive(:error).with( { message: error_message, batched_job_id: job.id } ) + expect(Gitlab::AppLogger).to receive(:error).with( + { + batched_job_id: job.id, + batched_migration_id: job.batched_background_migration_id, + job_arguments: job.batched_migration.job_arguments, + job_class_name: job.batched_migration.job_class_name, + message: error_message + } + ) job.failure!(error: exception) end @@ -100,13 +132,32 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d let(:job) { create(:batched_background_migration_job, :running) } it 'logs the transition' do - expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :failed, previous_state: :running } ) + expect(Gitlab::AppLogger).to receive(:info).with( + { + batched_job_id: job.id, + batched_migration_id: job.batched_background_migration_id, + exception_class: RuntimeError, + exception_message: 'error', + job_arguments: job.batched_migration.job_arguments, + job_class_name: job.batched_migration.job_class_name, + message: 'BatchedJob transition', + new_state: :failed, + previous_state: :running + } + ) - job.failure! + job.failure!(error: RuntimeError.new('error')) end it 'tracks the exception' do - expect(Gitlab::ErrorTracking).to receive(:track_exception).with(RuntimeError, { batched_job_id: job.id } ) + expect(Gitlab::ErrorTracking).to receive(:track_exception).with( + RuntimeError, + { + batched_job_id: job.id, + job_arguments: job.batched_migration.job_arguments, + job_class_name: job.batched_migration.job_class_name + } + ) job.failure!(error: RuntimeError.new) end @@ -130,13 +181,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d describe 'scopes' do let_it_be(:fixed_time) { Time.new(2021, 04, 27, 10, 00, 00, 00) } - let_it_be(:pending_job) { create(:batched_background_migration_job, :pending, updated_at: fixed_time) } - let_it_be(:running_job) { create(:batched_background_migration_job, :running, updated_at: fixed_time) } - let_it_be(:stuck_job) { create(:batched_background_migration_job, :pending, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) } - let_it_be(:failed_job) { create(:batched_background_migration_job, :failed, attempts: 1) } - - let!(:max_attempts_failed_job) { create(:batched_background_migration_job, :failed, attempts: described_class::MAX_ATTEMPTS) } - let!(:succeeded_job) { create(:batched_background_migration_job, :succeeded) } + let_it_be(:pending_job) { create(:batched_background_migration_job, :pending, created_at: fixed_time - 2.days, updated_at: fixed_time) } + let_it_be(:running_job) { create(:batched_background_migration_job, :running, created_at: fixed_time - 2.days, updated_at: fixed_time) } + let_it_be(:stuck_job) { create(:batched_background_migration_job, :pending, created_at: fixed_time, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) } + let_it_be(:failed_job) { create(:batched_background_migration_job, :failed, created_at: fixed_time, attempts: 1) } + let_it_be(:max_attempts_failed_job) { create(:batched_background_migration_job, :failed, created_at: fixed_time, attempts: described_class::MAX_ATTEMPTS) } before do travel_to fixed_time @@ -165,6 +214,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d expect(described_class.retriable).to contain_exactly(failed_job, stuck_job) end end + + describe '.created_since' do + it 'returns jobs since a given time' do + expect(described_class.created_since(fixed_time)).to contain_exactly(stuck_job, failed_job, max_attempts_failed_job) + end + end end describe 'delegated batched_migration attributes' do @@ -194,6 +249,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d expect(batched_job.migration_job_arguments).to eq(batched_migration.job_arguments) end end + + describe '#migration_job_class_name' do + it 'returns the migration job_class_name' do + expect(batched_job.migration_job_class_name).to eq(batched_migration.job_class_name) + end + end end describe '#can_split?' do diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb index 124d204cb62..f147e8204e6 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb @@ -3,8 +3,16 @@ require 'spec_helper' RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do + let(:connection) { Gitlab::Database.database_base_models[:main].connection } let(:migration_wrapper) { double('test wrapper') } - let(:runner) { described_class.new(migration_wrapper) } + + let(:runner) { described_class.new(connection: connection, migration_wrapper: migration_wrapper) } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end describe '#run_migration_job' do shared_examples_for 'it has completed the migration' do @@ -86,6 +94,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do end end + context 'when the migration should stop' do + let(:migration) { create(:batched_background_migration, :active) } + + let!(:job) { create(:batched_background_migration_job, :failed, batched_migration: migration) } + + it 'changes the status to failure' do + expect(migration).to receive(:should_stop?).and_return(true) + expect(migration_wrapper).to receive(:perform).and_return(job) + + expect { runner.run_migration_job(migration) }.to change { migration.status_name }.from(:active).to(:failed) + end + end + context 'when the migration has previous jobs' do let!(:event1) { create(:event) } let!(:event2) { create(:event) } @@ -282,7 +303,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do end describe '#finalize' do - let(:migration_wrapper) { Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new } + let(:migration_wrapper) do + Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new(connection: connection) + end let(:migration_helpers) { ActiveRecord::Migration.new } let(:table_name) { :_test_batched_migrations_test_table } @@ -293,8 +316,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do let!(:batched_migration) do create( - :batched_background_migration, - status: migration_status, + :batched_background_migration, migration_status, max_value: 8, batch_size: 2, sub_batch_size: 1, @@ -339,7 +361,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments) .and_return(batched_migration) - expect(batched_migration).to receive(:finalizing!).and_call_original + expect(batched_migration).to receive(:finalize!).and_call_original expect do runner.finalize( @@ -348,7 +370,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do column_name, job_arguments ) - end.to change { batched_migration.reload.status }.from('active').to('finished') + end.to change { batched_migration.reload.status_name }.from(:active).to(:finished) expect(batched_migration.batched_jobs).to all(be_succeeded) @@ -390,7 +412,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do expect(Gitlab::AppLogger).to receive(:warn) .with("Batched background migration for the given configuration is already finished: #{configuration}") - expect(batched_migration).not_to receive(:finalizing!) + expect(batched_migration).not_to receive(:finalize!) runner.finalize( batched_migration.job_class_name, @@ -417,7 +439,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do expect(Gitlab::AppLogger).to receive(:warn) .with("Could not find batched background migration for the given configuration: #{configuration}") - expect(batched_migration).not_to receive(:finalizing!) + expect(batched_migration).not_to receive(:finalize!) runner.finalize( batched_migration.job_class_name, @@ -431,8 +453,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do describe '.finalize' do context 'when the connection is passed' do - let(:connection) { double('connection') } - let(:table_name) { :_test_batched_migrations_test_table } let(:column_name) { :some_id } let(:job_arguments) { [:some, :other, :arguments] } diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index 803123e8e34..7a433be0e2f 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -27,28 +27,46 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m it { is_expected.to validate_uniqueness_of(:job_arguments).scoped_to(:job_class_name, :table_name, :column_name) } context 'when there are failed jobs' do - let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) } + let(:batched_migration) { create(:batched_background_migration, :active, total_tuple_count: 100) } let!(:batched_job) { create(:batched_background_migration_job, :failed, batched_migration: batched_migration) } it 'raises an exception' do - expect { batched_migration.finished! }.to raise_error(ActiveRecord::RecordInvalid) + expect { batched_migration.finish! }.to raise_error(StateMachines::InvalidTransition) - expect(batched_migration.reload.status).to eql 'active' + expect(batched_migration.reload.status_name).to be :active end end context 'when the jobs are completed' do - let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) } + let(:batched_migration) { create(:batched_background_migration, :active, total_tuple_count: 100) } let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: batched_migration) } it 'finishes the migration' do - batched_migration.finished! + batched_migration.finish! - expect(batched_migration.status).to eql 'finished' + expect(batched_migration.status_name).to be :finished end end end + describe 'state machine' do + context 'when a migration is executed' do + let!(:batched_migration) { create(:batched_background_migration) } + + it 'updates the started_at' do + expect { batched_migration.execute! }.to change(batched_migration, :started_at).from(nil).to(Time) + end + end + end + + describe '.valid_status' do + valid_status = [:paused, :active, :finished, :failed, :finalizing] + + it 'returns valid status' do + expect(described_class.valid_status).to eq(valid_status) + end + end + describe '.queue_order' do let!(:migration1) { create(:batched_background_migration) } let!(:migration2) { create(:batched_background_migration) } @@ -61,12 +79,23 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m describe '.active_migration' do let!(:migration1) { create(:batched_background_migration, :finished) } - let!(:migration2) { create(:batched_background_migration, :active) } - let!(:migration3) { create(:batched_background_migration, :active) } - it 'returns the first active migration according to queue order' do - expect(described_class.active_migration).to eq(migration2) - create(:batched_background_migration_job, :succeeded, batched_migration: migration1, batch_size: 1000) + context 'without migrations on hold' do + let!(:migration2) { create(:batched_background_migration, :active) } + let!(:migration3) { create(:batched_background_migration, :active) } + + it 'returns the first active migration according to queue order' do + expect(described_class.active_migration).to eq(migration2) + end + end + + context 'with migrations are on hold' do + let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) } + let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) } + + it 'returns the first active migration that is not on hold according to queue order' do + expect(described_class.active_migration).to eq(migration3) + end end end @@ -287,7 +316,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m it 'moves the status of the migration to active' do retry_failed_jobs - expect(batched_migration.status).to eql 'active' + expect(batched_migration.status_name).to be :active end it 'changes the number of attempts to 0' do @@ -301,8 +330,59 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m it 'moves the status of the migration to active' do retry_failed_jobs - expect(batched_migration.status).to eql 'active' + expect(batched_migration.status_name).to be :active + end + end + end + + describe '#should_stop?' do + subject(:should_stop?) { batched_migration.should_stop? } + + let(:batched_migration) { create(:batched_background_migration, started_at: started_at) } + + before do + stub_const('Gitlab::Database::BackgroundMigration::BatchedMigration::MINIMUM_JOBS', 1) + end + + context 'when the started_at is nil' do + let(:started_at) { nil } + + it { expect(should_stop?).to be_falsey } + end + + context 'when the number of jobs is lesser than the MINIMUM_JOBS' do + let(:started_at) { Time.zone.now - 6.days } + + before do + stub_const('Gitlab::Database::BackgroundMigration::BatchedMigration::MINIMUM_JOBS', 10) + stub_const('Gitlab::Database::BackgroundMigration::BatchedMigration::MAXIMUM_FAILED_RATIO', 0.70) + create_list(:batched_background_migration_job, 1, :succeeded, batched_migration: batched_migration) + create_list(:batched_background_migration_job, 3, :failed, batched_migration: batched_migration) + end + + it { expect(should_stop?).to be_falsey } + end + + context 'when the calculated value is greater than the threshold' do + let(:started_at) { Time.zone.now - 6.days } + + before do + stub_const('Gitlab::Database::BackgroundMigration::BatchedMigration::MAXIMUM_FAILED_RATIO', 0.70) + create_list(:batched_background_migration_job, 1, :succeeded, batched_migration: batched_migration) + create_list(:batched_background_migration_job, 3, :failed, batched_migration: batched_migration) + end + + it { expect(should_stop?).to be_truthy } + end + + context 'when the calculated value is lesser than the threshold' do + let(:started_at) { Time.zone.now - 6.days } + + before do + create_list(:batched_background_migration_job, 2, :succeeded, batched_migration: batched_migration) end + + it { expect(should_stop?).to be_falsey } end end @@ -449,6 +529,20 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end end + describe '#hold!', :freeze_time do + subject { create(:batched_background_migration) } + + let(:time) { 5.minutes.from_now } + + it 'updates on_hold_until property' do + expect { subject.hold!(until_time: time) }.to change { subject.on_hold_until }.from(nil).to(time) + end + + it 'defaults to 10 minutes' do + expect { subject.hold! }.to change { subject.on_hold_until }.from(nil).to(10.minutes.from_now) + end + end + describe '.for_configuration' do let!(:migration) do create( diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb index d6c984c7adb..6a4ac317cad 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb @@ -3,8 +3,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '#perform' do - subject { described_class.new.perform(job_record) } + subject { described_class.new(connection: connection, metrics: metrics_tracker).perform(job_record) } + let(:connection) { Gitlab::Database.database_base_models[:main].connection } + let(:metrics_tracker) { instance_double('::Gitlab::Database::BackgroundMigration::PrometheusMetrics', track: nil) } let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob } let_it_be(:pause_ms) { 250 } @@ -19,6 +21,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' let(:job_instance) { double('job instance', batch_metrics: {}) } + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end + before do allow(job_class).to receive(:new).and_return(job_instance) end @@ -78,86 +86,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' end end - context 'reporting prometheus metrics' do - let(:labels) { job_record.batched_migration.prometheus_labels } - - before do - allow(job_instance).to receive(:perform) - end - - it 'reports batch_size' do - expect(described_class.metrics[:gauge_batch_size]).to receive(:set).with(labels, job_record.batch_size) - - subject - end - - it 'reports sub_batch_size' do - expect(described_class.metrics[:gauge_sub_batch_size]).to receive(:set).with(labels, job_record.sub_batch_size) - - subject - end - - it 'reports interval' do - expect(described_class.metrics[:gauge_interval]).to receive(:set).with(labels, job_record.batched_migration.interval) - - subject - end - - it 'reports updated tuples (currently based on batch_size)' do - expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment).with(labels, job_record.batch_size) - - subject - end - - it 'reports migrated tuples' do - count = double - expect(job_record.batched_migration).to receive(:migrated_tuple_count).and_return(count) - expect(described_class.metrics[:gauge_migrated_tuples]).to receive(:set).with(labels, count) - - subject - end - - it 'reports summary of query timings' do - metrics = { 'timings' => { 'update_all' => [1, 2, 3, 4, 5] } } - - expect(job_instance).to receive(:batch_metrics).and_return(metrics) - - metrics['timings'].each do |key, timings| - summary_labels = labels.merge(operation: key) - timings.each do |timing| - expect(described_class.metrics[:histogram_timings]).to receive(:observe).with(summary_labels, timing) - end - end - - subject - end - - it 'reports job duration' do - freeze_time do - expect(Time).to receive(:current).and_return(Time.zone.now - 5.seconds).ordered - allow(Time).to receive(:current).and_call_original - - expect(described_class.metrics[:gauge_job_duration]).to receive(:set).with(labels, 5.seconds) - - subject - end - end - - it 'reports the total tuple count for the migration' do - expect(described_class.metrics[:gauge_total_tuple_count]).to receive(:set).with(labels, job_record.batched_migration.total_tuple_count) - - subject - end - - it 'reports last updated at timestamp' do - freeze_time do - expect(described_class.metrics[:gauge_last_update_time]).to receive(:set).with(labels, Time.current.to_i) - - subject - end - end - end - context 'when the migration job does not raise an error' do it 'marks the tracking record as succeeded' do expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id') @@ -171,6 +99,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' expect(reloaded_job_record.finished_at).to eq(Time.current) end end + + it 'tracks metrics of the execution' do + expect(job_instance).to receive(:perform) + expect(metrics_tracker).to receive(:track).with(job_record) + + subject + end end context 'when the migration job raises an error' do @@ -189,6 +124,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' expect(reloaded_job_record.finished_at).to eq(Time.current) end end + + it 'tracks metrics of the execution' do + expect(job_instance).to receive(:perform).and_raise(error_class) + expect(metrics_tracker).to receive(:track).with(job_record) + + expect { subject }.to raise_error(error_class) + end end it_behaves_like 'an error is raised', RuntimeError.new('Something broke!') @@ -203,7 +145,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' stub_const('Gitlab::BackgroundMigration::Foo', migration_class) end - let(:connection) { double(:connection) } let(:active_migration) { create(:batched_background_migration, :active, job_class_name: 'Foo') } let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) } @@ -212,12 +153,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' expect(job_instance).to receive(:perform) - described_class.new(connection: connection).perform(job_record) + subject end end context 'when the batched background migration inherits from BaseJob' do - let(:connection) { double(:connection) } let(:active_migration) { create(:batched_background_migration, :active, job_class_name: 'Foo') } let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) } @@ -232,7 +172,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, ' expect(job_instance).to receive(:perform) - described_class.new(connection: connection).perform(job_record) + subject end end end diff --git a/spec/lib/gitlab/database/background_migration/prometheus_metrics_spec.rb b/spec/lib/gitlab/database/background_migration/prometheus_metrics_spec.rb new file mode 100644 index 00000000000..1f256de35ec --- /dev/null +++ b/spec/lib/gitlab/database/background_migration/prometheus_metrics_spec.rb @@ -0,0 +1,118 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::BackgroundMigration::PrometheusMetrics, :prometheus do + describe '#track' do + let(:job_record) do + build(:batched_background_migration_job, :succeeded, + started_at: Time.current - 2.minutes, + finished_at: Time.current - 1.minute, + updated_at: Time.current, + metrics: { 'timings' => { 'update_all' => [0.05, 0.2, 0.4, 0.9, 4] } }) + end + + let(:labels) { job_record.batched_migration.prometheus_labels } + + subject(:track_job_record_metrics) { described_class.new.track(job_record) } + + it 'reports batch_size' do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_batch_size)).to eq(job_record.batch_size) + end + + it 'reports sub_batch_size' do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_sub_batch_size)).to eq(job_record.sub_batch_size) + end + + it 'reports interval' do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_interval)).to eq(job_record.batched_migration.interval) + end + + it 'reports job duration' do + freeze_time do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_job_duration)).to eq(1.minute) + end + end + + it 'increments updated tuples (currently based on batch_size)' do + expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment) + .with(labels, job_record.batch_size) + .twice + .and_call_original + + track_job_record_metrics + + expect(metric_for_job_by_name(:counter_updated_tuples)).to eq(job_record.batch_size) + + described_class.new.track(job_record) + + expect(metric_for_job_by_name(:counter_updated_tuples)).to eq(job_record.batch_size * 2) + end + + it 'reports migrated tuples' do + expect(job_record.batched_migration).to receive(:migrated_tuple_count).and_return(20) + + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_migrated_tuples)).to eq(20) + end + + it 'reports the total tuple count for the migration' do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_total_tuple_count)).to eq(job_record.batched_migration.total_tuple_count) + end + + it 'reports last updated at timestamp' do + freeze_time do + track_job_record_metrics + + expect(metric_for_job_by_name(:gauge_last_update_time)).to eq(Time.current.to_i) + end + end + + it 'reports summary of query timings' do + summary_labels = labels.merge(operation: 'update_all') + + job_record.metrics['timings']['update_all'].each do |timing| + expect(described_class.metrics[:histogram_timings]).to receive(:observe) + .with(summary_labels, timing) + .and_call_original + end + + track_job_record_metrics + + expect(metric_for_job_by_name(:histogram_timings, job_labels: summary_labels)) + .to eq({ 0.1 => 1.0, 0.25 => 2.0, 0.5 => 3.0, 1 => 4.0, 5 => 5.0 }) + end + + context 'when the tracking record does not having timing metrics' do + before do + job_record.metrics = {} + end + + it 'does not attempt to report query timings' do + summary_labels = labels.merge(operation: 'update_all') + + expect(described_class.metrics[:histogram_timings]).not_to receive(:observe) + + track_job_record_metrics + + expect(metric_for_job_by_name(:histogram_timings, job_labels: summary_labels)) + .to eq({ 0.1 => 0.0, 0.25 => 0.0, 0.5 => 0.0, 1 => 0.0, 5 => 0.0 }) + end + end + + def metric_for_job_by_name(name, job_labels: labels) + described_class.metrics[name].values[job_labels].get + end + end +end diff --git a/spec/lib/gitlab/database/consistency_checker_spec.rb b/spec/lib/gitlab/database/consistency_checker_spec.rb new file mode 100644 index 00000000000..2ff79d20786 --- /dev/null +++ b/spec/lib/gitlab/database/consistency_checker_spec.rb @@ -0,0 +1,189 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::ConsistencyChecker do + let(:batch_size) { 10 } + let(:max_batches) { 4 } + let(:max_runtime) { described_class::MAX_RUNTIME } + let(:metrics_counter) { Gitlab::Metrics.registry.get(:consistency_checks) } + + subject(:consistency_checker) do + described_class.new( + source_model: Namespace, + target_model: Ci::NamespaceMirror, + source_columns: %w[id traversal_ids], + target_columns: %w[namespace_id traversal_ids] + ) + end + + before do + stub_const("#{described_class.name}::BATCH_SIZE", batch_size) + stub_const("#{described_class.name}::MAX_BATCHES", max_batches) + redis_shared_state_cleanup! # For Prometheus Counters + end + + after do + Gitlab::Metrics.reset_registry! + end + + describe '#over_time_limit?' do + before do + allow(consistency_checker).to receive(:start_time).and_return(0) + end + + it 'returns true only if the running time has exceeded MAX_RUNTIME' do + allow(consistency_checker).to receive(:monotonic_time).and_return(0, max_runtime - 1, max_runtime + 1) + expect(consistency_checker.monotonic_time).to eq(0) + expect(consistency_checker.send(:over_time_limit?)).to eq(false) + expect(consistency_checker.send(:over_time_limit?)).to eq(true) + end + end + + describe '#execute' do + context 'when empty tables' do + it 'returns an empty response' do + expected_result = { matches: 0, mismatches: 0, batches: 0, mismatches_details: [], next_start_id: nil } + expect(consistency_checker.execute(start_id: 1)).to eq(expected_result) + end + end + + context 'when the tables contain matching items' do + before do + create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects + end + + it 'does not process more than MAX_BATCHES' do + max_batches = 3 + stub_const("#{described_class.name}::MAX_BATCHES", max_batches) + result = consistency_checker.execute(start_id: Namespace.minimum(:id)) + expect(result[:batches]).to eq(max_batches) + expect(result[:matches]).to eq(max_batches * batch_size) + end + + it 'doesn not exceed the MAX_RUNTIME' do + allow(consistency_checker).to receive(:monotonic_time).and_return(0, max_runtime - 1, max_runtime + 1) + result = consistency_checker.execute(start_id: Namespace.minimum(:id)) + expect(result[:batches]).to eq(1) + expect(result[:matches]).to eq(1 * batch_size) + end + + it 'returns the correct number of matches and batches checked' do + expected_result = { + next_start_id: Namespace.minimum(:id) + described_class::MAX_BATCHES * described_class::BATCH_SIZE, + batches: max_batches, + matches: max_batches * batch_size, + mismatches: 0, + mismatches_details: [] + } + expect(consistency_checker.execute(start_id: Namespace.minimum(:id))).to eq(expected_result) + end + + it 'returns the min_id as the next_start_id if the check reaches the last element' do + expect(Gitlab::Metrics).to receive(:counter).at_most(:once) + .with(:consistency_checks, "Consistency Check Results") + .and_call_original + + # Starting from the 5th last element + start_id = Namespace.all.order(id: :desc).limit(5).pluck(:id).last + expected_result = { + next_start_id: Namespace.first.id, + batches: 1, + matches: 5, + mismatches: 0, + mismatches_details: [] + } + expect(consistency_checker.execute(start_id: start_id)).to eq(expected_result) + + expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(0) + expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(5) + end + end + + context 'when some items are missing from the first table' do + let(:missing_namespace) { Namespace.all.order(:id).limit(2).last } + + before do + create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects + missing_namespace.delete + end + + it 'reports the missing elements' do + expected_result = { + next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE, + batches: max_batches, + matches: 39, + mismatches: 1, + mismatches_details: [{ + id: missing_namespace.id, + source_table: nil, + target_table: [missing_namespace.traversal_ids] + }] + } + expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result) + + expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(1) + expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(39) + end + end + + context 'when some items are missing from the second table' do + let(:missing_ci_namespace_mirror) { Ci::NamespaceMirror.all.order(:id).limit(2).last } + + before do + create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects + missing_ci_namespace_mirror.delete + end + + it 'reports the missing elements' do + expected_result = { + next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE, + batches: 4, + matches: 39, + mismatches: 1, + mismatches_details: [{ + id: missing_ci_namespace_mirror.namespace_id, + source_table: [missing_ci_namespace_mirror.traversal_ids], + target_table: nil + }] + } + expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result) + + expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(1) + expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(39) + end + end + + context 'when elements are different between the two tables' do + let(:different_namespaces) { Namespace.order(:id).limit(max_batches * batch_size).sample(3).sort_by(&:id) } + + before do + create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects + + different_namespaces.each do |namespace| + namespace.update_attribute(:traversal_ids, []) + end + end + + it 'reports the difference between the two tables' do + expected_result = { + next_start_id: Namespace.first.id + described_class::MAX_BATCHES * described_class::BATCH_SIZE, + batches: 4, + matches: 37, + mismatches: 3, + mismatches_details: different_namespaces.map do |namespace| + { + id: namespace.id, + source_table: [[]], + target_table: [[namespace.id]] # old traversal_ids of the namespace + } + end + } + expect(consistency_checker.execute(start_id: Namespace.first.id)).to eq(expected_result) + + expect(metrics_counter.get(source_table: "namespaces", result: "mismatch")).to eq(3) + expect(metrics_counter.get(source_table: "namespaces", result: "match")).to eq(37) + end + end + end +end diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb index d46c1ca8681..191f7017b4c 100644 --- a/spec/lib/gitlab/database/each_database_spec.rb +++ b/spec/lib/gitlab/database/each_database_spec.rb @@ -58,6 +58,15 @@ RSpec.describe Gitlab::Database::EachDatabase do end end end + + context 'when shared connections are not included' do + it 'only yields the unshared connections' do + expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main') + + expect { |b| described_class.each_database_connection(include_shared: false, &b) } + .to yield_successive_args([ActiveRecord::Base.connection, 'main']) + end + end end describe '.each_model_connection' do diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb index 4d565ce137a..c44637b8d06 100644 --- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb @@ -10,7 +10,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do expect(setup).to receive(:configure_connection) expect(setup).to receive(:setup_connection_proxy) expect(setup).to receive(:setup_service_discovery) - expect(setup).to receive(:setup_feature_flag_to_model_load_balancing) setup.setup end @@ -120,120 +119,46 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do end end - describe '#setup_feature_flag_to_model_load_balancing', :reestablished_active_record_base do + context 'uses correct base models', :reestablished_active_record_base do using RSpec::Parameterized::TableSyntax where do { - "with model LB enabled it picks a dedicated CI connection" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: 'true', + "it picks a dedicated CI connection" => { env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, request_store_active: false, - ff_use_model_load_balancing: nil, ff_force_no_sharing_primary_model: false, expectations: { main: { read: 'main_replica', write: 'main' }, ci: { read: 'ci_replica', write: 'ci' } } }, - "with model LB enabled and re-use of primary connection it uses CI connection for reads" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: 'true', + "with re-use of primary connection it uses CI connection for reads" => { env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', request_store_active: false, - ff_use_model_load_balancing: nil, ff_force_no_sharing_primary_model: false, expectations: { main: { read: 'main_replica', write: 'main' }, ci: { read: 'ci_replica', write: 'main' } } }, - "with model LB disabled it fallbacks to use main" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: 'false', - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, - request_store_active: false, - ff_use_model_load_balancing: nil, - ff_force_no_sharing_primary_model: false, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'main_replica', write: 'main' } - } - }, - "with model LB disabled, but re-use configured it fallbacks to use main" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: 'false', + "with re-use and FF force_no_sharing_primary_model enabled with RequestStore it sticks FF and uses CI connection for reads and writes" => { env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', - request_store_active: false, - ff_use_model_load_balancing: nil, - ff_force_no_sharing_primary_model: false, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'main_replica', write: 'main' } - } - }, - "with FF use_model_load_balancing disabled without RequestStore it uses main" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, - request_store_active: false, - ff_use_model_load_balancing: false, - ff_force_no_sharing_primary_model: false, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'main_replica', write: 'main' } - } - }, - "with FF use_model_load_balancing enabled without RequestStore sticking of FF does not work, so it fallbacks to use main" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, - request_store_active: false, - ff_use_model_load_balancing: true, - ff_force_no_sharing_primary_model: false, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'main_replica', write: 'main' } - } - }, - "with FF use_model_load_balancing disabled with RequestStore it uses main" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, - request_store_active: true, - ff_use_model_load_balancing: false, - ff_force_no_sharing_primary_model: false, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'main_replica', write: 'main' } - } - }, - "with FF use_model_load_balancing enabled with RequestStore it sticks FF and uses CI connection" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, request_store_active: true, - ff_use_model_load_balancing: true, - ff_force_no_sharing_primary_model: false, + ff_force_no_sharing_primary_model: true, expectations: { main: { read: 'main_replica', write: 'main' }, ci: { read: 'ci_replica', write: 'ci' } } }, - "with re-use and ff_use_model_load_balancing enabled and FF force_no_sharing_primary_model disabled with RequestStore it sticks FF and uses CI connection for reads" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + "with re-use and FF force_no_sharing_primary_model enabled without RequestStore it doesn't use FF and uses CI connection for reads only" => { env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', request_store_active: true, - ff_use_model_load_balancing: true, ff_force_no_sharing_primary_model: false, expectations: { main: { read: 'main_replica', write: 'main' }, ci: { read: 'ci_replica', write: 'main' } } - }, - "with re-use and ff_use_model_load_balancing enabled and FF force_no_sharing_primary_model enabled with RequestStore it sticks FF and uses CI connection for reads" => { - env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, - env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', - request_store_active: true, - ff_use_model_load_balancing: true, - ff_force_no_sharing_primary_model: true, - expectations: { - main: { read: 'main_replica', write: 'main' }, - ci: { read: 'ci_replica', write: 'ci' } - } } } end @@ -285,9 +210,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do end end - stub_env('GITLAB_USE_MODEL_LOAD_BALANCING', env_GITLAB_USE_MODEL_LOAD_BALANCING) stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci) - stub_feature_flags(use_model_load_balancing: ff_use_model_load_balancing) # Make load balancer to force init with a dedicated replicas connections models.each do |_, model| diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb index ad9a3a6e257..e7b5bad8626 100644 --- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb @@ -240,7 +240,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a end def software_license_class - Class.new(ActiveRecord::Base) do + Class.new(Gitlab::Database::Migration[2.0]::MigrationRecord) do self.table_name = 'software_licenses' end end @@ -272,7 +272,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a end def ci_instance_variables_class - Class.new(ActiveRecord::Base) do + Class.new(Gitlab::Database::Migration[2.0]::MigrationRecord) do self.table_name = 'ci_instance_variables' end end @@ -303,7 +303,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a end def detached_partitions_class - Class.new(ActiveRecord::Base) do + Class.new(Gitlab::Database::Migration[2.0]::MigrationRecord) do self.table_name = 'detached_partitions' end end @@ -496,11 +496,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a Gitlab::Database.database_base_models.each do |db_config_name, model| context "for db_config_name=#{db_config_name}" do around do |example| + verbose_was = ActiveRecord::Migration.verbose + ActiveRecord::Migration.verbose = false + with_reestablished_active_record_base do reconfigure_db_connection(model: ActiveRecord::Base, config_model: model) example.run end + ensure + ActiveRecord::Migration.verbose = verbose_was end before do @@ -543,8 +548,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DDLNotAllowedError) { migration_class.migrate(:down) } }.not_to raise_error when :skipped - expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError) - expect { migration_class.migrate(:down) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError) + expect_next_instance_of(migration_class) do |migration_object| + expect(migration_object).to receive(:migration_skipped).and_call_original + expect(migration_object).not_to receive(:up) + expect(migration_object).not_to receive(:down) + expect(migration_object).not_to receive(:change) + end + + migration_class.migrate(:up) + migration_class.migrate(:down) end end end diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb index acf775b3538..5c054795697 100644 --- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb @@ -96,6 +96,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated') expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil) end + + it 'requires the helper to run in ddl mode' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) + + migration.public_send(operation, :_test_table, :original, :renamed) + end end describe '#rename_column_concurrently' do diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 9505da8fd12..798eee0de3e 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -1390,6 +1390,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end it 'reverses the operations of cleanup_concurrent_column_type_change' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) + expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:create_column_from).with( @@ -1415,6 +1417,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end it 'passes the type_cast_function, batch_column_name and limit' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) + expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) expect(model).to receive(:check_trigger_permissions!).with(:users) @@ -2096,7 +2100,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end end - let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.active } + let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.with_status(:active) } before do model.initialize_conversion_of_integer_to_bigint(table, columns) @@ -2218,7 +2222,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) } it 'raises an error when migration exists and is not marked as finished' do - create(:batched_background_migration, configuration.merge(status: :active)) + create(:batched_background_migration, :active, configuration) expect { ensure_batched_background_migration_is_finished } .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \ @@ -2234,7 +2238,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end it 'does not raise error when migration exists and is marked as finished' do - create(:batched_background_migration, configuration.merge(status: :finished)) + create(:batched_background_migration, :finished, configuration) expect { ensure_batched_background_migration_is_finished } .not_to raise_error @@ -2422,7 +2426,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do def setup namespace = namespaces.create!(name: 'foo', path: 'foo', type: Namespaces::UserNamespace.sti_name) - projects.create!(namespace_id: namespace.id) + project_namespace = namespaces.create!(name: 'project-foo', path: 'project-foo', type: 'Project', parent_id: namespace.id, visibility_level: 20) + projects.create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) end it 'generates iids properly for models created after the migration' do diff --git a/spec/lib/gitlab/database/migration_spec.rb b/spec/lib/gitlab/database/migration_spec.rb index 287e738c24e..18bbc6c1dd3 100644 --- a/spec/lib/gitlab/database/migration_spec.rb +++ b/spec/lib/gitlab/database/migration_spec.rb @@ -32,7 +32,7 @@ RSpec.describe Gitlab::Database::Migration do # This breaks upon Rails upgrade. In that case, we'll add a new version in Gitlab::Database::Migration::MIGRATION_CLASSES, # bump .current_version and leave existing migrations and already defined versions of Gitlab::Database::Migration # untouched. - expect(described_class[described_class.current_version].superclass).to eq(ActiveRecord::Migration::Current) + expect(described_class[described_class.current_version]).to be < ActiveRecord::Migration::Current end end diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb index 37efff165c7..f9347a174c4 100644 --- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb @@ -75,7 +75,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d max_batch_size: 10000, sub_batch_size: 10, job_arguments: %w[], - status: 'active', + status_name: :active, total_tuple_count: pgclass_info.cardinality_estimate) end diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb index fd8303c379c..c31244060ec 100644 --- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb +++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb @@ -11,6 +11,10 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do describe '#observe' do subject { described_class.new(result_dir: result_dir) } + def load_observation(result_dir, migration_name) + Gitlab::Json.parse(File.read(File.join(result_dir, migration_name, described_class::STATS_FILENAME))) + end + let(:migration_name) { 'test' } let(:migration_version) { '12345' } @@ -87,7 +91,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do end context 'retrieving observations' do - subject { instance.observations.first } + subject { load_observation(result_dir, migration_name) } before do observe @@ -98,10 +102,10 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do end it 'records a valid observation', :aggregate_failures do - expect(subject.walltime).not_to be_nil - expect(subject.success).to be_falsey - expect(subject.version).to eq(migration_version) - expect(subject.name).to eq(migration_name) + expect(subject['walltime']).not_to be_nil + expect(subject['success']).to be_falsey + expect(subject['version']).to eq(migration_version) + expect(subject['name']).to eq(migration_name) end end end @@ -113,11 +117,18 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:migration1) { double('migration1', call: nil) } let(:migration2) { double('migration2', call: nil) } + let(:migration_name_2) { 'other_migration' } + let(:migration_version_2) { '98765' } + it 'records observations for all migrations' do subject.observe(version: migration_version, name: migration_name, connection: connection) {} - subject.observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } rescue nil + subject.observe(version: migration_version_2, name: migration_name_2, connection: connection) { raise 'something went wrong' } rescue nil + + expect { load_observation(result_dir, migration_name) }.not_to raise_error + expect { load_observation(result_dir, migration_name_2) }.not_to raise_error - expect(subject.observations.size).to eq(2) + # Each observation is a subdirectory of the result_dir, so here we check that we didn't record an extra one + expect(Pathname(result_dir).children.map { |d| d.basename.to_s }).to contain_exactly(migration_name, migration_name_2) end end end diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb index 84482e6b450..8b1ccf05eb1 100644 --- a/spec/lib/gitlab/database/migrations/runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/runner_spec.rb @@ -124,4 +124,16 @@ RSpec.describe Gitlab::Database::Migrations::Runner do expect(metadata).to match('version' => described_class::SCHEMA_VERSION) end end + + describe '.background_migrations' do + it 'is a TestBackgroundRunner' do + expect(described_class.background_migrations).to be_a(Gitlab::Database::Migrations::TestBackgroundRunner) + end + + it 'is configured with a result dir of /background_migrations' do + runner = described_class.background_migrations + + expect(runner.result_dir).to eq(described_class::BASE_RESULT_DIR.join( 'background_migrations')) + end + end end diff --git a/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb index c6fe88a7c2d..9407efad91f 100644 --- a/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb @@ -11,11 +11,17 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do Sidekiq::Testing.disable! { ex.run } end + let(:result_dir) { Dir.mktmpdir } + + after do + FileUtils.rm_rf(result_dir) + end + context 'without jobs to run' do it 'returns immediately' do - runner = described_class.new + runner = described_class.new(result_dir: result_dir) expect(runner).not_to receive(:run_job) - described_class.new.run_jobs(for_duration: 1.second) + described_class.new(result_dir: result_dir).run_jobs(for_duration: 1.second) end end @@ -30,7 +36,7 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do context 'finding pending background jobs' do it 'finds all the migrations' do - expect(described_class.new.traditional_background_migrations.to_a.size).to eq(5) + expect(described_class.new(result_dir: result_dir).traditional_background_migrations.to_a.size).to eq(5) end end @@ -53,12 +59,28 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do end end + def expect_recorded_migration_runs(migrations_to_runs) + migrations_to_runs.each do |migration, runs| + path = File.join(result_dir, migration.name.demodulize) + num_subdirs = Pathname(path).children.count(&:directory?) + expect(num_subdirs).to eq(runs) + end + end + + def expect_migration_runs(migrations_to_run_counts) + expect_migration_call_counts(migrations_to_run_counts) + + yield + + expect_recorded_migration_runs(migrations_to_run_counts) + end + it 'runs the migration class correctly' do calls = [] define_background_migration(migration_name) do |i| calls << i end - described_class.new.run_jobs(for_duration: 1.second) # Any time would work here as we do not advance time + described_class.new(result_dir: result_dir).run_jobs(for_duration: 1.second) # Any time would work here as we do not advance time expect(calls).to contain_exactly(1, 2, 3, 4, 5) end @@ -67,9 +89,9 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do travel(1.minute) end - expect_migration_call_counts(migration => 3) - - described_class.new.run_jobs(for_duration: 3.minutes) + expect_migration_runs(migration => 3) do + described_class.new(result_dir: result_dir).run_jobs(for_duration: 3.minutes) + end end context 'with multiple migrations to run' do @@ -90,12 +112,12 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do travel(2.minutes) end - expect_migration_call_counts( + expect_migration_runs( migration => 2, # 1 minute jobs for 90 seconds, can finish the first and start the second other_migration => 1 # 2 minute jobs for 90 seconds, past deadline after a single job - ) - - described_class.new.run_jobs(for_duration: 3.minutes) + ) do + described_class.new(result_dir: result_dir).run_jobs(for_duration: 3.minutes) + end end it 'does not give leftover time to extra migrations' do @@ -107,12 +129,13 @@ RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do other_migration = define_background_migration(other_migration_name) do travel(1.minute) end - expect_migration_call_counts( + + expect_migration_runs( migration => 5, other_migration => 2 - ) - - described_class.new.run_jobs(for_duration: 3.minutes) + ) do + described_class.new(result_dir: result_dir).run_jobs(for_duration: 3.minutes) + end end end end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb index 4f1d6302331..1026b4370a5 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb @@ -125,6 +125,17 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe expect_table_partitioned_by(partitioned_table, [partition_column]) end + it 'requires the migration helper to be run in DDL mode' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) + + migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date + + expect(connection.table_exists?(partitioned_table)).to be(true) + expect(connection.primary_key(partitioned_table)).to eq(new_primary_key) + + expect_table_partitioned_by(partitioned_table, [partition_column]) + end + it 'changes the primary key datatype to bigint' do migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date @@ -191,6 +202,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end it 'creates a partition spanning over each month from the first record' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:with_suppressed).and_yield + migration.partition_table_by_date source_table, partition_column, max_date: max_date expect_range_partitions_for(partitioned_table, { @@ -206,6 +219,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe context 'without data' do it 'creates the catchall partition plus two actual partition' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:with_suppressed).and_yield + migration.partition_table_by_date source_table, partition_column, max_date: max_date expect_range_partitions_for(partitioned_table, { @@ -536,6 +551,16 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe migration.finalize_backfilling_partitioned_table source_table end + + it 'requires the migration helper to execute in DML mode' do + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!) + + expect(Gitlab::BackgroundMigration).to receive(:steal) + .with(described_class::MIGRATION_CLASS_NAME) + .and_yield(background_job) + + migration.finalize_backfilling_partitioned_table source_table + end end context 'when there is missed data' do @@ -627,6 +652,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe allow(backfill).to receive(:perform).and_return(1) end + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:with_suppressed).and_yield expect(migration).to receive(:disable_statement_timeout).and_call_original expect(migration).to receive(:execute).with("VACUUM FREEZE ANALYZE #{partitioned_table}") diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb index 86e74cf5177..b8c1ecd9089 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana process_sql(ActiveRecord::Base, "SELECT 1 FROM projects") end - context 'properly observes all queries', :add_ci_connection do + context 'properly observes all queries', :add_ci_connection, :request_store do using RSpec::Parameterized::TableSyntax where do @@ -28,7 +28,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana expectations: { gitlab_schemas: "gitlab_main", db_config_name: "main" - } + }, + setup: nil }, "for query accessing gitlab_ci and gitlab_main" => { model: ApplicationRecord, @@ -36,7 +37,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana expectations: { gitlab_schemas: "gitlab_ci,gitlab_main", db_config_name: "main" - } + }, + setup: nil }, "for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => { model: ApplicationRecord, @@ -44,7 +46,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana expectations: { gitlab_schemas: "gitlab_ci,gitlab_main", db_config_name: "main" - } + }, + setup: nil }, "for query accessing CI database" => { model: Ci::ApplicationRecord, @@ -53,6 +56,62 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana gitlab_schemas: "gitlab_ci", db_config_name: "ci" } + }, + "for query accessing CI database with re-use and disabled sharing" => { + model: Ci::ApplicationRecord, + sql: "SELECT 1 FROM ci_builds", + expectations: { + gitlab_schemas: "gitlab_ci", + db_config_name: "ci", + ci_dedicated_primary_connection: true + }, + setup: ->(_) do + skip_if_multiple_databases_not_setup + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + stub_feature_flags(force_no_sharing_primary_model: true) + end + }, + "for query accessing CI database with re-use and enabled sharing" => { + model: Ci::ApplicationRecord, + sql: "SELECT 1 FROM ci_builds", + expectations: { + gitlab_schemas: "gitlab_ci", + db_config_name: "ci", + ci_dedicated_primary_connection: false + }, + setup: ->(_) do + skip_if_multiple_databases_not_setup + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + stub_feature_flags(force_no_sharing_primary_model: false) + end + }, + "for query accessing CI database without re-use and disabled sharing" => { + model: Ci::ApplicationRecord, + sql: "SELECT 1 FROM ci_builds", + expectations: { + gitlab_schemas: "gitlab_ci", + db_config_name: "ci", + ci_dedicated_primary_connection: true + }, + setup: ->(_) do + skip_if_multiple_databases_not_setup + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil) + stub_feature_flags(force_no_sharing_primary_model: true) + end + }, + "for query accessing CI database without re-use and enabled sharing" => { + model: Ci::ApplicationRecord, + sql: "SELECT 1 FROM ci_builds", + expectations: { + gitlab_schemas: "gitlab_ci", + db_config_name: "ci", + ci_dedicated_primary_connection: true + }, + setup: ->(_) do + skip_if_multiple_databases_not_setup + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil) + stub_feature_flags(force_no_sharing_primary_model: false) + end } } end @@ -63,8 +122,15 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana end it do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil) + + instance_eval(&setup) if setup + + allow(::Ci::ApplicationRecord.load_balancer).to receive(:configuration) + .and_return(Gitlab::Database::LoadBalancing::Configuration.for_model(::Ci::ApplicationRecord)) + expect(described_class.schemas_metrics).to receive(:increment) - .with(expectations).and_call_original + .with({ ci_dedicated_primary_connection: anything }.merge(expectations)).and_call_original process_sql(model, sql) end diff --git a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb index e76718fe48a..34670696787 100644 --- a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb +++ b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb @@ -74,8 +74,28 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do end describe '#notify_start' do - context 'additional tag is nil' do - subject { described_class.new(api_key, api_url, nil).notify_start(action) } + context 'when Grafana is configured using application settings' do + subject { described_class.new.notify_start(action) } + + let(:payload) do + { + time: (action.action_start.utc.to_f * 1000).to_i, + tags: ['reindex', additional_tag, action.index.tablename, action.index.name], + text: "Started reindexing of #{action.index.name} on #{action.index.tablename}" + } + end + + before do + stub_application_setting(database_grafana_api_key: api_key) + stub_application_setting(database_grafana_api_url: api_url) + stub_application_setting(database_grafana_tag: additional_tag) + end + + it_behaves_like 'interacting with Grafana annotations API' + end + + context 'when there is no additional tag' do + subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: '').notify_start(action) } let(:payload) do { @@ -88,8 +108,8 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do it_behaves_like 'interacting with Grafana annotations API' end - context 'additional tag is not nil' do - subject { described_class.new(api_key, api_url, additional_tag).notify_start(action) } + context 'additional tag is provided' do + subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_start(action) } let(:payload) do { @@ -104,8 +124,30 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do end describe '#notify_end' do - context 'additional tag is nil' do - subject { described_class.new(api_key, api_url, nil).notify_end(action) } + context 'when Grafana is configured using application settings' do + subject { described_class.new.notify_end(action) } + + let(:payload) do + { + time: (action.action_start.utc.to_f * 1000).to_i, + tags: ['reindex', additional_tag, action.index.tablename, action.index.name], + text: "Finished reindexing of #{action.index.name} on #{action.index.tablename} (#{action.state})", + timeEnd: (action.action_end.utc.to_f * 1000).to_i, + isRegion: true + } + end + + before do + stub_application_setting(database_grafana_api_key: api_key) + stub_application_setting(database_grafana_api_url: api_url) + stub_application_setting(database_grafana_tag: additional_tag) + end + + it_behaves_like 'interacting with Grafana annotations API' + end + + context 'when there is no additional tag' do + subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: '').notify_end(action) } let(:payload) do { @@ -120,8 +162,8 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do it_behaves_like 'interacting with Grafana annotations API' end - context 'additional tag is not nil' do - subject { described_class.new(api_key, api_url, additional_tag).notify_end(action) } + context 'additional tag is provided' do + subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_end(action) } let(:payload) do { diff --git a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb index 7caee414719..0bea348e6b4 100644 --- a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb +++ b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb @@ -68,8 +68,8 @@ RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do describe 'when the table behind a model is actually a view' do let(:group) { create(:group) } - let(:project_attributes) { attributes_for(:project, namespace_id: group.id).except(:creator) } - let(:record) { old_model.create!(project_attributes) } + let(:attrs) { attributes_for(:project, namespace_id: group.id, project_namespace_id: group.id).except(:creator) } + let(:record) { old_model.create!(attrs) } it 'can persist records' do expect(record.reload.attributes).to eq(new_model.find(record.id).attributes) diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index c58dba213ee..ac8616f84a7 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -185,16 +185,6 @@ RSpec.describe Gitlab::Database do end end - describe '.nulls_last_order' do - it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column ASC NULLS LAST'} - it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC NULLS LAST'} - end - - describe '.nulls_first_order' do - it { expect(described_class.nulls_first_order('column', 'ASC')).to eq 'column ASC NULLS FIRST'} - it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column DESC NULLS FIRST'} - end - describe '.db_config_for_connection' do context 'when the regular connection is used' do it 'returns db_config' do @@ -245,15 +235,32 @@ RSpec.describe Gitlab::Database do end end + describe '.db_config_names' do + let(:expected) { %w[foo bar] } + + it 'includes only main by default' do + allow(::ActiveRecord::Base).to receive(:configurations).and_return( + double(configs_for: %w[foo bar].map { |x| double(name: x) }) + ) + + expect(described_class.db_config_names).to eq(expected) + end + + it 'excludes geo when that is included' do + allow(::ActiveRecord::Base).to receive(:configurations).and_return( + double(configs_for: %w[foo bar geo].map { |x| double(name: x) }) + ) + + expect(described_class.db_config_names).to eq(expected) + end + end + describe '.gitlab_schemas_for_connection' do it 'does raise exception for invalid connection' do expect { described_class.gitlab_schemas_for_connection(:invalid) }.to raise_error /key not found: "unknown"/ end it 'does return a valid schema depending on a base model used', :request_store do - # This is currently required as otherwise the `Ci::Build.connection` == `Project.connection` - # ENV due to lib/gitlab/database/load_balancing/setup.rb:93 - stub_env('GITLAB_USE_MODEL_LOAD_BALANCING', '1') # FF due to lib/gitlab/database/load_balancing/configuration.rb:92 stub_feature_flags(force_no_sharing_primary_model: true) @@ -268,6 +275,47 @@ RSpec.describe Gitlab::Database do expect(described_class.gitlab_schemas_for_connection(ActiveRecord::Base.connection)).to include(:gitlab_ci, :gitlab_shared) end end + + context "when there's CI connection", :request_store do + before do + skip_if_multiple_databases_not_setup + + # FF due to lib/gitlab/database/load_balancing/configuration.rb:92 + # Requires usage of `:request_store` + stub_feature_flags(force_no_sharing_primary_model: true) + end + + context 'when CI uses database_tasks: false does indicate that ci: is subset of main:' do + before do + allow(Ci::ApplicationRecord.connection_db_config).to receive(:database_tasks?).and_return(false) + end + + it 'does return gitlab_ci when accessing via main: connection' do + expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_ci, :gitlab_main, :gitlab_shared) + end + + it 'does not return gitlab_main when accessing via ci: connection' do + expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared) + expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).not_to include(:gitlab_main) + end + end + + context 'when CI uses database_tasks: true does indicate that ci: has own database' do + before do + allow(Ci::ApplicationRecord.connection_db_config).to receive(:database_tasks?).and_return(true) + end + + it 'does not return gitlab_ci when accessing via main: connection' do + expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared) + expect(described_class.gitlab_schemas_for_connection(Project.connection)).not_to include(:gitlab_ci) + end + + it 'does not return gitlab_main when accessing via ci: connection' do + expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared) + expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).not_to include(:gitlab_main) + end + end + end end describe '#true_value' do diff --git a/spec/lib/gitlab/diff/custom_diff_spec.rb b/spec/lib/gitlab/diff/custom_diff_spec.rb index 246508d2e1e..77d2a6cbcd6 100644 --- a/spec/lib/gitlab/diff/custom_diff_spec.rb +++ b/spec/lib/gitlab/diff/custom_diff_spec.rb @@ -34,6 +34,59 @@ RSpec.describe Gitlab::Diff::CustomDiff do expect(described_class.transformed_for_diff?(blob)).to be_falsey end end + + context 'timeout' do + subject { described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) } + + it 'falls back to nil on timeout' do + allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + expect(Timeout).to receive(:timeout).and_raise(Timeout::Error) + + expect(subject).to be_nil + end + + context 'when in foreground' do + it 'utilizes timeout for web' do + expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original + + expect(subject).not_to include('cells') + end + + it 'increments metrics' do + counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc') + + expect(Timeout).to receive(:timeout).and_raise(Timeout::Error) + expect { subject }.to change { counter.get(source: described_class::FOREGROUND_EXECUTION) }.by(1) + end + end + + context 'when in background' do + before do + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + end + + it 'utilizes longer timeout for sidekiq' do + expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_BACKGROUND).and_call_original + + expect(subject).not_to include('cells') + end + + it 'increments metrics' do + counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc') + + expect(Timeout).to receive(:timeout).and_raise(Timeout::Error) + expect { subject }.to change { counter.get(source: described_class::BACKGROUND_EXECUTION) }.by(1) + end + end + end + + context 'when invalid ipynb' do + it 'returns nil' do + expect(ipynb_blob).to receive(:data).and_return('invalid ipynb') + + expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).to be_nil + end + end end describe '#transformed_blob_data' do diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index f2212ec9b09..0d7a183bb11 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -51,6 +51,54 @@ RSpec.describe Gitlab::Diff::File do project.commit(branch_name).diffs.diff_files.first end + describe '#initialize' do + let(:commit) { project.commit("532c837") } + + context 'when file is ipynb' do + let(:ipynb_semantic_diff) { false } + let(:rendered_diffs_viewer) { false } + + before do + stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff, rendered_diffs_viewer: rendered_diffs_viewer) + end + + context 'when ipynb_semantic_diff is off, and rendered_viewer is off' do + it 'does not generate notebook diffs' do + expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff) + expect(diff_file.rendered).to be_nil + end + end + + context 'when ipynb_semantic_diff is off, and rendered_viewer is on' do + let(:rendered_diffs_viewer) { true } + + it 'does not generate rendered diff' do + expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff) + expect(diff_file.rendered).to be_nil + end + end + + context 'when ipynb_semantic_diff is on, and rendered_viewer is off' do + let(:ipynb_semantic_diff) { true } + + it 'transforms using custom diff CustomDiff' do + expect(Gitlab::Diff::CustomDiff).to receive(:preprocess_before_diff).and_call_original + expect(diff_file.rendered).to be_nil + end + end + + context 'when ipynb_semantic_diff is on, and rendered_viewer is on' do + let(:ipynb_semantic_diff) { true } + let(:rendered_diffs_viewer) { true } + + it 'transforms diff using NotebookDiffFile' do + expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff) + expect(diff_file.rendered).not_to be_nil + end + end + end + end + describe '#has_renderable?' do context 'file is ipynb' do let(:commit) { project.commit("532c837") } @@ -66,14 +114,58 @@ RSpec.describe Gitlab::Diff::File do it 'does not have renderable viewer' do expect(diff_file.has_renderable?).to be_falsey end + + it 'does not create a Notebook DiffFile' do + expect(diff_file.rendered).to be_nil + + expect(::Gitlab::Diff::Rendered::Notebook::DiffFile).not_to receive(:new) + end end end describe '#rendered' do - let(:commit) { project.commit("532c837") } + context 'when not ipynb' do + it 'is nil' do + expect(diff_file.rendered).to be_nil + end + end + + context 'when ipynb' do + let(:commit) { project.commit("532c837") } + + it 'creates a NotebookDiffFile for rendering' do + expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile) + end + + context 'when too large' do + it 'is nil' do + expect(diff).to receive(:too_large?).and_return(true) + + expect(diff_file.rendered).to be_nil + end + end + + context 'when not modified' do + it 'is nil' do + expect(diff_file).to receive(:modified_file?).and_return(false) + + expect(diff_file.rendered).to be_nil + end + end + + context 'when semantic ipynb is off' do + before do + stub_feature_flags(ipynb_semantic_diff: false) + end + + it 'returns nil' do + expect(diff_file).not_to receive(:modified_file?) + expect(diff_file).not_to receive(:ipynb?) + expect(diff).not_to receive(:too_large?) - it 'creates a NotebookDiffFile for rendering' do - expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile) + expect(diff_file.rendered).to be_nil + end + end end end diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb index 15edbc22460..89b284feee0 100644 --- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb +++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb @@ -63,6 +63,28 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do expect(nb_file.diff).to be_nil end end + + context 'timeout' do + it 'utilizes timeout for web' do + expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original + + nb_file.diff + end + + it 'falls back to nil on timeout' do + allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + expect(Timeout).to receive(:timeout).and_raise(Timeout::Error) + + expect(nb_file.diff).to be_nil + end + + it 'utilizes longer timeout for sidekiq' do + allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) + expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_BACKGROUND).and_call_original + + nb_file.diff + end + end end describe '#has_renderable?' do diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index 913e197708f..8d008986464 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -477,20 +477,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end end - context 'when there is a reply-to address and a from address' do - let(:email_raw) { email_fixture('emails/service_desk_reply_to_and_from.eml') } - - it 'shows both from and reply-to addresses in the issue header' do - setup_attachment - - expect { receiver.execute }.to change { Issue.count }.by(1) - - new_issue = Issue.last - - expect(new_issue.external_author).to eq('finn@adventuretime.ooo (reply to: marceline@adventuretime.ooo)') - end - end - context 'when service desk is not enabled for project' do before do allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(false) diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb index 0521123f1ef..8bd873cf008 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb @@ -100,7 +100,6 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do :trial | true :team | true :experience | true - :invite_team | false end with_them do diff --git a/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb deleted file mode 100644 index 8319560f594..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::InviteTeam do - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - let(:series) { 0 } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe 'initialize' do - context 'when series is valid' do - it 'does not raise error' do - expect { subject }.not_to raise_error(ArgumentError) - end - end - - context 'when series is invalid' do - let(:series) { 1 } - - it 'raises error' do - expect { subject }.to raise_error(ArgumentError) - end - end - end - - it 'contains the correct message', :aggregate_failures do - expect(message.subject_line).to eq 'Invite your teammates to GitLab' - expect(message.tagline).to be_empty - expect(message.title).to eq 'GitLab is better with teammates to help out!' - expect(message.subtitle).to be_empty - expect(message.body_line1).to eq 'Invite your teammates today and build better code together. You can even assign tasks to new teammates such as setting up CI/CD, to help get projects up and running.' - expect(message.body_line2).to be_empty - expect(message.cta_text).to eq 'Invite your teammates to help' - expect(message.logo_path).to eq 'mailers/in_product_marketing/team-0.png' - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb index 594df7440bb..40351bef8b9 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb @@ -18,7 +18,6 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing do :trial | described_class::Trial :team | described_class::Team :experience | described_class::Experience - :invite_team | described_class::InviteTeam end with_them do diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb index 98170ef437c..4c1fbb93c13 100644 --- a/spec/lib/gitlab/encoding_helper_spec.rb +++ b/spec/lib/gitlab/encoding_helper_spec.rb @@ -265,4 +265,14 @@ RSpec.describe Gitlab::EncodingHelper do end end end + + describe '#unquote_path' do + it do + expect(described_class.unquote_path('unquoted')).to eq('unquoted') + expect(described_class.unquote_path('"quoted"')).to eq('quoted') + expect(described_class.unquote_path('"\\311\\240\\304\\253\\305\\247\\305\\200\\310\\247\\306\\200"')).to eq('É Ä«Å§Å€È§Æ€') + expect(described_class.unquote_path('"\\\\303\\\\251"')).to eq('\303\251') + expect(described_class.unquote_path('"\a\b\e\f\n\r\t\v\""')).to eq("\a\b\e\f\n\r\t\v\"") + end + end end diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb index 5b78acc3b1d..f878f02f410 100644 --- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb +++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb @@ -67,7 +67,7 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do it 'does not rewrite plain links as embedded' do embedded_link = image_uploader.markdown_link - plain_image_link = embedded_link.sub(/\A!/, "") + plain_image_link = embedded_link.delete_prefix('!') text = "#{plain_image_link} and #{embedded_link}" moved_text = described_class.new(text, old_project, user).rewrite(new_project) diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb index 495cb16ebab..7dd7460b142 100644 --- a/spec/lib/gitlab/git/blame_spec.rb +++ b/spec/lib/gitlab/git/blame_spec.rb @@ -4,71 +4,81 @@ require "spec_helper" RSpec.describe Gitlab::Git::Blame, :seed_helper do let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') } - let(:blame) do - Gitlab::Git::Blame.new(repository, SeedRepo::Commit::ID, "CONTRIBUTING.md") + + let(:sha) { SeedRepo::Commit::ID } + let(:path) { 'CONTRIBUTING.md' } + let(:range) { nil } + + subject(:blame) { Gitlab::Git::Blame.new(repository, sha, path, range: range) } + + let(:result) do + [].tap do |data| + blame.each do |commit, line, previous_path| + data << { commit: commit, line: line, previous_path: previous_path } + end + end end describe 'blaming a file' do - context "each count" do - it do - data = [] - blame.each do |commit, line| - data << { - commit: commit, - line: line - } - end + it 'has the right number of lines' do + expect(result.size).to eq(95) + expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit) + expect(result.first[:line]).to eq("# Contribute to GitLab") + expect(result.first[:line]).to be_utf8 + end - expect(data.size).to eq(95) - expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit) - expect(data.first[:line]).to eq("# Contribute to GitLab") - expect(data.first[:line]).to be_utf8 + context 'blaming a range' do + let(:range) { 2..4 } + + it 'only returns the range' do + expect(result.size).to eq(range.size) + expect(result.map {|r| r[:line] }).to eq(['', 'This guide details how contribute to GitLab.', '']) end end context "ISO-8859 encoding" do - let(:blame) do - Gitlab::Git::Blame.new(repository, SeedRepo::EncodingCommit::ID, "encoding/iso8859.txt") - end + let(:sha) { SeedRepo::EncodingCommit::ID } + let(:path) { 'encoding/iso8859.txt' } it 'converts to UTF-8' do - data = [] - blame.each do |commit, line| - data << { - commit: commit, - line: line - } - end - - expect(data.size).to eq(1) - expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit) - expect(data.first[:line]).to eq("Ä ü") - expect(data.first[:line]).to be_utf8 + expect(result.size).to eq(1) + expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit) + expect(result.first[:line]).to eq("Ä ü") + expect(result.first[:line]).to be_utf8 end end context "unknown encoding" do - let(:blame) do - Gitlab::Git::Blame.new(repository, SeedRepo::EncodingCommit::ID, "encoding/iso8859.txt") - end + let(:sha) { SeedRepo::EncodingCommit::ID } + let(:path) { 'encoding/iso8859.txt' } it 'converts to UTF-8' do expect_next_instance_of(CharlockHolmes::EncodingDetector) do |detector| expect(detector).to receive(:detect).and_return(nil) end - data = [] - blame.each do |commit, line| - data << { - commit: commit, - line: line - } - end + expect(result.size).to eq(1) + expect(result.first[:commit]).to be_kind_of(Gitlab::Git::Commit) + expect(result.first[:line]).to eq(" ") + expect(result.first[:line]).to be_utf8 + end + end + + context "renamed file" do + let(:project) { create(:project, :repository) } + let(:repository) { project.repository.raw_repository } + let(:commit) { project.commit('blame-on-renamed') } + let(:sha) { commit.id } + let(:path) { 'files/plain_text/renamed' } + + it 'includes the previous path' do + expect(result.size).to eq(5) - expect(data.size).to eq(1) - expect(data.first[:commit]).to be_kind_of(Gitlab::Git::Commit) - expect(data.first[:line]).to eq(" ") - expect(data.first[:line]).to be_utf8 + expect(result[0]).to include(line: 'Initial commit', previous_path: nil) + expect(result[1]).to include(line: 'Initial commit', previous_path: nil) + expect(result[2]).to include(line: 'Renamed as "filename"', previous_path: 'files/plain_text/initial-commit') + expect(result[3]).to include(line: 'Renamed as renamed', previous_path: 'files/plain_text/"filename"') + expect(result[4]).to include(line: 'Last edit, no rename', previous_path: path) end end end diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb index 17bb83d0f2f..46f544797bb 100644 --- a/spec/lib/gitlab/git/diff_spec.rb +++ b/spec/lib/gitlab/git/diff_spec.rb @@ -161,6 +161,52 @@ EOT expect(diff).not_to have_binary_notice end end + + context 'when diff contains invalid characters' do + let(:bad_string) { [0xae].pack("C*") } + let(:bad_string_two) { [0x89].pack("C*") } + + let(:diff) { described_class.new(@raw_diff_hash.merge({ diff: bad_string })) } + let(:diff_two) { described_class.new(@raw_diff_hash.merge({ diff: bad_string_two })) } + + context 'when replace_invalid_utf8_chars is true' do + it 'will convert invalid characters and not cause an encoding error' do + expect(diff.diff).to include(Gitlab::EncodingHelper::UNICODE_REPLACEMENT_CHARACTER) + expect(diff_two.diff).to include(Gitlab::EncodingHelper::UNICODE_REPLACEMENT_CHARACTER) + + expect { Oj.dump(diff) }.not_to raise_error(EncodingError) + expect { Oj.dump(diff_two) }.not_to raise_error(EncodingError) + end + + context 'when the diff is binary' do + let(:project) { create(:project, :repository) } + + it 'will not try to replace characters' do + expect(Gitlab::EncodingHelper).not_to receive(:encode_utf8_with_replacement_character?) + expect(binary_diff(project).diff).not_to be_empty + end + end + + context 'when convert_diff_to_utf8_with_replacement_symbol feature flag is disabled' do + before do + stub_feature_flags(convert_diff_to_utf8_with_replacement_symbol: false) + end + + it 'will not try to convert invalid characters' do + expect(Gitlab::EncodingHelper).not_to receive(:encode_utf8_with_replacement_character?) + end + end + end + + context 'when replace_invalid_utf8_chars is false' do + let(:not_replaced_diff) { described_class.new(@raw_diff_hash.merge({ diff: bad_string, replace_invalid_utf8_chars: false }) ) } + let(:not_replaced_diff_two) { described_class.new(@raw_diff_hash.merge({ diff: bad_string_two, replace_invalid_utf8_chars: false }) ) } + + it 'will not try to convert invalid characters' do + expect(Gitlab::EncodingHelper).not_to receive(:encode_utf8_with_replacement_character?) + end + end + end end describe 'straight diffs' do @@ -255,12 +301,11 @@ EOT let(:project) { create(:project, :repository) } it 'fake binary message when it detects binary' do - # Rugged will not detect this as binary, but we can fake it diff_message = "Binary files files/images/icn-time-tracking.pdf and files/images/icn-time-tracking.pdf differ\n" - binary_diff = described_class.between(project.repository, 'add-pdf-text-binary', 'add-pdf-text-binary^').first - expect(binary_diff.diff).not_to be_empty - expect(binary_diff.json_safe_diff).to eq(diff_message) + diff = binary_diff(project) + expect(diff.diff).not_to be_empty + expect(diff.json_safe_diff).to eq(diff_message) end it 'leave non-binary diffs as-is' do @@ -374,4 +419,9 @@ EOT expect(diff.line_count).to eq(0) end end + + def binary_diff(project) + # rugged will not detect this as binary, but we can fake it + described_class.between(project.repository, 'add-pdf-text-binary', 'add-pdf-text-binary^').first + end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index ae6ca728573..47688c4b3e6 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -2448,7 +2448,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do it 'delegates to Gitaly' do expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |svc| - expect(svc).to receive(:import_repository).with(url).and_return(nil) + expect(svc).to receive(:import_repository).with(url, http_authorization_header: '', mirror: false).and_return(nil) end repository.import_repository(url) diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index 8d9ab5db886..50a0f20e775 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -563,4 +563,39 @@ RSpec.describe Gitlab::GitalyClient::CommitService do expect(response).not_to have_key 'nonexistent' end end + + describe '#raw_blame' do + let(:project) { create(:project, :test_repo) } + let(:revision) { 'blame-on-renamed' } + let(:path) { 'files/plain_text/renamed' } + + let(:blame_headers) do + [ + '405a45736a75e439bb059e638afaa9a3c2eeda79 1 1 2', + '405a45736a75e439bb059e638afaa9a3c2eeda79 2 2', + 'bed1d1610ebab382830ee888288bf939c43873bb 3 3 1', + '3685515c40444faf92774e72835e1f9c0e809672 4 4 1', + '32c33da59f8a1a9f90bdeda570337888b00b244d 5 5 1' + ] + end + + subject(:blame) { client.raw_blame(revision, path, range: range).split("\n") } + + context 'without a range' do + let(:range) { nil } + + it 'blames a whole file' do + is_expected.to include(*blame_headers) + end + end + + context 'with a range' do + let(:range) { '3,4' } + + it 'blames part of a file' do + is_expected.to include(blame_headers[2], blame_headers[3]) + is_expected.not_to include(blame_headers[0], blame_headers[1], blame_headers[4]) + end + end + end end diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb index 1c7b35ed928..6eb92cdeab9 100644 --- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb @@ -98,9 +98,9 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do .to receive(:each_object_to_import) .and_yield(github_comment) - expect(Gitlab::GithubImport::ImportDiffNoteWorker) - .to receive(:perform_async) - .with(project.id, an_instance_of(Hash), an_instance_of(String)) + expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:bulk_perform_in).with(1.second, [ + [project.id, an_instance_of(Hash), an_instance_of(String)] + ], batch_size: 1000, batch_delay: 1.minute) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb index 2c2b6a2aff0..6b807bdf098 100644 --- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb @@ -91,9 +91,9 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do .to receive(:each_object_to_import) .and_yield(github_issue) - expect(Gitlab::GithubImport::ImportIssueWorker) - .to receive(:perform_async) - .with(project.id, an_instance_of(Hash), an_instance_of(String)) + expect(Gitlab::GithubImport::ImportIssueWorker).to receive(:bulk_perform_in).with(1.second, [ + [project.id, an_instance_of(Hash), an_instance_of(String)] + ], batch_size: 1000, batch_delay: 1.minute) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb index a2c7d51214a..6dfd4424342 100644 --- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb @@ -118,9 +118,9 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do expect(service).to receive(:execute).and_return([lfs_download_object]) end - expect(Gitlab::GithubImport::ImportLfsObjectWorker) - .to receive(:perform_async) - .with(project.id, an_instance_of(Hash), an_instance_of(String)) + expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:bulk_perform_in).with(1.second, [ + [project.id, an_instance_of(Hash), an_instance_of(String)] + ], batch_size: 1000, batch_delay: 1.minute) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb index 3782dab5ee3..3b4fe652da8 100644 --- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb @@ -84,9 +84,9 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do .to receive(:each_object_to_import) .and_yield(github_comment) - expect(Gitlab::GithubImport::ImportNoteWorker) - .to receive(:perform_async) - .with(project.id, an_instance_of(Hash), an_instance_of(String)) + expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:bulk_perform_in).with(1.second, [ + [project.id, an_instance_of(Hash), an_instance_of(String)] + ], batch_size: 1000, batch_delay: 1.minute) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb index c9e4ac67061..e522f74416c 100644 --- a/spec/lib/gitlab/github_import/object_counter_spec.rb +++ b/spec/lib/gitlab/github_import/object_counter_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do - let_it_be(:project) { create(:project) } + let_it_be(:project) { create(:project, :import_started, import_type: 'github') } it 'validates the operation being incremented' do expect { described_class.increment(project, :issue, :unknown) } @@ -49,4 +49,12 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do 'imported' => {} }) end + + it 'expires etag cache of relevant realtime change endpoints on increment' do + expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance| + expect(instance).to receive(:touch).with(Gitlab::Routing.url_helpers.realtime_changes_import_github_path(format: :json)) + end + + described_class.increment(project, :issue, :fetched) + end end diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb index 6a19afbc60d..200898f8f03 100644 --- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb +++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb @@ -22,10 +22,6 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do def collection_method :issues end - - def parallel_import_batch - { size: 10, delay: 1.minute } - end end end @@ -261,7 +257,7 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do let(:repr_class) { double(:representation) } let(:worker_class) { double(:worker) } let(:object) { double(:object) } - let(:batch_size) { 200 } + let(:batch_size) { 1000 } let(:batch_delay) { 1.minute } before do @@ -281,7 +277,6 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do context 'with multiple objects' do before do - allow(importer).to receive(:parallel_import_batch) { { size: batch_size, delay: batch_delay } } expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object) end @@ -296,9 +291,9 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do end end - context 'when FF is disabled' do + context 'when distribute_github_parallel_import feature flag is disabled' do before do - stub_feature_flags(spread_parallel_import: false) + stub_feature_flags(distribute_github_parallel_import: false) end it 'imports data in parallel' do diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb index 047873d8237..28cb9125af1 100644 --- a/spec/lib/gitlab/gon_helper_spec.rb +++ b/spec/lib/gitlab/gon_helper_spec.rb @@ -64,6 +64,34 @@ RSpec.describe Gitlab::GonHelper do end end + describe '#push_force_frontend_feature_flag' do + let(:gon) { class_double('Gon') } + + before do + skip_feature_flags_yaml_validation + + allow(helper) + .to receive(:gon) + .and_return(gon) + end + + it 'pushes a feature flag to the frontend with the provided value' do + expect(gon) + .to receive(:push) + .with({ features: { 'myFeatureFlag' => true } }, true) + + helper.push_force_frontend_feature_flag(:my_feature_flag, true) + end + + it 'pushes a disabled feature flag if provided value is nil' do + expect(gon) + .to receive(:push) + .with({ features: { 'myFeatureFlag' => false } }, true) + + helper.push_force_frontend_feature_flag(:my_feature_flag, nil) + end + end + describe '#default_avatar_url' do it 'returns an absolute URL' do url = helper.default_avatar_url diff --git a/spec/lib/gitlab/graphql/known_operations_spec.rb b/spec/lib/gitlab/graphql/known_operations_spec.rb index 411c0876f82..3ebfefbb43c 100644 --- a/spec/lib/gitlab/graphql/known_operations_spec.rb +++ b/spec/lib/gitlab/graphql/known_operations_spec.rb @@ -19,7 +19,7 @@ RSpec.describe Gitlab::Graphql::KnownOperations do describe "#from_query" do where(:query_string, :expected) do - "query { helloWorld }" | described_class::ANONYMOUS + "query { helloWorld }" | described_class::UNKNOWN "query fuzzyyy { helloWorld }" | described_class::UNKNOWN "query foo { helloWorld }" | described_class::Operation.new("foo") end @@ -35,13 +35,13 @@ RSpec.describe Gitlab::Graphql::KnownOperations do describe "#operations" do it "returns array of known operations" do - expect(subject.operations.map(&:name)).to match_array(%w(anonymous unknown foo bar)) + expect(subject.operations.map(&:name)).to match_array(%w(unknown foo bar)) end end describe "Operation#to_caller_id" do where(:query_string, :expected) do - "query { helloWorld }" | "graphql:#{described_class::ANONYMOUS.name}" + "query { helloWorld }" | "graphql:#{described_class::UNKNOWN.name}" "query foo { helloWorld }" | "graphql:foo" end diff --git a/spec/lib/gitlab/graphql/pagination/active_record_array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/active_record_array_connection_spec.rb new file mode 100644 index 00000000000..320c6b52308 --- /dev/null +++ b/spec/lib/gitlab/graphql/pagination/active_record_array_connection_spec.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Graphql::Pagination::ActiveRecordArrayConnection do + using RSpec::Parameterized::TableSyntax + + let_it_be(:items) { create_list(:package_build_info, 3) } + + let_it_be(:context) do + GraphQL::Query::Context.new( + query: GraphQL::Query.new(GitlabSchema, document: nil, context: {}, variables: {}), + values: {}, + object: nil + ) + end + + let(:first) { nil } + let(:last) { nil } + let(:after) { nil } + let(:before) { nil } + let(:max_page_size) { nil } + + let(:connection) do + described_class.new( + items, + context: context, + first: first, + last: last, + after: after, + before: before, + max_page_size: max_page_size + ) + end + + it_behaves_like 'a connection with collection methods' + + it_behaves_like 'a redactable connection' do + let(:unwanted) { items[1] } + end + + describe '#nodes' do + subject { connection.nodes } + + it { is_expected.to match_array(items) } + + context 'with first set' do + let(:first) { 2 } + + it { is_expected.to match_array([items[0], items[1]]) } + end + + context 'with last set' do + let(:last) { 2 } + + it { is_expected.to match_array([items[1], items[2]]) } + end + end + + describe '#next_page?' do + subject { connection.next_page? } + + where(:before, :first, :max_page_size, :result) do + nil | nil | nil | false + 1 | nil | nil | true + nil | 1 | nil | true + nil | 10 | nil | false + nil | 1 | 1 | true + nil | 1 | 10 | true + nil | 10 | 10 | false + end + + with_them do + it { is_expected.to eq(result) } + end + end + + describe '#previous_page?' do + subject { connection.previous_page? } + + where(:after, :last, :max_page_size, :result) do + nil | nil | nil | false + 1 | nil | nil | true + nil | 1 | nil | true + nil | 10 | nil | false + nil | 1 | 1 | true + nil | 1 | 10 | true + nil | 10 | 10 | false + end + + with_them do + it { is_expected.to eq(result) } + end + end + + describe '#cursor_for' do + let(:item) { items[0] } + let(:expected_result) do + GitlabSchema.cursor_encoder.encode( + Gitlab::Json.dump(id: item.id.to_s), + nonce: true + ) + end + + subject { connection.cursor_for(item) } + + it { is_expected.to eq(expected_result) } + + context 'with a BatchLoader::GraphQL item' do + let_it_be(:user) { create(:user) } + + let(:item) { ::Gitlab::Graphql::Loaders::BatchModelLoader.new(::User, user.id).find } + let(:expected_result) do + GitlabSchema.cursor_encoder.encode( + Gitlab::Json.dump(id: user.id.to_s), + nonce: true + ) + end + + it { is_expected.to eq(expected_result) } + end + end + + describe '#dup' do + subject { connection.dup } + + it 'properly handles items duplication' do + connection2 = subject + + connection2 << create(:package_build_info) + + expect(connection.items).not_to eq(connection2.items) + end + end +end diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb index 0741088c915..86e7d4e344c 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb @@ -19,8 +19,8 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'last_repository_check_at', column_expression: Project.arel_table[:last_repository_check_at], - order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc), - reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc), + order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last, + reversed_order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last, order_direction: :asc, nullable: :nulls_last, distinct: false) @@ -30,8 +30,8 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'last_repository_check_at', column_expression: Project.arel_table[:last_repository_check_at], - order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc), - reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc), + order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last, + reversed_order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last, order_direction: :desc, nullable: :nulls_last, distinct: false) @@ -256,11 +256,6 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do end end - # rubocop: disable RSpec/EmptyExampleGroup - context 'when ordering uses LOWER' do - end - # rubocop: enable RSpec/EmptyExampleGroup - context 'when ordering by similarity' do let_it_be(:project1) { create(:project, name: 'test') } let_it_be(:project2) { create(:project, name: 'testing') } diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb index b511a294f97..f31ec6c09fd 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb @@ -77,6 +77,17 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s) end + context 'when SimpleOrderBuilder cannot build keyset paginated query' do + it 'increments the `old_keyset_pagination_usage` counter', :prometheus do + expect(Gitlab::Pagination::Keyset::SimpleOrderBuilder).to receive(:build).and_return([false, nil]) + + decoded_cursor(cursor) + + counter = Gitlab::Metrics.registry.get(:old_keyset_pagination_usage) + expect(counter.get(model: 'Project')).to eq(1) + end + end + context 'when an order is specified' do let(:nodes) { Project.order(:updated_at) } @@ -222,91 +233,97 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do end end - context 'when multiple orders with nil values are defined' do - let!(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3 - let!(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1 - let!(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5 - let!(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2 - let!(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4 + context 'when ordering uses LOWER' do + let!(:project1) { create(:project, name: 'A') } # Asc: project1 Desc: project4 + let!(:project2) { create(:project, name: 'c') } # Asc: project5 Desc: project2 + let!(:project3) { create(:project, name: 'b') } # Asc: project3 Desc: project3 + let!(:project4) { create(:project, name: 'd') } # Asc: project2 Desc: project5 + let!(:project5) { create(:project, name: 'a') } # Asc: project4 Desc: project1 context 'when ascending' do let(:nodes) do - Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc) + Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(id: :asc) end - let(:ascending_nodes) { [project5, project1, project3, project2, project4] } + let(:ascending_nodes) { [project1, project5, project3, project2, project4] } it_behaves_like 'nodes are in ascending order' - - context 'when before cursor value is NULL' do - let(:arguments) { { before: encoded_cursor(project4) } } - - it 'returns all projects before the cursor' do - expect(subject.sliced_nodes).to eq([project5, project1, project3, project2]) - end - end - - context 'when after cursor value is NULL' do - let(:arguments) { { after: encoded_cursor(project2) } } - - it 'returns all projects after the cursor' do - expect(subject.sliced_nodes).to eq([project4]) - end - end end context 'when descending' do let(:nodes) do - Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc) + Project.order(Arel::Table.new(:projects)['name'].lower.desc).order(id: :desc) end - let(:descending_nodes) { [project3, project1, project5, project2, project4] } + let(:descending_nodes) { [project4, project2, project3, project5, project1] } it_behaves_like 'nodes are in descending order' + end + end - context 'when before cursor value is NULL' do - let(:arguments) { { before: encoded_cursor(project4) } } + context 'NULLS order' do + using RSpec::Parameterized::TableSyntax - it 'returns all projects before the cursor' do - expect(subject.sliced_nodes).to eq([project3, project1, project5, project2]) - end - end + let_it_be(:issue1) { create(:issue, relative_position: nil) } + let_it_be(:issue2) { create(:issue, relative_position: 100) } + let_it_be(:issue3) { create(:issue, relative_position: 200) } + let_it_be(:issue4) { create(:issue, relative_position: nil) } + let_it_be(:issue5) { create(:issue, relative_position: 300) } + + context 'when ascending NULLS LAST (ties broken by id DESC implicitly)' do + let(:ascending_nodes) { [issue2, issue3, issue5, issue4, issue1] } - context 'when after cursor value is NULL' do - let(:arguments) { { after: encoded_cursor(project2) } } + where(:nodes) do + [ + lazy { Issue.order(Issue.arel_table[:relative_position].asc.nulls_last) } + ] + end - it 'returns all projects after the cursor' do - expect(subject.sliced_nodes).to eq([project4]) - end + with_them do + it_behaves_like 'nodes are in ascending order' end end - end - context 'when ordering uses LOWER' do - let!(:project1) { create(:project, name: 'A') } # Asc: project1 Desc: project4 - let!(:project2) { create(:project, name: 'c') } # Asc: project5 Desc: project2 - let!(:project3) { create(:project, name: 'b') } # Asc: project3 Desc: project3 - let!(:project4) { create(:project, name: 'd') } # Asc: project2 Desc: project5 - let!(:project5) { create(:project, name: 'a') } # Asc: project4 Desc: project1 + context 'when descending NULLS LAST (ties broken by id DESC implicitly)' do + let(:descending_nodes) { [issue5, issue3, issue2, issue4, issue1] } - context 'when ascending' do - let(:nodes) do - Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(id: :asc) + where(:nodes) do + [ + lazy { Issue.order(Issue.arel_table[:relative_position].desc.nulls_last) } +] end - let(:ascending_nodes) { [project1, project5, project3, project2, project4] } - - it_behaves_like 'nodes are in ascending order' + with_them do + it_behaves_like 'nodes are in descending order' + end end - context 'when descending' do - let(:nodes) do - Project.order(Arel::Table.new(:projects)['name'].lower.desc).order(id: :desc) + context 'when ascending NULLS FIRST with a tie breaker' do + let(:ascending_nodes) { [issue1, issue4, issue2, issue3, issue5] } + + where(:nodes) do + [ + lazy { Issue.order(Issue.arel_table[:relative_position].asc.nulls_first).order(id: :asc) } +] end - let(:descending_nodes) { [project4, project2, project3, project5, project1] } + with_them do + it_behaves_like 'nodes are in ascending order' + end + end - it_behaves_like 'nodes are in descending order' + context 'when descending NULLS FIRST with a tie breaker' do + let(:descending_nodes) { [issue1, issue4, issue5, issue3, issue2] } + + where(:nodes) do + [ + lazy { Issue.order(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :asc) } +] + end + + with_them do + it_behaves_like 'nodes are in descending order' + end end end diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb index f5ee8eba8bc..676396697fb 100644 --- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::HookData::IssuableBuilder do let_it_be(:user) { create(:user) } # This shared example requires a `builder` and `user` variable - shared_examples 'issuable hook data' do |kind| + shared_examples 'issuable hook data' do |kind, hook_data_issuable_builder_class| let(:data) { builder.build(user: user) } include_examples 'project hook data' do @@ -20,7 +20,7 @@ RSpec.describe Gitlab::HookData::IssuableBuilder do expect(data[:object_kind]).to eq(kind) expect(data[:user]).to eq(user.hook_attrs) expect(data[:project]).to eq(builder.issuable.project.hook_attrs) - expect(data[:object_attributes]).to eq(builder.issuable.hook_attrs) + expect(data[:object_attributes]).to eq(hook_data_issuable_builder_class.new(issuable).build) expect(data[:changes]).to eq({}) expect(data[:repository]).to eq(builder.issuable.project.hook_attrs.slice(:name, :url, :description, :homepage)) end @@ -95,12 +95,12 @@ RSpec.describe Gitlab::HookData::IssuableBuilder do end describe '#build' do - it_behaves_like 'issuable hook data', 'issue' do + it_behaves_like 'issuable hook data', 'issue', Gitlab::HookData::IssueBuilder do let(:issuable) { create(:issue, description: 'A description') } let(:builder) { described_class.new(issuable) } end - it_behaves_like 'issuable hook data', 'merge_request' do + it_behaves_like 'issuable hook data', 'merge_request', Gitlab::HookData::MergeRequestBuilder do let(:issuable) { create(:merge_request, description: 'A description') } let(:builder) { described_class.new(issuable) } end diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb index ddd681f75f0..771fc0218e2 100644 --- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb @@ -62,6 +62,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do expect(data).to include(:human_time_estimate) expect(data).to include(:human_total_time_spent) expect(data).to include(:human_time_change) + expect(data).to include(:labels) end context 'when the MR has an image in the description' do diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb index e9e517f1fe6..cde8376febd 100644 --- a/spec/lib/gitlab/http_connection_adapter_spec.rb +++ b/spec/lib/gitlab/http_connection_adapter_spec.rb @@ -27,16 +27,6 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do end end - context 'with header_read_timeout_buffered_io feature disabled' do - before do - stub_feature_flags(header_read_timeout_buffered_io: false) - end - - it 'uses the regular Net::HTTP class' do - expect(connection).to be_a(Net::HTTP) - end - end - context 'when local requests are allowed' do let(:options) { { allow_local_requests: true } } diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 29a19e4cafd..730f9035293 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -665,6 +665,7 @@ protected_environments: - project - group - deploy_access_levels +- approval_rules deploy_access_levels: - protected_environment - user diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb index 59d97357045..f47f1ab58a8 100644 --- a/spec/lib/gitlab/import_export/command_line_util_spec.rb +++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb @@ -114,7 +114,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do end end - %w[MOVED_PERMANENTLY FOUND TEMPORARY_REDIRECT].each do |code| + %w[MOVED_PERMANENTLY FOUND SEE_OTHER TEMPORARY_REDIRECT].each do |code| context "with a redirect status code #{code}" do let(:status) { HTTP::Status.const_get(code, false) } diff --git a/spec/lib/gitlab/import_export/duration_measuring_spec.rb b/spec/lib/gitlab/import_export/duration_measuring_spec.rb new file mode 100644 index 00000000000..cf8b6060741 --- /dev/null +++ b/spec/lib/gitlab/import_export/duration_measuring_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::ImportExport::DurationMeasuring do + subject do + Class.new do + include Gitlab::ImportExport::DurationMeasuring + + def test + with_duration_measuring do + 'test' + end + end + end.new + end + + it 'measures method execution duration' do + subject.test + + expect(subject.duration_s).not_to be_nil + end + + describe '#with_duration_measuring' do + it 'yields control' do + expect { |block| subject.with_duration_measuring(&block) }.to yield_control + end + + it 'returns result of the yielded block' do + return_value = 'return_value' + + expect(subject.with_duration_measuring { return_value }).to eq(return_value) + end + end +end diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb index ba1cccf87ce..03f522ae490 100644 --- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb @@ -115,7 +115,7 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do end it 'orders exported issues by custom column(relative_position)' do - expected_issues = exportable.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'DESC')).order(id: :desc).map(&:to_json) + expected_issues = exportable.issues.reorder(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :desc).map(&:to_json) expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues) diff --git a/spec/lib/gitlab/import_export/version_checker_spec.rb b/spec/lib/gitlab/import_export/version_checker_spec.rb index 8b39330656f..9e69e04b17c 100644 --- a/spec/lib/gitlab/import_export/version_checker_spec.rb +++ b/spec/lib/gitlab/import_export/version_checker_spec.rb @@ -1,9 +1,10 @@ # frozen_string_literal: true require 'spec_helper' -include ImportExport::CommonUtil RSpec.describe Gitlab::ImportExport::VersionChecker do + include ImportExport::CommonUtil + let!(:shared) { Gitlab::ImportExport::Shared.new(nil) } describe 'bundle a project Git repo' do diff --git a/spec/lib/gitlab/insecure_key_fingerprint_spec.rb b/spec/lib/gitlab/insecure_key_fingerprint_spec.rb index 2f3489edcd8..3a281574563 100644 --- a/spec/lib/gitlab/insecure_key_fingerprint_spec.rb +++ b/spec/lib/gitlab/insecure_key_fingerprint_spec.rb @@ -10,16 +10,9 @@ RSpec.describe Gitlab::InsecureKeyFingerprint do 'Jw0=' end - let(:fingerprint) { "3f:a2:ee:de:b5:de:53:c3:aa:2f:9c:45:24:4c:47:7b" } let(:fingerprint_sha256) { "MQHWhS9nhzUezUdD42ytxubZoBKrZLbyBZzxCkmnxXc" } - describe "#fingerprint" do - it "generates the key's fingerprint" do - expect(described_class.new(key.split[1]).fingerprint_md5).to eq(fingerprint) - end - end - - describe "#fingerprint" do + describe '#fingerprint_sha256' do it "generates the key's fingerprint" do expect(described_class.new(key.split[1]).fingerprint_sha256).to eq(fingerprint_sha256) end diff --git a/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb index 02cc2eba4da..68f1c214cef 100644 --- a/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb +++ b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ProjectCreator do let(:namespace) { create(:group) } let(:repo) do - OpenStruct.new( + ActiveSupport::InheritableOptions.new( login: 'vim', name: 'vim', full_name: 'asd/vim', @@ -21,7 +21,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ProjectCreator do namespace.add_owner(user) expect_next_instance_of(Project) do |project| - expect(project).to receive(:add_import_job) + allow(project).to receive(:add_import_job) end end diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb index 0c77dc9f582..2ba06316507 100644 --- a/spec/lib/gitlab/metrics/rails_slis_spec.rb +++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::Metrics::RailsSlis do end allow(Gitlab::RequestEndpoints).to receive(:all_api_endpoints).and_return([api_route]) - allow(Gitlab::RequestEndpoints).to receive(:all_controller_actions).and_return([[ProjectsController, 'show']]) + allow(Gitlab::RequestEndpoints).to receive(:all_controller_actions).and_return([[ProjectsController, 'index']]) allow(Gitlab::Graphql::KnownOperations).to receive(:default).and_return(Gitlab::Graphql::KnownOperations.new(%w(foo bar))) end @@ -22,13 +22,13 @@ RSpec.describe Gitlab::Metrics::RailsSlis do request_urgency: :default }, { - endpoint_id: "ProjectsController#show", + endpoint_id: "ProjectsController#index", feature_category: :projects, request_urgency: :default } ] - possible_graphql_labels = ['graphql:foo', 'graphql:bar', 'graphql:unknown', 'graphql:anonymous'].map do |endpoint_id| + possible_graphql_labels = ['graphql:foo', 'graphql:bar', 'graphql:unknown'].map do |endpoint_id| { endpoint_id: endpoint_id, feature_category: nil, diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb index 8b959cf787f..c91b14a33ba 100644 --- a/spec/lib/gitlab/omniauth_initializer_spec.rb +++ b/spec/lib/gitlab/omniauth_initializer_spec.rb @@ -309,4 +309,16 @@ RSpec.describe Gitlab::OmniauthInitializer do subject.execute([conf]) end end + + describe '.full_host' do + subject { described_class.full_host.call({}) } + + let(:base_url) { 'http://localhost/test' } + + before do + allow(Settings).to receive(:gitlab).and_return({ 'base_url' => base_url }) + end + + it { is_expected.to eq(base_url) } + end end diff --git a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb index 69384e0c501..778244677ef 100644 --- a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb @@ -140,8 +140,8 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do described_class.new( attribute_name: :name, column_expression: Project.arel_table[:name], - order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc), - reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc), + order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last, + reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first, order_direction: :desc, nullable: :nulls_last, # null values are always last distinct: false @@ -161,8 +161,8 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do described_class.new( attribute_name: :name, column_expression: Project.arel_table[:name], - order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc), - reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc), + order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last, + reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first, order_direction: :desc, nullable: true, distinct: false @@ -175,8 +175,8 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do described_class.new( attribute_name: :name, column_expression: Project.arel_table[:name], - order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc), - reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc), + order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last, + reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first, order_direction: :desc, nullable: :nulls_last, distinct: true @@ -191,8 +191,8 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do described_class.new( attribute_name: :name, column_expression: Project.arel_table[:name], - order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc), - reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc), + order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last, + reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first, order_direction: :desc, nullable: :nulls_last, # null values are always last distinct: false diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb index 58db22e5a9c..9f2ac9a953d 100644 --- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb @@ -24,12 +24,12 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder let_it_be(:issues) do [ create(:issue, project: project_1, created_at: three_weeks_ago, relative_position: 5), - create(:issue, project: project_1, created_at: two_weeks_ago), + create(:issue, project: project_1, created_at: two_weeks_ago, relative_position: nil), create(:issue, project: project_2, created_at: two_weeks_ago, relative_position: 15), - create(:issue, project: project_2, created_at: two_weeks_ago), - create(:issue, project: project_3, created_at: four_weeks_ago), + create(:issue, project: project_2, created_at: two_weeks_ago, relative_position: nil), + create(:issue, project: project_3, created_at: four_weeks_ago, relative_position: nil), create(:issue, project: project_4, created_at: five_weeks_ago, relative_position: 10), - create(:issue, project: project_5, created_at: four_weeks_ago) + create(:issue, project: project_5, created_at: four_weeks_ago, relative_position: nil) ] end @@ -121,8 +121,8 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: :relative_position, column_expression: Issue.arel_table[:relative_position], - order_expression: Gitlab::Database.nulls_last_order('relative_position', :desc), - reversed_order_expression: Gitlab::Database.nulls_first_order('relative_position', :asc), + order_expression: Issue.arel_table[:relative_position].desc.nulls_last, + reversed_order_expression: Issue.arel_table[:relative_position].asc.nulls_first, order_direction: :desc, nullable: :nulls_last, distinct: false @@ -155,6 +155,31 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder it_behaves_like 'correct ordering examples' end + + context 'with condition "relative_position IS NULL"' do + let(:base_scope) { Issue.where(relative_position: nil) } + let(:scope) { base_scope.order(order) } + + let(:in_operator_optimization_options) do + { + array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id), + array_mapping_scope: -> (id_expression) { Issue.merge(base_scope.dup).where(Issue.arel_table[:project_id].eq(id_expression)) }, + finder_query: -> (_relative_position_expression, id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) } + } + end + + context 'when iterating records one by one' do + let(:batch_size) { 1 } + + it_behaves_like 'correct ordering examples' + end + + context 'when iterating records with LIMIT 3' do + let(:batch_size) { 3 } + + it_behaves_like 'correct ordering examples' + end + end end context 'when ordering by issues.created_at DESC, issues.id ASC' do @@ -239,7 +264,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder end it 'raises error when unsupported scope is passed' do - scope = Issue.order(Issue.arel_table[:id].lower.desc) + scope = Issue.order(Arel::Nodes::NamedFunction.new('UPPER', [Issue.arel_table[:id]])) options = { scope: scope, diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb index 09cbca2c1cb..d62d20d2d2c 100644 --- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb @@ -19,8 +19,8 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: column, column_expression: klass.arel_table[column], - order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position), - reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position), + order_expression: klass.arel_table[column].public_send(direction).public_send(nulls_position), # rubocop:disable GitlabSecurity/PublicSend + reversed_order_expression: klass.arel_table[column].public_send(reverse_direction).public_send(reverse_nulls_position), # rubocop:disable GitlabSecurity/PublicSend order_direction: direction, nullable: nulls_position, distinct: false @@ -99,7 +99,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) } - expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id)) + expect(positions).to eq(project.issues.reorder(Issue.arel_table[:relative_position].asc.nulls_last).order(id: :asc).pluck(:relative_position, :id)) end end @@ -111,7 +111,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) } - expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id)) + expect(positions).to eq(project.issues.reorder(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :desc).pluck(:relative_position, :id)) end end @@ -123,7 +123,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) } - expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id)) + expect(positions).to eq(project.issues.reorder(Issue.arel_table[:relative_position].asc.nulls_first).order(id: :asc).pluck(:relative_position, :id)) end end @@ -136,7 +136,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) } - expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id)) + expect(positions).to eq(project.issues.reorder(Issue.arel_table[:relative_position].desc.nulls_last).order(id: :desc).pluck(:relative_position, :id)) end end diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb index 1bed8e542a2..abbb3a21cd4 100644 --- a/spec/lib/gitlab/pagination/keyset/order_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb @@ -262,8 +262,8 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'year', column_expression: table['year'], - order_expression: Gitlab::Database.nulls_last_order('year', :asc), - reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc), + order_expression: table[:year].asc.nulls_last, + reversed_order_expression: table[:year].desc.nulls_first, order_direction: :asc, nullable: :nulls_last, distinct: false @@ -271,8 +271,8 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'month', column_expression: table['month'], - order_expression: Gitlab::Database.nulls_last_order('month', :asc), - reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc), + order_expression: table[:month].asc.nulls_last, + reversed_order_expression: table[:month].desc.nulls_first, order_direction: :asc, nullable: :nulls_last, distinct: false @@ -328,8 +328,8 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'year', column_expression: table['year'], - order_expression: Gitlab::Database.nulls_first_order('year', :asc), - reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc), + order_expression: table[:year].asc.nulls_first, + reversed_order_expression: table[:year].desc.nulls_last, order_direction: :asc, nullable: :nulls_first, distinct: false @@ -337,9 +337,9 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( attribute_name: 'month', column_expression: table['month'], - order_expression: Gitlab::Database.nulls_first_order('month', :asc), + order_expression: table[:month].asc.nulls_first, order_direction: :asc, - reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc), + reversed_order_expression: table[:month].desc.nulls_last, nullable: :nulls_first, distinct: false ), @@ -441,6 +441,47 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do end end + context 'when ordering by the named function LOWER' do + let(:order) do + Gitlab::Pagination::Keyset::Order.build([ + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'title', + column_expression: Arel::Nodes::NamedFunction.new("LOWER", [table['title'].desc]), + order_expression: table['title'].lower.desc, + nullable: :not_nullable, + distinct: false + ), + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'id', + column_expression: table['id'], + order_expression: table['id'].desc, + nullable: :not_nullable, + distinct: true + ) + ]) + end + + let(:table_data) do + <<-SQL + VALUES (1, 'A') + SQL + end + + let(:query) do + <<-SQL + SELECT id, title + FROM (#{table_data}) my_table (id, title) + ORDER BY #{order}; + SQL + end + + subject { run_query(query) } + + it "uses downcased value for encoding and decoding a cursor" do + expect(order.cursor_attributes_for_node(subject.first)['title']).to eq("a") + end + end + context 'when the passed cursor values do not match with the order definition' do let(:order) do Gitlab::Pagination::Keyset::Order.build([ diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb index 5af86cb2dc0..4f1d380ab0a 100644 --- a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do let(:ordered_scope) { described_class.build(scope).first } let(:order_object) { Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(ordered_scope) } + let(:column_definition) { order_object.column_definitions.first } subject(:sql_with_order) { ordered_scope.to_sql } @@ -16,11 +17,25 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do end it 'sets the column definition distinct and not nullable' do - column_definition = order_object.column_definitions.first - expect(column_definition).to be_not_nullable expect(column_definition).to be_distinct end + + context "when the order scope's model uses default_scope" do + let(:scope) do + model = Class.new(ApplicationRecord) do + self.table_name = 'events' + + default_scope { reorder(nil) } # rubocop:disable Cop/DefaultScope + end + + model.reorder(nil) + end + + it 'orders by primary key' do + expect(sql_with_order).to end_with('ORDER BY "events"."id" DESC') + end + end end context 'when primary key order present' do @@ -39,8 +54,6 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do end it 'sets the column definition for created_at non-distinct and nullable' do - column_definition = order_object.column_definitions.first - expect(column_definition.attribute_name).to eq('created_at') expect(column_definition.nullable?).to eq(true) # be_nullable calls non_null? method for some reason expect(column_definition).not_to be_distinct @@ -59,14 +72,80 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do let(:scope) { Project.where(id: [1, 2, 3]).order(namespace_id: :asc, id: :asc) } it 'sets the column definition for namespace_id non-distinct and non-nullable' do - column_definition = order_object.column_definitions.first - expect(column_definition.attribute_name).to eq('namespace_id') expect(column_definition).to be_not_nullable expect(column_definition).not_to be_distinct end end + context 'when ordering by a column with the lower named function' do + let(:scope) { Project.where(id: [1, 2, 3]).order(Project.arel_table[:name].lower.desc) } + + it 'sets the column definition for name' do + expect(column_definition.attribute_name).to eq('name') + expect(column_definition.column_expression.expressions.first.name).to eq('name') + expect(column_definition.column_expression.name).to eq('LOWER') + end + + it 'adds extra primary key order as tie-breaker' do + expect(sql_with_order).to end_with('ORDER BY LOWER("projects"."name") DESC, "projects"."id" DESC') + end + end + + context "NULLS order given as as an Arel literal" do + context 'when NULLS LAST order is given without a tie-breaker' do + let(:scope) { Project.order(Project.arel_table[:created_at].asc.nulls_last) } + + it 'sets the column definition for created_at appropriately' do + expect(column_definition.attribute_name).to eq('created_at') + end + + it 'orders by primary key' do + expect(sql_with_order) + .to end_with('ORDER BY "projects"."created_at" ASC NULLS LAST, "projects"."id" DESC') + end + end + + context 'when NULLS FIRST order is given with a tie-breaker' do + let(:scope) { Issue.order(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :asc) } + + it 'sets the column definition for created_at appropriately' do + expect(column_definition.attribute_name).to eq('relative_position') + end + + it 'orders by the given primary key' do + expect(sql_with_order) + .to end_with('ORDER BY "issues"."relative_position" DESC NULLS FIRST, "issues"."id" ASC') + end + end + end + + context "NULLS order given as as an Arel node" do + context 'when NULLS LAST order is given without a tie-breaker' do + let(:scope) { Project.order(Project.arel_table[:created_at].asc.nulls_last) } + + it 'sets the column definition for created_at appropriately' do + expect(column_definition.attribute_name).to eq('created_at') + end + + it 'orders by primary key' do + expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC NULLS LAST, "projects"."id" DESC') + end + end + + context 'when NULLS FIRST order is given with a tie-breaker' do + let(:scope) { Issue.order(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :asc) } + + it 'sets the column definition for created_at appropriately' do + expect(column_definition.attribute_name).to eq('relative_position') + end + + it 'orders by the given primary key' do + expect(sql_with_order).to end_with('ORDER BY "issues"."relative_position" DESC NULLS FIRST, "issues"."id" ASC') + end + end + end + context 'return :unable_to_order symbol when order cannot be built' do subject(:success) { described_class.build(scope).last } @@ -76,10 +155,20 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do it { is_expected.to eq(false) } end - context 'when NULLS LAST order is given' do - let(:scope) { Project.order(::Gitlab::Database.nulls_last_order('created_at', 'ASC')) } + context 'when an invalid NULLS order is given' do + using RSpec::Parameterized::TableSyntax - it { is_expected.to eq(false) } + where(:scope) do + [ + lazy { Project.order(Arel.sql('projects.updated_at created_at Asc Nulls Last')) }, + lazy { Project.order(Arel.sql('projects.created_at ZZZ NULLS FIRST')) }, + lazy { Project.order(Arel.sql('projects.relative_position ASC NULLS LAST')) } + ] + end + + with_them do + it { is_expected.to eq(false) } + end end context 'when more than 2 columns are given for the order' do diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb index f8d50fbc517..ebbd207cc11 100644 --- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb +++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb @@ -66,70 +66,50 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do let(:query) { base_query.merge(page: 1, per_page: 2) } - context 'when the api_kaminari_count_with_limit feature flag is unset' do - it_behaves_like 'paginated response' - it_behaves_like 'response with pagination headers' - end - - context 'when the api_kaminari_count_with_limit feature flag is disabled' do + context 'when resources count is less than MAX_COUNT_LIMIT' do before do - stub_feature_flags(api_kaminari_count_with_limit: false) + stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 4) end it_behaves_like 'paginated response' it_behaves_like 'response with pagination headers' end - context 'when the api_kaminari_count_with_limit feature flag is enabled' do + context 'when resources count is more than MAX_COUNT_LIMIT' do before do - stub_feature_flags(api_kaminari_count_with_limit: true) - end - - context 'when resources count is less than MAX_COUNT_LIMIT' do - before do - stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 4) - end - - it_behaves_like 'paginated response' - it_behaves_like 'response with pagination headers' + stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 2) end - context 'when resources count is more than MAX_COUNT_LIMIT' do - before do - stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_LIMIT", 2) - end - - it_behaves_like 'paginated response' - - it 'does not return the X-Total and X-Total-Pages headers' do - expect_no_header('X-Total') - expect_no_header('X-Total-Pages') - expect_header('X-Per-Page', '2') - expect_header('X-Page', '1') - expect_header('X-Next-Page', '2') - expect_header('X-Prev-Page', '') - - expect_header('Link', anything) do |_key, val| - expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first")) - expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next")) - expect(val).not_to include('rel="last"') - expect(val).not_to include('rel="prev"') - end - - subject.paginate(resource) - end - end + it_behaves_like 'paginated response' - it 'does not return the total headers when excluding them' do + it 'does not return the X-Total and X-Total-Pages headers' do expect_no_header('X-Total') expect_no_header('X-Total-Pages') expect_header('X-Per-Page', '2') expect_header('X-Page', '1') + expect_header('X-Next-Page', '2') + expect_header('X-Prev-Page', '') - paginator.paginate(resource, exclude_total_headers: true) + expect_header('Link', anything) do |_key, val| + expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first")) + expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next")) + expect(val).not_to include('rel="last"') + expect(val).not_to include('rel="prev"') + end + + subject.paginate(resource) end end + it 'does not return the total headers when excluding them' do + expect_no_header('X-Total') + expect_no_header('X-Total-Pages') + expect_header('X-Per-Page', '2') + expect_header('X-Page', '1') + + paginator.paginate(resource, exclude_total_headers: true) + end + context 'when resource already paginated' do let(:resource) { Project.all.page(1).per(1) } diff --git a/spec/lib/gitlab/patch/legacy_database_config_spec.rb b/spec/lib/gitlab/patch/database_config_spec.rb index b87e16f31ae..d6f36ab86d5 100644 --- a/spec/lib/gitlab/patch/legacy_database_config_spec.rb +++ b/spec/lib/gitlab/patch/database_config_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Patch::LegacyDatabaseConfig do +RSpec.describe Gitlab::Patch::DatabaseConfig do it 'module is included' do expect(Rails::Application::Configuration).to include(described_class) end diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index 9876387512b..e5fa7538515 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -557,7 +557,7 @@ RSpec.describe Gitlab::PathRegex do end it 'does not match other non-word characters' do - expect(subject.match('ruby:2.7.0')[0]).to eq('ruby') + expect(subject.match('image:1.0.0')[0]).to eq('image') end end diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb index 05417e721c7..0ef52b63bc6 100644 --- a/spec/lib/gitlab/project_template_spec.rb +++ b/spec/lib/gitlab/project_template_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::ProjectTemplate do expected = %w[ rails spring express iosswift dotnetcore android gomicro gatsby hugo jekyll plainhtml gitbook - hexo sse_middleman gitpod_spring_petclinic nfhugo + hexo middleman gitpod_spring_petclinic nfhugo nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx serverless_framework tencent_serverless_framework jsonnet cluster_management kotlin_native_linux diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb index 73629ce3da2..8362c07baca 100644 --- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb +++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb @@ -26,7 +26,7 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do describe "#noop?" do context "when the command has an action block" do before do - subject.action_block = proc { } + subject.action_block = proc {} end it "returns false" do @@ -42,7 +42,7 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do end describe "#available?" do - let(:opts) { OpenStruct.new(go: false) } + let(:opts) { ActiveSupport::InheritableOptions.new(go: false) } context "when the command has a condition block" do before do @@ -104,7 +104,8 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do end describe "#execute" do - let(:context) { OpenStruct.new(run: false, commands_executed_count: nil) } + let(:fake_context) { Struct.new(:run, :commands_executed_count, :received_arg) } + let(:context) { fake_context.new(false, nil, nil) } context "when the command is a noop" do it "doesn't execute the command" do diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb index 079477115bb..a09115f3f21 100644 --- a/spec/lib/gitlab/search_context/builder_spec.rb +++ b/spec/lib/gitlab/search_context/builder_spec.rb @@ -43,7 +43,6 @@ RSpec.describe Gitlab::SearchContext::Builder, type: :controller do def be_search_context(project: nil, group: nil, snippets: [], ref: nil) group = project ? project.group : group snippets.compact! - ref = ref have_attributes( project: project, diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb index 2e8a11dfda3..1760796c5a0 100644 --- a/spec/lib/gitlab/security/scan_configuration_spec.rb +++ b/spec/lib/gitlab/security/scan_configuration_spec.rb @@ -47,6 +47,16 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do it { is_expected.to be_nil } end + describe '#meta_info_path' do + subject { scan.meta_info_path } + + let(:configured) { true } + let(:available) { true } + let(:type) { :dast } + + it { is_expected.to be_nil } + end + describe '#can_enable_by_merge_request?' do subject { scan.can_enable_by_merge_request? } diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb index 71d0a41ef98..a22d47cbfb3 100644 --- a/spec/lib/gitlab/seeder_spec.rb +++ b/spec/lib/gitlab/seeder_spec.rb @@ -3,6 +3,24 @@ require 'spec_helper' RSpec.describe Gitlab::Seeder do + describe Namespace do + subject { described_class } + + it 'has not_mass_generated scope' do + expect { Namespace.not_mass_generated }.to raise_error(NoMethodError) + + Gitlab::Seeder.quiet do + expect { Namespace.not_mass_generated }.not_to raise_error + end + end + + it 'includes NamespaceSeed module' do + Gitlab::Seeder.quiet do + is_expected.to include_module(Gitlab::Seeder::NamespaceSeed) + end + end + end + describe '.quiet' do let(:database_base_models) do { @@ -50,4 +68,13 @@ RSpec.describe Gitlab::Seeder do notification_service.new_note(note) end end + + describe '.log_message' do + it 'prepends timestamp to the logged message' do + freeze_time do + message = "some message." + expect { described_class.log_message(message) }.to output(/#{Time.current}: #{message}/).to_stdout + end + end + end end diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb index 3fbd207c2e1..ffa92126cc9 100644 --- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb @@ -292,7 +292,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do if category feature_category category else - feature_category_not_owned! + feature_category :not_owned end def perform diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb index b9a13fd697e..3baa0c6f967 100644 --- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do 'TestNotOwnedWithContextWorker' end - feature_category_not_owned! + feature_category :not_owned end end diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb index 377ff6fd166..05b328e55d3 100644 --- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb @@ -29,7 +29,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do "NotOwnedWorker" end - feature_category_not_owned! + feature_category :not_owned end end diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb index cf5d2c3b455..422b6f925a1 100644 --- a/spec/lib/gitlab/ssh_public_key_spec.rb +++ b/spec/lib/gitlab/ssh_public_key_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::SSHPublicKey, lib: true do +RSpec.describe Gitlab::SSHPublicKey, lib: true, fips_mode: false do let(:key) { attributes_for(:rsa_key_2048)[:key] } let(:public_key) { described_class.new(key) } @@ -19,6 +19,17 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do it { expect(described_class.technology(name).name).to eq(name) } it { expect(described_class.technology(name.to_s).name).to eq(name) } end + + context 'FIPS mode', :fips_mode do + where(:name) do + [:rsa, :ecdsa, :ed25519, :ecdsa_sk, :ed25519_sk] + end + + with_them do + it { expect(described_class.technology(name).name).to eq(name) } + it { expect(described_class.technology(name.to_s).name).to eq(name) } + end + end end describe '.supported_types' do @@ -27,6 +38,14 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do [:rsa, :dsa, :ecdsa, :ed25519, :ecdsa_sk, :ed25519_sk] ) end + + context 'FIPS mode', :fips_mode do + it 'returns array with the names of supported technologies' do + expect(described_class.supported_types).to eq( + [:rsa, :dsa, :ecdsa, :ed25519, :ecdsa_sk, :ed25519_sk] + ) + end + end end describe '.supported_sizes(name)' do @@ -45,6 +64,24 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do it { expect(described_class.supported_sizes(name)).to eq(sizes) } it { expect(described_class.supported_sizes(name.to_s)).to eq(sizes) } end + + context 'FIPS mode', :fips_mode do + where(:name, :sizes) do + [ + [:rsa, [3072, 4096]], + [:dsa, []], + [:ecdsa, [256, 384, 521]], + [:ed25519, [256]], + [:ecdsa_sk, [256]], + [:ed25519_sk, [256]] + ] + end + + with_them do + it { expect(described_class.supported_sizes(name)).to eq(sizes) } + it { expect(described_class.supported_sizes(name.to_s)).to eq(sizes) } + end + end end describe '.supported_algorithms' do @@ -60,6 +97,21 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do ) ) end + + context 'FIPS mode', :fips_mode do + it 'returns all supported algorithms' do + expect(described_class.supported_algorithms).to eq( + %w( + ssh-rsa + ssh-dss + ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521 + ssh-ed25519 + sk-ecdsa-sha2-nistp256@openssh.com + sk-ssh-ed25519@openssh.com + ) + ) + end + end end describe '.supported_algorithms_for_name' do @@ -80,6 +132,26 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do expect(described_class.supported_algorithms_for_name(name.to_s)).to eq(algorithms) end end + + context 'FIPS mode', :fips_mode do + where(:name, :algorithms) do + [ + [:rsa, %w(ssh-rsa)], + [:dsa, %w(ssh-dss)], + [:ecdsa, %w(ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521)], + [:ed25519, %w(ssh-ed25519)], + [:ecdsa_sk, %w(sk-ecdsa-sha2-nistp256@openssh.com)], + [:ed25519_sk, %w(sk-ssh-ed25519@openssh.com)] + ] + end + + with_them do + it "returns all supported algorithms for #{params[:name]}" do + expect(described_class.supported_algorithms_for_name(name)).to eq(algorithms) + expect(described_class.supported_algorithms_for_name(name.to_s)).to eq(algorithms) + end + end + end end describe '.sanitize(key_content)' do diff --git a/spec/lib/gitlab/suggestions/commit_message_spec.rb b/spec/lib/gitlab/suggestions/commit_message_spec.rb index 965960f0c3e..dcadc206715 100644 --- a/spec/lib/gitlab/suggestions/commit_message_spec.rb +++ b/spec/lib/gitlab/suggestions/commit_message_spec.rb @@ -3,7 +3,10 @@ require 'spec_helper' RSpec.describe Gitlab::Suggestions::CommitMessage do - def create_suggestion(file_path, new_line, to_content) + include ProjectForksHelper + using RSpec::Parameterized::TableSyntax + + def create_suggestion(merge_request, file_path, new_line, to_content) position = Gitlab::Diff::Position.new(old_path: file_path, new_path: file_path, old_line: nil, @@ -29,69 +32,111 @@ RSpec.describe Gitlab::Suggestions::CommitMessage do create(:project, :repository, path: 'project-1', name: 'Project_1') end - let_it_be(:merge_request) do + let_it_be(:forked_project) { fork_project(project, nil, repository: true) } + + let_it_be(:merge_request_same_project) do create(:merge_request, source_project: project, target_project: project) end - let_it_be(:suggestion_set) do - suggestion1 = create_suggestion('files/ruby/popen.rb', 9, '*** SUGGESTION 1 ***') - suggestion2 = create_suggestion('files/ruby/popen.rb', 13, '*** SUGGESTION 2 ***') - suggestion3 = create_suggestion('files/ruby/regex.rb', 22, '*** SUGGESTION 3 ***') + let_it_be(:merge_request_from_fork) do + create(:merge_request, source_project: forked_project, target_project: project) + end + + let_it_be(:suggestion_set_same_project) do + suggestion1 = create_suggestion(merge_request_same_project, 'files/ruby/popen.rb', 9, '*** SUGGESTION 1 ***') + suggestion2 = create_suggestion(merge_request_same_project, 'files/ruby/popen.rb', 13, '*** SUGGESTION 2 ***') + suggestion3 = create_suggestion(merge_request_same_project, 'files/ruby/regex.rb', 22, '*** SUGGESTION 3 ***') + + Gitlab::Suggestions::SuggestionSet.new([suggestion1, suggestion2, suggestion3]) + end + + let_it_be(:suggestion_set_forked_project) do + suggestion1 = create_suggestion(merge_request_from_fork, 'files/ruby/popen.rb', 9, '*** SUGGESTION 1 ***') + suggestion2 = create_suggestion(merge_request_from_fork, 'files/ruby/popen.rb', 13, '*** SUGGESTION 2 ***') + suggestion3 = create_suggestion(merge_request_from_fork, 'files/ruby/regex.rb', 22, '*** SUGGESTION 3 ***') Gitlab::Suggestions::SuggestionSet.new([suggestion1, suggestion2, suggestion3]) end describe '#message' do - before do - # Updating the suggestion_commit_message on a project shared across specs - # avoids recreating the repository for each spec. - project.update!(suggestion_commit_message: message) - end + where(:suggestion_set) { [ref(:suggestion_set_same_project), ref(:suggestion_set_forked_project)] } + + with_them do + before do + # Updating the suggestion_commit_message on a project shared across specs + # avoids recreating the repository for each spec. + project.update!(suggestion_commit_message: message) + forked_project.update!(suggestion_commit_message: fork_message) + end + + let(:fork_message) { nil } - context 'when a custom commit message is not specified' do - let(:expected_message) { 'Apply 3 suggestion(s) to 2 file(s)' } + context 'when a custom commit message is not specified' do + let(:expected_message) { 'Apply 3 suggestion(s) to 2 file(s)' } - context 'and is nil' do - let(:message) { nil } + context 'and is nil' do + let(:message) { nil } - it 'uses the default commit message' do - expect(described_class - .new(user, suggestion_set) - .message).to eq(expected_message) + it 'uses the default commit message' do + expect(described_class + .new(user, suggestion_set) + .message).to eq(expected_message) + end end - end - context 'and is an empty string' do - let(:message) { '' } + context 'and is an empty string' do + let(:message) { '' } - it 'uses the default commit message' do - expect(described_class - .new(user, suggestion_set) - .message).to eq(expected_message) + it 'uses the default commit message' do + expect(described_class + .new(user, suggestion_set) + .message).to eq(expected_message) + end end - end - end - context 'when a custom commit message is specified' do - let(:message) { "i'm a project message. a user's custom message takes precedence over me :(" } - let(:custom_message) { "hello there! i'm a cool custom commit message." } + context 'when a custom commit message is specified for forked project' do + let(:message) { nil } + let(:fork_message) { "I'm a sad message that will not be used :(" } - it 'shows the custom commit message' do - expect(Gitlab::Suggestions::CommitMessage - .new(user, suggestion_set, custom_message) - .message).to eq(custom_message) + it 'uses the default commit message' do + expect(described_class + .new(user, suggestion_set) + .message).to eq(expected_message) + end + end end - end - context 'is specified and includes all placeholders' do - let(:message) do - '*** %{branch_name} %{files_count} %{file_paths} %{project_name} %{project_path} %{user_full_name} %{username} %{suggestions_count} ***' + context 'when a custom commit message is specified' do + let(:message) { "i'm a project message. a user's custom message takes precedence over me :(" } + let(:custom_message) { "hello there! i'm a cool custom commit message." } + + it 'shows the custom commit message' do + expect(Gitlab::Suggestions::CommitMessage + .new(user, suggestion_set, custom_message) + .message).to eq(custom_message) + end end - it 'generates a custom commit message' do - expect(Gitlab::Suggestions::CommitMessage - .new(user, suggestion_set) - .message).to eq('*** master 2 files/ruby/popen.rb, files/ruby/regex.rb Project_1 project-1 Test User test.user 3 ***') + context 'is specified and includes all placeholders' do + let(:message) do + '*** %{branch_name} %{files_count} %{file_paths} %{project_name} %{project_path} %{user_full_name} %{username} %{suggestions_count} ***' + end + + it 'generates a custom commit message' do + expect(Gitlab::Suggestions::CommitMessage + .new(user, suggestion_set) + .message).to eq('*** master 2 files/ruby/popen.rb, files/ruby/regex.rb Project_1 project-1 Test User test.user 3 ***') + end + + context 'when a custom commit message is specified for forked project' do + let(:fork_message) { "I'm a sad message that will not be used :(" } + + it 'uses the target project commit message' do + expect(Gitlab::Suggestions::CommitMessage + .new(user, suggestion_set) + .message).to eq('*** master 2 files/ruby/popen.rb, files/ruby/regex.rb Project_1 project-1 Test User test.user 3 ***') + end + end end end end diff --git a/spec/lib/gitlab/suggestions/suggestion_set_spec.rb b/spec/lib/gitlab/suggestions/suggestion_set_spec.rb index 54d79a9d4ba..469646986e1 100644 --- a/spec/lib/gitlab/suggestions/suggestion_set_spec.rb +++ b/spec/lib/gitlab/suggestions/suggestion_set_spec.rb @@ -3,6 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::Suggestions::SuggestionSet do + include ProjectForksHelper + using RSpec::Parameterized::TableSyntax + def create_suggestion(file_path, new_line, to_content) position = Gitlab::Diff::Position.new(old_path: file_path, new_path: file_path, @@ -24,86 +27,99 @@ RSpec.describe Gitlab::Suggestions::SuggestionSet do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :repository) } + let_it_be(:forked_project) { fork_project(project, nil, repository: true) } - let_it_be(:merge_request) do + let_it_be(:merge_request_same_project) do create(:merge_request, source_project: project, target_project: project) end - let_it_be(:suggestion) { create(:suggestion)} - - let_it_be(:suggestion2) do - create_suggestion('files/ruby/popen.rb', 13, "*** SUGGESTION 2 ***") - end - - let_it_be(:suggestion3) do - create_suggestion('files/ruby/regex.rb', 22, "*** SUGGESTION 3 ***") + let_it_be(:merge_request_from_fork) do + create(:merge_request, source_project: forked_project, target_project: project) end - let_it_be(:unappliable_suggestion) { create(:suggestion, :unappliable) } + where(:merge_request) { [ref(:merge_request_same_project), ref(:merge_request_from_fork)] } + with_them do + let(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } + let(:suggestion) { create(:suggestion, note: note) } - let(:suggestion_set) { described_class.new([suggestion]) } - - describe '#project' do - it 'returns the project associated with the suggestions' do - expected_project = suggestion.project + let(:suggestion2) do + create_suggestion('files/ruby/popen.rb', 13, "*** SUGGESTION 2 ***") + end - expect(suggestion_set.project).to be(expected_project) + let(:suggestion3) do + create_suggestion('files/ruby/regex.rb', 22, "*** SUGGESTION 3 ***") end - end - describe '#branch' do - it 'returns the branch associated with the suggestions' do - expected_branch = suggestion.branch + let(:unappliable_suggestion) { create(:suggestion, :unappliable) } + + let(:suggestion_set) { described_class.new([suggestion]) } - expect(suggestion_set.branch).to be(expected_branch) + describe '#source_project' do + it 'returns the source project associated with the suggestions' do + expect(suggestion_set.source_project).to be(merge_request.source_project) + end end - end - describe '#valid?' do - it 'returns true if no errors are found' do - expect(suggestion_set.valid?).to be(true) + describe '#target_project' do + it 'returns the target project associated with the suggestions' do + expect(suggestion_set.target_project).to be(project) + end end - it 'returns false if an error is found' do - suggestion_set = described_class.new([unappliable_suggestion]) + describe '#branch' do + it 'returns the branch associated with the suggestions' do + expected_branch = suggestion.branch - expect(suggestion_set.valid?).to be(false) + expect(suggestion_set.branch).to be(expected_branch) + end end - end - describe '#error_message' do - it 'returns an error message if an error is found' do - suggestion_set = described_class.new([unappliable_suggestion]) + describe '#valid?' do + it 'returns true if no errors are found' do + expect(suggestion_set.valid?).to be(true) + end - expect(suggestion_set.error_message).to be_a(String) + it 'returns false if an error is found' do + suggestion_set = described_class.new([unappliable_suggestion]) + + expect(suggestion_set.valid?).to be(false) + end end - it 'returns nil if no errors are found' do - expect(suggestion_set.error_message).to be(nil) + describe '#error_message' do + it 'returns an error message if an error is found' do + suggestion_set = described_class.new([unappliable_suggestion]) + + expect(suggestion_set.error_message).to be_a(String) + end + + it 'returns nil if no errors are found' do + expect(suggestion_set.error_message).to be(nil) + end end - end - describe '#actions' do - it 'returns an array of hashes with proper key/value pairs' do - first_action = suggestion_set.actions.first + describe '#actions' do + it 'returns an array of hashes with proper key/value pairs' do + first_action = suggestion_set.actions.first - file_suggestion = suggestion_set.send(:suggestions_per_file).first + file_suggestion = suggestion_set.send(:suggestions_per_file).first - expect(first_action[:action]).to be('update') - expect(first_action[:file_path]).to eq(file_suggestion.file_path) - expect(first_action[:content]).to eq(file_suggestion.new_content) + expect(first_action[:action]).to be('update') + expect(first_action[:file_path]).to eq(file_suggestion.file_path) + expect(first_action[:content]).to eq(file_suggestion.new_content) + end end - end - describe '#file_paths' do - it 'returns an array of unique file paths associated with the suggestions' do - suggestion_set = described_class.new([suggestion, suggestion2, suggestion3]) + describe '#file_paths' do + it 'returns an array of unique file paths associated with the suggestions' do + suggestion_set = described_class.new([suggestion, suggestion2, suggestion3]) - expected_paths = %w(files/ruby/popen.rb files/ruby/regex.rb) + expected_paths = %w(files/ruby/popen.rb files/ruby/regex.rb) - actual_paths = suggestion_set.file_paths + actual_paths = suggestion_set.file_paths - expect(actual_paths.sort).to eq(expected_paths) + expect(actual_paths.sort).to eq(expected_paths) + end end end end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index cd83971aef9..cc973be8be9 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -149,4 +149,42 @@ RSpec.describe Gitlab::Tracking do described_class.event(nil, 'some_action') end end + + describe '.definition' do + let(:namespace) { create(:namespace) } + + let_it_be(:definition_action) { 'definition_action' } + let_it_be(:definition_category) { 'definition_category' } + let_it_be(:label_description) { 'definition label description' } + let_it_be(:test_definition) {{ 'category': definition_category, 'action': definition_action }} + + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:event) + end + allow_next_instance_of(Gitlab::Tracking::Destinations::Snowplow) do |instance| + allow(instance).to receive(:event) + end + allow(YAML).to receive(:load_file).with(Rails.root.join('config/events/filename.yml')).and_return(test_definition) + end + + it 'dispatchs the data to .event' do + project = build_stubbed(:project) + user = build_stubbed(:user) + + expect(described_class).to receive(:event) do |category, action, args| + expect(category).to eq(definition_category) + expect(action).to eq(definition_action) + expect(args[:label]).to eq('label') + expect(args[:property]).to eq('...') + expect(args[:project]).to eq(project) + expect(args[:user]).to eq(user) + expect(args[:namespace]).to eq(namespace) + expect(args[:extra_key_1]).to eq('extra value 1') + end + + described_class.definition('filename', category: nil, action: nil, label: 'label', property: '...', + project: project, user: user, namespace: namespace, extra_key_1: 'extra value 1') + end + end end diff --git a/spec/lib/gitlab/url_sanitizer_spec.rb b/spec/lib/gitlab/url_sanitizer_spec.rb index aba4ca109a9..0ffbf5f81e7 100644 --- a/spec/lib/gitlab/url_sanitizer_spec.rb +++ b/spec/lib/gitlab/url_sanitizer_spec.rb @@ -139,6 +139,23 @@ RSpec.describe Gitlab::UrlSanitizer do it { is_expected.to eq(credentials) } end end + + context 'with mixed credentials' do + where(:url, :credentials, :result) do + 'http://a@example.com' | { password: 'd' } | { user: 'a', password: 'd' } + 'http://a:b@example.com' | { password: 'd' } | { user: 'a', password: 'd' } + 'http://:b@example.com' | { password: 'd' } | { user: nil, password: 'd' } + 'http://a@example.com' | { user: 'c' } | { user: 'c', password: nil } + 'http://a:b@example.com' | { user: 'c' } | { user: 'c', password: 'b' } + 'http://a:b@example.com' | { user: '' } | { user: 'a', password: 'b' } + end + + with_them do + subject { described_class.new(url, credentials: credentials).credentials } + + it { is_expected.to eq(result) } + end + end end describe '#user' do diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb index 1f62ddd0bbb..b6119ab52ec 100644 --- a/spec/lib/gitlab/usage/service_ping_report_spec.rb +++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb @@ -7,119 +7,86 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c let(:usage_data) { { uuid: "1111", counts: { issue: 0 } } } - context 'when feature merge_service_ping_instrumented_metrics enabled' do - before do - stub_feature_flags(merge_service_ping_instrumented_metrics: true) + before do + allow_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor) do |instance| + allow(instance).to receive(:missing_key_paths).and_return([]) + end - allow_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor) do |instance| - allow(instance).to receive(:missing_key_paths).and_return([]) - end + allow_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload) do |instance| + allow(instance).to receive(:build).and_return({}) + end + end - allow_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload) do |instance| - allow(instance).to receive(:build).and_return({}) - end + context 'all_metrics_values' do + it 'generates the service ping when there are no missing values' do + expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } }) end - context 'all_metrics_values' do - it 'generates the service ping when there are no missing values' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } }) + it 'generates the service ping with the missing values' do + expect_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor, usage_data) do |instance| + expect(instance).to receive(:missing_instrumented_metrics_key_paths).and_return(['counts.boards']) end - it 'generates the service ping with the missing values' do - expect_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor, usage_data) do |instance| - expect(instance).to receive(:missing_instrumented_metrics_key_paths).and_return(['counts.boards']) - end - - expect_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload, ['counts.boards'], :with_value) do |instance| - expect(instance).to receive(:build).and_return({ counts: { boards: 1 } }) - end - - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0, boards: 1 } }) + expect_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload, ['counts.boards'], :with_value) do |instance| + expect(instance).to receive(:build).and_return({ counts: { boards: 1 } }) end - end - - context 'for output: :metrics_queries' do - it 'generates the service ping' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - described_class.for(output: :metrics_queries) - end + expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0, boards: 1 } }) end + end - context 'for output: :non_sql_metrics_values' do - it 'generates the service ping' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) + context 'for output: :metrics_queries' do + it 'generates the service ping' do + expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - described_class.for(output: :non_sql_metrics_values) - end + described_class.for(output: :metrics_queries) end + end - context 'when using cached' do - context 'for cached: true' do - let(:new_usage_data) { { uuid: "1112" } } - - it 'caches the values' do - allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) + context 'for output: :non_sql_metrics_values' do + it 'generates the service ping' do + expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data) + described_class.for(output: :non_sql_metrics_values) + end + end - expect(Rails.cache.fetch('usage_data')).to eq(usage_data) - end + context 'when using cached' do + context 'for cached: true' do + let(:new_usage_data) { { uuid: "1112" } } - it 'writes to cache and returns fresh data' do - allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) + it 'caches the values' do + allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) - expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) + expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data) - expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) - end + expect(Rails.cache.fetch('usage_data')).to eq(usage_data) end - context 'when no caching' do - let(:new_usage_data) { { uuid: "1112" } } - - it 'returns fresh data' do - allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) - - expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) - end - end - end - end + it 'writes to cache and returns fresh data' do + allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - context 'when feature merge_service_ping_instrumented_metrics disabled' do - before do - stub_feature_flags(merge_service_ping_instrumented_metrics: false) - end + expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) + expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data) - context 'all_metrics_values' do - it 'generates the service ping when there are no missing values' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } }) + expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) end end - context 'for output: :metrics_queries' do - it 'generates the service ping' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) + context 'when no caching' do + let(:new_usage_data) { { uuid: "1112" } } - described_class.for(output: :metrics_queries) - end - end + it 'returns fresh data' do + allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - context 'for output: :non_sql_metrics_values' do - it 'generates the service ping' do - expect(Gitlab::UsageData).to receive(:data).and_return(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) - described_class.for(output: :non_sql_metrics_values) + expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) end end end diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb index 222198a58ac..6a37bfd106d 100644 --- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb @@ -5,30 +5,52 @@ require 'spec_helper' RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do describe '.track_unique_project_event' do using RSpec::Parameterized::TableSyntax + include SnowplowHelpers - let(:project_id) { 1 } + let(:project) { build(:project) } + let(:user) { build(:user) } shared_examples 'tracks template' do + let(:subject) { described_class.track_unique_project_event(project: project, template: template_path, config_source: config_source, user: user) } + it "has an event defined for template" do expect do - described_class.track_unique_project_event( - project_id: project_id, - template: template_path, - config_source: config_source - ) + subject end.not_to raise_error end it "tracks template" do expanded_template_name = described_class.expand_template_name(template_path) expected_template_event_name = described_class.ci_template_event_name(expanded_template_name, config_source) - expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project_id) + expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project.id) + + subject + end + + context 'Snowplow' do + it 'event is not tracked if FF is disabled' do + stub_feature_flags(route_hll_to_snowplow: false) + + subject - described_class.track_unique_project_event(project_id: project_id, template: template_path, config_source: config_source) + expect_no_snowplow_event + end + + it 'tracks event' do + subject + + expect_snowplow_event( + category: described_class.to_s, + action: 'ci_templates_unique', + namespace: project.namespace, + user: user, + project: project + ) + end end end - context 'with explicit includes' do + context 'with explicit includes', :snowplow do let(:config_source) { :repository_source } (described_class.ci_templates - ['Verify/Browser-Performance.latest.gitlab-ci.yml', 'Verify/Browser-Performance.gitlab-ci.yml']).each do |template| @@ -40,7 +62,7 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do end end - context 'with implicit includes' do + context 'with implicit includes', :snowplow do let(:config_source) { :auto_devops_source } [ @@ -60,7 +82,7 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do it 'expands short template names' do expect do - described_class.track_unique_project_event(project_id: 1, template: 'Dependency-Scanning.gitlab-ci.yml', config_source: :repository_source) + described_class.track_unique_project_event(project: project, template: 'Dependency-Scanning.gitlab-ci.yml', config_source: :repository_source, user: user) end.not_to raise_error end end diff --git a/spec/lib/gitlab/usage_data_counters/gitlab_cli_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/gitlab_cli_activity_unique_counter_spec.rb new file mode 100644 index 00000000000..d6eb67e5c35 --- /dev/null +++ b/spec/lib/gitlab/usage_data_counters/gitlab_cli_activity_unique_counter_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::UsageDataCounters::GitLabCliActivityUniqueCounter, :clean_gitlab_redis_shared_state do # rubocop:disable RSpec/FilePath + let(:user1) { build(:user, id: 1) } + let(:user2) { build(:user, id: 2) } + let(:time) { Time.current } + let(:action) { described_class::GITLAB_CLI_API_REQUEST_ACTION } + let(:user_agent) { { user_agent: 'GLab - GitLab CLI' } } + + context 'when tracking a gitlab cli request' do + it_behaves_like 'a request from an extension' + end +end diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb index c3ac9d7db90..88322e1b971 100644 --- a/spec/lib/gitlab/usage_data_queries_spec.rb +++ b/spec/lib/gitlab/usage_data_queries_spec.rb @@ -34,14 +34,14 @@ RSpec.describe Gitlab::UsageDataQueries do describe '.redis_usage_data' do subject(:redis_usage_data) { described_class.redis_usage_data { 42 } } - it 'returns a class for redis_usage_data with a counter call' do + it 'returns a stringified class for redis_usage_data with a counter call' do expect(described_class.redis_usage_data(Gitlab::UsageDataCounters::WikiPageCounter)) - .to eq(redis_usage_data_counter: Gitlab::UsageDataCounters::WikiPageCounter) + .to eq(redis_usage_data_counter: "Gitlab::UsageDataCounters::WikiPageCounter") end - it 'returns a stringified block for redis_usage_data with a block' do + it 'returns a placeholder string for redis_usage_data with a block' do is_expected.to include(:redis_usage_data_block) - expect(redis_usage_data[:redis_usage_data_block]).to start_with('#<Proc:') + expect(redis_usage_data[:redis_usage_data_block]).to eq('non-SQL usage data block') end end @@ -53,8 +53,8 @@ RSpec.describe Gitlab::UsageDataQueries do .to eq(alt_usage_data_value: 1) end - it 'returns a stringified block for alt_usage_data with a block' do - expect(alt_usage_data[:alt_usage_data_block]).to start_with('#<Proc:') + it 'returns a placeholder string for alt_usage_data with a block' do + expect(alt_usage_data[:alt_usage_data_block]).to eq('non-SQL usage data block') end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 958df7baf72..8a919a0a72e 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -166,7 +166,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(described_class.usage_activity_by_stage_create({})).to include( deploy_keys: 2, keys: 2, - merge_requests: 2, projects_with_disable_overriding_approvers_per_merge_request: 2, projects_without_disable_overriding_approvers_per_merge_request: 6, remote_mirrors: 2, @@ -175,7 +174,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(described_class.usage_activity_by_stage_create(described_class.monthly_time_range_db_params)).to include( deploy_keys: 1, keys: 1, - merge_requests: 1, projects_with_disable_overriding_approvers_per_merge_request: 1, projects_without_disable_overriding_approvers_per_merge_request: 3, remote_mirrors: 1, @@ -507,10 +505,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end it 'gathers usage counts', :aggregate_failures do - stub_feature_flags(merge_service_ping_instrumented_metrics: false) - count_data = subject[:counts] - expect(count_data[:boards]).to eq(1) expect(count_data[:projects]).to eq(4) expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS) expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty diff --git a/spec/lib/gitlab/utils/delegator_override/validator_spec.rb b/spec/lib/gitlab/utils/delegator_override/validator_spec.rb index 4cd1b18de82..a58bc65c708 100644 --- a/spec/lib/gitlab/utils/delegator_override/validator_spec.rb +++ b/spec/lib/gitlab/utils/delegator_override/validator_spec.rb @@ -47,6 +47,15 @@ RSpec.describe Gitlab::Utils::DelegatorOverride::Validator do expect(validator.target_classes).to contain_exactly(target_class) end + + it 'adds all descendants of the target' do + child_class1 = Class.new(target_class) + child_class2 = Class.new(target_class) + grandchild_class = Class.new(child_class2) + validator.add_target(target_class) + + expect(validator.target_classes).to contain_exactly(target_class, child_class1, child_class2, grandchild_class) + end end describe '#expand_on_ancestors' do diff --git a/spec/lib/gitlab/view/presenter/base_spec.rb b/spec/lib/gitlab/view/presenter/base_spec.rb index a7083bd2722..afb44c0d298 100644 --- a/spec/lib/gitlab/view/presenter/base_spec.rb +++ b/spec/lib/gitlab/view/presenter/base_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::View::Presenter::Base do let(:project) { double(:project) } let(:presenter_class) do - Struct.new(:subject).include(described_class) + Struct.new(:__subject__).include(described_class) end describe '.presenter?' do @@ -17,17 +17,24 @@ RSpec.describe Gitlab::View::Presenter::Base do end describe '.presents' do - it 'exposes #subject with the given keyword' do - presenter_class.presents(Object, as: :foo) - presenter = presenter_class.new(project) - - expect(presenter.foo).to eq(project) - end - it 'raises an error when symbol is passed' do expect { presenter_class.presents(:foo) }.to raise_error(ArgumentError) end + context 'when the presenter class specifies a custom keyword' do + subject(:presenter) { presenter_class.new(project) } + + before do + presenter_class.class_eval do + presents Object, as: :foo + end + end + + it 'exposes the subject with the given keyword' do + expect(presenter.foo).to be(project) + end + end + context 'when the presenter class inherits Presenter::Delegated' do let(:presenter_class) do Class.new(::Gitlab::View::Presenter::Delegated) do @@ -50,13 +57,22 @@ RSpec.describe Gitlab::View::Presenter::Base do end it 'does not set the delegator target' do - expect(presenter_class).not_to receive(:delegator_target).with(Object) + expect(presenter_class).not_to receive(:delegator_target) presenter_class.presents(Object, as: :foo) end end end + describe '#__subject__' do + it 'returns the subject' do + subject = double + presenter = presenter_class.new(subject) + + expect(presenter.__subject__).to be(subject) + end + end + describe '#can?' do context 'user is not allowed' do it 'returns false' do diff --git a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb index f8c4a28ed45..7d96adf95e8 100644 --- a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb +++ b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb @@ -132,7 +132,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do { before_script: %w[ls pwd], script: 'sleep 100', tags: ['webide'], - image: 'ruby:3.0', + image: 'image:1.0', services: ['mysql'], variables: { KEY: 'value' } } end @@ -143,7 +143,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do tag_list: ['webide'], job_variables: [{ key: 'KEY', value: 'value', public: true }], options: { - image: { name: "ruby:3.0" }, + image: { name: "image:1.0" }, services: [{ name: "mysql" }], before_script: %w[ls pwd], script: ['sleep 100'] diff --git a/spec/lib/gitlab/web_ide/config_spec.rb b/spec/lib/gitlab/web_ide/config_spec.rb index 7ee9d40410c..11ea6150719 100644 --- a/spec/lib/gitlab/web_ide/config_spec.rb +++ b/spec/lib/gitlab/web_ide/config_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Gitlab::WebIde::Config do let(:yml) do <<-EOS terminal: - image: ruby:2.7 + image: image:1.0 before_script: - gem install rspec EOS @@ -21,7 +21,7 @@ RSpec.describe Gitlab::WebIde::Config do it 'returns hash created from string' do hash = { terminal: { - image: 'ruby:2.7', + image: 'image:1.0', before_script: ['gem install rspec'] } } diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 3bab9aec454..91ab0a53c6c 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -448,6 +448,14 @@ RSpec.describe Gitlab::Workhorse do end end + describe '.detect_content_type' do + subject { described_class.detect_content_type } + + it 'returns array setting detect content type in workhorse' do + expect(subject).to eq(%w[Gitlab-Workhorse-Detect-Content-Type true]) + end + end + describe '.send_git_blob' do include FakeBlobHelpers diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb index e2e1b4c28c7..2158076e4b5 100644 --- a/spec/lib/mattermost/session_spec.rb +++ b/spec/lib/mattermost/session_spec.rb @@ -35,6 +35,12 @@ RSpec.describe Mattermost::Session, type: :request do it 'makes a request to the oauth uri' do expect { subject.with_session }.to raise_error(::Mattermost::NoSessionError) end + + it 'returns nill on calling a non exisitng method on request' do + return_value = subject.request.method_missing("non_existing_method", "something") do + end + expect(return_value).to be(nil) + end end context 'with oauth_uri' do diff --git a/spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb b/spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb index db77a5d42d8..bdf9673a53f 100644 --- a/spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb +++ b/spec/lib/prometheus/cleanup_multiproc_dir_service_spec.rb @@ -1,32 +1,27 @@ # frozen_string_literal: true -require 'spec_helper' +require 'fast_spec_helper' RSpec.describe Prometheus::CleanupMultiprocDirService do - describe '.call' do - subject { described_class.new.execute } - + describe '#execute' do let(:metrics_multiproc_dir) { Dir.mktmpdir } let(:metrics_file_path) { File.join(metrics_multiproc_dir, 'counter_puma_master-0.db') } + subject(:service) { described_class.new(metrics_dir_arg).execute } + before do FileUtils.touch(metrics_file_path) end after do - FileUtils.rm_r(metrics_multiproc_dir) + FileUtils.rm_rf(metrics_multiproc_dir) end context 'when `multiprocess_files_dir` is defined' do - before do - expect(Prometheus::Client.configuration) - .to receive(:multiprocess_files_dir) - .and_return(metrics_multiproc_dir) - .at_least(:once) - end + let(:metrics_dir_arg) { metrics_multiproc_dir } it 'removes old metrics' do - expect { subject } + expect { service } .to change { File.exist?(metrics_file_path) } .from(true) .to(false) @@ -34,15 +29,10 @@ RSpec.describe Prometheus::CleanupMultiprocDirService do end context 'when `multiprocess_files_dir` is not defined' do - before do - expect(Prometheus::Client.configuration) - .to receive(:multiprocess_files_dir) - .and_return(nil) - .at_least(:once) - end + let(:metrics_dir_arg) { nil } it 'does not remove any files' do - expect { subject } + expect { service } .not_to change { File.exist?(metrics_file_path) } .from(true) end diff --git a/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb index b68af6fb8ab..5f67ee11970 100644 --- a/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb +++ b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb @@ -28,6 +28,20 @@ RSpec.describe Sidebars::Groups::Menus::GroupInformationMenu do end end + describe '#sprite_icon' do + subject { described_class.new(context).sprite_icon } + + context 'when group is a root group' do + specify { is_expected.to eq 'group'} + end + + context 'when group is a child group' do + let(:group) { build(:group, parent: root_group) } + + specify { is_expected.to eq 'subgroup'} + end + end + describe 'Menu Items' do subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } } diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb index 8a6b0e4e95d..81114f5a0b3 100644 --- a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb @@ -112,6 +112,38 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do let(:item_id) { :google_cloud } it_behaves_like 'access rights checks' + + context 'when feature flag is turned off globally' do + before do + stub_feature_flags(incubation_5mp_google_cloud: false) + end + + it { is_expected.to be_nil } + + context 'when feature flag is enabled for specific project' do + before do + stub_feature_flags(incubation_5mp_google_cloud: project) + end + + it_behaves_like 'access rights checks' + end + + context 'when feature flag is enabled for specific group' do + before do + stub_feature_flags(incubation_5mp_google_cloud: project.group) + end + + it_behaves_like 'access rights checks' + end + + context 'when feature flag is enabled for specific project' do + before do + stub_feature_flags(incubation_5mp_google_cloud: user) + end + + it_behaves_like 'access rights checks' + end + end end end end diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb index 7e69a2dfe52..ff253eedd08 100644 --- a/spec/lib/sidebars/projects/panel_spec.rb +++ b/spec/lib/sidebars/projects/panel_spec.rb @@ -17,16 +17,40 @@ RSpec.describe Sidebars::Projects::Panel do subject { described_class.new(context).instance_variable_get(:@menus) } context 'when integration is present and active' do - let_it_be(:confluence) { create(:confluence_integration, active: true) } + context 'confluence only' do + let_it_be(:confluence) { create(:confluence_integration, active: true) } - let(:project) { confluence.project } + let(:project) { confluence.project } - it 'contains Confluence menu item' do - expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil + it 'contains Confluence menu item' do + expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil + end + + it 'does not contain Wiki menu item' do + expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).to be_nil + end end - it 'does not contain Wiki menu item' do - expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).to be_nil + context 'shimo only' do + let_it_be(:shimo) { create(:shimo_integration, active: true) } + + let(:project) { shimo.project } + + it 'contains Shimo menu item' do + expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).not_to be_nil + end + end + + context 'confluence & shimo' do + let_it_be(:confluence) { create(:confluence_integration, active: true) } + let_it_be(:shimo) { create(:shimo_integration, active: true) } + + let(:project) { confluence.project } + + it 'contains Confluence menu item, not Shimo' do + expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil + expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).to be_nil + end end end diff --git a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb index 2c996635c36..7c9fbe152cc 100644 --- a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb +++ b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb @@ -71,7 +71,31 @@ RSpec.describe SystemCheck::App::GitUserDefaultSSHConfigCheck do end end + describe '#show_error' do + subject(:show_error) { described_class.new.show_error } + + before do + stub_user + stub_home_dir + stub_ssh_file(forbidden_file) + end + + it 'outputs error information' do + expected = %r{ + Try\ fixing\ it:\s+ + mkdir\ ~/gitlab-check-backup-(.+)\s+ + sudo\ mv\ (.+)\s+ + For\ more\ information\ see:\s+ + doc/user/ssh\.md\#overriding-ssh-settings-on-the-gitlab-server\s+ + Please\ fix\ the\ error\ above\ and\ rerun\ the\ checks + }x + + expect { show_error }.to output(expected).to_stdout + end + end + def stub_user + allow(File).to receive(:expand_path).and_call_original allow(File).to receive(:expand_path).with("~#{username}").and_return(home_dir) end |